-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
908 additions
and
0 deletions.
There are no files selected for viewing
Empty file.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
import torch | ||
import numpy as np | ||
from shapely.geometry import LineString | ||
from scipy.special import comb as n_over_k | ||
|
||
|
||
class PiecewiseBezierCurve(object): | ||
def __init__(self, num_points=100, num_degree=2, margin=0.05, threshold=0.1): | ||
super().__init__() | ||
self.num_points = num_points | ||
self.num_degree = num_degree | ||
self.margin = margin | ||
self.bezier_coefficient = self._get_bezier_coefficients(np.linspace(0, 1, self.num_points)) | ||
self.threshold = threshold | ||
|
||
def _get_bezier_coefficients(self, t_list): | ||
bernstein_fn = lambda n, t, k: (t ** k) * ((1 - t) ** (n - k)) * n_over_k(n, k) | ||
bezier_coefficient_fn = \ | ||
lambda ts: [[bernstein_fn(self.num_degree, t, k) for k in range(self.num_degree + 1)] for t in t_list] | ||
return np.array(bezier_coefficient_fn(t_list)) | ||
|
||
def _get_interpolated_points(self, points): | ||
line = LineString(points) | ||
distances = np.linspace(0, line.length, self.num_points) | ||
sampled_points = np.array([list(line.interpolate(distance).coords) for distance in distances]).reshape(-1, 2) | ||
return sampled_points | ||
|
||
def _get_chamfer_distance(self, points_before, points_after): | ||
points_before = torch.from_numpy(points_before).float() | ||
points_after = torch.from_numpy(points_after).float() | ||
dist = torch.cdist(points_before, points_after) | ||
dist1, _ = torch.min(dist, 2) | ||
dist1 = (dist1 * (dist1 > self.margin).float()) | ||
dist2, _ = torch.min(dist, 1) | ||
dist2 = (dist2 * (dist2 > self.margin).float()) | ||
return (dist1.mean(-1) + dist2.mean(-1)) / 2 | ||
|
||
def bezier_fitting(self, curve_pts): | ||
curve_pts_intered = self._get_interpolated_points(curve_pts) | ||
bezier_ctrl_pts = np.linalg.pinv(self.bezier_coefficient).dot(curve_pts_intered) | ||
bezier_ctrl_pts = np.concatenate([curve_pts[0:1], bezier_ctrl_pts[1:-1], curve_pts[-1:]], axis=0) | ||
curve_pts_recovery = self.bezier_coefficient.dot(bezier_ctrl_pts) | ||
criterion = self._get_chamfer_distance(curve_pts_intered[None, :, :], curve_pts_recovery[None, :, :]).item() | ||
return bezier_ctrl_pts, criterion | ||
|
||
@staticmethod | ||
def sequence_reverse(ctr_points): | ||
ctr_points = np.array(ctr_points) | ||
(xs, ys), (xe, ye) = ctr_points[0], ctr_points[-1] | ||
if ys > ye: | ||
ctr_points = ctr_points[::-1] | ||
return ctr_points | ||
|
||
def __call__(self, curve_pts): | ||
ctr_points_piecewise = [] | ||
num_points = curve_pts.shape[0] | ||
start, end = 0, num_points - 1 | ||
while start < end: | ||
ctr_points, loss = self.bezier_fitting(curve_pts[start: end + 1]) | ||
if loss < self.threshold: | ||
start, end = end, num_points - 1 | ||
if start >= end: | ||
ctr_points_piecewise += ctr_points.tolist() | ||
else: | ||
ctr_points_piecewise += ctr_points.tolist()[:-1] | ||
else: | ||
end = end - 1 | ||
ctr_points_piecewise = self.sequence_reverse(ctr_points_piecewise) | ||
return ctr_points_piecewise |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
import numpy as np | ||
import visvalingamwyatt as vw | ||
|
||
class GenPivots: | ||
def __init__(self, max_pts=[10, 2, 30], map_region=(30, -30, 15, -15), vm_thre=2.0, resolution=0.15): | ||
self.max_pts = max_pts | ||
self.map_region = map_region | ||
self.vm_thre = vm_thre | ||
self.resolution = resolution | ||
|
||
def pivots_generate(self, map_vectors): | ||
pivots_single_frame = {0:[], 1:[], 2:[]} | ||
lengths_single_frame = {0:[], 1:[], 2:[]} | ||
for ii, vec in enumerate(map_vectors): | ||
pts = np.array(vec["pts"]) * self.resolution # 转成 m | ||
pts = pts[:, ::-1] | ||
cls = vec["type"] | ||
|
||
# If the difference in x is obvious (greater than 1m), then rank according to x. | ||
# If the difference in x is not obvious, rank according to y. | ||
if (np.abs(pts[0][0]-pts[-1][0])>1 and pts[0][0]<pts[-1][0]) \ | ||
or (np.abs(pts[0][0]-pts[-1][0])<=1 and pts[0][1]<pts[-1][1]): | ||
pts = pts[::-1] | ||
|
||
simplifier = vw.Simplifier(pts) | ||
sim_pts = simplifier.simplify(threshold=self.vm_thre) | ||
length = min(self.max_pts[cls], len(sim_pts)) | ||
padded_pts = self.pad_pts(sim_pts, self.max_pts[cls]) | ||
pivots_single_frame[cls].append(padded_pts) | ||
lengths_single_frame[cls].append(length) | ||
|
||
for cls in [0, 1, 2]: | ||
new_pts = np.array(pivots_single_frame[cls]) | ||
if new_pts.size > 0: | ||
new_pts[:, :, 0] = new_pts[:, :, 0] / (2 * self.map_region[0]) # normalize | ||
new_pts[:, :, 1] = new_pts[:, :, 1] / (2 * self.map_region[2]) | ||
pivots_single_frame[cls] = new_pts | ||
lengths_single_frame[cls] = np.array(lengths_single_frame[cls]) | ||
|
||
return pivots_single_frame, lengths_single_frame | ||
|
||
def pad_pts(self, pts, tgt_length): | ||
if len(pts) >= tgt_length: | ||
return pts[:tgt_length] | ||
pts = np.concatenate([pts, np.zeros((tgt_length-len(pts), 2))], axis=0) | ||
return pts |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
import os | ||
import argparse | ||
import numpy as np | ||
from tqdm import tqdm | ||
from nuscenes import NuScenes | ||
from pyquaternion import Quaternion | ||
from torch.utils.data import Dataset | ||
from rasterize import RasterizedLocalMap | ||
from vectorize import VectorizedLocalMap | ||
from tools.anno_converter.generate_pivots import GenPivots | ||
|
||
|
||
class NuScenesDataset(Dataset): | ||
def __init__(self, version, dataroot, xbound=(-30., 30., 0.15), ybound=(-15., 15., 0.15)): | ||
super(NuScenesDataset, self).__init__() | ||
patch_h = ybound[1] - ybound[0] | ||
patch_w = xbound[1] - xbound[0] | ||
canvas_h = int(patch_h / ybound[2]) | ||
canvas_w = int(patch_w / xbound[2]) | ||
self.patch_size = (patch_h, patch_w) | ||
self.canvas_size = (canvas_h, canvas_w) | ||
self.nusc = NuScenes(version=version, dataroot=dataroot, verbose=False) | ||
self.vector_map = VectorizedLocalMap(dataroot, patch_size=self.patch_size, canvas_size=self.canvas_size) | ||
|
||
def __len__(self): | ||
return len(self.nusc.sample) | ||
|
||
def __getitem__(self, idx): | ||
record = self.nusc.sample[idx] | ||
location = self.nusc.get('log', self.nusc.get('scene', record['scene_token'])['log_token'])['location'] | ||
ego_pose = self.nusc.get('ego_pose', | ||
self.nusc.get('sample_data', record['data']['LIDAR_TOP'])['ego_pose_token']) | ||
vectors = self.vector_map.gen_vectorized_samples(location, ego_pose['translation'], ego_pose['rotation']) | ||
imgs, trans, rots, intrins = self.get_data_info(record) | ||
return imgs, np.stack(trans), np.stack(rots), np.stack(intrins), vectors | ||
|
||
def get_data_info(self, record): | ||
imgs, trans, rots, intrins = [], [], [], [] | ||
for cam in ['CAM_FRONT_LEFT', 'CAM_FRONT', 'CAM_FRONT_RIGHT', 'CAM_BACK_LEFT', 'CAM_BACK', 'CAM_BACK_RIGHT']: | ||
samp = self.nusc.get('sample_data', record['data'][cam]) | ||
imgs.append(samp['filename']) | ||
sens = self.nusc.get('calibrated_sensor', samp['calibrated_sensor_token']) | ||
trans.append(sens['translation']) | ||
rots.append(Quaternion(sens['rotation']).rotation_matrix) | ||
intrins.append(sens['camera_intrinsic']) | ||
return imgs, trans, rots, intrins | ||
|
||
|
||
class NuScenesSemanticDataset(NuScenesDataset): | ||
def __init__(self, version, dataroot, xbound, ybound, thickness, num_degrees, max_channel=3, bezier=False): | ||
super(NuScenesSemanticDataset, self).__init__(version, dataroot, xbound, ybound) | ||
self.raster_map = RasterizedLocalMap(self.patch_size, self.canvas_size, num_degrees, max_channel, thickness, bezier=bezier) | ||
self.pivot_gen = GenPivots(map_region=(xbound[1], xbound[0], ybound[1], ybound[0]), resolution=xbound[2]) | ||
|
||
def __getitem__(self, idx): | ||
record = self.nusc.sample[idx] | ||
location = self.nusc.get('log', self.nusc.get('scene', record['scene_token'])['log_token'])['location'] | ||
ego_pose = self.nusc.get('ego_pose', self.nusc.get('sample_data', record['data']['LIDAR_TOP'])['ego_pose_token']) | ||
vectors = self.vector_map.gen_vectorized_samples(location, ego_pose['translation'], ego_pose['rotation']) | ||
imgs, trans, rots, intrins = self.get_data_info(record) | ||
semantic_masks, instance_masks, instance_vec_points, instance_ctr_points = \ | ||
self.raster_map.convert_vec_to_mask(vectors) | ||
pivots, pivot_lengths = self.pivot_gen.pivots_generate(instance_vec_points) | ||
|
||
return imgs, np.stack(trans), np.stack(rots), np.stack(intrins), semantic_masks, instance_masks, \ | ||
vectors, instance_vec_points, instance_ctr_points, pivots, pivot_lengths | ||
|
||
|
||
def main(): | ||
parser = argparse.ArgumentParser(description='Pivot-Bezier GT Generator.') | ||
parser.add_argument('-d', '--data_root', type=str, default='/data/dataset/public/nuScenes-tt') | ||
parser.add_argument('-v', '--version', nargs='+', type=str, default=['v1.0-trainval']) | ||
parser.add_argument("--num_degrees", nargs='+', type=int, default=[2, 1, 3]) | ||
parser.add_argument("--thickness", nargs='+', type=int, default=[1, 8]) | ||
parser.add_argument("--xbound", nargs=3, type=float, default=[-30.0, 30.0, 0.15]) | ||
parser.add_argument("--ybound", nargs=3, type=float, default=[-15.0, 15.0, 0.15]) | ||
parser.add_argument("--bezier", default=False, action='store_true') # whether to generate bezier GT | ||
args = parser.parse_args() | ||
|
||
n_classes = len(args.num_degrees) # 0 --> divider(d=2), 1 --> crossing(d=1), 2--> contour(d=3) | ||
save_dir = os.path.join(args.data_root, 'customer', "pivot-bezier") | ||
os.makedirs(save_dir, exist_ok=True) | ||
for version in args.version: | ||
dataset = NuScenesSemanticDataset( | ||
version, args.data_root, args.xbound, args.ybound, args.thickness, args.num_degrees, max_channel=n_classes, bezier=args.bezier) | ||
for idx in tqdm(range(dataset.__len__())): | ||
file_path = os.path.join(save_dir, dataset.nusc.sample[idx]['token'] + '.npz') | ||
# if os.path.exists(file_path): | ||
# continue | ||
item = dataset.__getitem__(idx) | ||
np.savez_compressed( | ||
file_path, image_paths=np.array(item[0]), trans=item[1], rots=item[2], intrins=item[3], | ||
semantic_mask=item[4][0], instance_mask=item[5][0], instance_mask8=item[5][1], | ||
ego_vectors=item[6], map_vectors=item[7], ctr_points=item[8], pivot_pts=item[9], pivot_length=item[10], | ||
) | ||
|
||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,107 @@ | ||
import cv2 | ||
import numpy as np | ||
from shapely import affinity | ||
from shapely.geometry import LineString, box | ||
from tools.anno_converter.bezier import PiecewiseBezierCurve | ||
|
||
|
||
class RasterizedLocalMap(object): | ||
def __init__(self, patch_size, canvas_size, num_degrees, max_channel, thickness, patch_angle=0.0, bezier=False): | ||
super().__init__() | ||
self.patch_size = patch_size | ||
self.canvas_size = canvas_size | ||
self.max_channel = max_channel | ||
self.num_degrees = num_degrees | ||
self.thickness = thickness | ||
assert self.thickness[0] == 1 | ||
self.patch_box = (0.0, 0.0, self.patch_size[0], self.patch_size[1]) | ||
self.patch_angle = patch_angle | ||
self.patch = self.get_patch_coord() | ||
self.bezier = bezier | ||
if bezier: | ||
self.pbc_funcs = { | ||
d: PiecewiseBezierCurve(num_points=100, num_degree=d, margin=0.05, threshold=0.1) for d in num_degrees | ||
} | ||
|
||
def convert_vec_to_mask(self, vectors): | ||
vector_num_list = {cls_idx: [] for cls_idx in range(self.max_channel)} # map-type -> list | ||
for vector in vectors: | ||
if vector['pts_num'] >= 2: | ||
vector_num_list[vector['type']].append(LineString(vector['pts'][:vector['pts_num']])) | ||
ins_idx = 1 # instance-index | ||
instance_masks = np.zeros( | ||
(len(self.thickness), self.max_channel, self.canvas_size[1], self.canvas_size[0]), np.uint8) | ||
instance_vec_points, instance_ctr_points = [], [] | ||
for cls_idx in range(self.max_channel): | ||
if self.bezier: | ||
pbc_func = self.pbc_funcs[self.num_degrees[cls_idx]] | ||
else: | ||
pbc_func = None | ||
masks, map_points, ctr_points, ins_idx = self.line_geom_to_mask(vector_num_list[cls_idx], ins_idx, pbc_func) | ||
instance_masks[:, cls_idx, :, :] = masks | ||
for pts in map_points: | ||
instance_vec_points.append({'pts': pts, 'pts_num': len(pts), 'type': cls_idx}) | ||
for pts in ctr_points: | ||
instance_ctr_points.append({'pts': pts, 'pts_num': len(pts), 'type': cls_idx}) | ||
instance_masks = np.stack(instance_masks).astype(np.uint8) | ||
semantic_masks = (instance_masks != 0).astype(np.uint8) | ||
return semantic_masks, instance_masks, instance_vec_points, instance_ctr_points | ||
|
||
def line_geom_to_mask(self, layer_geom, idx, pbc_func, trans_type='index'): | ||
patch_x, patch_y, patch_h, patch_w = self.patch_box | ||
canvas_h = self.canvas_size[0] | ||
canvas_w = self.canvas_size[1] | ||
scale_height = canvas_h / patch_h | ||
scale_width = canvas_w / patch_w | ||
trans_x = -patch_x + patch_w / 2.0 | ||
trans_y = -patch_y + patch_h / 2.0 | ||
map_masks = np.zeros((len(self.thickness), *self.canvas_size), np.uint8) | ||
map_points, ctr_points = [], [] | ||
for line in layer_geom: | ||
new_line = line.intersection(self.patch) | ||
if not new_line.is_empty: | ||
new_line = affinity.affine_transform(new_line, [1.0, 0.0, 0.0, 1.0, trans_x, trans_y]) | ||
if new_line.geom_type == 'MultiLineString': | ||
for single_line in new_line: | ||
pts2 = self.patch_size - np.array(single_line.coords[:])[:, ::-1] | ||
if pbc_func is not None: | ||
ctr_points.append(pbc_func(pts2)) | ||
single_line = affinity.scale(single_line, xfact=scale_width, yfact=scale_height, origin=(0, 0)) | ||
map_masks, idx = self.mask_for_lines(single_line, map_masks, self.thickness, idx, trans_type) | ||
pts = self.canvas_size - np.array(single_line.coords[:])[:, ::-1] | ||
map_points.append(pts.tolist()) | ||
else: | ||
pts2 = self.patch_size - np.array(new_line.coords[:])[:, ::-1] | ||
if pbc_func is not None: | ||
ctr_points.append(pbc_func(pts2)) | ||
new_line = affinity.scale(new_line, xfact=scale_width, yfact=scale_height, origin=(0, 0)) | ||
map_masks, idx = self.mask_for_lines(new_line, map_masks, self.thickness, idx, trans_type) | ||
pts = self.canvas_size - np.array(new_line.coords[:])[:, ::-1] | ||
map_points.append(pts.tolist()) | ||
map_masks_ret = [] | ||
for i in range(len(self.thickness)): | ||
map_masks_ret.append(np.flip(np.rot90(map_masks[i][None], k=1, axes=(1, 2)), axis=2)[0]) | ||
map_masks_ret = np.array(map_masks_ret) | ||
return map_masks_ret, map_points, ctr_points, idx | ||
|
||
@staticmethod | ||
def mask_for_lines(lines, mask, thickness, idx, trans_type='index'): | ||
coords = np.asarray(list(lines.coords), np.int32) | ||
coords = coords.reshape((-1, 2)) | ||
if len(coords) < 2: | ||
return mask, idx | ||
for i, t in enumerate(thickness): | ||
if trans_type == 'index': | ||
cv2.polylines(mask[i], [coords], False, color=idx, thickness=t) | ||
idx += 1 | ||
return mask, idx | ||
|
||
def get_patch_coord(self): | ||
patch_x, patch_y, patch_h, patch_w = self.patch_box | ||
x_min = patch_x - patch_w / 2.0 | ||
y_min = patch_y - patch_h / 2.0 | ||
x_max = patch_x + patch_w / 2.0 | ||
y_max = patch_y + patch_h / 2.0 | ||
patch = box(x_min, y_min, x_max, y_max) | ||
patch = affinity.rotate(patch, self.patch_angle, origin=(patch_x, patch_y), use_radians=False) | ||
return patch |
Oops, something went wrong.