diff --git a/.gitignore b/.gitignore index ba0abc3..ff3dc94 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,5 @@ __pycache__/ .vscode/ -data/output/*.png -data/output/*/*.json -data/*.bag -data/ -test.cpp \ No newline at end of file +data/* +test.cpp +*.bag \ No newline at end of file diff --git a/docs/RUN.md b/docs/RUN.md index d944121..9d95e19 100644 --- a/docs/RUN.md +++ b/docs/RUN.md @@ -16,7 +16,7 @@ Installation and execution of TrackDLO was verified with the below dependencies * [Pillow](https://pillow.readthedocs.io/en/stable/installation.html) * [ROS Numpy](https://pypi.org/project/rosnumpy/) -We also provide Docker files for compatibility with other system configurations. Refer to the [DOCKER.md](https://github.com/RMDLO/trackdlo/blob/master/docs/DOCKER.md) for more information on using docker, and see the docker [requirements.txt](https://github.com/RMDLO/trackdlo/blob/master/docker/requirements.txt) file for a list of the tested versions of TrackDLO package dependencies. +We also provide Docker files for compatibility with other system configurations. Refer to the ![DOCKER.md](https://github.com/RMDLO/trackdlo/blob/master/docs/DOCKER.md) for more information on using docker, and see the docker ![requirements.txt](https://github.com/RMDLO/trackdlo/blob/master/docker/requirements.txt) file for a list of the tested versions of TrackDLO package dependencies. ## Other Requirements diff --git a/launch/realsense_node.launch b/launch/realsense_node.launch index 88dd846..4ecd716 100644 --- a/launch/realsense_node.launch +++ b/launch/realsense_node.launch @@ -2,25 +2,23 @@ - - + - + - - - - - + + + + - + diff --git a/launch/trackdlo.launch b/launch/trackdlo.launch index d8dc402..2832f35 100644 --- a/launch/trackdlo.launch +++ b/launch/trackdlo.launch @@ -9,12 +9,12 @@ - + - + @@ -61,7 +61,7 @@ - + diff --git a/trackdlo/src/initialize.py b/trackdlo/src/initialize.py index 401b227..bcdff88 100755 --- a/trackdlo/src/initialize.py +++ b/trackdlo/src/initialize.py @@ -11,7 +11,6 @@ import time import cv2 import numpy as np -import time from visualization_msgs.msg import MarkerArray from scipy import interpolate @@ -68,75 +67,81 @@ def callback (rgb, depth): # color thresholding mask = color_thresholding(hsv_image, cur_depth) - start_time = time.time() - mask = cv2.cvtColor(mask.copy(), cv2.COLOR_GRAY2BGR) - - # returns the pixel coord of points (in order). a list of lists - img_scale = 1 - extracted_chains = extract_connected_skeleton(visualize_initialization_process, mask, img_scale=img_scale, seg_length=8, max_curvature=25) - - all_pixel_coords = [] - for chain in extracted_chains: - all_pixel_coords += chain - print('Finished extracting chains. Time taken:', time.time()-start_time) - - all_pixel_coords = np.array(all_pixel_coords) * img_scale - all_pixel_coords = np.flip(all_pixel_coords, 1) - - pc_z = cur_depth[tuple(map(tuple, all_pixel_coords.T))] / 1000.0 - f = proj_matrix[0, 0] - cx = proj_matrix[0, 2] - cy = proj_matrix[1, 2] - pixel_x = all_pixel_coords[:, 1] - pixel_y = all_pixel_coords[:, 0] - - pc_x = (pixel_x - cx) * pc_z / f - pc_y = (pixel_y - cy) * pc_z / f - extracted_chains_3d = np.vstack((pc_x, pc_y)) - extracted_chains_3d = np.vstack((extracted_chains_3d, pc_z)) - extracted_chains_3d = extracted_chains_3d.T - - # do not include those without depth values - extracted_chains_3d = extracted_chains_3d[((extracted_chains_3d[:, 0] != 0) | (extracted_chains_3d[:, 1] != 0) | (extracted_chains_3d[:, 2] != 0))] - - if multi_color_dlo: - depth_threshold = 0.58 # m - extracted_chains_3d = extracted_chains_3d[extracted_chains_3d[:, 2] > depth_threshold] - - # tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.001) - tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.0005) - # 1st fit, less points - u_fine = np.linspace(0, 1, 300) # <-- num fit points - x_fine, y_fine, z_fine = interpolate.splev(u_fine, tck) - spline_pts = np.vstack((x_fine, y_fine, z_fine)).T - - # 2nd fit, higher accuracy - num_true_pts = int(np.sum(np.sqrt(np.sum(np.square(np.diff(spline_pts, axis=0)), axis=1))) * 1000) - u_fine = np.linspace(0, 1, num_true_pts) # <-- num true points - x_fine, y_fine, z_fine = interpolate.splev(u_fine, tck) - spline_pts = np.vstack((x_fine, y_fine, z_fine)).T - total_spline_len = np.sum(np.sqrt(np.sum(np.square(np.diff(spline_pts, axis=0)), axis=1))) - - init_nodes = spline_pts[np.linspace(0, num_true_pts-1, num_of_nodes).astype(int)] - - results = ndarray2MarkerArray(init_nodes, result_frame_id, [1, 150/255, 0, 0.75], [0, 1, 0, 0.75]) - results_pub.publish(results) - - # add color - pc_rgba = struct.unpack('I', struct.pack('BBBB', 255, 40, 40, 255))[0] - pc_rgba_arr = np.full((len(init_nodes), 1), pc_rgba) - pc_colored = np.hstack((init_nodes, pc_rgba_arr)).astype(object) - pc_colored[:, 3] = pc_colored[:, 3].astype(int) - - header.stamp = rospy.Time.now() - converted_points = pcl2.create_cloud(header, fields, pc_colored) - pc_pub.publish(converted_points) - - rospy.signal_shutdown('Finished initial node set computation.') + try: + start_time = time.time() + mask = cv2.cvtColor(mask.copy(), cv2.COLOR_GRAY2BGR) + + # returns the pixel coord of points (in order). a list of lists + img_scale = 1 + extracted_chains = extract_connected_skeleton(visualize_initialization_process, mask, img_scale=img_scale, seg_length=8, max_curvature=25) + + all_pixel_coords = [] + for chain in extracted_chains: + all_pixel_coords += chain + print('Finished extracting chains. Time taken:', time.time()-start_time) + + all_pixel_coords = np.array(all_pixel_coords) * img_scale + all_pixel_coords = np.flip(all_pixel_coords, 1) + + pc_z = cur_depth[tuple(map(tuple, all_pixel_coords.T))] / 1000.0 + f = proj_matrix[0, 0] + cx = proj_matrix[0, 2] + cy = proj_matrix[1, 2] + pixel_x = all_pixel_coords[:, 1] + pixel_y = all_pixel_coords[:, 0] + + pc_x = (pixel_x - cx) * pc_z / f + pc_y = (pixel_y - cy) * pc_z / f + extracted_chains_3d = np.vstack((pc_x, pc_y)) + extracted_chains_3d = np.vstack((extracted_chains_3d, pc_z)) + extracted_chains_3d = extracted_chains_3d.T + + # do not include those without depth values + extracted_chains_3d = extracted_chains_3d[((extracted_chains_3d[:, 0] != 0) | (extracted_chains_3d[:, 1] != 0) | (extracted_chains_3d[:, 2] != 0))] + + if multi_color_dlo: + depth_threshold = 0.58 # m + extracted_chains_3d = extracted_chains_3d[extracted_chains_3d[:, 2] > depth_threshold] + + # tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.001) + tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.0005) + # 1st fit, less points + u_fine = np.linspace(0, 1, 300) # <-- num fit points + x_fine, y_fine, z_fine = interpolate.splev(u_fine, tck) + spline_pts = np.vstack((x_fine, y_fine, z_fine)).T + + # 2nd fit, higher accuracy + num_true_pts = int(np.sum(np.sqrt(np.sum(np.square(np.diff(spline_pts, axis=0)), axis=1))) * 1000) + u_fine = np.linspace(0, 1, num_true_pts) # <-- num true points + x_fine, y_fine, z_fine = interpolate.splev(u_fine, tck) + spline_pts = np.vstack((x_fine, y_fine, z_fine)).T + total_spline_len = np.sum(np.sqrt(np.sum(np.square(np.diff(spline_pts, axis=0)), axis=1))) + + init_nodes = spline_pts[np.linspace(0, num_true_pts-1, num_of_nodes).astype(int)] + + results = ndarray2MarkerArray(init_nodes, result_frame_id, [1, 150/255, 0, 0.75], [0, 1, 0, 0.75]) + results_pub.publish(results) + + # add color + pc_rgba = struct.unpack('I', struct.pack('BBBB', 255, 40, 40, 255))[0] + pc_rgba_arr = np.full((len(init_nodes), 1), pc_rgba) + pc_colored = np.hstack((init_nodes, pc_rgba_arr)).astype(object) + pc_colored[:, 3] = pc_colored[:, 3].astype(int) + + header.stamp = rospy.Time.now() + converted_points = pcl2.create_cloud(header, fields, pc_colored) + pc_pub.publish(converted_points) + # rospy.signal_shutdown('Finished initial node set computation.') + except: + rospy.logerr("Failed to extract splines.") + rospy.signal_shutdown('Stopping initialization.') if __name__=='__main__': rospy.init_node('init_tracker', anonymous=True) + global new_messages + new_messages=False + num_of_nodes = rospy.get_param('/init_tracker/num_of_nodes') multi_color_dlo = rospy.get_param('/init_tracker/multi_color_dlo') camera_info_topic = rospy.get_param('/init_tracker/camera_info_topic') diff --git a/trackdlo/src/utils.py b/trackdlo/src/utils.py index 8e23ec5..c6fa027 100644 --- a/trackdlo/src/utils.py +++ b/trackdlo/src/utils.py @@ -40,7 +40,7 @@ def orientation(p, q, r): # See https://www.geeksforgeeks.org/orientation-3-ordered-points/amp/ # for details of below formula. - val = (float(q.y - p.y) * (r.x - q.x)) - (float(q.x - p.x) * (r.y - q.y)) + val = (np.float64(q.y - p.y) * (r.x - q.x)) - (np.float64(q.x - p.x) * (r.y - q.y)) if (val > 0): # Clockwise orientation @@ -213,7 +213,7 @@ def extract_connected_skeleton (visualize_process, mask, img_scale=10, seg_lengt break # graph for visualization - mask = cv2.line(mask, contour[i][0], contour[i+1][0], 255, 1) + mask = cv2.line(mask, tuple(contour[i][0]), tuple(contour[i+1][0]), 255, 1) # if haven't initialized, perform initialization if cur_seg_start_point is None: