diff --git a/src/deep_neurographs/densegraph.py b/src/deep_neurographs/densegraph.py index fa1c651..f2c7610 100644 --- a/src/deep_neurographs/densegraph.py +++ b/src/deep_neurographs/densegraph.py @@ -15,7 +15,7 @@ from scipy.spatial import KDTree from deep_neurographs import swc_utils, utils -from deep_neurographs.geometry_utils import dist as get_dist +from deep_neurographs.geometry import dist as get_dist class DenseGraph: diff --git a/src/deep_neurographs/feature_extraction.py b/src/deep_neurographs/feature_extraction.py index 4af758c..118f660 100644 --- a/src/deep_neurographs/feature_extraction.py +++ b/src/deep_neurographs/feature_extraction.py @@ -13,8 +13,11 @@ from random import sample import numpy as np +import tensorstore as ts -from deep_neurographs import geometry_utils, utils +from deep_neurographs import geometry +from deep_neurographs import graph_utils as gutils +from deep_neurographs import utils CHUNK_SIZE = [64, 64, 64] WINDOW = [5, 5, 5] @@ -31,12 +34,7 @@ # -- Wrappers -- def generate_mutable_features( - neurograph, - model_type, - anisotropy=[1.0, 1.0, 1.0], - img_path=None, - labels_path=None, - proposal_list=None, + neurograph, model_type, img_path=None, labels_path=None, proposals=None ): """ Generates feature vectors for every edge proposal in a neurograph. @@ -47,16 +45,13 @@ def generate_mutable_features( NeuroGraph generated from a directory of swcs generated from a predicted segmentation. model_type : str - Indication of model to be trained. Options include: AdaBoost, - RandomForest, FeedForwardNet, ConvNet, MultiModalNet. - anisotropy : list[float], optional - Real-world to image coordinates scaling factor for (x, y, z). - The default is [1.0, 1.0, 1.0]. + Type of model to be trained. Options include: AdaBoost, RandomForest, + FeedForwardNet, ConvNet, MultiModalNet. img_path : str, optional Path to raw image. The default is None. labels_path : str, optional Path to predicted segmentation. The default is None. - proposal_list : list[frozenset], optional + proposals : list[frozenset], optional List of edge proposals for which features will be generated. The default is None. @@ -68,83 +63,63 @@ def generate_mutable_features( """ features = { - "skel": generate_mutable_skel_features( - neurograph, proposal_list=proposal_list - ) + "skel": generate_skel_features(neurograph, proposals=proposals) } if model_type in ["ConvNet", "MultiModalNet"]: - features["img_chunks"] = generate_img_chunks( + features["img_chunks"], features["img_profile"] = generate_img_chunks( neurograph, img_path, labels_path, - anisotropy=anisotropy, - proposal_list=proposal_list, + model_type=model_type, + proposals=proposals, ) - if model_type != "ConvNet": + if model_type in ["AdaBoost", "RandomForest", "FeedForwardNet"]: features["img_profile"] = generate_img_profiles( - neurograph, - img_path, - anisotropy=anisotropy, - proposal_list=proposal_list, + neurograph, img_path, proposals=proposals ) return features # -- Edge feature extraction -- def generate_img_chunks( - neurograph, - img_path, - labels_path, - anisotropy=[1.0, 1.0, 1.0], - proposal_list=None, + neurograph, img_path, labels_path, model_type=None, proposals=None ): if neurograph.bbox: return generate_img_chunks_via_superchunk( - neurograph, - img_path, - labels_path, - anisotropy=[1.0, 1.0, 1.0], - proposal_list=proposal_list, + neurograph, img_path, labels_path, proposals=proposals ) else: - return generate_img_chunks_via_multithreading( - neurograph, - img_path, - labels_path, - anisotropy=[1.0, 1.0, 1.0], - proposal_list=proposal_list, + return generate_img_chunks_via_multithreads( + neurograph, img_path, labels_path, proposals=proposals ) -def generate_img_chunks_via_multithreading( - neurograph, - img_path, - labels_path, - anisotropy=[1.0, 1.0, 1.0], - proposal_list=None, +def generate_img_chunks_via_multithreads( + neurograph, img_path, labels_path, proposals=None ): - features = dict() - img = utils.open_tensorstore(img_path, 'neuroglancer_precomputed') - labels = utils.open_tensorstore(labels_path, 'neuroglancer_precomputed') + img = utils.open_tensorstore(img_path, "zarr") + labels = utils.open_tensorstore(labels_path, "neuroglancer_precomputed") with ThreadPoolExecutor() as executor: - threads = [None] * len(proposal_list) - for i, edge in enumerate(proposal_list): + # Assign Threads + threads = [None] * len(proposals) + for i, edge in enumerate(proposals): xyz_i, xyz_j = gutils.get_edge_attr(neurograph, edge, "xyz") threads[i] = executor.submit( - get_img_chunk, img, labels, xyz_i, xyz_j, edge + get_img_chunk_features, img, labels, xyz_i, xyz_j, edge ) + + # Save result + chunks = dict() + profiles = dict() for thread in as_completed(threads): - edge, result = thread.result() - features[edge] = result - return features + edge, chunk, profile = thread.result() + chunks[edge] = chunk + profiles[edge] = profile + return chunks, profiles def generate_img_chunks_via_superchunk( - neurograph, - img_path, - labels_path, - anisotropy=[1.0, 1.0, 1.0], - proposal_list=None, + neurograph, img_path, labels_path, proposals=None ): """ Generates an image chunk for each edge proposal such that the centroid of @@ -160,10 +135,7 @@ def generate_img_chunks_via_superchunk( Path to raw image. labels_path : str Path to predicted segmentation. - anisotropy : list[float], optional - Real-world to image coordinates scaling factor for (x, y, z). - The default is [1.0, 1.0, 1.0]. - proposal_list : list[frozenset], optional + proposals : list[frozenset], optional List of edge proposals for which features will be generated. The default is None. @@ -173,47 +145,90 @@ def generate_img_chunks_via_superchunk( Dictonary such that each pair is the edge id and image chunk. """ - features = dict() + chunk_features = dict() + profile_features = dict() origin = utils.apply_anisotropy(neurograph.origin, return_int=True) img, labels = utils.get_superchunks( img_path, labels_path, origin, neurograph.shape, from_center=False ) - img = utils.normalize_img(img) for edge in neurograph.mutable_edges: # Compute image coordinates i, j = tuple(edge) - xyz = get_edge_attr(neurograph, edge, "xyz") + xyz = gutils.get_edge_attr(neurograph, edge, "xyz") xyz_i = utils.world_to_img(neurograph, xyz[0]) xyz_j = utils.world_to_img(neurograph, xyz[1]) - features[edge] = get_img_chunk(img, labels, xyz_i, xyz_j) - return features + chunk, profile = get_img_chunk_features(img, labels, xyz_i, xyz_j) + chunk_features[edge] = chunk + profile_features[edge] = profile + return chunk_features, profile_features -def get_img_chunk(img, labels, xyz_i, xyz_j, process_id=None): +def get_img_chunk_features(img, labels, xyz_i, xyz_j, process_id=None): # Extract chunks - midpoint = geometry_utils.get_midpoint(xyz_i, xyz_j).astype(int) - img_chunk = utils.get_chunk(img, midpoint, CHUNK_SIZE) - labels_chunk = utils.get_chunk(labels, midpoint, CHUNK_SIZE) + midpoint = geometry.get_midpoint(xyz_i, xyz_j).astype(int) + if type(img) == ts.TensorStore: + img_chunk = utils.read_tensorstore(img, midpoint, CHUNK_SIZE) + labels_chunk = utils.read_tensorstore(labels, midpoint, CHUNK_SIZE) + else: + img_chunk = utils.get_chunk(img, midpoint, CHUNK_SIZE) + labels_chunk = utils.get_chunk(labels, midpoint, CHUNK_SIZE) - # Mark path - d = int(geometry_utils.dist(xyz_i, xyz_j) + 5) - img_coords_i = utils.img_to_patch(xyz_i, midpoint, CHUNK_SIZE) - img_coords_j = utils.img_to_patch(xyz_j, midpoint, CHUNK_SIZE) - path = geometry_utils.make_line(img_coords_i, img_coords_j, d) + # Coordinate transform + img_chunk = utils.normalize_img(img_chunk) + xyz_i = utils.img_to_patch(xyz_i, midpoint, CHUNK_SIZE) + xyz_j = utils.img_to_patch(xyz_j, midpoint, CHUNK_SIZE) + # Generate features + path = geometry.make_line(xyz_i, xyz_j, N_PROFILE_POINTS) + profile = geometry.get_profile(img_chunk, path, window=WINDOW) labels_chunk[labels_chunk > 0] = 1 - labels_chunk = geometry_utils.fill_path(labels_chunk, path, val=2) + labels_chunk = geometry.fill_path(labels_chunk, path, val=2) + chunk = np.stack([img_chunk, labels_chunk], axis=0) + + # Output if process_id: - return np.stack([img_chunk, labels_chunk], axis=0), process_id + return process_id, chunk, profile + else: + return chunk, profile + + +def generate_img_profiles(neurograph, path, proposals=None): + if neurograph.bbox: + return generate_img_profiles_via_superchunk( + neurograph, path, proposals=proposals + ) else: - return np.stack([img_chunk, labels_chunk], axis=0) + return generate_img_profiles_via_multithreads( + neurograph, path, proposals=proposals + ) -def generate_img_profiles( - neurograph, path, anisotropy=[1.0, 1.0, 1.0], proposal_list=None +def generate_img_profiles_via_multithreads( + neurograph, img_path, proposals=None ): + profile_features = dict() + img = utils.open_tensorstore(img_path, "zarr") + with ThreadPoolExecutor() as executor: + # Assign threads + threads = [None] * len(proposals) + for i, edge in enumerate(proposals): + xyz_i, xyz_j = gutils.get_edge_attr(neurograph, edge, "xyz") + xyz_i = utils.world_to_img(neurograph, xyz_i) + xyz_j = utils.world_to_img(neurograph, xyz_j) + line = geometry.make_line(xyz_i, xyz_j, N_PROFILE_POINTS) + threads[i] = executor.submit(geometry.get_profile, img, line, edge) + + # Store result + for thread in as_completed(threads): + edge, profile = thread.result() + profile_features[edge] = profile + return profile_features + + +def generate_img_profiles_via_superchunk(neurograph, path, proposals=None): """ - Generates an image intensity profile along each edge proposal. + Generates an image intensity profile along each edge proposal by reading + a single superchunk from cloud that contains all proposals. Parameters ---------- @@ -222,10 +237,7 @@ def generate_img_profiles( predicted segmentation. path : str Path to raw image. - anisotropy : list[float], optional - Real-world to image coordinates scaling factor for (x, y, z). - The default is [1.0, 1.0, 1.0]. - proposal_list : list[frozenset], optional + proposals : list[frozenset], optional List of edge proposals for which features will be generated. The default is None. @@ -246,12 +258,12 @@ def generate_img_profiles( xyz_i, xyz_j = neurograph.get_edge_attr(edge, "xyz") xyz_i = utils.world_to_img(neurograph, xyz_i) xyz_j = utils.world_to_img(neurograph, xyz_j) - path = geometry_utils.make_line(xyz_i, xyz_j, N_PROFILE_POINTS) - features[edge] = geometry_utils.get_profile(img, path, window=WINDOW) + path = geometry.make_line(xyz_i, xyz_j, N_PROFILE_POINTS) + features[edge] = geometry.get_profile(img, path, window=WINDOW) return features -def generate_mutable_skel_features(neurograph, proposal_list=None): +def generate_skel_features(neurograph, proposals=None): features = dict() for edge in neurograph.mutable_edges: i, j = tuple(edge) @@ -275,20 +287,18 @@ def generate_mutable_skel_features(neurograph, proposal_list=None): def get_directionals(neurograph, edge, window): # Compute tangent vectors i, j = tuple(edge) - edge_directional = geometry_utils.compute_tangent( - neurograph.edges[edge]["xyz"] - ) - directional_i = geometry_utils.get_directional( - neurograph, i, edge_directional, window=window + edge_direction = geometry.compute_tangent(neurograph.edges[edge]["xyz"]) + direction_i = geometry.get_directional( + neurograph, i, edge_direction, window=window ) - directional_j = geometry_utils.get_directional( - neurograph, j, edge_directional, window=window + direction_j = geometry.get_directional( + neurograph, j, edge_direction, window=window ) # Compute features - inner_product_1 = abs(np.dot(edge_directional, directional_i)) - inner_product_2 = abs(np.dot(edge_directional, directional_j)) - inner_product_3 = np.dot(directional_i, directional_j) + inner_product_1 = abs(np.dot(edge_direction, direction_i)) + inner_product_2 = abs(np.dot(edge_direction, direction_j)) + inner_product_3 = np.dot(direction_i, direction_j) return np.array([inner_product_1, inner_product_2, inner_product_3]) diff --git a/src/deep_neurographs/geometry_utils.py b/src/deep_neurographs/geometry.py similarity index 90% rename from src/deep_neurographs/geometry_utils.py rename to src/deep_neurographs/geometry.py index c0b9b1e..72ae741 100644 --- a/src/deep_neurographs/geometry_utils.py +++ b/src/deep_neurographs/geometry.py @@ -1,11 +1,12 @@ import heapq -import math from copy import deepcopy import numpy as np +import tensorstore as ts from scipy.interpolate import UnivariateSpline from scipy.linalg import svd -from scipy.spatial import distance +from scipy.spatial import distance + from deep_neurographs import utils @@ -67,7 +68,7 @@ def smooth_branch(xyz, s=None): t = np.linspace(0, 1, xyz.shape[0]) spline_x, spline_y, spline_z = fit_spline(xyz, s=s) xyz = np.column_stack((spline_x(t), spline_y(t), spline_z(t))) - return xyz + return xyz.astype(np.float32) def fit_spline(xyz, s=None): @@ -90,14 +91,24 @@ def sample_path(path, n_points): # Image feature extraction -def get_profile(img, xyz_arr, window=[5, 5, 5]): - return [np.max(utils.get_chunk(img, xyz, window)) for xyz in xyz_arr] +def get_profile(img, xyz_arr, process_id=None, window=[5, 5, 5]): + profile = [] + for xyz in xyz_arr: + if type(img) == ts.TensorStore: + profile.append(np.max(utils.read_tensorstore(img, xyz, window))) + else: + profile.append(np.max(utils.get_chunk(img, xyz, window))) + + if process_id: + return process_id, profile + else: + return profile def fill_path(img, path, val=-1): for xyz in path: x, y, z = tuple(np.floor(xyz).astype(int)) - img[x - 1 : x + 2, y - 1 : y + 2, z - 1 : z + 2] = val + img[x - 1: x + 2, y - 1: y + 2, z - 1: z + 2] = val return img @@ -156,8 +167,8 @@ def optimize_simple_alignment(neurograph, img, edge, depth=15): i, j = tuple(edge) branch_i = neurograph.get_branch(i) branch_j = neurograph.get_branch(j) - xyz_i, xyz_j, _ = align(neurograph, img, branch_i, branch_j, depth) - return xyz_i, xyz_j + d_i, d_j, _ = align(neurograph, img, branch_i, branch_j, depth) + return branch_i[d_i], branch_j[d_j] def optimize_complex_alignment(neurograph, img, edge, depth=15): @@ -185,9 +196,11 @@ def optimize_complex_alignment(neurograph, img, edge, depth=15): i, j = tuple(edge) branch = neurograph.get_branch(i if neurograph.is_leaf(i) else j) branches = neurograph.get_branches(j if neurograph.is_leaf(i) else i) - xyz_1, leaf_1, val_1 = align(neurograph, img, branch, branches[0], depth) - xyz_2, leaf_2, val_2 = align(neurograph, img, branch, branches[1], depth) - return (xyz_1, leaf_1) if val_1 > val_2 else (xyz_2, leaf_2) + d1, e1, val_1 = align(neurograph, img, branch, branches[0], depth) + d2, e2, val_2 = align(neurograph, img, branch, branches[1], depth) + pair_1 = (branch[d1], branches[0][e1]) + pair_2 = (branch[d2], branches[1][e2]) + return pair_1 if val_1 > val_2 else pair_2 def align(neurograph, img, branch_1, branch_2, depth): @@ -225,20 +238,20 @@ def align(neurograph, img, branch_1, branch_2, depth): and "best_xyz_2". """ - best_xyz_1 = None - best_xyz_2 = None + best_d1 = None + best_d2 = None best_score = 0 - for d_1 in range(min(depth, len(branch_1) - 1)): - xyz_1 = neurograph.to_img(branch_1[d_1]) - for d_2 in range(min(depth, len(branch_2) - 1)): - xyz_2 = neurograph.to_img(branch_2[d_2]) + for d1 in range(min(depth, len(branch_1) - 1)): + xyz_1 = neurograph.to_img(branch_1[d1]) + for d2 in range(min(depth, len(branch_2) - 1)): + xyz_2 = neurograph.to_img(branch_2[d2]) line = make_line(xyz_1, xyz_2, 10) score = np.mean(get_profile(img, line, window=[3, 3, 3])) if score > best_score: best_score = score - best_xyz_1 = deepcopy(xyz_1) - best_xyz_2 = deepcopy(xyz_2) - return best_xyz_1, best_xyz_2, best_score + best_d1 = d1 + best_d2 = d2 + return best_d1, best_d2, best_score def optimize_path(img, origin, xyz_1, xyz_2): @@ -433,8 +446,8 @@ def dist(v_1, v_2, metric="l2"): def check_dists(xyz_1, xyz_2, xyz_3, radius): """ - Checks whether distance between "xyz_1", "xyz_3" and "xyz_2", "xyz_3" is - sufficiently small. Routine is used during edge proposal generation to + Checks whether distance between "xyz_1", "xyz_3" and "xyz_2", "xyz_3" is + sufficiently small. Routine is used during edge proposal generation to determine whether to create new vertex at "xyz_2" or draw proposal between "xyz_1" and existing node at "xyz_3". diff --git a/src/deep_neurographs/graph_utils.py b/src/deep_neurographs/graph_utils.py index 157c4c4..361e564 100644 --- a/src/deep_neurographs/graph_utils.py +++ b/src/deep_neurographs/graph_utils.py @@ -27,7 +27,7 @@ import networkx as nx import numpy as np -from deep_neurographs import geometry_utils, swc_utils, utils +from deep_neurographs import geometry, swc_utils, utils def get_irreducibles(swc_dict, swc_id=None, prune=True, depth=16, smooth=True): @@ -82,7 +82,7 @@ def get_irreducibles(swc_dict, swc_id=None, prune=True, depth=16, smooth=True): # Visit j attrs = upd_edge_attrs(swc_dict, attrs, j) if j in leafs or j in junctions: - attrs = set_edge_attrs(attrs) + attrs = to_numpy(attrs) if smooth: swc_dict, edges = __smooth_branch( swc_dict, attrs, edges, nbs, root, j @@ -221,7 +221,7 @@ def __smooth_branch(swc_dict, attrs, edges, nbs, root, j): j : int End point of branch to be smoothed. """ - attrs["xyz"] = geometry_utils.smooth_branch(attrs["xyz"], s=10) + attrs["xyz"] = geometry.smooth_branch(attrs["xyz"], s=5) swc_dict, edges = upd_xyz(swc_dict, attrs, edges, nbs, root, 0) swc_dict, edges = upd_xyz(swc_dict, attrs, edges, nbs, j, -1) edges[(root, j)] = attrs @@ -363,13 +363,44 @@ def get_edge_attr(graph, edge, attr): return graph.edges[edge][attr] -def set_edge_attrs(attrs): +def to_numpy(attrs): + """ + Converts edge attributes from a list to NumPy array. + + Parameters + ---------- + attrs : dict + Dictionary containing attributes of some edge. + + Returns + ------- + attrs : dict + Updated edge attribute dictionary. + + """ attrs["xyz"] = np.array(attrs["xyz"], dtype=np.float32) attrs["radius"] = np.array(attrs["radius"], dtype=np.float16) return attrs def set_node_attrs(swc_dict, nodes): + """ + Set node attributes by extracting values from "swc_dict". + + Parameters + ---------- + swc_dict : dict + Contents of an swc file. + nodes : list + List of nodes to set attributes. + + Returns + ------- + attrs : dict + Dictionary in which keys are node ids and values are a dictionary of + attributes extracted from "swc_dict". + + """ attrs = dict() for i in nodes: attrs[i] = {"radius": swc_dict["radius"][i], "xyz": swc_dict["xyz"][i]} @@ -377,6 +408,32 @@ def set_node_attrs(swc_dict, nodes): def upd_node_attrs(swc_dict, leafs, junctions, i): + """ + Updates node attributes by extracting values from "swc_dict". + + Parameters + ---------- + swc_dict : dict + Contents of an swc file that contains the smoothed xyz coordinates of + corresponding to "leafs" and "junctions". Note xyz coordinates are + smoothed during edge extraction. + leafs : dict + Dictionary where keys are leaf node ids and values are attribute + dictionaries. + junctions : dict + Dictionary where keys are junction node ids and values are attribute + dictionaries. + i : int + Node to be updated. + + Returns + ------- + leafs : dict + Updated dictionary if "i" was contained in "leafs.keys()". + junctions : dict + Updated dictionary if "i" was contained in "junctions.keys()". + + """ upd_attrs = {"radius": swc_dict["radius"][i], "xyz": swc_dict["xyz"][i]} if i in leafs: leafs[i] = upd_attrs diff --git a/src/deep_neurographs/neurograph.py b/src/deep_neurographs/neurograph.py index 9fc3906..e65ed96 100644 --- a/src/deep_neurographs/neurograph.py +++ b/src/deep_neurographs/neurograph.py @@ -15,11 +15,12 @@ import tensorstore as ts from scipy.spatial import KDTree -from deep_neurographs import geometry_utils +from deep_neurographs import geometry from deep_neurographs import graph_utils as gutils from deep_neurographs import utils from deep_neurographs.densegraph import DenseGraph -from deep_neurographs.geometry_utils import dist as get_dist, check_dists +from deep_neurographs.geometry import check_dists +from deep_neurographs.geometry import dist as get_dist SUPPORTED_LABEL_MASK_TYPES = [dict, np.array, ts.TensorStore] @@ -57,9 +58,10 @@ def __init__( # Initialize bounding box (if exists) self.bbox = bbox if self.bbox: - self.origin = bbox["min"] + self.origin = bbox["min"].astype(int) self.shape = (bbox["max"] - bbox["min"]).astype(int) else: + self.origin = np.array([0, 0, 0], dtype=int) self.shape = None def init_immutable_graph(self, add_attrs=False): @@ -305,9 +307,8 @@ def run_optimization(self): self.img_path, "zarr", origin, self.shape, from_center=False ) for edge in self.mutable_edges: - xyz_1, xyz_2 = geometry_utils.optimize_alignment(self, img, edge) - proposal = [self.to_world(xyz_1), self.to_world(xyz_2)] - self.edges[edge]["xyz"] = np.vstack(proposal) + xyz_1, xyz_2 = geometry.optimize_alignment(self, img, edge) + self.edges[edge]["xyz"] = np.array([xyz_1, xyz_2]) def get_branch(self, xyz_or_node, key="xyz"): if type(xyz_or_node) == int: @@ -351,7 +352,7 @@ def init_targets(self, target_neurograph): edge = proposals[idx] if self.is_simple(edge): add_bool = self.is_target( - target_neurograph, edge, dist=3, ratio=0.7, exclude=5 + target_neurograph, edge, dist=7, ratio=0.7, exclude=10 ) if add_bool: self.target_edges.add(edge) @@ -363,7 +364,7 @@ def init_targets(self, target_neurograph): for idx in np.argsort(dists): edge = remaining_proposals[idx] add_bool = self.is_target( - target_neurograph, edge, dist=5, ratio=0.5, exclude=5 + target_neurograph, edge, dist=8, ratio=0.5, exclude=10 ) if add_bool: self.target_edges.add(edge) diff --git a/src/deep_neurographs/swc_utils.py b/src/deep_neurographs/swc_utils.py index 7b60783..d8e6a0c 100644 --- a/src/deep_neurographs/swc_utils.py +++ b/src/deep_neurographs/swc_utils.py @@ -16,7 +16,7 @@ import networkx as nx import numpy as np -from deep_neurographs import geometry_utils +from deep_neurographs import geometry from deep_neurographs import graph_utils as gutils from deep_neurographs import utils @@ -130,7 +130,7 @@ def fast_parse(contents): "id": np.zeros((len(contents)), dtype=np.int32), "radius": np.zeros((len(contents)), dtype=np.float32), "pid": np.zeros((len(contents)), dtype=np.int32), - "xyz": np.zeros((len(contents), 3), dtype=np.int32), + "xyz": np.zeros((len(contents), 3), dtype=np.float32), } for i, line in enumerate(contents): parts = line.split() @@ -347,7 +347,7 @@ def smooth(swc_dict): graph = to_graph(swc_dict) leafs, junctions = gutils.get_irreducible_nodes(graph) if len(junctions) == 0: - xyz = geometry_utils.smooth_branch(xyz) + xyz = geometry.smooth_branch(xyz) else: idxs = [] root = None @@ -369,5 +369,5 @@ def smooth(swc_dict): def upd_edge(xyz, idxs): idxs = np.array(idxs) - xyz[idxs] = geometry_utils.smooth_branch(xyz[idxs], s=10) + xyz[idxs] = geometry.smooth_branch(xyz[idxs], s=10) return xyz diff --git a/src/deep_neurographs/utils.py b/src/deep_neurographs/utils.py index d752792..a43bfac 100644 --- a/src/deep_neurographs/utils.py +++ b/src/deep_neurographs/utils.py @@ -12,7 +12,6 @@ import json import math import os -import psutil import shutil from copy import deepcopy from io import BytesIO @@ -20,6 +19,7 @@ from zipfile import ZipFile import numpy as np +import psutil import tensorstore as ts import zarr @@ -310,21 +310,21 @@ def open_tensorstore(path, driver): def read_img_chunk(img, xyz, shape): start, end = get_start_end(xyz, shape) return img[ - start[2] : end[2], start[1] : end[1], start[0] : end[0] + start[2]: end[2], start[1]: end[1], start[0]: end[0] ].transpose(2, 1, 0) def get_chunk(arr, xyz, shape): start, end = get_start_end(xyz, shape) return deepcopy( - arr[start[0] : end[0], start[1] : end[1], start[2] : end[2]] + arr[start[0]: end[0], start[1]: end[1], start[2]: end[2]] ) def read_tensorstore(ts_arr, xyz, shape): start, end = get_start_end(xyz, shape) return ( - ts_arr[start[0] : end[0], start[1] : end[1], start[2] : end[2]] + ts_arr[start[0]: end[0], start[1]: end[1], start[2]: end[2]] .read() .result() )