From c3e966bf8fcf934dde32c9a3d7846cc1aadd9e4e Mon Sep 17 00:00:00 2001 From: arnaudon Date: Tue, 2 May 2023 15:30:44 +0200 Subject: [PATCH 01/37] wip --- .../run_boundary_new/create_mesh.py | 39 ++++++++++++ .../run_boundary_new/luigi.cfg | 60 ++++++++++++++++++ .../run_boundary_new/plot_debug.py | 62 +++++++++++++++++++ .../boundary_example/run_boundary_new/run.sh | 12 ++++ 4 files changed, 173 insertions(+) create mode 100644 examples/boundary_example/run_boundary_new/create_mesh.py create mode 100644 examples/boundary_example/run_boundary_new/luigi.cfg create mode 100644 examples/boundary_example/run_boundary_new/plot_debug.py create mode 100755 examples/boundary_example/run_boundary_new/run.sh diff --git a/examples/boundary_example/run_boundary_new/create_mesh.py b/examples/boundary_example/run_boundary_new/create_mesh.py new file mode 100644 index 0000000..1282f44 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/create_mesh.py @@ -0,0 +1,39 @@ +from neurocollage.mesh_helper import MeshHelper +from voxcell.cell_collection import CellCollection +import numpy as np +import trimesh + + +def get_distances_to_mesh(mesh_helper, mesh, ray_origins, ray_directions): + """Compute distances from point/directions to a mesh.""" + vox_ray_origins = mesh_helper.positions_to_indices(ray_origins) + locations, index_ray, index_tri = mesh.ray.intersects_location( + ray_origins=vox_ray_origins, ray_directions=ray_directions + ) + return np.linalg.norm( + mesh_helper.indices_to_positions(locations[index_ray]) - ray_origins, axis=1 + ) + + +if __name__ == "__main__": + atlas = {"atlas": "atlas", "structure": "out/synthesis_input/region_structure.yaml"} + mesh_helper = MeshHelper(atlas, None) + pia_mesh = mesh_helper.get_pia_mesh() + pia_mesh = mesh_helper.get_boundary_mesh() + #pia_mesh = mesh_helper.get_layer_meshes()[1] + print(pia_mesh) + pia_mesh.export("mesh.obj") + + morph_df = CellCollection().load("out/synthesis/circuit.h5").as_dataframe() + + ray_origins = morph_df[["x", "y", "z"]].to_numpy() + ray_directions = np.array(len(morph_df) * [[0.0, 1, 0.0]]) + dists = get_distances_to_mesh(mesh_helper, pia_mesh, ray_origins, ray_directions) + print(dists) + ray_visualize = trimesh.load_path( + np.hstack((ray_origins, ray_origins + ray_directions * 500.0)).reshape(-1, 2, 3) + ) + + scene = trimesh.Scene([pia_mesh, ray_visualize]) + scene.show() + mesh_helper.show() diff --git a/examples/boundary_example/run_boundary_new/luigi.cfg b/examples/boundary_example/run_boundary_new/luigi.cfg new file mode 100644 index 0000000..7477846 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/luigi.cfg @@ -0,0 +1,60 @@ +[core] +workers = 1 +logging_conf_file = logging.conf + +[RunnerConfig] +nb_jobs = 10 +joblib_verbose = 10 + +[SynthesisConfig] +mtypes = ["L5_TPC:A"] +axon_method = no_axon + +[CircuitConfig] +circuit_somata_path = circuit.mvd3 +atlas_path = atlas + +[PathConfig] +result_path = out +morphology_path = repaired_morphology_path_h5 + +[BuildCircuit] +density_factor = 1.0 + +[SliceCircuit] +n_cells = 20 + +[CreateAtlasPlanes] +plane_type = aligned +plane_count = 1 +centerline_axis = 2 +slice_thickness = 200 + +[GetSynthesisInputs] +url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git +branch = boundary +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_sscx + +[BuildMorphsDF] +neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml +morphology_dirs = {"repaired_morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc","repaired_morphology_path_h5": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5"} + +#[BuildAxonMorphologies] +#axon_cells_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5 + +[Synthesize] +rerun = False +context_config_path = context_config.yaml +debug_region_grower_scales = True + +#################################### +# ########## Validation ########## # +#################################### +[ValidateSynthesis] +with_collage = True +with_path_distance_fits = False +with_morphometrics = False +with_density_profiles = False +with_scale_statistics = False +with_score_matrix_reports=False +with_morphology_validation_reports = False diff --git a/examples/boundary_example/run_boundary_new/plot_debug.py b/examples/boundary_example/run_boundary_new/plot_debug.py new file mode 100644 index 0000000..021b880 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/plot_debug.py @@ -0,0 +1,62 @@ +import pandas as pd +from tqdm import tqdm +import matplotlib.pyplot as plt + +if __name__ == "__main__": + df = pd.read_csv("data.csv", header=None) + + d = df[df[6] < 0] + dd = df[df[6] > 0] + + plt.figure() + plt.scatter(d[0], d[1], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[0], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_xy.pdf") + + plt.figure() + plt.scatter(d[2], d[1], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[2], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_zy.pdf") + + plt.figure() + plt.scatter(d[0], d[2], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[0], dd[2], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_xz.pdf") + + n_steps = 200 + n = int(len(df) / n_steps) + for step in tqdm(range(n_steps + 1)): + d = df.loc[: step * (n + 1)] + + _d = d[d[6] < 0] + _dd = d[d[6] > 0] + plt.figure() + plt.scatter(_d[0], _d[1], c="r", s=0.01, marker=".", rasterized=True) + plt.scatter( + _dd[0], _dd[1], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 + ) + plt.suptitle(f"computational time [a.u]: {step}") + plt.colorbar(shrink=0.5, label="distance to boundary [microns]") + plt.axis([-1000, 750, 500, 2200]) + plt.tight_layout() + + plt.savefig(f"steps/side_step_{step:04d}.png") + plt.close() + + plt.figure() + plt.scatter(_d[0], _d[2], c="r", s=0.01, marker=".", rasterized=True) + plt.scatter( + _dd[0], _dd[2], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 + ) + plt.colorbar(shrink=0.5, label="distance to boundary [microns]") + plt.axis([-800, 700, -1400, 0]) + plt.suptitle(f"computational time [a.u]: {step}") + plt.tight_layout() + plt.savefig(f"steps/top_step_{step:04d}.png") + plt.close() diff --git a/examples/boundary_example/run_boundary_new/run.sh b/examples/boundary_example/run_boundary_new/run.sh new file mode 100755 index 0000000..d2e6f8c --- /dev/null +++ b/examples/boundary_example/run_boundary_new/run.sh @@ -0,0 +1,12 @@ +#!/usr/bin/bash + +export NUMEXPR_MAX_THREADS=1 +export OMP_NUM_THREADS=1 +export OPENBLAS_NUM_THREADS=1 + +#rm -rf atlas +#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 + +python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ + --local-scheduler \ + --log-level INFO \ From bc0491db2fe633dac7c64ae55968733c2c957905 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 22 Jun 2023 21:22:58 +0200 Subject: [PATCH 02/37] more examples --- .../run_boundary_new/create_mesh.py | 39 ------------ .../run_boundary_new/luigi.cfg | 60 ------------------ .../run_boundary_new/plot_debug.py | 62 ------------------- .../boundary_example/run_boundary_new/run.sh | 12 ---- src/synthesis_workflow/tasks/validation.py | 7 +++ src/synthesis_workflow/validation.py | 5 +- 6 files changed, 11 insertions(+), 174 deletions(-) delete mode 100644 examples/boundary_example/run_boundary_new/create_mesh.py delete mode 100644 examples/boundary_example/run_boundary_new/luigi.cfg delete mode 100644 examples/boundary_example/run_boundary_new/plot_debug.py delete mode 100755 examples/boundary_example/run_boundary_new/run.sh diff --git a/examples/boundary_example/run_boundary_new/create_mesh.py b/examples/boundary_example/run_boundary_new/create_mesh.py deleted file mode 100644 index 1282f44..0000000 --- a/examples/boundary_example/run_boundary_new/create_mesh.py +++ /dev/null @@ -1,39 +0,0 @@ -from neurocollage.mesh_helper import MeshHelper -from voxcell.cell_collection import CellCollection -import numpy as np -import trimesh - - -def get_distances_to_mesh(mesh_helper, mesh, ray_origins, ray_directions): - """Compute distances from point/directions to a mesh.""" - vox_ray_origins = mesh_helper.positions_to_indices(ray_origins) - locations, index_ray, index_tri = mesh.ray.intersects_location( - ray_origins=vox_ray_origins, ray_directions=ray_directions - ) - return np.linalg.norm( - mesh_helper.indices_to_positions(locations[index_ray]) - ray_origins, axis=1 - ) - - -if __name__ == "__main__": - atlas = {"atlas": "atlas", "structure": "out/synthesis_input/region_structure.yaml"} - mesh_helper = MeshHelper(atlas, None) - pia_mesh = mesh_helper.get_pia_mesh() - pia_mesh = mesh_helper.get_boundary_mesh() - #pia_mesh = mesh_helper.get_layer_meshes()[1] - print(pia_mesh) - pia_mesh.export("mesh.obj") - - morph_df = CellCollection().load("out/synthesis/circuit.h5").as_dataframe() - - ray_origins = morph_df[["x", "y", "z"]].to_numpy() - ray_directions = np.array(len(morph_df) * [[0.0, 1, 0.0]]) - dists = get_distances_to_mesh(mesh_helper, pia_mesh, ray_origins, ray_directions) - print(dists) - ray_visualize = trimesh.load_path( - np.hstack((ray_origins, ray_origins + ray_directions * 500.0)).reshape(-1, 2, 3) - ) - - scene = trimesh.Scene([pia_mesh, ray_visualize]) - scene.show() - mesh_helper.show() diff --git a/examples/boundary_example/run_boundary_new/luigi.cfg b/examples/boundary_example/run_boundary_new/luigi.cfg deleted file mode 100644 index 7477846..0000000 --- a/examples/boundary_example/run_boundary_new/luigi.cfg +++ /dev/null @@ -1,60 +0,0 @@ -[core] -workers = 1 -logging_conf_file = logging.conf - -[RunnerConfig] -nb_jobs = 10 -joblib_verbose = 10 - -[SynthesisConfig] -mtypes = ["L5_TPC:A"] -axon_method = no_axon - -[CircuitConfig] -circuit_somata_path = circuit.mvd3 -atlas_path = atlas - -[PathConfig] -result_path = out -morphology_path = repaired_morphology_path_h5 - -[BuildCircuit] -density_factor = 1.0 - -[SliceCircuit] -n_cells = 20 - -[CreateAtlasPlanes] -plane_type = aligned -plane_count = 1 -centerline_axis = 2 -slice_thickness = 200 - -[GetSynthesisInputs] -url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git -branch = boundary -git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_sscx - -[BuildMorphsDF] -neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml -morphology_dirs = {"repaired_morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc","repaired_morphology_path_h5": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5"} - -#[BuildAxonMorphologies] -#axon_cells_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5 - -[Synthesize] -rerun = False -context_config_path = context_config.yaml -debug_region_grower_scales = True - -#################################### -# ########## Validation ########## # -#################################### -[ValidateSynthesis] -with_collage = True -with_path_distance_fits = False -with_morphometrics = False -with_density_profiles = False -with_scale_statistics = False -with_score_matrix_reports=False -with_morphology_validation_reports = False diff --git a/examples/boundary_example/run_boundary_new/plot_debug.py b/examples/boundary_example/run_boundary_new/plot_debug.py deleted file mode 100644 index 021b880..0000000 --- a/examples/boundary_example/run_boundary_new/plot_debug.py +++ /dev/null @@ -1,62 +0,0 @@ -import pandas as pd -from tqdm import tqdm -import matplotlib.pyplot as plt - -if __name__ == "__main__": - df = pd.read_csv("data.csv", header=None) - - d = df[df[6] < 0] - dd = df[df[6] > 0] - - plt.figure() - plt.scatter(d[0], d[1], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[0], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_xy.pdf") - - plt.figure() - plt.scatter(d[2], d[1], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[2], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_zy.pdf") - - plt.figure() - plt.scatter(d[0], d[2], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[0], dd[2], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_xz.pdf") - - n_steps = 200 - n = int(len(df) / n_steps) - for step in tqdm(range(n_steps + 1)): - d = df.loc[: step * (n + 1)] - - _d = d[d[6] < 0] - _dd = d[d[6] > 0] - plt.figure() - plt.scatter(_d[0], _d[1], c="r", s=0.01, marker=".", rasterized=True) - plt.scatter( - _dd[0], _dd[1], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 - ) - plt.suptitle(f"computational time [a.u]: {step}") - plt.colorbar(shrink=0.5, label="distance to boundary [microns]") - plt.axis([-1000, 750, 500, 2200]) - plt.tight_layout() - - plt.savefig(f"steps/side_step_{step:04d}.png") - plt.close() - - plt.figure() - plt.scatter(_d[0], _d[2], c="r", s=0.01, marker=".", rasterized=True) - plt.scatter( - _dd[0], _dd[2], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 - ) - plt.colorbar(shrink=0.5, label="distance to boundary [microns]") - plt.axis([-800, 700, -1400, 0]) - plt.suptitle(f"computational time [a.u]: {step}") - plt.tight_layout() - plt.savefig(f"steps/top_step_{step:04d}.png") - plt.close() diff --git a/examples/boundary_example/run_boundary_new/run.sh b/examples/boundary_example/run_boundary_new/run.sh deleted file mode 100755 index d2e6f8c..0000000 --- a/examples/boundary_example/run_boundary_new/run.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/bash - -export NUMEXPR_MAX_THREADS=1 -export OMP_NUM_THREADS=1 -export OPENBLAS_NUM_THREADS=1 - -#rm -rf atlas -#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 - -python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ - --local-scheduler \ - --log-level INFO \ diff --git a/src/synthesis_workflow/tasks/validation.py b/src/synthesis_workflow/tasks/validation.py index f4ae91c..4abb432 100644 --- a/src/synthesis_workflow/tasks/validation.py +++ b/src/synthesis_workflow/tasks/validation.py @@ -308,6 +308,7 @@ class PlotSingleCollage(WorkflowTask): """ mtype = luigi.Parameter(description=":str: The mtype to plot.") + n_pixels_y = luigi.IntParameter(default=20, description=":str: Number of orientations arrows") def requires(self): """Required input tasks.""" @@ -355,6 +356,7 @@ def run(self): nb_jobs=self.nb_jobs, joblib_verbose=self.joblib_verbose, dpi=self.dpi, + n_pixels_y=self.n_pixels_y, plot_neuron_kwargs={ "realistic_diameters": self.realistic_diameters, "linewidth": self.linewidth, @@ -706,9 +708,14 @@ def run(self): synth_morphs_df = pd.read_csv(self.input()["vacuum"]["out_morphs_df"].path) comp_key = self.requires()["vacuum"].vacuum_synth_morphology_path + mtypes = None + if self.in_atlas: + mtypes = SynthesisConfig().mtypes + trunk_validation( morphs_df, synth_morphs_df, + mtypes, self.output().pathlib_path, self.base_key, comp_key, diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index fd5bf67..8917d1a 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -1113,6 +1113,7 @@ def _get_hist(data, bins=50): def trunk_validation( morphs_df, synth_morphs_df, + mtypes, output_dir, base_key, comp_key, @@ -1130,7 +1131,9 @@ def trunk_validation( tmd_distributions = json.load(f) output_dir.mkdir(parents=True, exist_ok=True) - for mtype in morphs_df.mtype.unique(): + if mtypes is None: + mtypes = morphs_df.mtype.unique() + for mtype in mtypes: data_bio = extract_angle_data(morphs_df[morphs_df.mtype == mtype], base_key) data_synth = extract_angle_data(synth_morphs_df[synth_morphs_df.mtype == mtype], comp_key) with PdfPages(output_dir / f"trunk_validation_{mtype}.pdf") as pdf: From 08d06ef93a7e13fb4dcb9988fbff048697c1143b Mon Sep 17 00:00:00 2001 From: Alexis Arnaudon Date: Mon, 3 Jul 2023 12:47:58 +0000 Subject: [PATCH 03/37] Feat: Update examples for tutorial --- src/version.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/version.py b/src/version.py index cedc516..4eb3be2 100644 --- a/src/version.py +++ b/src/version.py @@ -1,3 +1,2 @@ """Package version.""" - -VERSION = "1.2.0.dev1" +VERSION = "1.2.1.dev1" From 4229e282f4cdc2c7b70c4dace18f155809ccd398 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 22 Sep 2023 16:19:50 +0200 Subject: [PATCH 04/37] Fix: small --- examples/O1_example/luigi.cfg | 14 ++++++++++---- examples/O1_example/run.sh | 4 ++-- src/synthesis_workflow/tasks/validation.py | 5 ++--- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index fa5a265..8de697e 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -5,7 +5,9 @@ workers = 1 logging_conf_file = logging.conf [SynthesisConfig] -mtypes = ["L5_TPC:A"] +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +mtypes = ["L1_HAC"] +axon_method = synthesis [RunnerConfig] nb_jobs = 5 @@ -17,13 +19,16 @@ region = O0 [BuildCircuit] density_factor = 1 +[CreateBoundaryMask] +boundary_thickness = 2 + [SliceCircuit] -n_cells = 5 +n_cells = 20 [CreateAtlasPlanes] plane_type = centerline_straight -plane_count = 5 -slice_thickness = 50 +plane_count = 3 +slice_thickness = 200 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git @@ -36,6 +41,7 @@ morphology_dirs = {"morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gev [ValidateSynthesis] with_collage = True +with_trunk_validation = True with_path_distance_fits = False with_morphometrics = False with_density_profiles = False diff --git a/examples/O1_example/run.sh b/examples/O1_example/run.sh index e7ee2d9..b94957a 100755 --- a/examples/O1_example/run.sh +++ b/examples/O1_example/run.sh @@ -2,8 +2,8 @@ export OMP_NUM_THREADS=1 -rm -rf atlas -brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 +#rm -rf atlas +#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ --local-scheduler \ diff --git a/src/synthesis_workflow/tasks/validation.py b/src/synthesis_workflow/tasks/validation.py index 4abb432..f12746f 100644 --- a/src/synthesis_workflow/tasks/validation.py +++ b/src/synthesis_workflow/tasks/validation.py @@ -34,7 +34,6 @@ from synthesis_workflow.tasks.config import SynthesisConfig from synthesis_workflow.tasks.config import ValidationLocalTarget from synthesis_workflow.tasks.synthesis import ApplySubstitutionRules -from synthesis_workflow.tasks.synthesis import BuildMorphsDF from synthesis_workflow.tasks.synthesis import BuildSynthesisDistributions from synthesis_workflow.tasks.synthesis import BuildSynthesisParameters from synthesis_workflow.tasks.synthesis import Synthesize @@ -120,7 +119,7 @@ class PlotMorphometrics(WorkflowTask): def requires(self): """Required input tasks.""" if self.in_atlas: - return {"morphs": BuildMorphsDF(), "circuit": ConvertCircuit()} + return {"morphs": ApplySubstitutionRules(), "circuit": ConvertCircuit()} else: return {"vacuum": VacuumSynthesize(), "morphs": ApplySubstitutionRules()} @@ -693,7 +692,7 @@ def requires(self): "parameters": BuildSynthesisParameters(), } if self.in_atlas: - tasks.update({"morphs": BuildMorphsDF(), "circuit": ConvertCircuit()}) + tasks.update({"morphs": ApplySubstitutionRules(), "circuit": ConvertCircuit()}) else: tasks.update({"vacuum": VacuumSynthesize(), "morphs": ApplySubstitutionRules()}) return tasks From bf41a7fe22409efb60f193e5bcb13ed66c1002ac Mon Sep 17 00:00:00 2001 From: arnaudon Date: Wed, 27 Sep 2023 17:44:14 +0200 Subject: [PATCH 05/37] more --- examples/O1_example/luigi.cfg | 22 +++++++++++++++------- src/synthesis_workflow/tasks/circuit.py | 13 +++++++++---- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 8de697e..98cbdb9 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -1,14 +1,22 @@ # run syntheis in an O1 atlas [core] -workers = 1 +workers = 5 logging_conf_file = logging.conf [SynthesisConfig] -#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] -mtypes = ["L1_HAC"] axon_method = synthesis +# L1 IN +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] + +# L23 IN +#mtypes = ["L23_MC"] +#, "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] + +# L23 PC +mtypes = ["L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C"] + [RunnerConfig] nb_jobs = 5 @@ -17,13 +25,13 @@ atlas_path = atlas region = O0 [BuildCircuit] -density_factor = 1 +density_factor = 1.0 [CreateBoundaryMask] boundary_thickness = 2 [SliceCircuit] -n_cells = 20 +n_cells = 10 [CreateAtlasPlanes] plane_type = centerline_straight @@ -33,11 +41,11 @@ slice_thickness = 200 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = main +branch = axon_rat_O1 [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml -morphology_dirs = {"morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} +morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} [ValidateSynthesis] with_collage = True diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index abcef31..ded4f0a 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -4,6 +4,7 @@ from copy import deepcopy from functools import partial from pathlib import Path +import numpy as np import luigi import pandas as pd @@ -232,11 +233,15 @@ def run(self): CircuitConfig().region, CircuitConfig().hemisphere, ) - layer = mtype[1] - keys = [k + 1 for k, d in annotation["mapping"].items() if d.endswith(layer)] density_annotation = deepcopy(annotation["annotation"]) - density_annotation.raw[annotation["annotation"].raw == keys[0]] = 100000 - density_annotation.raw[annotation["annotation"].raw != keys[0]] = 0 + density_annotation.raw = np.zeros_like(density_annotation.raw, dtype=float) + for data in composition: + if data["traits"]["mtype"] == mtype: + layer = str(data["traits"]["layer"]) + keys = [ + k + 1 for k, d in annotation["mapping"].items() if d.endswith(layer) + ] + density_annotation.raw[annotation["annotation"].raw == keys[0]] = 100000 density_annotation.save_nrrd(str(nrrd_path)) cells = build_circuit( From 710aa514b7a53186bc1af2e59e7ed8f1d861c05f Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 9 Oct 2023 15:44:56 +0200 Subject: [PATCH 06/37] example luigi --- examples/O1_example/luigi.cfg | 38 ++++++++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 98cbdb9..b40dbfe 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -1,24 +1,44 @@ # run syntheis in an O1 atlas [core] -workers = 5 +workers = 10 logging_conf_file = logging.conf +[RunnerConfig] +nb_jobs = 5 + [SynthesisConfig] axon_method = synthesis # L1 IN -#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +# mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN -#mtypes = ["L23_MC"] -#, "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] +# mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] # L23 PC -mtypes = ["L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C"] +# mtypes = ["L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C"] -[RunnerConfig] -nb_jobs = 5 +# L4 IN +# mtypes = ["L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC"] + +# L4 PC +# mtypes = ["L4_TPC", "L4_UPC"] + +# L5 IN +# mtypes = ["L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC"] + +# L5 PC +# mtypes = ["L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC"] + +# L6 PC +# mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] + +# L6 IN +#mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] + +# all +mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] [CircuitConfig] atlas_path = atlas @@ -35,8 +55,8 @@ n_cells = 10 [CreateAtlasPlanes] plane_type = centerline_straight -plane_count = 3 -slice_thickness = 200 +plane_count = 5 +slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git From 650061770c5be6fb020c51f79c45a627c535bc84 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 9 Oct 2023 16:20:09 +0200 Subject: [PATCH 07/37] more --- README.rst | 4 ++-- docs/source/synthesis_methodology.rst | 32 ++++++++++++++++--------- src/synthesis_workflow/tasks/circuit.py | 2 +- src/version.py | 2 +- 4 files changed, 25 insertions(+), 15 deletions(-) diff --git a/README.rst b/README.rst index f52ed12..60858c7 100644 --- a/README.rst +++ b/README.rst @@ -4,8 +4,8 @@ Synthesis Workflow This project contains several workflows used for neuron synthesis and the validation of this process. It is divided into two packages: -* **synthesis-workflow**, which contains the workflow tasks and tools. -* **MorphVal**, which is a library used for morphology validation and can be used as a standalone. +* **synthesis-workflow**, the main package, which contains the workflow tasks and tools. +* **MorphVal**, which is a legacy library used for morphology validation. The complete documentation of this project is available here: ``_ diff --git a/docs/source/synthesis_methodology.rst b/docs/source/synthesis_methodology.rst index a1c8dae..eec8960 100644 --- a/docs/source/synthesis_methodology.rst +++ b/docs/source/synthesis_methodology.rst @@ -1,26 +1,26 @@ Synthesis methodology ===================== -This page presents the scientific procedure used to synthesize the cells. +This page presents the scientific procedure to follow in order to synthesize morphologies. Overview -------- Production workflow ~~~~~~~~~~~~~~~~~~~ +For specific mtypes and brain regions, the standard procedure to synthesize morphologiesis the following: -Basically, the workflow used in BBP to generate the cell morphologies is the following: +* collect biological data via an at least curated morphology release with the code + available at https://github.com/BlueBrain/morphology-workflows. +* run this workflow and change default parameters via files in the git repository SynthDB + available at https://bbpgitlab.epfl.ch/neuromath/synthdb. +* once the parametrisation is satisfactory, save the parameter and distributions json + files on the SynthDB database. +* these files are then available in SynthDB to run production workflows, such as circuit-build -* collect biological data, -* calibrate the models on these data, -* synthesized the cells according to the calibration parameters. -This package aims at running the calibration process, then calling each part of the -production workflow on a small use case and computing some validation metrics. Once the -calibration parameters are correct, the actual production workflow can be run using them. - -Internal workflow -~~~~~~~~~~~~~~~~~ +Internal workflows +~~~~~~~~~~~~~~~~~~ The main workflow that should be used is: :py:class:`tasks.workflows.ValidateSynthesis`. This workflow runs all the required tasks to calibrate and validate the synthesis models. @@ -29,9 +29,19 @@ It is divided in several subtasks (more details on these tasks are given in .. graphviz:: autoapi/tasks/workflows/ValidateSynthesis.dot + +If no information on the atlas is available, one can instead run the simpler vacuum +synthesis workflow: :py:class:`tasks.workflows.ValidateVaccumSynthesis`. It is also +divided in several subtasks (more details on these tasks are given in +:doc:`autoapi/tasks/index`): + +.. graphviz:: autoapi/tasks/workflows/ValidateSynthesis.dot + Calibration parameters ---------------------- +!!! Discuss custom_parameters.csv file here. + The calibration step should create the two parameter files used by :py:class:`placement_algorithm.app.synthesize_morphologies.Master`: diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index ded4f0a..5317804 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -4,9 +4,9 @@ from copy import deepcopy from functools import partial from pathlib import Path -import numpy as np import luigi +import numpy as np import pandas as pd import yaml from luigi.parameter import PathParameter diff --git a/src/version.py b/src/version.py index 4eb3be2..90ec225 100644 --- a/src/version.py +++ b/src/version.py @@ -1,2 +1,2 @@ """Package version.""" -VERSION = "1.2.1.dev1" +VERSION = "1.2.2.dev0" From 73eace96df1652852686d38a411b5e256b6d0fbf Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 9 Oct 2023 16:32:52 +0200 Subject: [PATCH 08/37] lint --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 7db6576..13bd0f8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -15,7 +15,7 @@ max-line-length=100 [DESIGN] # Maximum number of arguments for function / method -max-args=10 +max-args=12 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* From 0ec159b970539087b874904859d2dc911091c6d2 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 19 Oct 2023 16:10:45 +0200 Subject: [PATCH 09/37] add example --- examples/boundary_example/logging.conf | 38 ++++++++++++++++++++ examples/boundary_example/luigi.cfg | 50 ++++++++++++++++++++++++++ examples/boundary_example/run.sh | 12 +++++++ 3 files changed, 100 insertions(+) create mode 100644 examples/boundary_example/logging.conf create mode 100644 examples/boundary_example/luigi.cfg create mode 100755 examples/boundary_example/run.sh diff --git a/examples/boundary_example/logging.conf b/examples/boundary_example/logging.conf new file mode 100644 index 0000000..802318d --- /dev/null +++ b/examples/boundary_example/logging.conf @@ -0,0 +1,38 @@ +[loggers] +keys=root,luigi,luigi_interface + +[handlers] +keys=consoleHandler,fileHandler + +[formatters] +keys=PrettyFormatter + +[logger_root] +level=INFO +handlers=consoleHandler,fileHandler + +[logger_luigi] +level=INFO +handlers=consoleHandler,fileHandler +qualname=luigi +propagate=0 + +[logger_luigi_interface] +level=INFO +handlers=consoleHandler,fileHandler +qualname=luigi-interface +propagate=0 + +[handler_consoleHandler] +class=StreamHandler +formatter=PrettyFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=FileHandler +formatter=PrettyFormatter +args=('synthesis_workflow.log',) + +[formatter_PrettyFormatter] +format=%(asctime)s - %(name)s - %(levelname)s - %(message)s +datefmt=%Y-%m-%d %H:%M:%S diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg new file mode 100644 index 0000000..5f84d0d --- /dev/null +++ b/examples/boundary_example/luigi.cfg @@ -0,0 +1,50 @@ +# run syntheis in an O1 atlas + +[core] +workers = 10 +logging_conf_file = logging.conf + +[RunnerConfig] +nb_jobs = 5 + +[SynthesisConfig] +axon_method = synthesis + +mtypes = ["L1_open", "L1_hard", "L1_leaky"] + +[CircuitConfig] +atlas_path = atlas +region = O0 + +[BuildCircuit] +density_factor = 1.0 + +[CreateBoundaryMask] +boundary_thickness = 1 + +[SliceCircuit] +n_cells = 10 + +[CreateAtlasPlanes] +plane_type = centerline_straight +plane_count = 1 +slice_thickness = 50 + +[GetSynthesisInputs] +url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/boundary_example +branch = boundary_example + +[BuildMorphsDF] +neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml +morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} + +[ValidateSynthesis] +with_collage = True +with_trunk_validation = True +with_path_distance_fits = False +with_morphometrics = False +with_density_profiles = False +with_scale_statistics = False +with_score_matrix_reports=False +with_morphology_validation_reports = False diff --git a/examples/boundary_example/run.sh b/examples/boundary_example/run.sh new file mode 100755 index 0000000..23e66a9 --- /dev/null +++ b/examples/boundary_example/run.sh @@ -0,0 +1,12 @@ +#!/bin/bash -l + +export OMP_NUM_THREADS=1 +export REGION_GROWER_BOUNDARY_DEBUG=1 + +rm -rf atlas +brainbuilder atlases -n 1 -t 100 -d 10 -o atlas column -a 200 + + +python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ + --local-scheduler \ + --log-level INFO \ From 5419fb1d3e41d78363ead41e52ede977ca501bad Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 20 Oct 2023 16:31:31 +0200 Subject: [PATCH 10/37] more --- examples/boundary_example/luigi.cfg | 3 ++- examples/boundary_example/run.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg index 5f84d0d..a077eb0 100644 --- a/examples/boundary_example/luigi.cfg +++ b/examples/boundary_example/luigi.cfg @@ -10,7 +10,8 @@ nb_jobs = 5 [SynthesisConfig] axon_method = synthesis -mtypes = ["L1_open", "L1_hard", "L1_leaky"] +#mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk"] +mtypes = ["L1_attractive"] [CircuitConfig] atlas_path = atlas diff --git a/examples/boundary_example/run.sh b/examples/boundary_example/run.sh index 23e66a9..5992c91 100755 --- a/examples/boundary_example/run.sh +++ b/examples/boundary_example/run.sh @@ -4,7 +4,7 @@ export OMP_NUM_THREADS=1 export REGION_GROWER_BOUNDARY_DEBUG=1 rm -rf atlas -brainbuilder atlases -n 1 -t 100 -d 10 -o atlas column -a 200 +brainbuilder atlases -n 2,1 -t 100,100 -d 10 -o atlas column -a 200 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ From 4324f0b12cac09ce646734aed5963f430a14edda Mon Sep 17 00:00:00 2001 From: arnaudon Date: Tue, 31 Oct 2023 16:15:02 +0100 Subject: [PATCH 11/37] more --- examples/O1_example/luigi.cfg | 8 ++++---- examples/boundary_example/luigi.cfg | 3 +-- src/synthesis_workflow/synthesis.py | 10 ++++++++-- src/synthesis_workflow/tasks/validation.py | 4 ++-- src/synthesis_workflow/validation.py | 1 + 5 files changed, 16 insertions(+), 10 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index b40dbfe..9a35c91 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -68,11 +68,11 @@ neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morp morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} [ValidateSynthesis] -with_collage = True -with_trunk_validation = True +with_collage = False +with_trunk_validation = False with_path_distance_fits = False -with_morphometrics = False -with_density_profiles = False +with_morphometrics = True +with_density_profiles = True with_scale_statistics = False with_score_matrix_reports=False with_morphology_validation_reports = False diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg index a077eb0..a24546f 100644 --- a/examples/boundary_example/luigi.cfg +++ b/examples/boundary_example/luigi.cfg @@ -10,8 +10,7 @@ nb_jobs = 5 [SynthesisConfig] axon_method = synthesis -#mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk"] -mtypes = ["L1_attractive"] +mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] [CircuitConfig] atlas_path = atlas diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 1a12be7..ea2572a 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -13,6 +13,7 @@ from joblib import Parallel from joblib import delayed from morphio.mut import Morphology +from morphio import SectionType from neuroc.scale import ScaleParameters from neuroc.scale import scale_section from neurom import load_morphology @@ -33,7 +34,12 @@ def _get_morph_class(path): - return "PC" if has_apical_dendrite(load_morphology(path)) else "IN" + m = load_morphology(path) + + if len(["" for neurite in m.root_sections if neurite.type == SectionType.basal_dendrite]) == 1: + raise ValueError(f"{path} bas a single basal, it will break synthesis, we stop here") + + return "PC" if has_apical_dendrite(m) else "IN" def get_neurite_types(morphs_df): @@ -44,7 +50,7 @@ def get_neurite_types(morphs_df): morph_class = list(set(_df["morph_class"])) if len(morph_class) > 1: - raise ValueError("f{mtype} has not consistent morph_class, we stop here") + raise ValueError(f"{mtype} has not consistent morph_class, we stop here, see {_df}") if morph_class[0] == "IN": neurite_types[mtype] = ["basal_dendrite"] diff --git a/src/synthesis_workflow/tasks/validation.py b/src/synthesis_workflow/tasks/validation.py index f12746f..cb77188 100644 --- a/src/synthesis_workflow/tasks/validation.py +++ b/src/synthesis_workflow/tasks/validation.py @@ -151,7 +151,7 @@ def output(self): return ValidationLocalTarget(self.morphometrics_path) -@copy_params(nb_jobs=ParamRef(RunnerConfig)) +@copy_params(nb_jobs=ParamRef(RunnerConfig), region=ParamRef(CircuitConfig)) class PlotDensityProfiles(WorkflowTask): """Plot density profiles of neurites in an atlas. @@ -200,7 +200,7 @@ def run(self): plot_density_profiles( circuit, self.n_cells, - self.in_atlas, + self.region, self.sample_distance, self.output().path, self.nb_jobs, diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index 8917d1a..210d50b 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -523,6 +523,7 @@ def relative_depth_volume( voxel_size = y.voxel_dimensions[0] if in_region is None: raise ValueError("in_region should not be None") + region = atlas.get_region_mask(in_region).raw to_filter = np.zeros(region.shape) to_filter[np.logical_and(region, reldepth < 0.5)] = -1 From 816679a6ddfee0c8a06cd1f4af904a2a75a954e8 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 13 Nov 2023 16:36:02 +0100 Subject: [PATCH 12/37] use_* --- examples/O1_example/luigi.cfg | 4 ++-- examples/boundary_example/luigi.cfg | 5 +++-- src/synthesis_workflow/synthesis.py | 23 +++++++++++------------ src/synthesis_workflow/tasks/synthesis.py | 9 +++++++++ src/synthesis_workflow/validation.py | 3 ++- 5 files changed, 27 insertions(+), 17 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 9a35c91..bf6440e 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -68,8 +68,8 @@ neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morp morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} [ValidateSynthesis] -with_collage = False -with_trunk_validation = False +with_collage = True +with_trunk_validation = True with_path_distance_fits = False with_morphometrics = True with_density_profiles = True diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg index a24546f..d21573e 100644 --- a/examples/boundary_example/luigi.cfg +++ b/examples/boundary_example/luigi.cfg @@ -10,7 +10,8 @@ nb_jobs = 5 [SynthesisConfig] axon_method = synthesis -mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] +#mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] +mtypes = ["L1_open", "L1_left", "L1_down"] [CircuitConfig] atlas_path = atlas @@ -28,7 +29,7 @@ n_cells = 10 [CreateAtlasPlanes] plane_type = centerline_straight plane_count = 1 -slice_thickness = 50 +slice_thickness = 20 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index ea2572a..1bc3248 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -34,12 +34,7 @@ def _get_morph_class(path): - m = load_morphology(path) - - if len(["" for neurite in m.root_sections if neurite.type == SectionType.basal_dendrite]) == 1: - raise ValueError(f"{path} bas a single basal, it will break synthesis, we stop here") - - return "PC" if has_apical_dendrite(m) else "IN" + return "PC" if has_apical_dendrite(load_morphology(path)) else "IN" def get_neurite_types(morphs_df): @@ -92,12 +87,16 @@ def _build_distributions_single_mtype( """Internal function for multiprocessing of tmd_distribution building.""" data = {} for neurite_type in neurite_types[mtype]: - if "use_axon" in morphs_df.columns: - morphology_paths = morphs_df.loc[ - (morphs_df.mtype == mtype) & morphs_df.use_axon, morphology_path - ].to_list() - else: - morphology_paths = morphs_df.loc[morphs_df.mtype == mtype, morphology_path].to_list() + _morphs_df = morphs_df[morphs_df.mtype == mtype] + if neurite_type == "axon" and "use_axon" in morphs_df.columns: + _morphs_df = _morphs_df.loc[morphs_df.use_axon] + if neurite_type == "apical_dendrite" and "use_apical" in morphs_df.columns: + _morphs_df = _morphs_df.loc[morphs_df.use_apical] + if neurite_type == "basal_dendrite" and "use_basal" in morphs_df.columns: + _morphs_df = _morphs_df.loc[morphs_df.use_basal] + print(_morphs_df) + morphology_paths = _morphs_df[morphology_path].to_list() + config["neurite_types"] = neurite_types[mtype] kwargs = { "neurite_types": neurite_types[mtype], diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index 76a26f0..adc2a78 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -64,6 +64,10 @@ class BuildMorphsDF(WorkflowTask): apical_points_path = luigi.OptionalParameter( default=None, description=":str: Path to the apical points file (JSON)." ) + neurite_selection_path = luigi.OptionalPathParameter( + default=None, + description=":path: Path to a .csv file with use_* and (morph_name) columns to select only subsect of neurites.", + ) def requires(self): """Required input tasks.""" @@ -78,6 +82,11 @@ def run(self): morphs_df = load_neurondb_to_dataframe( neurondb_path, self.morphology_dirs, self.apical_points_path ) + if self.neurite_selection_path is not None: + neurite_selection_df = pd.read_csv(self.neurite_selection_path).set_index("morph_name") + for col in neurite_selection_df.columns: + if col.startswith("use_"): + morphs_df[col] = neurite_selection_df.loc[morphs_df["name"], col].to_list() # Remove possibly duplicated morphologies morphs_df = morphs_df.drop_duplicates(subset=["name"]) diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index 210d50b..9a96c4f 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -28,6 +28,7 @@ from matplotlib.backends.backend_pdf import PdfPages from morphio.mut import Morphology from neurom import load_morphologies +from neurom import load_morphology from neurom.apps import morph_stats from neurom.apps.morph_stats import extract_dataframe from neurom.check.morphology_checks import has_apical_dendrite @@ -403,7 +404,7 @@ def _get_depths_df(circuit, mtype, sample, voxeldata, sample_distance): point_depths = defaultdict(list) for gid in gids: - morphology = circuit.morph.get(gid, transform=True, source="ascii") + morphology = load_morphology(circuit.morph.get(gid, transform=True, source="ascii")) point_depth_tmp = sample_morph_voxel_values( morphology, sample_distance, voxeldata, out_of_bounds_value ) From a4d258afa1031c8d73ad8122faf3cd741999c5a0 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Wed, 15 Nov 2023 14:51:20 +0100 Subject: [PATCH 13/37] fix some validation --- examples/O1_example/luigi.cfg | 15 +- src/synthesis_workflow/synthesis.py | 2 +- src/synthesis_workflow/tasks/synthesis.py | 2 + src/synthesis_workflow/tasks/workflows.py | 1 + src/synthesis_workflow/validation.py | 178 ++++------------------ 5 files changed, 43 insertions(+), 155 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index bf6440e..d9f1503 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -11,7 +11,8 @@ nb_jobs = 5 axon_method = synthesis # L1 IN -# mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] +#, "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN # mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] @@ -38,7 +39,7 @@ axon_method = synthesis #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] # all -mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] [CircuitConfig] atlas_path = atlas @@ -61,7 +62,7 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = axon_rat_O1 +branch = main [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml @@ -70,9 +71,9 @@ morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph- [ValidateSynthesis] with_collage = True with_trunk_validation = True -with_path_distance_fits = False +with_path_distance_fits = True with_morphometrics = True with_density_profiles = True -with_scale_statistics = False -with_score_matrix_reports=False -with_morphology_validation_reports = False +with_scale_statistics = True +with_score_matrix_reports = True +with_morphology_validation_reports = True diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 1bc3248..b72ec42 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -94,7 +94,7 @@ def _build_distributions_single_mtype( _morphs_df = _morphs_df.loc[morphs_df.use_apical] if neurite_type == "basal_dendrite" and "use_basal" in morphs_df.columns: _morphs_df = _morphs_df.loc[morphs_df.use_basal] - print(_morphs_df) + morphology_paths = _morphs_df[morphology_path].to_list() config["neurite_types"] = neurite_types[mtype] diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index adc2a78..6fe9f57 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -168,6 +168,7 @@ def run(self): if SynthesisConfig().axon_method != "no_axon": for neurite_type in neurite_types.values(): neurite_type.append("axon") + region = CircuitConfig().region tmd_parameters = {region: {}} for mtype in tqdm(mtypes): @@ -571,6 +572,7 @@ def run(self): "nb_processes": self.nb_jobs, "region_structure": self.input()["synthesis_input"].pathlib_path / CircuitConfig().region_structure_path, + "synthesize_axons": SynthesisConfig().axon_method == "synthesis", } if SynthesisConfig().axon_method == "reconstructed" and self.apply_jitter: kwargs["scaling_jitter_std"] = self.scaling_jitter_std diff --git a/src/synthesis_workflow/tasks/workflows.py b/src/synthesis_workflow/tasks/workflows.py index c5c7d13..86ff531 100644 --- a/src/synthesis_workflow/tasks/workflows.py +++ b/src/synthesis_workflow/tasks/workflows.py @@ -143,6 +143,7 @@ def requires(self): if self.with_path_distance_fits: tasks.append(PlotPathDistanceFits()) if self.with_scale_statistics: + Synthesize().debug_region_grower_scales = True tasks.append(PlotScales()) if self.with_morphology_validation_reports: tasks.append(MorphologyValidationReports()) diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index 9a96c4f..a05f31d 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -33,11 +33,11 @@ from neurom.apps.morph_stats import extract_dataframe from neurom.check.morphology_checks import has_apical_dendrite from neurom.core.dataformat import COLS +from morph_tool.resampling import resample_linear_density from neurots import NeuronGrower from neurots.extract_input.from_neurom import trunk_vectors from neurots.generate.orientations import get_probability_function from region_grower.modify import scale_target_barcode -from scipy.ndimage import correlate from tmd.io.io import load_population from voxcell import CellCollection @@ -313,46 +313,6 @@ def plot_morphometrics( ) -def iter_positions(morph, sample_distance, neurite_filter=None): - """Iterator for positions in space of points every um. - - Assumption about segment linearity is that curvature between the start and end of segments - is negligible. - - Args: - morph (neurom.FstNeuron): morphology - sample_distance (int): points sampling distance (in um) - neurite_filter: filter neurites, see ``neurite_filter`` of ``neurom.iter_sections()`` - - Yields: - sampled points for the neurites (each point is a (3,) numpy array). - """ - section_offsets = {} - - for section in neurom.iter_sections(morph, neurite_filter=neurite_filter): - if section.parent is None: - parent_section_offset = 0 - else: - parent_section_offset = section_offsets[section.parent.id] - segment_offset = parent_section_offset - for segment in neurom.iter_segments(section): - segment_len = neurom.morphmath.segment_length(segment) - if segment_offset + segment_len < sample_distance: - segment_offset += segment_len - elif segment_offset + segment_len == sample_distance: - yield segment[1][COLS.XYZ] - segment_offset = 0 - else: - offsets = np.arange(sample_distance - segment_offset, segment_len, sample_distance) - for offset in offsets: - yield neurom.morphmath.linear_interpolate(*segment, offset / segment_len) - segment_offset = segment_len - offsets[-1] - if segment_offset == sample_distance: - segment_offset = 0 - yield segment[1][COLS.XYZ] - section_offsets[section.id] = segment_offset - - def sample_morph_voxel_values( morphology, sample_distance, @@ -380,15 +340,15 @@ def sample_morph_voxel_values( output = {} for neurite_type in neurite_types: - points = list( - iter_positions( - morphology, - sample_distance=sample_distance, - neurite_filter=lambda n, nt=neurite_type: n.type == nt, - ) - ) + morph = resample_linear_density(morphology, 1 / sample_distance) + points = [] + for section in morph.iter(): + if section.type == neurite_type: + points += section.points.tolist() + indices = voxel_data.positions_to_indices(points, False) - out_of_bounds = np.any(indices == -1, axis=1) + points = np.array(points) + out_of_bounds = np.any(indices == voxel_data.OUT_OF_BOUNDS, axis=1) within_bounds = ~out_of_bounds values = np.zeros(len(points), dtype=voxel_data.raw.dtype) values[within_bounds] = voxel_data.raw[tuple(indices[within_bounds].transpose())] @@ -463,101 +423,25 @@ def _plot_density_profile( """Plot density profile of an mtype.""" fig = plt.figure() ax = plt.gca() - try: - if isinstance(circuit, Circuit): - _plot_layers(x_pos, circuit.atlas, ax) - plot_df = _get_depths_df(circuit, mtype, sample, voxeldata, sample_distance) - ax.legend(loc="best") - elif isinstance(circuit, VacuumCircuit): - plot_df = _get_vacuum_depths_df(circuit, mtype) - else: - plot_df = None + if isinstance(circuit, Circuit): + _plot_layers(x_pos, circuit.atlas, ax) + plot_df = _get_depths_df(circuit, mtype, sample, voxeldata, sample_distance) + ax.legend(loc="best") + elif isinstance(circuit, VacuumCircuit): + plot_df = _get_vacuum_depths_df(circuit, mtype) - with DisableLogger(): - sns.violinplot(x="neurite_type", y="y", data=plot_df, ax=ax, bw=0.1) - except Exception: # pylint: disable=broad-except - ax.text( - 0.5, - 0.5, - "ERROR WHEN GETTING POINT DEPTHS", - horizontalalignment="center", - verticalalignment="center", - transform=ax.transAxes, - ) + with DisableLogger(): + sns.violinplot(x="neurite_type", y="y", data=plot_df, ax=ax, bw=0.1) fig.suptitle(mtype) return fig -def _spherical_filter(radius): - filt_size = radius * 2 + 1 - sphere = np.zeros((filt_size, filt_size, filt_size)) - center = np.array([radius, radius, radius]) - posns = np.transpose(np.nonzero(sphere == 0)) - in_sphere = posns[np.linalg.norm(posns - center, axis=-1) <= radius] - sphere[tuple(in_sphere.transpose())] = 1 - return sphere - - -def relative_depth_volume( - atlas, - top_layer="1", - bottom_layer="6", # pylint: disable=too-many-locals - in_region="Isocortex", - relative=True, -): - """Return volumetric data of relative cortical depth at voxel centers. - - The relative cortical depth is equal to / . - Outside of the region `in_region` relative depth will be estimated, i.e. extrapolated from the - internal relative depth. - The `in_region` is the region within which to use the relative depth-values outside this - region are estimated. - """ - y = atlas.load_data("[PH]y") - top = atlas.load_data(f"[PH]{top_layer}").raw[..., 1] - bottom = atlas.load_data(f"[PH]{bottom_layer}").raw[..., 0] - thickness = top - bottom - if relative: - reldepth = (top - y.raw) / thickness - else: - reldepth = y.raw - voxel_size = y.voxel_dimensions[0] - if in_region is None: - raise ValueError("in_region should not be None") - - region = atlas.get_region_mask(in_region).raw - to_filter = np.zeros(region.shape) - to_filter[np.logical_and(region, reldepth < 0.5)] = -1 - to_filter[np.logical_and(region, reldepth >= 0.5)] = 1 - max_dist = 5 # voxels - for voxels_distance in range(max_dist, 0, -1): - filt = _spherical_filter(voxels_distance) - - num_voxels_in_range = correlate(to_filter, filt) - # we get the estimated thickness by normalizing the filtered thickness - # by the number of voxels that contributed - filtered_thickness = correlate(region * np.nan_to_num(thickness), filt) / np.abs( - num_voxels_in_range - ) - in_range_below = np.logical_and(num_voxels_in_range > 0.5, ~region) - in_range_above = np.logical_and(num_voxels_in_range < -0.5, ~region) - max_distance_from_region = voxels_distance * voxel_size - reldepth[in_range_below] = 1 + ( - max_distance_from_region / filtered_thickness[in_range_below] - ) - reldepth[in_range_above] = -(max_distance_from_region / filtered_thickness[in_range_above]) - return y.with_data(reldepth) - - def plot_density_profiles(circuit, sample, region, sample_distance, output_path, nb_jobs=-1): - """Plot density profiles for all mtypes. - - WIP function, waiting on complete atlas to update. - """ + """Plot density profiles for all mtypes.""" if not region or region == "in_vacuum": voxeldata = None else: - voxeldata = relative_depth_volume(circuit.atlas, in_region=region, relative=False) + voxeldata = circuit.atlas.load_data("[PH]y") x_pos = 0 ensure_dir(output_path) @@ -622,7 +506,7 @@ def _get_fit_population( # Load biological neurons return_error = (mtype, None, None, None, None, None, None) if len(files) > 0: - input_population = load_population(files) + input_population = load_population(files, use_morphio=True) else: return return_error + (f"No file to load for mtype='{mtype}'",) if ( @@ -833,17 +717,17 @@ def plot_scale_statistics(mtypes, scale_data, cols, output_dir="scales", dpi=100 mtypes = sorted(scale_data["mtype"].unique()) for col in cols: - fig = plt.figure() - ax = plt.gca() - scale_data.loc[scale_data["mtype"].isin(mtypes), ["mtype", col]].boxplot( - by="mtype", vert=False, ax=ax - ) - - ax.grid(True) - fig.suptitle("") - with DisableLogger(): - pdf.savefig(fig, bbox_inches="tight", dpi=dpi) - plt.close(fig) + data = scale_data.loc[scale_data["mtype"].isin(mtypes), ["mtype", col]] + if all(data[col]): + fig = plt.figure() + ax = plt.gca() + data.boxplot(by="mtype", vert=False, ax=ax) + + ax.grid(True) + fig.suptitle("") + with DisableLogger(): + pdf.savefig(fig, bbox_inches="tight", dpi=dpi) + plt.close(fig) def mvs_score(data1, data2, percentile=10): From 026dbd1156a3572e9f2483aa4a84221ab6e7678c Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 17 Nov 2023 13:39:43 +0100 Subject: [PATCH 14/37] o1 --- examples/O1_example/luigi.cfg | 8 ++++---- examples/O1_example/run.sh | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index d9f1503..44489a7 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -11,8 +11,7 @@ nb_jobs = 5 axon_method = synthesis # L1 IN -mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] -#, "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN # mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] @@ -33,7 +32,7 @@ mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] # mtypes = ["L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC"] # L6 PC -# mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] +#mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] # L6 IN #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] @@ -41,6 +40,7 @@ mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] # all #mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +mtypes = ["L4_MC"] [CircuitConfig] atlas_path = atlas region = O0 @@ -62,7 +62,7 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = main +branch = O1_with_axons [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml diff --git a/examples/O1_example/run.sh b/examples/O1_example/run.sh index b94957a..5e6612a 100755 --- a/examples/O1_example/run.sh +++ b/examples/O1_example/run.sh @@ -2,8 +2,8 @@ export OMP_NUM_THREADS=1 -#rm -rf atlas -#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 +rm -rf atlas +brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 -w 1 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ --local-scheduler \ From 4ab5af6bc4f4732c1421fce2a252e036aa72945e Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 24 Nov 2023 16:20:08 +0100 Subject: [PATCH 15/37] more doc --- docs/source/synthesis_methodology.rst | 323 ++++++++++++++++------ src/synthesis_workflow/synthesis.py | 1 - src/synthesis_workflow/tasks/synthesis.py | 3 +- src/synthesis_workflow/validation.py | 2 +- 4 files changed, 238 insertions(+), 91 deletions(-) diff --git a/docs/source/synthesis_methodology.rst b/docs/source/synthesis_methodology.rst index eec8960..dbc73e9 100644 --- a/docs/source/synthesis_methodology.rst +++ b/docs/source/synthesis_methodology.rst @@ -3,113 +3,260 @@ Synthesis methodology This page presents the scientific procedure to follow in order to synthesize morphologies. -Overview --------- -Production workflow -~~~~~~~~~~~~~~~~~~~ -For specific mtypes and brain regions, the standard procedure to synthesize morphologiesis the following: +How to use synthesis from scratch ? +----------------------------------- -* collect biological data via an at least curated morphology release with the code - available at https://github.com/BlueBrain/morphology-workflows. -* run this workflow and change default parameters via files in the git repository SynthDB - available at https://bbpgitlab.epfl.ch/neuromath/synthdb. -* once the parametrisation is satisfactory, save the parameter and distributions json - files on the SynthDB database. -* these files are then available in SynthDB to run production workflows, such as circuit-build +For specific mtypes and brain regions, the standard procedure to synthesize morphologies is as follow: +1. Collect biological data via an curated morphology release (see https://github.com/BlueBrain/morphology-workflows) that consists of a folder of morphologies and a neurondb.xml file (see https://github.com/BlueBrain/morph-tool/blob/master/morph_tool/morphdb.py). +2. Create a Pull Request on https://bbpgitlab.epfl.ch/neuromath/synthdb with a new folder under 'insitu_synthesis_inputs' to store specific configuration files. +3. Run the workflow :py:class:`tasks.workflows.ValidateVaccumSynthesis` to by pointing the `GetSynthesisInputs` task to this repository/branch/folder. Assess if synthesis works as expected with default values. If not, the synthesis parameters of `tmd_parameters.json` can be modified via a `custom_parameters.csv` file (see below). Additional files include `substitution_rules.yaml` to create or complete mtypes and `scaling_rules.yaml` to control barcode scalings insitu. +4. (Optional, to run insitu synthesis) Run the workflow :py:class:`tasks.workflows.ValidateSynthesis` after having added more files to the config repository, including `cell_composition.yaml`, `mtype_taxonomy.tsv` and `region_structure.yaml`. The latter corresponds to the atlas/region under consideration. +5. Once the parametrisation is satisfactory, save a reduced version of the luigi.cfg in SynthDB under `synthesis_inputs/luigi_configs` and run the cli `synthb synthesis-inputs build` to create and save the parameter and distributions json files on the SynthDB database. +6. Use `synthdb pull` cli to get the necessary synthesis input files. Also copy the other configuration files and run either region-grower cli (see https://bbpgitlab.epfl.ch/neuromath/region-grower) or circuit-build (see https://bbpgitlab.epfl.ch/nse/circuit-build) to synthesize an entire region. -Internal workflows -~~~~~~~~~~~~~~~~~~ -The main workflow that should be used is: :py:class:`tasks.workflows.ValidateSynthesis`. +Two synthesis workflows +----------------------- + +1. When no information on the atlas is available, one can run the vacuum +synthesis workflow :py:class:`tasks.workflows.ValidateVacuumSynthesis`. This workflow runs all the required tasks to calibrate and validate the synthesis models. It is divided in several subtasks (more details on these tasks are given in :doc:`autoapi/tasks/index`): .. graphviz:: autoapi/tasks/workflows/ValidateSynthesis.dot - -If no information on the atlas is available, one can instead run the simpler vacuum -synthesis workflow: :py:class:`tasks.workflows.ValidateVaccumSynthesis`. It is also -divided in several subtasks (more details on these tasks are given in +2. For insitu synthesis the workflow is :py:class:`tasks.workflows.ValidateSynthesis`. +This workflow runs all the required tasks to calibrate and validate the synthesis models. +It is divided in several subtasks (more details on these tasks are given in :doc:`autoapi/tasks/index`): .. graphviz:: autoapi/tasks/workflows/ValidateSynthesis.dot -Calibration parameters ----------------------- -!!! Discuss custom_parameters.csv file here. +How to calibrating synthesis parameters ? +----------------------------------------- + +The workflow will create the parameter file ``tmd_parameters.json`` from the default values in https://github.com/BlueBrain/NeuroTS/blob/main/neurots/extract_input/input_parameters.py. +It will then modify a few entryes. One time with scaling rules, if a `scaling_rules.yaml` file is present and another time with trunk angle data obtained from fits. + +All parameters can be modified via the file ``custom_parameters.csv`` in SynthDB as follow. +Each line corresponds to a parameter modification. It should have three columns ``mtype``, ``entry``, ``value``. The ``mtype`` column should match the ``mtype`` under consideration, ``entry`` column points to the parameter entry, with ``.`` referencing nested dictionaries. ``value`` is the parameter value to assign. If the value is a list, then ``entry`` can be of the form ``parameter.data[1]`` to access the second element. +These rules are appled to ``tmd_parameters.json`` after the scaling rules, and before the trunk angles (as trunk angle is not enabled by default, see below). + +Setting non-default trunk angle algorithm +------------------------------------------ + +There are two algorithms to assign trunk angles, the default one, which is not configurable and another one that takes into account relative angles between neurite types. +Notice that if one neurite_type uses this non-default algorithm, all other neurite_types must use it as well. +To enable the second one, one can set specific ``orientation`` parameters in ``custom_parameters.csv`` file as follow. + +The orientations are always assigned with respect to a direction, the pia or apical. +For the pia, use for example: + +.. code-block:: + + L1_DAC,basal_dendrite.orientation.mode,pia_constraint + +and for apical + +.. code-block:: + + L2_IPC,basal_dendrite.orientation.mode,apical_constraint + +with these modes, the algorithm will try to fit a probability distribution to the data, save the fit values in the ``tmd_parameters.json`` to later be used during synthesis. +By default, the fit function is a single step, corresponding to unimodal angle distribution, but if it is bimodal, one can use another function ``double_step`` as + +.. code-block:: + + L23_LBC,basal_dendrite.orientation.values.form,double_step + + +Another mode exists, that does not perform any fit. It is used to set particular directions for trunks, such as apical trunks, for example: + +.. code-block:: + + L5_TPC:A,apical_dendrite.orientation.mode,normal_pia_constraint + L5_TPC:A,apical_dendrite.orientation.values.direction.mean,0.0 + L5_TPC:A,apical_dendrite.orientation.values.direction.std,0.0 + +will assign apical trunks to exactly the pia direction. Std will allow some randomness. If ``mean`` is not ``0`` of ``pi`` (and ``std=0``, the angles are on a cone). + +Another example is for inverted PC cell + +.. code-block:: + + L2_IPC,apical_dendrite.orientation.mode,normal_pia_constraint + L2_IPC,apical_dendrite.orientation.values.direction.mean,3.1415 + L2_IPC,apical_dendrite.orientation.values.direction.std,0.3 + L6_IPC,basal_dendrite.orientation.mode,apical_constraint + L6_IPC,grow_types[0],apical_dendrite + L6_IPC,grow_types[1],basal_dendrite + +which has basal dendrite trunk relative to the apical trunk, but as by default the basal are generated first (basal, apical then axon if available), one must revert the ordering in ``grow_types``. + +Finally, for multiple apical trunks, one can use lists for ``mean`` and ``std`` parameters, as for BPC cells: -The calibration step should create the two parameter files used by -:py:class:`placement_algorithm.app.synthesize_morphologies.Master`: +.. code-block:: -* ``tmd_parameters.json`` which contains the model parameters for each ``mtype``. -* ``tmd_distributions.json`` which contains the distribution properties of each ``mtype``. + L6_BPC,apical_dendrite.orientation.mode,normal_pia_constraint + L6_BPC,apical_dendrite.orientation.values.direction.mean[0],0.0 + L6_BPC,apical_dendrite.orientation.values.direction.std[0],0.2 + L6_BPC,apical_dendrite.orientation.values.direction.mean[1],2.5 + L6_BPC,apical_dendrite.orientation.values.direction.std[1],0.3 -Details on the content of these files can be found here: (does not exist yet) -Synthesis in atlas ------------------- +Scaling rules for basic insitu synthesis +---------------------------------------- When cells are synthesized inside an atlas, their shapes must be adapted according to their -positions in order to fit in this atlas. Currently, the shapes are just rescaled in order -to fit in a defined interval. This interval depends on the ``mtype`` and on the cell position -because the depths of the atlas layers also depend on this position. The information on -how each ``mtype`` can be rescaled are inserted in the ``tmd_parameters.json`` file by the task -:py:class:`tasks.synthesis.AddScalingRulesToParameters`. - -For a given ``mtype``, the parameters specific to the scaling process are contained in a -``context_constraints`` key. The keys in this object should be neurity types (one of -``[apical, basal, axon]``) and should contain the interval in which it must fit. This interval -is defined relatively to the atlas layers, so its two boundaries should contain a ``layer`` -entry, which should be an integer in ``[1, 6]``, and an optional ``fraction`` entry, which -should be a float in ``[0, 1]`` (0 for the bottom boundary of the layer and 1 for the top -boundary). - -For apical rescaling, another optional ``extent_to_target`` entry should be added. This -entry contains a target ``layer`` entry and an optional ``fraction`` entry as described -before. But it also contains the fit parameters of the path distance of the cell as a -function of its extent along its principal direction. This is needed because the stopping -criterion used in the TNS model only takes the path distance into account. This fit is -linear and is thus described by a ``slope`` and a ``intercept`` entries. - -Here is an example of such ``context_constraints`` entry: - -.. code-block:: json - - { - "": { - "context_constraints": { - "apical": { - "extent_to_target": { - "fraction": 0.8, - "intercept": 0, - "layer": 1, - "slope": 1.5 - }, - "hard_limit_max": { - "fraction": 1, - "layer": 1 - }, - "hard_limit_min": { - "fraction": 0.1, - "layer": 1 - } - }, - "basal": { - "hard_limit_max": { - "fraction": 0.5, - "layer": 1 - } - } - } - } - } - -More details on the models can be found here: - -* `TNS `_ -* (does not exist yet) -* (does not exist yet) +positions in order to fit in this atlas. These rules are the synthesis version of placement hint algorithm of https://bbpteam.epfl.ch/documentation/projects/placement-algorithm/latest/methodology.html. +With scaling rules, only the barcodes are rescaled in order to fit the atlas. +To define the scaling rules, one must have a file ``scaling_rules.yaml`` in synthdb folder with the following form. + +.. code-block:: yaml + + default: # this will be applied on all mtypes if corresponding key is not present below + apical_dendrite: + hard_limit_max: + layer: L1 + fraction: 0.99 + basal_dendrite: + hard_limit_max: + layer: L1 + fraction: 0.99 + + L2_TPC:A: + apical_dendrite: + hard_limit_min: + layer: L1 + fraction: 0.1 + extent_to_target: + layer: L1 + fraction: 0.8 + + +For each ``mtype`` and ``neurite_type`` there can be ``hard_limit_min``, ``hard_limit_max`` +or ``extent_to_target`` rules. For each the entry the ``layer`` argument of the form ``L[1-6]`` +(not the names of the layer, just number from top to bottom) specifies the layer where the rule applies, +and ``fraction`` refines it to a fraction of it (0 for bottom, 1 for top). +For the ``hard_limit`` rules, the synthesized cells will be rescaled to so that there maximum/minimum extent +fit the rule. + +The other mode ``extent_to_target``, mostly used for apical dendrites uses a fit of expected extent from barcodes, +and rescales the barcodes so that it will fit the rule. This rescaling requires expected thicknesses of synthesis, that can be provided in ``region_structure.yaml``, see below. + + +Minimal region structure information +------------------------------------- + +In addition to a working atlas folder (with at least ``hierarchy.json``, ``[PH][layers].nrrd``, ``brain_region.nrrd``, ``orientations.nrrd``, if no cell densities file are present, we will add uniform ones in appropriate layers), +one needs additional information to run synthesis in this atlas, which we encode in ``region_structure.yaml`` file. +An example of this file for an single column, considered are a region names ``O0`` is: + +.. code-block:: yaml + + O0: + layers: + - 1 + - 2 + - 3 + - 4 + - 5 + - 6 + names: + 1: layer 1 + 2: layer 2 + 3: layer 3 + 4: layer 4 + 5: layer 5 + 6: layer 6 + region_queries: + 1: '@.*1$' + 2: '@.*2[a|b]?$' + 3: '@.*3$' + 4: '@.*4$' + 5: '@.*5$' + 6: '@.*6[a|b]?$' + thicknesses: + 1: 165 + 2: 149 + 3: 353 + 4: 190 + 5: 525 + 6: 700 + +The entry ``layers`` contains the name of layers (not int values, but str in general) that corresponds to ``[PH][layers].nrrd``, ordered by depth, from top to bottom. The next entries are dictionaries where keys are the layers in ``layers`` entry. +The ``names`` entry contains human readable names that can be used for plotting, it is optional, mostly used for legend of collage plots. +The entry ``region_queries`` contains regexes for querying the atlas ``hierarchy.json`` to find ids or layers present in ``brain_region.nrrd``. +Finally, the entry ``thicknesses`` contains expected thicknesses of synthesis in vacuum which will be used to apply the rescaling algorithm. If the ``thicknesses`` entry is absent, no scaling rule ``extent_to_target`` will be applied, even if the rule is present. + +For more subtle insitu synthesis, see the next two sections which describe two algorithms based on accept-reject mechanisms during growth. + +Insitu synthesis with directions +-------------------------------- + +Under a region block (such as ``O0`` above) of ``region_structure.yaml``, one can add a ``directions`` block to control the growing directions of sections during synthesis via atlas orientation field. + +.. code-block:: yaml + + directions: + - mtypes: + - L1_HAC + - L1_SAC + neurite_types: + - axon + processes: + - major + params: + direction: [0, 1, 0] + power : 2.0 + mode: perpendicular + layers: [1, 2] + +This block contains a list of rules, with the following entries. ``mtypes`` is the list of mtypes to apply this rule, ``neurite_types`` is the list of neurite_types to apply this rule. ``processes`` is optional and is the list of type of sections in NeuroTS (``major`` or ``secondary``) to differentiate between trunk (``major``) and obliques or collaterals (``secondary``). + +The entry ``params`` is a dictionary to parametrize the rule. First, we specify the ``direction`` with a 3-vector, where ``[0, 1, 0]`` is the pia direction and ``[0, -1, 0]`` is opposite to the pia. For non-cortical regions, pia generalises to ``y`` coordinate of the orientation vector in ``orientation.nrrd``. +Then, the ``mode`` selects between ``parallel`` (default if omitted) to follow the direction, and ``perpendicular`` to follow the perpendicular directions, hence a plane. +The optional ``power`` value is to set how strong the direction constraint is. The underlying algorithm converts the angle between the next point to grow and the direction into a probability function. If ``power=1`` (default) the relation is linear, otherwise it is a power of it (see ``get_directions`` in ``region-grower/region_grower/context.py``). +Finally, this rule can be applied into only specific layers, via the list in ``layers`` entry (default to all layers). + +Insitu synthesis with boundaries +-------------------------------- + +Under a region block (such as ``O0`` above) of ``region_structure.yaml``, one can add a ``boundaries`` block to control the growing directions of trunks and sections during synthesis via atlas based meshes. + +.. code-block:: yaml + + boundaries: + - mtypes: + - L2_TPC:A + neurite_types: + - apical_dendrite + - basal_dendrite + - axon + params_section: + d_min: 5 + d_max: 50 + params_trunk: + d_min: 5.0 + d_max: 1000 + power: 3.0 + mode: repulsive + path: pia_mesh.obj + +This block contains a list of rules for boundary constraints, similar to the direction for ``mtypes`` and ``neurite_types`` entries. +Each rule must have a ``path`` entry to a mesh (readabe by https://github.com/mikedh/trimesh) in either voxel id or coordinates. To select between the two ``mesh_type`` entry can be used with value ``voxel`` (default) for voxel ids or ``spatial`` for coordinates. +If the path is relative, it will be interpreted as relative to the location of ``region_structure.yaml`` file. +If the ``path`` is a folder, then it must contain mesh files which will be used for this rule. The way the mesh are selected to act as boundary depends on the rule parametrized by ``multimesh_mode``, which can be set to ``closest`` (default) for selecting the closest mech to the soma as the unique mesh, or ``inside`` to select the mesh surrounding the soma (used for barrel cortext for example). + +There are two main modes for these rules, parametrized by ``modes``. Either ``repulsive`` (default) where the mesh will act as a repulsive wall/boundary, or ``attractive`` where the mesh will attract the growing sections (more experimental, used for glomeruli spherical meshes for example). + +This rule can then be applied to either the section growing with ``params_section`` or trunk placements with ``params_trunk`` (only if the non-default trunk angle method is selected, see above). +In both cases, the algorithm uses ray tracing to compute the distance to the mesh in the direction of the growth, and convert it to a probability function. The probability will be ``0`` below a distance of ``d_min``, and ``1`` above the distance of ``d_max``. This distance is from the previous point (soma for trunk), and the direction is to the next point (first neurite point for trunk). The ``power`` argument is as above, to have a nonlinear function of distance. +If ``d_min`` is close negative, there will be a probability of going though the mesh, hence making it leaky. +The mesh are considered as non-oriented, hence there is no notion of side, so is a branch passes through, it will have no effect, unless the growing turns back and hit the mesh again from the other side. + +Meshes can be generated with trimesh package directly (or any other means), or via the atlas based helper here: https://bbpgitlab.epfl.ch/neuromath/neurocollage/-/blob/main/neurocollage/mesh_helper.py. diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index b72ec42..1c70514 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -13,7 +13,6 @@ from joblib import Parallel from joblib import delayed from morphio.mut import Morphology -from morphio import SectionType from neuroc.scale import ScaleParameters from neuroc.scale import scale_section from neurom import load_morphology diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index 6fe9f57..3820286 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -66,7 +66,8 @@ class BuildMorphsDF(WorkflowTask): ) neurite_selection_path = luigi.OptionalPathParameter( default=None, - description=":path: Path to a .csv file with use_* and (morph_name) columns to select only subsect of neurites.", + description=""":path: Path to a .csv file with use_* and (morph_name) columns to select + only subsect of neurites.""", ) def requires(self): diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index a05f31d..7730dc0 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -26,6 +26,7 @@ from joblib import delayed from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages +from morph_tool.resampling import resample_linear_density from morphio.mut import Morphology from neurom import load_morphologies from neurom import load_morphology @@ -33,7 +34,6 @@ from neurom.apps.morph_stats import extract_dataframe from neurom.check.morphology_checks import has_apical_dendrite from neurom.core.dataformat import COLS -from morph_tool.resampling import resample_linear_density from neurots import NeuronGrower from neurots.extract_input.from_neurom import trunk_vectors from neurots.generate.orientations import get_probability_function From d95e6815ebcd104c7c73fe68a9ad80138ccd9546 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 27 Nov 2023 11:09:11 +0100 Subject: [PATCH 16/37] more --- docs/source/synthesis_methodology.rst | 2 +- .../tasks/vacuum_synthesis.py | 8 ++-- src/synthesis_workflow/vacuum_synthesis.py | 45 +++++++------------ 3 files changed, 22 insertions(+), 33 deletions(-) diff --git a/docs/source/synthesis_methodology.rst b/docs/source/synthesis_methodology.rst index dbc73e9..389b05a 100644 --- a/docs/source/synthesis_methodology.rst +++ b/docs/source/synthesis_methodology.rst @@ -11,7 +11,7 @@ For specific mtypes and brain regions, the standard procedure to synthesize morp 1. Collect biological data via an curated morphology release (see https://github.com/BlueBrain/morphology-workflows) that consists of a folder of morphologies and a neurondb.xml file (see https://github.com/BlueBrain/morph-tool/blob/master/morph_tool/morphdb.py). 2. Create a Pull Request on https://bbpgitlab.epfl.ch/neuromath/synthdb with a new folder under 'insitu_synthesis_inputs' to store specific configuration files. -3. Run the workflow :py:class:`tasks.workflows.ValidateVaccumSynthesis` to by pointing the `GetSynthesisInputs` task to this repository/branch/folder. Assess if synthesis works as expected with default values. If not, the synthesis parameters of `tmd_parameters.json` can be modified via a `custom_parameters.csv` file (see below). Additional files include `substitution_rules.yaml` to create or complete mtypes and `scaling_rules.yaml` to control barcode scalings insitu. +3. Run the workflow :py:class:`tasks.workflows.ValidateVacuumSynthesis` to by pointing the `GetSynthesisInputs` task to this repository/branch/folder. Assess if synthesis works as expected with default values. If not, the synthesis parameters of `tmd_parameters.json` can be modified via a `custom_parameters.csv` file (see below). Additional files include `substitution_rules.yaml` to create or complete mtypes and `scaling_rules.yaml` to control barcode scalings insitu. 4. (Optional, to run insitu synthesis) Run the workflow :py:class:`tasks.workflows.ValidateSynthesis` after having added more files to the config repository, including `cell_composition.yaml`, `mtype_taxonomy.tsv` and `region_structure.yaml`. The latter corresponds to the atlas/region under consideration. 5. Once the parametrisation is satisfactory, save a reduced version of the luigi.cfg in SynthDB under `synthesis_inputs/luigi_configs` and run the cli `synthb synthesis-inputs build` to create and save the parameter and distributions json files on the SynthDB database. 6. Use `synthdb pull` cli to get the necessary synthesis input files. Also copy the other configuration files and run either region-grower cli (see https://bbpgitlab.epfl.ch/neuromath/region-grower) or circuit-build (see https://bbpgitlab.epfl.ch/nse/circuit-build) to synthesize an entire region. diff --git a/src/synthesis_workflow/tasks/vacuum_synthesis.py b/src/synthesis_workflow/tasks/vacuum_synthesis.py index 7bd7816..ffcedea 100644 --- a/src/synthesis_workflow/tasks/vacuum_synthesis.py +++ b/src/synthesis_workflow/tasks/vacuum_synthesis.py @@ -108,8 +108,8 @@ class PlotVacuumMorphologies(WorkflowTask): vacuum_synth_morphology_path (str): Column name to use from the morphology DataFrame. """ - pdf_filename = PathParameter( - default="vacuum_morphologies.pdf", description=":str: Path to the output file." + pdf_folder = PathParameter( + default="vacuum_morphologies", description=":str: Path to the output file." ) def requires(self): @@ -121,10 +121,10 @@ def run(self): vacuum_synth_morphs_df = pd.read_csv(self.input()["out_morphs_df"].path) plot_vacuum_morphologies( vacuum_synth_morphs_df, - self.output().path, + self.output().pathlib_path, self.vacuum_synth_morphology_path, ) def output(self): """Outputs of the task.""" - return ValidationLocalTarget(self.pdf_filename) + return ValidationLocalTarget(self.pdf_folder) diff --git a/src/synthesis_workflow/vacuum_synthesis.py b/src/synthesis_workflow/vacuum_synthesis.py index 57e5acb..5664469 100644 --- a/src/synthesis_workflow/vacuum_synthesis.py +++ b/src/synthesis_workflow/vacuum_synthesis.py @@ -1,7 +1,4 @@ """Functions for synthesis in vacuum to be used by luigi tasks.""" - -import itertools - import matplotlib.pyplot as plt import numpy as np import pandas as pd @@ -103,31 +100,23 @@ def grow_vacuum_morphologies( return vacuum_synth_morphs_df -def plot_vacuum_morphologies(vacuum_synth_morphs_df, pdf_filename, morphology_path): - """Plot synthesized morphologies on top of each others.""" +def plot_vacuum_morphologies(vacuum_synth_morphs_df, pdf_folder, morphology_path): + """Plot synthesized morphologies.""" # pylint: disable=cell-var-from-loop - with PdfPages(pdf_filename) as pdf: - for mtype in tqdm(sorted(vacuum_synth_morphs_df.mtype.unique())): + pdf_folder.mkdir(exist_ok=True, parents=True) + for mtype in tqdm(sorted(vacuum_synth_morphs_df.mtype.unique())): + with PdfPages(pdf_folder / f"morphologies_{mtype}.pdf") as pdf: ids = vacuum_synth_morphs_df[vacuum_synth_morphs_df.mtype == mtype].index - if len(ids) <= 5: - sqrt_n = len(ids) - sqrt_m = 1 - else: - sqrt_n = int(np.sqrt(len(ids)) + 1) - sqrt_m = sqrt_n - plt.figure() - ax = plt.gca() - for gid, (n, m) in zip(ids, itertools.product(range(sqrt_n), range(sqrt_m))): + for gid in ids: morphology = load_morphology(vacuum_synth_morphs_df.loc[gid, morphology_path]) - matplotlib_impl.plot_morph( - morphology.transform(lambda p: p + np.array([500 * n, 500 * m, 0])), - ax, - realistic_diameters=True, - soma_outline=False, - ) - ax.set_title(mtype) - plt.axis("equal") - plt.tight_layout() - with DisableLogger(): - pdf.savefig() - plt.close() + fig, axes = plt.subplots(1, 3, figsize=(15, 5)) + for ax, plane in zip(axes, ["xy", "xz", "yz"]): + matplotlib_impl.plot_morph( + morphology, ax, plane=plane, realistic_diameters=True, soma_outline=False + ) + ax.set_title(plane) + ax.axis("equal") + plt.tight_layout() + with DisableLogger(): + pdf.savefig() + plt.close() From 9661a4e1feff2f694a45ecc99dba18178a9bcdb2 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 21 Dec 2023 14:55:29 +0100 Subject: [PATCH 17/37] more --- examples/O1_example/luigi.cfg | 8 ++++---- examples/O1_example/run.sh | 6 +++--- src/synthesis_workflow/__init__.py | 5 ++--- src/synthesis_workflow/synthesis.py | 9 ++++----- src/synthesis_workflow/vacuum_synthesis.py | 2 +- 5 files changed, 14 insertions(+), 16 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 44489a7..e21a4b3 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -38,9 +38,9 @@ axon_method = synthesis #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] # all -#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] -mtypes = ["L4_MC"] +#mtypes = ["L4_MC"] [CircuitConfig] atlas_path = atlas region = O0 @@ -61,8 +61,8 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git -git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = O1_with_axons +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O0 +branch = o0o1 [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml diff --git a/examples/O1_example/run.sh b/examples/O1_example/run.sh index 5e6612a..755165e 100755 --- a/examples/O1_example/run.sh +++ b/examples/O1_example/run.sh @@ -1,9 +1,9 @@ #!/bin/bash -l -export OMP_NUM_THREADS=1 +#export OMP_NUM_THREADS=1 -rm -rf atlas -brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 -w 1 +#rm -rf atlas +#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ --local-scheduler \ diff --git a/src/synthesis_workflow/__init__.py b/src/synthesis_workflow/__init__.py index 54192fd..32a0966 100644 --- a/src/synthesis_workflow/__init__.py +++ b/src/synthesis_workflow/__init__.py @@ -1,6 +1,5 @@ """Workflow for neuronal synthesis validation.""" - -import pkg_resources +import importlib.metadata from morphio import SectionType # noqa -__version__ = pkg_resources.get_distribution("synthesis_workflow").version +__version__ = importlib.metadata.version("synthesis-workflow") diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 1c70514..a33c6e5 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -44,12 +44,11 @@ def get_neurite_types(morphs_df): morph_class = list(set(_df["morph_class"])) if len(morph_class) > 1: - raise ValueError(f"{mtype} has not consistent morph_class, we stop here, see {_df}") + L.warning(f"{mtype} has not consistent morph_class, we take PC if apical, see {_df}") - if morph_class[0] == "IN": - neurite_types[mtype] = ["basal_dendrite"] - if morph_class[0] == "PC": - neurite_types[mtype] = ["basal_dendrite", "apical_dendrite"] + neurite_types[mtype] = ["basal_dendrite"] + if "PC" in morph_class: + neurite_types[mtype] += ["apical_dendrite"] return neurite_types diff --git a/src/synthesis_workflow/vacuum_synthesis.py b/src/synthesis_workflow/vacuum_synthesis.py index 5664469..e7c66ba 100644 --- a/src/synthesis_workflow/vacuum_synthesis.py +++ b/src/synthesis_workflow/vacuum_synthesis.py @@ -110,7 +110,7 @@ def plot_vacuum_morphologies(vacuum_synth_morphs_df, pdf_folder, morphology_path for gid in ids: morphology = load_morphology(vacuum_synth_morphs_df.loc[gid, morphology_path]) fig, axes = plt.subplots(1, 3, figsize=(15, 5)) - for ax, plane in zip(axes, ["xy", "xz", "yz"]): + for ax, plane in zip(axes, ["xy", "xz", "zy"]): matplotlib_impl.plot_morph( morphology, ax, plane=plane, realistic_diameters=True, soma_outline=False ) From f83416f3f4a6e81ee2338acfb9cf4665f1b853c6 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Tue, 23 Jan 2024 10:36:15 +0100 Subject: [PATCH 18/37] more --- src/synthesis_workflow/synthesis.py | 12 ++++++++---- src/synthesis_workflow/validation.py | 4 ++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index a33c6e5..e1f44a2 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -102,11 +102,15 @@ def _build_distributions_single_mtype( } if config["models"][0] != "simpler": config["diameter_model"] = partial(diameter_model_function, config=config) - _data = extract_input.distributions(morphology_paths, **kwargs) + try: + _data = extract_input.distributions(morphology_paths, **kwargs) + + data[neurite_type] = _data[neurite_type] + data["diameter"] = _data["diameter"] + data["soma"] = _data["soma"] + except Exception as exc: + print(mtype, exc) - data[neurite_type] = _data[neurite_type] - data["diameter"] = _data["diameter"] - data["soma"] = _data["soma"] return mtype, data diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index 7730dc0..addbe0d 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -240,7 +240,7 @@ def plot_violin_features( hue="label", data=data, split=True, - bw=bw, + bw_method=bw, ax=ax, inner="quartile", ) @@ -431,7 +431,7 @@ def _plot_density_profile( plot_df = _get_vacuum_depths_df(circuit, mtype) with DisableLogger(): - sns.violinplot(x="neurite_type", y="y", data=plot_df, ax=ax, bw=0.1) + sns.violinplot(x="neurite_type", y="y", data=plot_df, ax=ax, bw_method=0.1) fig.suptitle(mtype) return fig From 73fe32eda6f96a8b185d5754c2cbde2162180454 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Wed, 27 Mar 2024 17:40:00 +0100 Subject: [PATCH 19/37] use 2 distribution threshold for single branch basal --- src/synthesis_workflow/synthesis.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index e1f44a2..4fbcca0 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -99,6 +99,7 @@ def _build_distributions_single_mtype( kwargs = { "neurite_types": neurite_types[mtype], "diameter_input_morph": morphology_paths, + "threshold_sec": 0, } if config["models"][0] != "simpler": config["diameter_model"] = partial(diameter_model_function, config=config) @@ -109,7 +110,7 @@ def _build_distributions_single_mtype( data["diameter"] = _data["diameter"] data["soma"] = _data["soma"] except Exception as exc: - print(mtype, exc) + print(mtype, neurite_type, exc) return mtype, data From b93d77903c25e2a9a9bd28c0cf90c12250ab3bec Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 28 Mar 2024 09:55:50 +0100 Subject: [PATCH 20/37] Add fit_orders parameter --- src/synthesis_workflow/synthesis.py | 3 +-- src/synthesis_workflow/tasks/config.py | 5 +++++ src/synthesis_workflow/tasks/synthesis.py | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 4fbcca0..f9b2424 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -100,9 +100,8 @@ def _build_distributions_single_mtype( "neurite_types": neurite_types[mtype], "diameter_input_morph": morphology_paths, "threshold_sec": 0, + "diameter_model": partial(diameter_model_function, config=config), } - if config["models"][0] != "simpler": - config["diameter_model"] = partial(diameter_model_function, config=config) try: _data = extract_input.distributions(morphology_paths, **kwargs) diff --git a/src/synthesis_workflow/tasks/config.py b/src/synthesis_workflow/tasks/config.py index 52fb16e..54d2bba 100644 --- a/src/synthesis_workflow/tasks/config.py +++ b/src/synthesis_workflow/tasks/config.py @@ -108,11 +108,16 @@ class DiametrizerConfig(luigi.Config): trunk_max_tries = luigi.IntParameter(default=100) n_samples = luigi.IntParameter(default=2) + fit_orders = luigi.DictParameter(default={"apical_dendrite": 2, "basal_dendrite": 1, "axon": 1}) def __init__(self, *args, **kwargs): """Init.""" super().__init__(*args, **kwargs) self.config_model = {"models": [self.model], "neurite_types": self.neurite_types} + + if self.model == "simpler": + self.config_model.update({"fit_orders": self.fit_orders}) + if self.model == "generic": self.config_model.update( { diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index 3820286..71e9068 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -176,7 +176,7 @@ def run(self): kwargs = {"neurite_types": neurite_types[mtype]} config = DiametrizerConfig().config_diametrizer if config["models"][0] == "simpler": - config = {"neurite_types": neurite_types[mtype]} + config = {"neurite_types": neurite_types[mtype], "models": ["simpler"]} else: config["neurite_types"] = neurite_types[mtype] kwargs["diameter_parameters"] = config From 993f2e5f4fcc8d7925f110f3ebf5336a361f83b3 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 2 Aug 2024 11:28:38 +0200 Subject: [PATCH 21/37] fix --- src/synthesis_workflow/tasks/synthesis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index 71e9068..4dcfa95 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -581,7 +581,6 @@ def run(self): if debug_scales_path is not None: kwargs["out_debug_data"] = debug_scales_path - synthesizer = SynthesizeMorphologies(**kwargs) synthesizer.synthesize() From 627511354f24f57bacd40b41d6802a2c109a0035 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Tue, 2 May 2023 15:30:44 +0200 Subject: [PATCH 22/37] wip --- .../run_boundary_new/create_mesh.py | 39 ++++++++++++ .../run_boundary_new/luigi.cfg | 60 ++++++++++++++++++ .../run_boundary_new/plot_debug.py | 62 +++++++++++++++++++ .../boundary_example/run_boundary_new/run.sh | 12 ++++ 4 files changed, 173 insertions(+) create mode 100644 examples/boundary_example/run_boundary_new/create_mesh.py create mode 100644 examples/boundary_example/run_boundary_new/luigi.cfg create mode 100644 examples/boundary_example/run_boundary_new/plot_debug.py create mode 100755 examples/boundary_example/run_boundary_new/run.sh diff --git a/examples/boundary_example/run_boundary_new/create_mesh.py b/examples/boundary_example/run_boundary_new/create_mesh.py new file mode 100644 index 0000000..1282f44 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/create_mesh.py @@ -0,0 +1,39 @@ +from neurocollage.mesh_helper import MeshHelper +from voxcell.cell_collection import CellCollection +import numpy as np +import trimesh + + +def get_distances_to_mesh(mesh_helper, mesh, ray_origins, ray_directions): + """Compute distances from point/directions to a mesh.""" + vox_ray_origins = mesh_helper.positions_to_indices(ray_origins) + locations, index_ray, index_tri = mesh.ray.intersects_location( + ray_origins=vox_ray_origins, ray_directions=ray_directions + ) + return np.linalg.norm( + mesh_helper.indices_to_positions(locations[index_ray]) - ray_origins, axis=1 + ) + + +if __name__ == "__main__": + atlas = {"atlas": "atlas", "structure": "out/synthesis_input/region_structure.yaml"} + mesh_helper = MeshHelper(atlas, None) + pia_mesh = mesh_helper.get_pia_mesh() + pia_mesh = mesh_helper.get_boundary_mesh() + #pia_mesh = mesh_helper.get_layer_meshes()[1] + print(pia_mesh) + pia_mesh.export("mesh.obj") + + morph_df = CellCollection().load("out/synthesis/circuit.h5").as_dataframe() + + ray_origins = morph_df[["x", "y", "z"]].to_numpy() + ray_directions = np.array(len(morph_df) * [[0.0, 1, 0.0]]) + dists = get_distances_to_mesh(mesh_helper, pia_mesh, ray_origins, ray_directions) + print(dists) + ray_visualize = trimesh.load_path( + np.hstack((ray_origins, ray_origins + ray_directions * 500.0)).reshape(-1, 2, 3) + ) + + scene = trimesh.Scene([pia_mesh, ray_visualize]) + scene.show() + mesh_helper.show() diff --git a/examples/boundary_example/run_boundary_new/luigi.cfg b/examples/boundary_example/run_boundary_new/luigi.cfg new file mode 100644 index 0000000..7477846 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/luigi.cfg @@ -0,0 +1,60 @@ +[core] +workers = 1 +logging_conf_file = logging.conf + +[RunnerConfig] +nb_jobs = 10 +joblib_verbose = 10 + +[SynthesisConfig] +mtypes = ["L5_TPC:A"] +axon_method = no_axon + +[CircuitConfig] +circuit_somata_path = circuit.mvd3 +atlas_path = atlas + +[PathConfig] +result_path = out +morphology_path = repaired_morphology_path_h5 + +[BuildCircuit] +density_factor = 1.0 + +[SliceCircuit] +n_cells = 20 + +[CreateAtlasPlanes] +plane_type = aligned +plane_count = 1 +centerline_axis = 2 +slice_thickness = 200 + +[GetSynthesisInputs] +url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git +branch = boundary +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_sscx + +[BuildMorphsDF] +neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml +morphology_dirs = {"repaired_morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc","repaired_morphology_path_h5": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5"} + +#[BuildAxonMorphologies] +#axon_cells_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5 + +[Synthesize] +rerun = False +context_config_path = context_config.yaml +debug_region_grower_scales = True + +#################################### +# ########## Validation ########## # +#################################### +[ValidateSynthesis] +with_collage = True +with_path_distance_fits = False +with_morphometrics = False +with_density_profiles = False +with_scale_statistics = False +with_score_matrix_reports=False +with_morphology_validation_reports = False diff --git a/examples/boundary_example/run_boundary_new/plot_debug.py b/examples/boundary_example/run_boundary_new/plot_debug.py new file mode 100644 index 0000000..021b880 --- /dev/null +++ b/examples/boundary_example/run_boundary_new/plot_debug.py @@ -0,0 +1,62 @@ +import pandas as pd +from tqdm import tqdm +import matplotlib.pyplot as plt + +if __name__ == "__main__": + df = pd.read_csv("data.csv", header=None) + + d = df[df[6] < 0] + dd = df[df[6] > 0] + + plt.figure() + plt.scatter(d[0], d[1], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[0], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_xy.pdf") + + plt.figure() + plt.scatter(d[2], d[1], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[2], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_zy.pdf") + + plt.figure() + plt.scatter(d[0], d[2], c="r", s=0.02, marker=".", rasterized=True) + plt.scatter(dd[0], dd[2], c=dd[6], s=0.02, marker=".", rasterized=True) + plt.axis("equal") + plt.colorbar() + plt.savefig("dist_xz.pdf") + + n_steps = 200 + n = int(len(df) / n_steps) + for step in tqdm(range(n_steps + 1)): + d = df.loc[: step * (n + 1)] + + _d = d[d[6] < 0] + _dd = d[d[6] > 0] + plt.figure() + plt.scatter(_d[0], _d[1], c="r", s=0.01, marker=".", rasterized=True) + plt.scatter( + _dd[0], _dd[1], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 + ) + plt.suptitle(f"computational time [a.u]: {step}") + plt.colorbar(shrink=0.5, label="distance to boundary [microns]") + plt.axis([-1000, 750, 500, 2200]) + plt.tight_layout() + + plt.savefig(f"steps/side_step_{step:04d}.png") + plt.close() + + plt.figure() + plt.scatter(_d[0], _d[2], c="r", s=0.01, marker=".", rasterized=True) + plt.scatter( + _dd[0], _dd[2], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 + ) + plt.colorbar(shrink=0.5, label="distance to boundary [microns]") + plt.axis([-800, 700, -1400, 0]) + plt.suptitle(f"computational time [a.u]: {step}") + plt.tight_layout() + plt.savefig(f"steps/top_step_{step:04d}.png") + plt.close() diff --git a/examples/boundary_example/run_boundary_new/run.sh b/examples/boundary_example/run_boundary_new/run.sh new file mode 100755 index 0000000..d2e6f8c --- /dev/null +++ b/examples/boundary_example/run_boundary_new/run.sh @@ -0,0 +1,12 @@ +#!/usr/bin/bash + +export NUMEXPR_MAX_THREADS=1 +export OMP_NUM_THREADS=1 +export OPENBLAS_NUM_THREADS=1 + +#rm -rf atlas +#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 + +python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ + --local-scheduler \ + --log-level INFO \ From e42f16f03123eaa5702e79bd8bebc27681ee5741 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 22 Jun 2023 21:22:58 +0200 Subject: [PATCH 23/37] more examples --- .../run_boundary_new/create_mesh.py | 39 ------------ .../run_boundary_new/luigi.cfg | 60 ------------------ .../run_boundary_new/plot_debug.py | 62 ------------------- .../boundary_example/run_boundary_new/run.sh | 12 ---- 4 files changed, 173 deletions(-) delete mode 100644 examples/boundary_example/run_boundary_new/create_mesh.py delete mode 100644 examples/boundary_example/run_boundary_new/luigi.cfg delete mode 100644 examples/boundary_example/run_boundary_new/plot_debug.py delete mode 100755 examples/boundary_example/run_boundary_new/run.sh diff --git a/examples/boundary_example/run_boundary_new/create_mesh.py b/examples/boundary_example/run_boundary_new/create_mesh.py deleted file mode 100644 index 1282f44..0000000 --- a/examples/boundary_example/run_boundary_new/create_mesh.py +++ /dev/null @@ -1,39 +0,0 @@ -from neurocollage.mesh_helper import MeshHelper -from voxcell.cell_collection import CellCollection -import numpy as np -import trimesh - - -def get_distances_to_mesh(mesh_helper, mesh, ray_origins, ray_directions): - """Compute distances from point/directions to a mesh.""" - vox_ray_origins = mesh_helper.positions_to_indices(ray_origins) - locations, index_ray, index_tri = mesh.ray.intersects_location( - ray_origins=vox_ray_origins, ray_directions=ray_directions - ) - return np.linalg.norm( - mesh_helper.indices_to_positions(locations[index_ray]) - ray_origins, axis=1 - ) - - -if __name__ == "__main__": - atlas = {"atlas": "atlas", "structure": "out/synthesis_input/region_structure.yaml"} - mesh_helper = MeshHelper(atlas, None) - pia_mesh = mesh_helper.get_pia_mesh() - pia_mesh = mesh_helper.get_boundary_mesh() - #pia_mesh = mesh_helper.get_layer_meshes()[1] - print(pia_mesh) - pia_mesh.export("mesh.obj") - - morph_df = CellCollection().load("out/synthesis/circuit.h5").as_dataframe() - - ray_origins = morph_df[["x", "y", "z"]].to_numpy() - ray_directions = np.array(len(morph_df) * [[0.0, 1, 0.0]]) - dists = get_distances_to_mesh(mesh_helper, pia_mesh, ray_origins, ray_directions) - print(dists) - ray_visualize = trimesh.load_path( - np.hstack((ray_origins, ray_origins + ray_directions * 500.0)).reshape(-1, 2, 3) - ) - - scene = trimesh.Scene([pia_mesh, ray_visualize]) - scene.show() - mesh_helper.show() diff --git a/examples/boundary_example/run_boundary_new/luigi.cfg b/examples/boundary_example/run_boundary_new/luigi.cfg deleted file mode 100644 index 7477846..0000000 --- a/examples/boundary_example/run_boundary_new/luigi.cfg +++ /dev/null @@ -1,60 +0,0 @@ -[core] -workers = 1 -logging_conf_file = logging.conf - -[RunnerConfig] -nb_jobs = 10 -joblib_verbose = 10 - -[SynthesisConfig] -mtypes = ["L5_TPC:A"] -axon_method = no_axon - -[CircuitConfig] -circuit_somata_path = circuit.mvd3 -atlas_path = atlas - -[PathConfig] -result_path = out -morphology_path = repaired_morphology_path_h5 - -[BuildCircuit] -density_factor = 1.0 - -[SliceCircuit] -n_cells = 20 - -[CreateAtlasPlanes] -plane_type = aligned -plane_count = 1 -centerline_axis = 2 -slice_thickness = 200 - -[GetSynthesisInputs] -url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git -branch = boundary -git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_sscx - -[BuildMorphsDF] -neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml -morphology_dirs = {"repaired_morphology_path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc","repaired_morphology_path_h5": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5"} - -#[BuildAxonMorphologies] -#axon_cells_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-h5 - -[Synthesize] -rerun = False -context_config_path = context_config.yaml -debug_region_grower_scales = True - -#################################### -# ########## Validation ########## # -#################################### -[ValidateSynthesis] -with_collage = True -with_path_distance_fits = False -with_morphometrics = False -with_density_profiles = False -with_scale_statistics = False -with_score_matrix_reports=False -with_morphology_validation_reports = False diff --git a/examples/boundary_example/run_boundary_new/plot_debug.py b/examples/boundary_example/run_boundary_new/plot_debug.py deleted file mode 100644 index 021b880..0000000 --- a/examples/boundary_example/run_boundary_new/plot_debug.py +++ /dev/null @@ -1,62 +0,0 @@ -import pandas as pd -from tqdm import tqdm -import matplotlib.pyplot as plt - -if __name__ == "__main__": - df = pd.read_csv("data.csv", header=None) - - d = df[df[6] < 0] - dd = df[df[6] > 0] - - plt.figure() - plt.scatter(d[0], d[1], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[0], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_xy.pdf") - - plt.figure() - plt.scatter(d[2], d[1], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[2], dd[1], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_zy.pdf") - - plt.figure() - plt.scatter(d[0], d[2], c="r", s=0.02, marker=".", rasterized=True) - plt.scatter(dd[0], dd[2], c=dd[6], s=0.02, marker=".", rasterized=True) - plt.axis("equal") - plt.colorbar() - plt.savefig("dist_xz.pdf") - - n_steps = 200 - n = int(len(df) / n_steps) - for step in tqdm(range(n_steps + 1)): - d = df.loc[: step * (n + 1)] - - _d = d[d[6] < 0] - _dd = d[d[6] > 0] - plt.figure() - plt.scatter(_d[0], _d[1], c="r", s=0.01, marker=".", rasterized=True) - plt.scatter( - _dd[0], _dd[1], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 - ) - plt.suptitle(f"computational time [a.u]: {step}") - plt.colorbar(shrink=0.5, label="distance to boundary [microns]") - plt.axis([-1000, 750, 500, 2200]) - plt.tight_layout() - - plt.savefig(f"steps/side_step_{step:04d}.png") - plt.close() - - plt.figure() - plt.scatter(_d[0], _d[2], c="r", s=0.01, marker=".", rasterized=True) - plt.scatter( - _dd[0], _dd[2], c=_dd[6], s=0.01, marker=".", rasterized=True, vmin=0, vmax=1000 - ) - plt.colorbar(shrink=0.5, label="distance to boundary [microns]") - plt.axis([-800, 700, -1400, 0]) - plt.suptitle(f"computational time [a.u]: {step}") - plt.tight_layout() - plt.savefig(f"steps/top_step_{step:04d}.png") - plt.close() diff --git a/examples/boundary_example/run_boundary_new/run.sh b/examples/boundary_example/run_boundary_new/run.sh deleted file mode 100755 index d2e6f8c..0000000 --- a/examples/boundary_example/run_boundary_new/run.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/bash - -export NUMEXPR_MAX_THREADS=1 -export OMP_NUM_THREADS=1 -export OPENBLAS_NUM_THREADS=1 - -#rm -rf atlas -#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 - -python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ - --local-scheduler \ - --log-level INFO \ From ef29f5e55bab736e77b050de5df255edaeb2a2e8 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 22 Sep 2023 16:19:50 +0200 Subject: [PATCH 24/37] Fix: small --- examples/O1_example/luigi.cfg | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index e21a4b3..07b4d6a 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -51,13 +51,20 @@ density_factor = 1.0 [CreateBoundaryMask] boundary_thickness = 2 +[CreateBoundaryMask] +boundary_thickness = 2 + [SliceCircuit] +<<<<<<< HEAD n_cells = 10 +======= +n_cells = 20 +>>>>>>> cf77182 (Fix: small) [CreateAtlasPlanes] plane_type = centerline_straight -plane_count = 5 -slice_thickness = 50 +plane_count = 3 +slice_thickness = 200 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git @@ -71,9 +78,18 @@ morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph- [ValidateSynthesis] with_collage = True with_trunk_validation = True +<<<<<<< HEAD with_path_distance_fits = True with_morphometrics = True with_density_profiles = True with_scale_statistics = True with_score_matrix_reports = True with_morphology_validation_reports = True +======= +with_path_distance_fits = False +with_morphometrics = False +with_density_profiles = False +with_scale_statistics = False +with_score_matrix_reports=False +with_morphology_validation_reports = False +>>>>>>> cf77182 (Fix: small) From 145771f3f30841e0f38790a640aa4bb767f4ea96 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Wed, 27 Sep 2023 17:44:14 +0200 Subject: [PATCH 25/37] more --- examples/O1_example/luigi.cfg | 19 +++---------------- src/synthesis_workflow/tasks/circuit.py | 1 + 2 files changed, 4 insertions(+), 16 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 07b4d6a..1888f6e 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -1,7 +1,7 @@ # run syntheis in an O1 atlas [core] -workers = 10 +workers = 5 logging_conf_file = logging.conf [RunnerConfig] @@ -55,11 +55,7 @@ boundary_thickness = 2 boundary_thickness = 2 [SliceCircuit] -<<<<<<< HEAD n_cells = 10 -======= -n_cells = 20 ->>>>>>> cf77182 (Fix: small) [CreateAtlasPlanes] plane_type = centerline_straight @@ -68,8 +64,8 @@ slice_thickness = 200 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git -git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O0 -branch = o0o1 +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 +branch = axon_rat_O1 [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml @@ -78,18 +74,9 @@ morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph- [ValidateSynthesis] with_collage = True with_trunk_validation = True -<<<<<<< HEAD with_path_distance_fits = True with_morphometrics = True with_density_profiles = True with_scale_statistics = True with_score_matrix_reports = True with_morphology_validation_reports = True -======= -with_path_distance_fits = False -with_morphometrics = False -with_density_profiles = False -with_scale_statistics = False -with_score_matrix_reports=False -with_morphology_validation_reports = False ->>>>>>> cf77182 (Fix: small) diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index 5317804..16fab21 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -4,6 +4,7 @@ from copy import deepcopy from functools import partial from pathlib import Path +import numpy as np import luigi import numpy as np From 13c67e8e7a4617bc387de4359d957fa01bc7be0c Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 9 Oct 2023 15:44:56 +0200 Subject: [PATCH 26/37] example luigi --- examples/O1_example/luigi.cfg | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 1888f6e..f841fcc 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -1,7 +1,7 @@ # run syntheis in an O1 atlas [core] -workers = 5 +workers = 10 logging_conf_file = logging.conf [RunnerConfig] @@ -11,7 +11,7 @@ nb_jobs = 5 axon_method = synthesis # L1 IN -#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +# mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN # mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] @@ -32,7 +32,7 @@ axon_method = synthesis # mtypes = ["L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC"] # L6 PC -#mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] +# mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] # L6 IN #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] @@ -40,7 +40,6 @@ axon_method = synthesis # all mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] -#mtypes = ["L4_MC"] [CircuitConfig] atlas_path = atlas region = O0 @@ -59,8 +58,8 @@ n_cells = 10 [CreateAtlasPlanes] plane_type = centerline_straight -plane_count = 3 -slice_thickness = 200 +plane_count = 5 +slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git From 500c88ea716ed0ddfd3c52782e526703cfc84256 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 9 Oct 2023 16:20:09 +0200 Subject: [PATCH 27/37] more --- src/synthesis_workflow/tasks/circuit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index 16fab21..5317804 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -4,7 +4,6 @@ from copy import deepcopy from functools import partial from pathlib import Path -import numpy as np import luigi import numpy as np From e62652c8afd5e7cf010ec7ad57f2508bbee7a014 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Tue, 31 Oct 2023 16:15:02 +0100 Subject: [PATCH 28/37] more --- examples/O1_example/luigi.cfg | 12 ++++++------ examples/boundary_example/luigi.cfg | 3 +-- src/synthesis_workflow/synthesis.py | 8 +++++++- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index f841fcc..fbec94a 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -71,11 +71,11 @@ neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morp morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} [ValidateSynthesis] -with_collage = True -with_trunk_validation = True -with_path_distance_fits = True +with_collage = False +with_trunk_validation = False +with_path_distance_fits = False with_morphometrics = True with_density_profiles = True -with_scale_statistics = True -with_score_matrix_reports = True -with_morphology_validation_reports = True +with_scale_statistics = False +with_score_matrix_reports=False +with_morphology_validation_reports = False diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg index d21573e..1ce7721 100644 --- a/examples/boundary_example/luigi.cfg +++ b/examples/boundary_example/luigi.cfg @@ -10,8 +10,7 @@ nb_jobs = 5 [SynthesisConfig] axon_method = synthesis -#mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] -mtypes = ["L1_open", "L1_left", "L1_down"] +mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] [CircuitConfig] atlas_path = atlas diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index f9b2424..9d0c333 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -13,6 +13,7 @@ from joblib import Parallel from joblib import delayed from morphio.mut import Morphology +from morphio import SectionType from neuroc.scale import ScaleParameters from neuroc.scale import scale_section from neurom import load_morphology @@ -33,7 +34,12 @@ def _get_morph_class(path): - return "PC" if has_apical_dendrite(load_morphology(path)) else "IN" + m = load_morphology(path) + + if len(["" for neurite in m.root_sections if neurite.type == SectionType.basal_dendrite]) == 1: + raise ValueError(f"{path} bas a single basal, it will break synthesis, we stop here") + + return "PC" if has_apical_dendrite(m) else "IN" def get_neurite_types(morphs_df): From d7521b446f41e41fc22476cac1527959e06162c0 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 13 Nov 2023 16:36:02 +0100 Subject: [PATCH 29/37] use_* --- examples/O1_example/luigi.cfg | 4 ++-- examples/boundary_example/luigi.cfg | 3 ++- src/synthesis_workflow/synthesis.py | 8 +------- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index fbec94a..251b265 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -71,8 +71,8 @@ neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morp morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc"} [ValidateSynthesis] -with_collage = False -with_trunk_validation = False +with_collage = True +with_trunk_validation = True with_path_distance_fits = False with_morphometrics = True with_density_profiles = True diff --git a/examples/boundary_example/luigi.cfg b/examples/boundary_example/luigi.cfg index 1ce7721..d21573e 100644 --- a/examples/boundary_example/luigi.cfg +++ b/examples/boundary_example/luigi.cfg @@ -10,7 +10,8 @@ nb_jobs = 5 [SynthesisConfig] axon_method = synthesis -mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] +#mtypes = ["L1_open", "L1_hard", "L1_leaky", "L1_hard_trunk", "L1_attractive"] +mtypes = ["L1_open", "L1_left", "L1_down"] [CircuitConfig] atlas_path = atlas diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 9d0c333..a51c4ed 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -34,12 +34,7 @@ def _get_morph_class(path): - m = load_morphology(path) - - if len(["" for neurite in m.root_sections if neurite.type == SectionType.basal_dendrite]) == 1: - raise ValueError(f"{path} bas a single basal, it will break synthesis, we stop here") - - return "PC" if has_apical_dendrite(m) else "IN" + return "PC" if has_apical_dendrite(load_morphology(path)) else "IN" def get_neurite_types(morphs_df): @@ -98,7 +93,6 @@ def _build_distributions_single_mtype( _morphs_df = _morphs_df.loc[morphs_df.use_apical] if neurite_type == "basal_dendrite" and "use_basal" in morphs_df.columns: _morphs_df = _morphs_df.loc[morphs_df.use_basal] - morphology_paths = _morphs_df[morphology_path].to_list() config["neurite_types"] = neurite_types[mtype] From 3a9932fe827d318df6d41de2ae5e9413dcfe7ad5 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Wed, 15 Nov 2023 14:51:20 +0100 Subject: [PATCH 30/37] fix some validation --- examples/O1_example/luigi.cfg | 15 ++++++++------- src/synthesis_workflow/synthesis.py | 1 + src/synthesis_workflow/validation.py | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 251b265..1180e4d 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -11,7 +11,8 @@ nb_jobs = 5 axon_method = synthesis # L1 IN -# mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] +#, "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN # mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] @@ -38,7 +39,7 @@ axon_method = synthesis #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] # all -mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] [CircuitConfig] atlas_path = atlas @@ -64,7 +65,7 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = axon_rat_O1 +branch = main [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml @@ -73,9 +74,9 @@ morphology_dirs = {"path": "/gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph- [ValidateSynthesis] with_collage = True with_trunk_validation = True -with_path_distance_fits = False +with_path_distance_fits = True with_morphometrics = True with_density_profiles = True -with_scale_statistics = False -with_score_matrix_reports=False -with_morphology_validation_reports = False +with_scale_statistics = True +with_score_matrix_reports = True +with_morphology_validation_reports = True diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index a51c4ed..5fc1166 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -93,6 +93,7 @@ def _build_distributions_single_mtype( _morphs_df = _morphs_df.loc[morphs_df.use_apical] if neurite_type == "basal_dendrite" and "use_basal" in morphs_df.columns: _morphs_df = _morphs_df.loc[morphs_df.use_basal] + morphology_paths = _morphs_df[morphology_path].to_list() config["neurite_types"] = neurite_types[mtype] diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index addbe0d..bba8da6 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -34,6 +34,7 @@ from neurom.apps.morph_stats import extract_dataframe from neurom.check.morphology_checks import has_apical_dendrite from neurom.core.dataformat import COLS +from morph_tool.resampling import resample_linear_density from neurots import NeuronGrower from neurots.extract_input.from_neurom import trunk_vectors from neurots.generate.orientations import get_probability_function From 9313498650ecc47348c6252892834cb72ac4424c Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 17 Nov 2023 13:39:43 +0100 Subject: [PATCH 31/37] o1 --- examples/O1_example/luigi.cfg | 8 ++++---- examples/O1_example/run.sh | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index 1180e4d..e35f358 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -11,8 +11,7 @@ nb_jobs = 5 axon_method = synthesis # L1 IN -mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] -#, "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] +#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC"] # L23 IN # mtypes = ["L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP"] @@ -33,7 +32,7 @@ mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] # mtypes = ["L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC"] # L6 PC -# mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] +#mtypes = ["L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC"] # L6 IN #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] @@ -41,6 +40,7 @@ mtypes = ["L1_DAC", "L5_TPC:A", "L6_TPC:C"] # all #mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +mtypes = ["L4_MC"] [CircuitConfig] atlas_path = atlas region = O0 @@ -65,7 +65,7 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = main +branch = O1_with_axons [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml diff --git a/examples/O1_example/run.sh b/examples/O1_example/run.sh index 755165e..f6245d0 100755 --- a/examples/O1_example/run.sh +++ b/examples/O1_example/run.sh @@ -2,8 +2,8 @@ #export OMP_NUM_THREADS=1 -#rm -rf atlas -#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 +rm -rf atlas +brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 -w 1 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ --local-scheduler \ From 3dcd5bacdb8522eb99c57a905ff48422f49be07e Mon Sep 17 00:00:00 2001 From: arnaudon Date: Fri, 24 Nov 2023 16:20:08 +0100 Subject: [PATCH 32/37] more doc --- src/synthesis_workflow/synthesis.py | 1 - src/synthesis_workflow/validation.py | 1 - 2 files changed, 2 deletions(-) diff --git a/src/synthesis_workflow/synthesis.py b/src/synthesis_workflow/synthesis.py index 5fc1166..f9b2424 100644 --- a/src/synthesis_workflow/synthesis.py +++ b/src/synthesis_workflow/synthesis.py @@ -13,7 +13,6 @@ from joblib import Parallel from joblib import delayed from morphio.mut import Morphology -from morphio import SectionType from neuroc.scale import ScaleParameters from neuroc.scale import scale_section from neurom import load_morphology diff --git a/src/synthesis_workflow/validation.py b/src/synthesis_workflow/validation.py index bba8da6..addbe0d 100644 --- a/src/synthesis_workflow/validation.py +++ b/src/synthesis_workflow/validation.py @@ -34,7 +34,6 @@ from neurom.apps.morph_stats import extract_dataframe from neurom.check.morphology_checks import has_apical_dendrite from neurom.core.dataformat import COLS -from morph_tool.resampling import resample_linear_density from neurots import NeuronGrower from neurots.extract_input.from_neurom import trunk_vectors from neurots.generate.orientations import get_probability_function From 23d82219f2d29fd51c965f23efb82df2fcec4203 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Mon, 27 Nov 2023 11:09:11 +0100 Subject: [PATCH 33/37] more --- src/synthesis_workflow/vacuum_synthesis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/synthesis_workflow/vacuum_synthesis.py b/src/synthesis_workflow/vacuum_synthesis.py index e7c66ba..5664469 100644 --- a/src/synthesis_workflow/vacuum_synthesis.py +++ b/src/synthesis_workflow/vacuum_synthesis.py @@ -110,7 +110,7 @@ def plot_vacuum_morphologies(vacuum_synth_morphs_df, pdf_folder, morphology_path for gid in ids: morphology = load_morphology(vacuum_synth_morphs_df.loc[gid, morphology_path]) fig, axes = plt.subplots(1, 3, figsize=(15, 5)) - for ax, plane in zip(axes, ["xy", "xz", "zy"]): + for ax, plane in zip(axes, ["xy", "xz", "yz"]): matplotlib_impl.plot_morph( morphology, ax, plane=plane, realistic_diameters=True, soma_outline=False ) From 344f978bee6cdcb93fc235db5f76aca109c99ee6 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 21 Dec 2023 14:55:29 +0100 Subject: [PATCH 34/37] more --- examples/O1_example/luigi.cfg | 8 ++++---- examples/O1_example/run.sh | 4 ++-- src/synthesis_workflow/vacuum_synthesis.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/O1_example/luigi.cfg b/examples/O1_example/luigi.cfg index e35f358..f9e27f9 100644 --- a/examples/O1_example/luigi.cfg +++ b/examples/O1_example/luigi.cfg @@ -38,9 +38,9 @@ axon_method = synthesis #mtypes = ["L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] # all -#mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] +mtypes = ["L1_DAC", "L1_HAC", "L1_NGC-SA", "L1_NGC-DA", "L1_LAC", "L1_SAC", "L23_MC", "L23_ChC", "L23_NBC", "L23_DBC", "L23_LBC", "L23_BTC", "L23_NGC", "L23_BP", "L2_TPC:A", "L2_TPC:B", "L2_IPC", "L3_TPC:A", "L3_TPC:C", "L4_SSC", "L4_ChC", "L4_BP", "L4_BTC", "L4_NBC", "L4_NGC","L4_SBC", "L4_DBC", "L4_LBC", "L4_MC", "L4_TPC", "L4_UPC", "L5_ChC", "L5_BP", "L5_BTC", "L5_NBC", "L5_NGC","L5_SBC", "L5_DBC", "L5_LBC", "L5_MC", "L5_TPC:A", "L5_TPC:B", "L5_TPC:C", "L5_UPC", "L6_TPC:A", "L6_TPC:C", "L6_HPC", "L6_IPC", "L6_BPC", "L6_UPC", "L6_ChC", "L6_BP", "L6_BTC", "L6_NBC", "L6_NGC", "L6_SBC", "L6_DBC", "L6_LBC", "L6_MC"] -mtypes = ["L4_MC"] +#mtypes = ["L4_MC"] [CircuitConfig] atlas_path = atlas region = O0 @@ -64,8 +64,8 @@ slice_thickness = 50 [GetSynthesisInputs] url = git@bbpgitlab.epfl.ch:neuromath/synthdb.git -git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O1 -branch = O1_with_axons +git_synthesis_input_path = synthdb/insitu_synthesis_inputs/rat_O0 +branch = o0o1 [BuildMorphsDF] neurondb_path = /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/06_RepairUnravel-asc/neuronDB.xml diff --git a/examples/O1_example/run.sh b/examples/O1_example/run.sh index f6245d0..755165e 100755 --- a/examples/O1_example/run.sh +++ b/examples/O1_example/run.sh @@ -2,8 +2,8 @@ #export OMP_NUM_THREADS=1 -rm -rf atlas -brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 -w 1 +#rm -rf atlas +#brainbuilder atlases -n 6,5,4,3,2,1 -t 700,525,190,353,149,165 -d 10 -o atlas column -a 1000 python -m luigi --module synthesis_workflow.tasks.workflows ValidateSynthesis \ --local-scheduler \ diff --git a/src/synthesis_workflow/vacuum_synthesis.py b/src/synthesis_workflow/vacuum_synthesis.py index 5664469..e7c66ba 100644 --- a/src/synthesis_workflow/vacuum_synthesis.py +++ b/src/synthesis_workflow/vacuum_synthesis.py @@ -110,7 +110,7 @@ def plot_vacuum_morphologies(vacuum_synth_morphs_df, pdf_folder, morphology_path for gid in ids: morphology = load_morphology(vacuum_synth_morphs_df.loc[gid, morphology_path]) fig, axes = plt.subplots(1, 3, figsize=(15, 5)) - for ax, plane in zip(axes, ["xy", "xz", "yz"]): + for ax, plane in zip(axes, ["xy", "xz", "zy"]): matplotlib_impl.plot_morph( morphology, ax, plane=plane, realistic_diameters=True, soma_outline=False ) From 2f0640f504e941dd9a6256ec4e8db517d96f22f8 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 3 Oct 2024 09:41:35 +0200 Subject: [PATCH 35/37] more --- src/synthesis_workflow/tasks/circuit.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index 5317804..3899511 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -281,8 +281,9 @@ class SliceCircuit(WorkflowTask): default="sliced_circuit_somata.h5", description=":str: Path to save sliced circuit.", ) - n_cells = luigi.IntParameter( - default=10, description=":int: Number of cells per mtype to consider." + n_cells = luigi.FloatParameter( + default=10, + description=":float: Number of cells per mtype to consider if >1, else fraction.", ) def requires(self): From a704d36e0c39fb40861cba132ca8d0525a093511 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 3 Oct 2024 11:05:43 +0200 Subject: [PATCH 36/37] add mclass task --- src/synthesis_workflow/circuit.py | 41 +++++++++++++++++++++++ src/synthesis_workflow/tasks/circuit.py | 36 +++++++++++++++++++- src/synthesis_workflow/tasks/synthesis.py | 3 +- 3 files changed, 78 insertions(+), 2 deletions(-) diff --git a/src/synthesis_workflow/circuit.py b/src/synthesis_workflow/circuit.py index 5b509a0..fe3e843 100644 --- a/src/synthesis_workflow/circuit.py +++ b/src/synthesis_workflow/circuit.py @@ -197,3 +197,44 @@ def get_layer_tags(atlas_dir, region_structure_path, region=None): L.warning("No voxel found for layer %s.", layer) brain_regions.raw = layers_data return brain_regions, layer_mapping + + +def add_mclass(cells_path, inh_probmap): + """Add mclass column to circuit for barrel cortex.""" + cells = CellCollection.load(cells_path) + cells_df = cells.as_dataframe() + cells_df["metype"] = [ + f"{mtype}_{etype}" for mtype, etype in cells_df[["mtype", "etype"]].values + ] + + exc_df = cells_df[cells_df["synapse_class"] == "EXC"] + + def generate_exc_marker_labels(mtype): + layer = mtype.split("_")[0] + if layer == "L2" or layer == "L3": + layer = "L23" + return f"{layer}_EXC" + + exc_df["marker"] = exc_df.apply(lambda x: generate_exc_marker_labels(x["mtype"]), axis=1) + + group_by_marker = exc_df.groupby("metype") + metype_to_mclass = group_by_marker["marker"].value_counts() / group_by_marker["metype"].count() + metype_to_mclass.name = "probability" + + exc_probmap = ( + metype_to_mclass.reset_index() + .pivot_table(index="marker", columns="metype", values="probability") + .T + ) + prob_map = pd.concat([inh_probmap, exc_probmap]).fillna(0) + + for metype, mapping in prob_map.iterrows(): + cells_metype = cells_df[cells_df["metype"] == metype] + subtype_mapping = mapping[mapping > 0] + cells_df.loc[cells_metype.index, "mclass"] = np.random.choice( + subtype_mapping.index.values, size=len(cells_metype), p=subtype_mapping.values + ) + + cells.add_properties({"mclass": cells_df.mclass.values}) + print(cells_df) + return cells diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index 3899511..3dc5397 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -4,12 +4,14 @@ from copy import deepcopy from functools import partial from pathlib import Path +import shutil import luigi import numpy as np import pandas as pd import yaml from luigi.parameter import PathParameter +from luigi.parameter import OptionalPathParameter from luigi_tools.parameter import RatioParameter from luigi_tools.task import ParamRef from luigi_tools.task import WorkflowTask @@ -22,6 +24,7 @@ from synthesis_workflow.circuit import circuit_slicer from synthesis_workflow.circuit import create_boundary_mask from synthesis_workflow.circuit import slice_circuit +from synthesis_workflow.circuit import add_mclass from synthesis_workflow.tasks.config import AtlasLocalTarget from synthesis_workflow.tasks.config import CircuitConfig from synthesis_workflow.tasks.config import CircuitLocalTarget @@ -300,7 +303,7 @@ def run(self): _slicer = partial( circuit_slicer, - n_cells=self.n_cells, + n_cells=int(self.n_cells) if self.n_cells > 1 else self.n_cells, mtypes=self.mtypes, planes=planes, hemisphere=CircuitConfig().hemisphere, @@ -314,3 +317,34 @@ def run(self): def output(self): """Outputs of the task.""" return CircuitLocalTarget(self.sliced_circuit_path) + + +class AddMClass(WorkflowTask): + """Add mclass column if needed (for Barrel Cortex).""" + + mclass_circuit_path = PathParameter( + default="mclass_circuit_somata.h5", + description=":str: Path to save sliced circuit.", + ) + inh_probability_map_path = OptionalPathParameter( + default=None, + description=":str: Path to parquet file with probability map of inh.", + ) + + def requires(self): + """Required input tasks.""" + return SliceCircuit() + + def run(self): + """Actual process of the task.""" + print(self.inh_probability_map_path.exists()) + if self.inh_probability_map_path is not None and self.inh_probability_map_path.exists(): + inh_probmap = pd.read_parquet(self.inh_probability_map_path).T + cells = add_mclass(self.input().path, inh_probmap) + cells.save(self.output().path) + else: + shutil.copy(self.input().path, self.output().path) + + def output(self): + """Outputs of the task.""" + return CircuitLocalTarget(self.mclass_circuit_path) diff --git a/src/synthesis_workflow/tasks/synthesis.py b/src/synthesis_workflow/tasks/synthesis.py index 4dcfa95..b63786a 100644 --- a/src/synthesis_workflow/tasks/synthesis.py +++ b/src/synthesis_workflow/tasks/synthesis.py @@ -34,6 +34,7 @@ from synthesis_workflow.synthesis import get_neurite_types from synthesis_workflow.synthesis import rescale_morphologies from synthesis_workflow.tasks.circuit import SliceCircuit +from synthesis_workflow.tasks.circuit import AddMClass from synthesis_workflow.tasks.config import CircuitConfig from synthesis_workflow.tasks.config import DiametrizerConfig from synthesis_workflow.tasks.config import GetCellComposition @@ -519,7 +520,7 @@ def requires(self): "substituted_cells": ApplySubstitutionRules(), "tmd_parameters": BuildSynthesisParameters(), "tmd_distributions": BuildSynthesisDistributions(), - "circuit": SliceCircuit(), + "circuit": AddMClass(), "composition": GetCellComposition(), } if SynthesisConfig().axon_method == "reconstructed": From a514ea9da3e98fbe5715176ed022f83911229e95 Mon Sep 17 00:00:00 2001 From: arnaudon Date: Thu, 3 Oct 2024 11:07:31 +0200 Subject: [PATCH 37/37] remove print --- src/synthesis_workflow/circuit.py | 1 - src/synthesis_workflow/tasks/circuit.py | 1 - 2 files changed, 2 deletions(-) diff --git a/src/synthesis_workflow/circuit.py b/src/synthesis_workflow/circuit.py index fe3e843..43a816e 100644 --- a/src/synthesis_workflow/circuit.py +++ b/src/synthesis_workflow/circuit.py @@ -236,5 +236,4 @@ def generate_exc_marker_labels(mtype): ) cells.add_properties({"mclass": cells_df.mclass.values}) - print(cells_df) return cells diff --git a/src/synthesis_workflow/tasks/circuit.py b/src/synthesis_workflow/tasks/circuit.py index 3dc5397..2f232bc 100644 --- a/src/synthesis_workflow/tasks/circuit.py +++ b/src/synthesis_workflow/tasks/circuit.py @@ -337,7 +337,6 @@ def requires(self): def run(self): """Actual process of the task.""" - print(self.inh_probability_map_path.exists()) if self.inh_probability_map_path is not None and self.inh_probability_map_path.exists(): inh_probmap = pd.read_parquet(self.inh_probability_map_path).T cells = add_mclass(self.input().path, inh_probmap)