Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[REF] Refactor and clean utils.input_files module #1311

Open
wants to merge 17 commits into
base: dev
Choose a base branch
from
18 changes: 8 additions & 10 deletions clinica/pipelines/anatomical/freesurfer/atlas/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,12 @@ def get_to_process_with_atlases(
get_processed_images,
)
from clinica.utils.filemanip import extract_image_ids
from clinica.utils.input_files import T1_FS_DESTRIEUX
from clinica.utils.image import HemiSphere
from clinica.utils.input_files import (
Parcellation,
get_t1_freesurfer_segmentation,
get_t1_freesurfer_statistics,
)
from clinica.utils.inputs import clinica_file_reader

part_ids, sess_ids, list_long_id = grab_image_ids_from_caps_directory(
Expand All @@ -66,13 +71,6 @@ def get_to_process_with_atlases(

if caps_directory.is_dir():
for atlas in atlas_list:
atlas_info = dict(
{
"pattern": f"t1/freesurfer_cross_sectional/sub-*_ses-*/stats/rh.{atlas}.stats",
"description": f"{atlas}-based segmentation",
"needed_pipeline": "t1-freesurfer",
}
)
t1_freesurfer_longitudinal_output = get_processed_images(
caps_directory, part_ids, sess_ids, list_long_id
)
Expand All @@ -87,13 +85,13 @@ def get_to_process_with_atlases(
subjects,
sessions,
caps_directory,
T1_FS_DESTRIEUX,
get_t1_freesurfer_segmentation(Parcellation.DESTRIEUX),
)
t1_freesurfer_files, _ = clinica_file_reader(
subjects,
sessions,
caps_directory,
atlas_info,
get_t1_freesurfer_statistics(atlas, HemiSphere.RIGHT),
)
image_ids = extract_image_ids(t1_freesurfer_files)
image_ids_2 = extract_image_ids(t1_freesurfer_output)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,11 @@ def _build_input_node(self):
save_part_sess_long_ids_to_tsv,
)
from clinica.utils.exceptions import ClinicaException
from clinica.utils.input_files import T1_FS_DESTRIEUX, T1_FS_T_DESTRIEUX
from clinica.utils.input_files import (
Parcellation,
get_t1_freesurfer_segmentation,
get_t1_freesurfer_template,
)
from clinica.utils.inputs import (
clinica_file_reader,
format_clinica_file_reader_errors,
Expand Down Expand Up @@ -119,19 +123,22 @@ def _build_input_node(self):
) = extract_subject_session_longitudinal_ids_from_filename(
to_process_ids
)

pattern_segmentation = get_t1_freesurfer_segmentation(Parcellation.DESTRIEUX)
_, errors_destrieux = clinica_file_reader(
self.subjects, self.sessions, self.caps_directory, T1_FS_DESTRIEUX
self.subjects, self.sessions, self.caps_directory, pattern_segmentation
)
pattern_template = get_t1_freesurfer_template(Parcellation.DESTRIEUX)
_, errors_t_destrieux = clinica_file_reader(
self.subjects, list_long_id, self.caps_directory, T1_FS_T_DESTRIEUX
self.subjects, list_long_id, self.caps_directory, pattern_template
)
all_errors = [errors_destrieux, errors_t_destrieux]

if any(all_errors):
message = "Clinica faced errors while trying to read files in your CAPS directory.\n"
for error, info in zip(all_errors, [T1_FS_DESTRIEUX, T1_FS_T_DESTRIEUX]):
message += format_clinica_file_reader_errors(error, info)
for error, pattern in zip(
all_errors, [pattern_segmentation, pattern_template]
):
message += format_clinica_file_reader_errors(error, pattern)
raise ClinicaException(message)

save_part_sess_long_ids_to_tsv(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
from typing import List

from clinica.pipelines.engine import Pipeline
from clinica.utils.input_files import (
Parcellation,
get_t1_freesurfer_segmentation,
get_t1_freesurfer_template,
)


class T1FreeSurferTemplate(Pipeline):
Expand All @@ -17,7 +22,6 @@ def get_processed_images(
) -> List[str]:
import re

from clinica.utils.input_files import T1_FS_T_DESTRIEUX
from clinica.utils.inputs import clinica_file_reader
from clinica.utils.longitudinal import get_long_id
from clinica.utils.participant import get_unique_subjects
Expand All @@ -28,11 +32,13 @@ def get_processed_images(
list_long_id = [
get_long_id(list_session_ids) for list_session_ids in list_list_session_ids
]

image_ids: List[str] = []
if caps_directory.is_dir():
t1_freesurfer_files, _ = clinica_file_reader(
list_participant_id, list_long_id, caps_directory, T1_FS_T_DESTRIEUX
list_participant_id,
list_long_id,
caps_directory,
get_t1_freesurfer_template(Parcellation.DESTRIEUX),
)
image_ids = [
re.search(r"(sub-[a-zA-Z0-9]+)_(long-[a-zA-Z0-9]+)", file).group()
Expand Down Expand Up @@ -88,9 +94,7 @@ def _build_input_node(self):
from clinica.pipelines.anatomical.freesurfer.longitudinal.utils import (
save_part_sess_long_ids_to_tsv,
)
from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
from clinica.utils.filemanip import extract_subjects_sessions_from_filename
from clinica.utils.input_files import T1_FS_DESTRIEUX
from clinica.utils.inputs import clinica_file_filter
from clinica.utils.longitudinal import (
get_long_id,
Expand Down Expand Up @@ -149,11 +153,12 @@ def _build_input_node(self):
self.subjects, self.sessions = extract_subjects_sessions_from_filename(
to_process_ids
)

_, self.subjects, self.sessions = clinica_file_filter(
self.subjects, self.sessions, self.caps_directory, T1_FS_DESTRIEUX
self.subjects,
self.sessions,
self.caps_directory,
get_t1_freesurfer_segmentation(Parcellation.DESTRIEUX),
)

long_ids = get_participants_long_id(self.subjects, self.sessions)
save_part_sess_long_ids_to_tsv(
self.subjects, self.sessions, long_ids, self.base_dir / self.name
Expand Down
15 changes: 10 additions & 5 deletions clinica/pipelines/anatomical/freesurfer/t1/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,19 @@ def get_processed_images(
caps_directory: Path, subjects: List[str], sessions: List[str]
) -> List[str]:
from clinica.utils.filemanip import extract_image_ids
from clinica.utils.input_files import T1_FS_DESTRIEUX
from clinica.utils.input_files import (
Parcellation,
get_t1_freesurfer_segmentation,
)
from clinica.utils.inputs import clinica_file_reader

image_ids: List[str] = []
if caps_directory.is_dir():
t1_freesurfer_files, _ = clinica_file_reader(
subjects, sessions, caps_directory, T1_FS_DESTRIEUX
subjects,
sessions,
caps_directory,
get_t1_freesurfer_segmentation(Parcellation.DESTRIEUX),
)
image_ids = extract_image_ids(t1_freesurfer_files)
return image_ids
Expand Down Expand Up @@ -96,7 +102,7 @@ def _build_input_node(self):
extract_subjects_sessions_from_filename,
save_participants_sessions,
)
from clinica.utils.input_files import T1W_NII
from clinica.utils.input_files import get_t1w_mri
from clinica.utils.inputs import clinica_file_filter
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process
Expand Down Expand Up @@ -131,9 +137,8 @@ def _build_input_node(self):
)

t1w_files, self.subjects, self.sessions = clinica_file_filter(
self.subjects, self.sessions, self.bids_directory, T1W_NII
self.subjects, self.sessions, self.bids_directory, get_t1w_mri()
)

if not t1w_files:
raise ClinicaException("Empty dataset or already processed")

Expand Down
53 changes: 34 additions & 19 deletions clinica/pipelines/dwi/connectome/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from nipype import config

from clinica.pipelines.engine import Pipeline
from clinica.utils.input_files import QueryPattern

cfg = dict(execution={"parameterize_dirs": False})
config.update_config(cfg)
Expand Down Expand Up @@ -54,40 +55,54 @@ def get_output_fields(self) -> List[str]:
"""
return ["response", "fod", "tracts", "nodes", "connectomes"]

@staticmethod
def _get_input_patterns() -> list[QueryPattern]:
from clinica.utils.input_files import (
DWIFileType,
Parcellation,
get_dwi_preprocessed_brainmask,
get_dwi_preprocessed_file,
get_t1_freesurfer_extracted_brain,
get_t1_freesurfer_segmentation,
get_t1_freesurfer_segmentation_white_matter,
)

patterns = [get_t1_freesurfer_segmentation_white_matter()]
patterns.extend(
[
get_t1_freesurfer_segmentation(p)
for p in (Parcellation.DESIKAN, Parcellation.DESTRIEUX)
]
)
patterns.append(get_t1_freesurfer_extracted_brain())
patterns.extend(
[
get_dwi_preprocessed_file(file_type)
for file_type in (DWIFileType.NII, DWIFileType.BVEC, DWIFileType.BVAL)
]
)
patterns.append(get_dwi_preprocessed_brainmask())
return patterns

def _build_input_node(self):
"""Build and connect an input node to the pipeline."""
import re

import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe

import clinica.utils.input_files as input_files
from clinica.utils.exceptions import ClinicaCAPSError
from clinica.utils.filemanip import save_participants_sessions
from clinica.utils.inputs import clinica_list_of_files_reader
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process

# Read CAPS files
list_caps_files = clinica_list_of_files_reader(
self.subjects,
self.sessions,
self.caps_directory,
[
# Inputs from t1-freesurfer pipeline
input_files.T1_FS_WM, # list_caps_files[0]
input_files.T1_FS_DESIKAN, # list_caps_files[1]
input_files.T1_FS_DESTRIEUX, # list_caps_files[2]
input_files.T1_FS_BRAIN, # list_caps_files[3]
# Inputs from dwi-preprocessing pipeline
input_files.DWI_PREPROC_NII, # list_caps_files[4]
input_files.DWI_PREPROC_BRAINMASK, # list_caps_files[5]
input_files.DWI_PREPROC_BVEC, # list_caps_files[6]
input_files.DWI_PREPROC_BVAL, # list_caps_files[7]
],
raise_exception=True,
self._get_input_patterns(),
)

# Check space of DWI dataset
dwi_file_spaces = [
re.search(
Expand All @@ -110,7 +125,7 @@ def _build_input_node(self):
]

list_grad_fsl = [
(bvec, bval) for bvec, bval in zip(list_caps_files[6], list_caps_files[7])
(bvec, bval) for bvec, bval in zip(list_caps_files[5], list_caps_files[6])
]

# Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
Expand All @@ -133,7 +148,7 @@ def _build_input_node(self):
("wm_mask_file", list_caps_files[0]),
("t1_brain_file", list_caps_files[3]),
("dwi_file", list_caps_files[4]),
("dwi_brainmask_file", list_caps_files[5]),
("dwi_brainmask_file", list_caps_files[7]),
("grad_fsl", list_grad_fsl),
("atlas_files", list_atlas_files),
],
Expand Down Expand Up @@ -161,7 +176,7 @@ def _build_input_node(self):
iterables=[
("wm_mask_file", list_caps_files[0]),
("dwi_file", list_caps_files[4]),
("dwi_brainmask_file", list_caps_files[5]),
("dwi_brainmask_file", list_caps_files[7]),
("grad_fsl", list_grad_fsl),
("atlas_files", list_atlas_files),
],
Expand Down
18 changes: 11 additions & 7 deletions clinica/pipelines/dwi/dti/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,22 +67,26 @@ def _build_input_node(self):
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe

import clinica.utils.input_files as input_files
from clinica.utils.filemanip import save_participants_sessions
from clinica.utils.input_files import (
DWIFileType,
get_dwi_preprocessed_brainmask,
get_dwi_preprocessed_file,
)
from clinica.utils.inputs import clinica_list_of_files_reader
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process

patterns = [
get_dwi_preprocessed_file(file_type)
for file_type in (DWIFileType.NII, DWIFileType.BVEC, DWIFileType.BVAL)
]
patterns.append(get_dwi_preprocessed_brainmask())
list_caps_files = clinica_list_of_files_reader(
self.subjects,
self.sessions,
self.caps_directory,
[
input_files.DWI_PREPROC_NII,
input_files.DWI_PREPROC_BVEC,
input_files.DWI_PREPROC_BVAL,
input_files.DWI_PREPROC_BRAINMASK,
],
patterns,
raise_exception=True,
)

Expand Down
Loading
Loading