From b7c21d47d6e5e2438022a50a31bce66637d78611 Mon Sep 17 00:00:00 2001 From: Arshitha Date: Mon, 26 Sep 2022 19:38:34 -0400 Subject: [PATCH] adding mri_defacing_scripts as directory, not submodule --- imaging_data_prep_scripts/defacing_scripts | 1 - .../mri_defacing_scripts/.gitignore | 6 + .../01_defacing_t1s_prep.py | 167 ++++++++++++++++ .../02_restructure_defaced_tree.py | 51 +++++ .../03_other_scans_registration.py | 179 ++++++++++++++++++ .../04_segregate_sourcedata_from_main.py | 99 ++++++++++ .../mri_defacing_scripts/README.md | 44 +++++ .../vqc_generate_renders.py | 50 +++++ 8 files changed, 596 insertions(+), 1 deletion(-) delete mode 160000 imaging_data_prep_scripts/defacing_scripts create mode 100755 imaging_data_prep_scripts/mri_defacing_scripts/.gitignore create mode 100644 imaging_data_prep_scripts/mri_defacing_scripts/01_defacing_t1s_prep.py create mode 100644 imaging_data_prep_scripts/mri_defacing_scripts/02_restructure_defaced_tree.py create mode 100644 imaging_data_prep_scripts/mri_defacing_scripts/03_other_scans_registration.py create mode 100644 imaging_data_prep_scripts/mri_defacing_scripts/04_segregate_sourcedata_from_main.py create mode 100644 imaging_data_prep_scripts/mri_defacing_scripts/README.md create mode 100755 imaging_data_prep_scripts/mri_defacing_scripts/vqc_generate_renders.py diff --git a/imaging_data_prep_scripts/defacing_scripts b/imaging_data_prep_scripts/defacing_scripts deleted file mode 160000 index 1c1a43a..0000000 --- a/imaging_data_prep_scripts/defacing_scripts +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1c1a43a61f9ffcaa78643cfecd85e40b6a16d72c diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/.gitignore b/imaging_data_prep_scripts/mri_defacing_scripts/.gitignore new file mode 100755 index 0000000..a850a30 --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/.gitignore @@ -0,0 +1,6 @@ +scripts_outp* +test.ipynb +swarm_files/log +lookup_jsons/ +quantitative_metrics +swarm_files/ diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/01_defacing_t1s_prep.py b/imaging_data_prep_scripts/mri_defacing_scripts/01_defacing_t1s_prep.py new file mode 100644 index 0000000..b6e039a --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/01_defacing_t1s_prep.py @@ -0,0 +1,167 @@ +import argparse +import json +import subprocess +from collections import defaultdict +from os import fspath +from pathlib import Path + + +def get_args(): + parser = argparse.ArgumentParser( + description='Generate a swarm command file to deface T1w scans for a given BIDS dataset.') + + parser.add_argument('-in', action='store', dest='input', + help='Path to input BIDS dataset.') + + parser.add_argument('-out', action='store', dest='output', + help='Path to output dataset.') + + parser.add_argument('-logdir', action='store', dest='logdir', + help='Directory path where log files and other metadata are stored.') + + args = parser.parse_args() + return Path(args.input), Path(args.output), Path(args.logdir) + + +def run_command(cmdstr): + p = subprocess.Popen(cmdstr, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, + encoding='utf8', shell=True) + + return p.stdout.readline().strip() + + +def write_cmds_to_file(cmds_list, filepath): + with open(filepath, 'w') as f: + for c in cmds_list: + f.write(c) + + +def deface(output_dir, modality, scans_list): + cmds_list = [] + for scan in scans_list: + print(scan, type(scan)) + entities = scan.name.split('_') + acq = [i.split('-')[1] for i in entities if i.startswith('acq-')] + if acq: + subj_outdir = output_dir.joinpath(entities[0], entities[1], modality, acq[0]) + else: + subj_outdir = output_dir.joinpath(entities[0], entities[1], modality) + + # filename without the extensions + prefix = scan.name.split('.')[0] + + # make output directories within subject directory for afni + mkdir_cmds = f"mkdir -p {subj_outdir}" + + # afni commands + refacer = f"@afni_refacer_run -input {scan} -mode_deface -no_clean -prefix {fspath(subj_outdir.joinpath(prefix))}" + + full_cmd = ' ; '.join( + ["START=`date +%s`", mkdir_cmds, refacer, "STOP=`date +%s`", "RUNTIME=$((STOP-START))", + "echo ${RUNTIME}"]) + '\n' + + cmds_list.append(full_cmd) + + return cmds_list + + +def preprocess_facemask(fmask_path): + prefix = fmask_path.parent.joinpath('afni_facemask') + defacemask = fmask_path.parent.joinpath('afni_defacemask.nii.gz') + + # split the 4D volume + c1 = f"fslroi {fmask_path} {prefix} 1 1" + + # arithmetic on the result from above + c2 = f"fslmaths {prefix}.nii.gz -abs -binv {defacemask}" + print(f"Splitting the facemask volume at {fmask_path} and binarizing the resulting volume... \n " + f"{run_command('; '.join([c1, c2]))}") + if defacemask.exists(): + return defacemask + else: + return f"Cannot find the binarized facemask. Please check your commands." + + +def find_primary_t1(t1s_list, criterion_list): + required_t1 = None + for t1 in t1s_list: + t1_entities = t1.name.split('_') + for entity in t1_entities: + if entity in criterion_list: + required_t1 = t1 + return required_t1 + + +def find_scans(subj_sess_paths_dict): + """ + Find all the T1w and corresponding non-T1w scans for each + subject and session. + + :parameter subjs: Dictionary of paths to subject bids directories + + :return paths_dict: Nested default dictionary with T1s and + their associated non-T1w scans' info. + """ + paths_dict = defaultdict(lambda: defaultdict(dict)) + t1_not_found = [] + for subjid, scans_list in subj_sess_paths_dict.items(): + sess = scans_list[0].name.split('_')[1] + t1_suffix = ('T1w.nii.gz', 'T1w.nii') + t1s = [s for s in scans_list if s.name.endswith(t1_suffix)] + possible_primaries = sorted([t1 for t1 in t1s if 'sourcedata' not in t1.parts]) + if not possible_primaries: + t1_not_found.append(subjid) + primary_t1 = "n/a" + else: + primary_t1_acq_labels = ('acq-FSPGR', 'acq-MPRAGE', 'acq-SagittalMPRAGE') + primary_t1 = possible_primaries[0] + paths_dict[subjid][sess][primary_t1] = [s for s in scans_list if s != primary_t1] + return paths_dict, t1_not_found + + +def main(): + # get command line arguments + input, output, logdir = get_args() + + # track missing files and directories + missing = dict() + + # generate a t1 to other scans mapping + subj_sess_paths = list(input.glob('sub-*/ses-*')) + list(input.glob('sourcedata/sub-*/ses-*')) + subj_sess_paths_dict = defaultdict(list) + for subj_sess_path in subj_sess_paths: + subjid = subj_sess_path.parent.name + subj_sess_paths_dict[subjid].extend(list(subj_sess_path.glob('anat/*nii*'))) + + mapping_dict, missing_t1s = find_scans(subj_sess_paths_dict) + + # list + t1_list = [k2 for i in mapping_dict.keys() for k1, v1 in mapping_dict[i].items() + for k2, v2 in mapping_dict[i][k1].items() if k2 != "n/a"] + print(len(t1_list)) + + # write defacing commands to a swarm file + defacing_cmds = deface(output, 'anat', t1_list) + write_cmds_to_file(defacing_cmds, logdir.joinpath(f'defacing_commands_{input.name}.swarm')) + + # writing missing info to file + missing["T1w scans"] = missing_t1s + with open(logdir.joinpath('subj_sess_missing_t1s.json'), 'w') as f: + json.dump(missing, f, indent=4) + + # writing mapping_dict to file + human_readable_mapping_dict = defaultdict(lambda: defaultdict(dict)) + + for subjid, _ in mapping_dict.items(): + for sess, scans in mapping_dict[subjid].items(): + for t1, others in mapping_dict[subjid][sess].items(): + if t1 != "n/a": + human_readable_mapping_dict[subjid][sess]["primary_t1"] = str(t1) + human_readable_mapping_dict[subjid][sess]["other_scans"] = [str(other) for other in others] + # print(human_readable_mapping_dict) + with open(logdir.joinpath('t1s_to_others_mapping.json'), 'w') as map_f: + json.dump(human_readable_mapping_dict, map_f, indent=4) + + +if __name__ == "__main__": + main() diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/02_restructure_defaced_tree.py b/imaging_data_prep_scripts/mri_defacing_scripts/02_restructure_defaced_tree.py new file mode 100644 index 0000000..af6b0eb --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/02_restructure_defaced_tree.py @@ -0,0 +1,51 @@ +import argparse +import subprocess +from os import fspath +from pathlib import Path + + +def get_args(): + parser = argparse.ArgumentParser( + description='Generate a swarm command file to deface T1w scans for a given BIDS dataset.') + + parser.add_argument('-defaced-root', action='store', dest='output', + help='Path to defaced scans root directory.') + + args = parser.parse_args() + return Path(args.output) + + +def run_command(cmdstr): + p = subprocess.Popen(cmdstr, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, + encoding='utf8', shell=True) + + stdout_capture = [] + while p.poll() is None: + line = p.stdout.readline().strip() + if len(line) > 0: + stdout_capture.append(line) + + return stdout_capture + + +def main(): + # get command line arguments + output = get_args() + + find_afni_workdirs = run_command(f"find {output} -name __work*") + for workdir in find_afni_workdirs: + workdir = Path(workdir) + prefix = workdir.name.split('.')[1] + to_be_deleted_files = [fspath(f) for f in list(workdir.parent.glob('*')) if not f.name.startswith('__work')] + remove_files = f"rm -rf {' '.join(to_be_deleted_files)}" + rename = f"mv {workdir} {workdir.parent.joinpath('workdir_' + prefix)}" + cmd = '; '.join([remove_files, rename]) + print(f"Removing unwanted files and renaming AFNI workdirs..") + cmd_out = run_command(cmd) + if len(cmd_out) != 0: + for line in cmd_out: + print(line) + + +if __name__ == "__main__": + main() diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/03_other_scans_registration.py b/imaging_data_prep_scripts/mri_defacing_scripts/03_other_scans_registration.py new file mode 100644 index 0000000..fb602d2 --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/03_other_scans_registration.py @@ -0,0 +1,179 @@ +import argparse +import json +import subprocess +from collections import defaultdict +from os import fspath +from pathlib import Path + + +def get_args(): + parser = argparse.ArgumentParser( + description='Generate a swarm command file to deface T1w scans for a given BIDS dataset.') + + parser.add_argument('-in', action='store', dest='input', + help='Path to input BIDS dataset.') + + parser.add_argument('-out', action='store', dest='output', + help='Path to output dataset.') + + parser.add_argument('-m', '--mapping-file', action='store', dest='map', + help='JSON file with mapping of primary to other scans.') + + parser.add_argument('-s', '--script-output-files', action='store', dest='logdir', + help='Directory path where log files and other metadata are stored.') + + args = parser.parse_args() + return Path(args.input), Path(args.output), Path(args.map), Path(args.logdir) + + +def run_command(cmdstr): + p = subprocess.Popen(cmdstr, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, + encoding='utf8', shell=True) + + return p.stdout.readline().strip() + + +def write_cmds_to_file(cmds_list, filepath): + with open(filepath, 'w') as f: + for c in cmds_list: + f.write(c) + + +def preprocess_facemask(fmask_path): + prefix = fmask_path.parent.joinpath('afni_facemask') + defacemask = fmask_path.parent.joinpath('afni_defacemask.nii.gz') + + # load fsl module + c0 = f"module load fsl" + + # split the 4D volume + c1 = f"fslroi {fmask_path} {prefix} 1 1" + + # arithmetic on the result from above + c2 = f"fslmaths {prefix}.nii.gz -abs -binv {defacemask}" + print(f"Splitting the facemask volume at {fmask_path} and binarizing the resulting volume... \n " + f"{run_command('; '.join([c0, c1, c2]))}") + try: + if defacemask.exists(): + return defacemask + except OSError as err: + print(f"OS Error: {err}") + print( + f"Cannot find the binarized facemask. Please check is fsl module is loaded before running the script again.") + raise + + +def registration(input_dir, output_dir, t1_list, scans_dict): + sourcedata_maindata_mapping_dict = defaultdict(lambda: defaultdict(list)) + # main_data_mapping_dict = defaultdict(list) + afni_workdir_not_found = [] + cmds = [] + modality = 'anat' + for t1 in t1_list: + entities = t1.name.split('_') + subjid = entities[0] # subjid + sess = entities[1] # sess id + others = scans_dict[subjid][sess]["other_scans"] + acq = [i.split('-')[1] for i in entities if i.startswith('acq-')] + if acq: + subj_outdir = output_dir.joinpath(entities[0], entities[1], modality, acq[0]) + else: + subj_outdir = output_dir.joinpath(entities[0], entities[1], modality) + + # make output directories within subject directory for fsl flirt + afni_workdir = list(subj_outdir.glob('work*')) + + if not afni_workdir: + afni_workdir_not_found.append(t1.name) + else: + afni_workdir = afni_workdir[0] + + # separate out sourcedata primary t1s from main data t1s, and create mapping files for each + afni_workdir_suffix = afni_workdir.name.split('workdir_')[1] + if "sourcedata" in t1.parts: + # converting Path to str coz Path type is not JSON serializable + if t1.name.split('.')[0] == afni_workdir_suffix: + sourcedata_maindata_mapping_dict['sourcedata'][str(afni_workdir)].append( + str(t1.parent.joinpath('tmp.99.result.deface.nii'))) + else: + sourcedata_maindata_mapping_dict[['sourcedata']][str(afni_workdir)].append(str(t1)) + else: + if t1.name.split('.')[0] == afni_workdir_suffix: + sourcedata_maindata_mapping_dict['main'][str(afni_workdir)].append( + str(t1.parent.joinpath('tmp.99.result.deface.nii'))) + else: + sourcedata_maindata_mapping_dict['main'][str(afni_workdir)].append(str(t1)) + + # preprocess facemask + raw_facemask_volumes = afni_workdir.joinpath('tmp.05.sh_t2a_thr.nii') + t1_mask = preprocess_facemask(raw_facemask_volumes) + # print(t1_mask) + for other in others: + entities = other.split('_') + + # separate out sourcedata "other" scans from main data "other" scans, and create mapping files for each + if "sourcedata" in Path(other).parts: + # converting Path to str coz Path type is not JSON serializable + sourcedata_maindata_mapping_dict['sourcedata'][str(afni_workdir)].append(str(other)) + else: + sourcedata_maindata_mapping_dict['main'][str(afni_workdir)].append(str(other)) + + # changing other scan name to other scan full path + other = input_dir.joinpath(entities[0], entities[1], modality, other) + other_prefix = other.name.split('.')[0] + other_outdir = afni_workdir.joinpath(other_prefix) + + matrix = f"{other_outdir.joinpath(other_prefix)}_reg.mat" + out = f"{other_outdir.joinpath('registered.nii.gz')}" + other_mask = f"{other_outdir.joinpath(other_prefix)}_mask.nii.gz" + other_defaced = f"{other_outdir.joinpath(other_prefix)}_defaced.nii.gz" + + mkdir_cmd = f"mkdir -p {other_outdir}; cp {other} {other_outdir.joinpath('original.nii.gz')}" + + flirt_cmd = f"flirt -dof 6 -cost mutualinfo -searchcost mutualinfo -in {t1} " \ + f"-ref {other} -omat {matrix} -out {out}" + + # t1 mask can be found in the afni work directory + applyxfm_cmd = f"flirt -interp nearestneighbour -applyxfm -init {matrix} " \ + f"-in {t1_mask} -ref {other} -out {other_mask}" + + mask_cmd = f"fslmaths {other} -mas {other_mask} {other_defaced}" + full_cmd = " ; ".join([mkdir_cmd, flirt_cmd, applyxfm_cmd, mask_cmd]) + '\n' + print(full_cmd) + cmds.append(full_cmd) + + return cmds, afni_workdir_not_found, sourcedata_maindata_mapping_dict + + +def main(): + # get command line arguments + input, output, map, logdir = get_args() + + # track missing files and directories + f = open(fspath(map), 'r') + mapping_dict = json.load(f) + + # list of primary t1s + primary_list = [input.joinpath(subjid, sess, 'anat', v2) for subjid in mapping_dict.keys() for sess, v1 in + mapping_dict[subjid].items() for k2, v2 in mapping_dict[subjid][sess].items() if + k2 == "primary_t1"] + + # write registration commands to a swarm file + registration_cmds, missing_afni_wrkdirs, sourcedata_maindata_mapping_dict = registration(input, output, + primary_list, + mapping_dict) + write_cmds_to_file(registration_cmds, logdir.joinpath(f'registration_commands_{input.name}.swarm')) + + # write sourcedata files mapping to json file + with open(logdir.joinpath('sourcedata_maindata_files_mapping.json'), 'w') as m: + json.dump(sourcedata_maindata_mapping_dict, m, indent=4) + + # writing missing info to file + missing = dict() + missing["afni workdirs"] = missing_afni_wrkdirs + with open(logdir.joinpath('missing_afni_workdirs.json'), 'w') as f: + json.dump(missing, f, indent=4) + + +if __name__ == "__main__": + main() diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/04_segregate_sourcedata_from_main.py b/imaging_data_prep_scripts/mri_defacing_scripts/04_segregate_sourcedata_from_main.py new file mode 100644 index 0000000..cc8f67f --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/04_segregate_sourcedata_from_main.py @@ -0,0 +1,99 @@ +import argparse +import json +import subprocess +from pathlib import Path + + +def parse_arguments(): + parser = argparse.ArgumentParser(description="Does something worth doing.") + + parser.add_argument("-i", "--input-dir", type=Path, action='store', dest='inputdir', + help="Path to BIDS-like directory with defaced scans.") + parser.add_argument("-o", "--output-dir", type=Path, action='store', dest='outdir', + help="Path to sourcedata directory with defaced scans.") + parser.add_argument("-m", "--mapping-file", type=Path, action='store', dest='mapping_file', + help="Path afni work directory to sourcedata scans mapping file.") + + args = parser.parse_args() + return args.inputdir, args.outdir, args.mapping_file + + +def run_shell_cmd(cmdstr): + pipe = subprocess.Popen(cmdstr, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + universal_newlines=True, encoding='utf8', shell=True) + print(pipe.stdout) + + +def copy_over_sourcedata_files(mapping_data, outdir): + for afni_workdir_path, srcdata_scans in mapping_data.items(): + for scan in srcdata_scans: + # converting from str type to Path type variable + scan = Path(scan) + afni_workdir_path = Path(afni_workdir_path) + + # constructing src and dest file paths for the copy command + if not scan.name.startswith('tmp'): + defaced_dir = scan.name.split('.')[0] + entities = scan.name.split('_') + subj_sess_outdir = outdir.joinpath(entities[0], entities[1], 'anat') + src = afni_workdir_path.joinpath(defaced_dir, defaced_dir + '_defaced.nii.gz') # path to defaced image + dest = subj_sess_outdir.joinpath(defaced_dir + '.nii.gz') + else: + scan_name_entities = afni_workdir_path.name.split('_') # first element of this list will be "workdir" + subj_sess_outdir = outdir.joinpath(scan_name_entities[1], scan_name_entities[2], 'anat') + src = afni_workdir_path.joinpath(scan.name) # path to defaced image + dest = subj_sess_outdir.joinpath('_'.join(scan_name_entities[1:]) + '.nii.gz') + + if not dest.parent.exists(): + dest.parent.mkdir(parents=True) # make destination directory if it doesn't already exist + cp_cmd = f"cp {src} {dest} ;" + run_shell_cmd(cp_cmd) + + +def main(): + inputdir, outdir, mapping_file = parse_arguments() + + # load sourcedata mapping file as a dictionary + f = open(mapping_file, 'r') + mapping_data = json.load(f) + + for scan_location, _ in mapping_data.items(): + for afni_workdir_path, scans_list in mapping_data[scan_location].items(): + for scan in scans_list: + # converting from str type to Path type variable + scan = Path(scan) + afni_workdir_path = Path(afni_workdir_path) + + # constructing src and dest file paths for the copy command + if not scan.name.startswith('tmp'): + defaced_dir = scan.name.split('.')[0] + entities = scan.name.split('_') + if scan_location == "sourcedata": + subj_sess_outdir = outdir.joinpath('sourcedata', 'auxiliary_scans', entities[0], entities[1], + 'anat') + else: + subj_sess_outdir = outdir.joinpath(entities[0], entities[1], 'anat') + + src = afni_workdir_path.joinpath(defaced_dir, + defaced_dir + '_defaced.nii.gz') # path to defaced image + dest = subj_sess_outdir.joinpath(defaced_dir + '.nii.gz') # path in bids tree with defaced images + else: + # first element of this list will be "workdir" + scan_name_entities = afni_workdir_path.name.split('_') + if scan_location == "sourcedata": + subj_sess_outdir = outdir.joinpath('sourcedata', 'auxiliary_scans', scan_name_entities[1], + scan_name_entities[2], + 'anat') + else: + subj_sess_outdir = outdir.joinpath(scan_name_entities[1], scan_name_entities[2], 'anat') + src = afni_workdir_path.joinpath(scan.name) # path to defaced image + dest = subj_sess_outdir.joinpath('_'.join(scan_name_entities[1:]) + '.nii') + + if not dest.parent.exists(): + dest.parent.mkdir(parents=True) # make destination directory if it doesn't already exist + cp_cmd = f"cp {src} {dest} ;" + run_shell_cmd(cp_cmd) + + +if __name__ == "__main__": + main() diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/README.md b/imaging_data_prep_scripts/mri_defacing_scripts/README.md new file mode 100644 index 0000000..f37b220 --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/README.md @@ -0,0 +1,44 @@ +# Protocol 17M0181 Defacing Notes + +1. Run [@afni_refacer_run](https://afni.nimh.nih.gov/pub/dist/doc/htmldoc/tutorials/refacer/refacer_run.html) on **a T1w + scan per subject per session** for a given BIDS dataset. Script - `01_defacing_t1s_prep.py`. For 17M0181 Protocol, as + of 2022-09-26, there's one session per subject and every subject has at least 1 T1w image. And therefore, you + shouldn't see missing T1s after Step 1 finishes. +2. Start a swarm job with `defacing_commands_.swarm` file that's one of the outputs from Step 1 above. + An example command + + ```bash + swarm -f defacing_commands_dcm2bids_2022_05_12.swarm --module afni --job-name afni-refacer-17M0181 --merge-output --logdir swarm_log_dcm2bids/ + ``` + +3. Remove unnecessary files and rename afni work directories with `02_restructure_defaced_tree.py` script. +4. Register "other" (non t1s) scans to their session's T1w scan and then mask the registered version with the T1w + defacemask. Script - `03_other_scans_registration.py` +5. Start a swarm job with `registration_commands_.swarm` to do Step 4. Example command - + + ```bash + swarm -f registration_commands_dcm2bids_2022_05_12.swarm --module fsl,afni --job-name non-t1-reg --merge-output --logdir swarm_log_dcm2bids/ + ``` + +6. Use [VisualQC's](https://raamana.github.io/visualqc/gallery_defacing.html) `vqcdeface` and `vqcalign` to visually + inspect the defaced images and correct/flag any that fail the QC Criteria. +7. Run `04_segregate_sourcedata_from_main.py` to rearrange directory with defaced scans into BIDS valid tree. + +## VisualQC Deface Prep Commands + +To prepare to QC defaced primary scans, we will need to generate 3D renders of the defaced scans. The +script `vqc_generate_renders.py` can help with that. + +- **Launching VisualQC** + + An example command to setup visualqc deface for autism subtypes dataset + ```bash + vqcdeface -u /data/NIMH_scratch/defacing_comparisons/autism_subtypes/defacing_outputs \ + -m tmp.00.INPUT_iso_1mm.nii.gz -d tmp.99.result.deface_iso_1mm.nii.gz \ + -r tmp.99.result.deface_iso_1mm_render \ + -o visualqc -i as_visualqc_arsh.txt + ``` + +Data Science and Sharing Team's defacing workflow is currently under development. For the most recent documentation, +please visit [DSST Defacing Workflow Repository](https://github.com/nih-fmrif/dsst-defacing-sop). + diff --git a/imaging_data_prep_scripts/mri_defacing_scripts/vqc_generate_renders.py b/imaging_data_prep_scripts/mri_defacing_scripts/vqc_generate_renders.py new file mode 100755 index 0000000..5e55f28 --- /dev/null +++ b/imaging_data_prep_scripts/mri_defacing_scripts/vqc_generate_renders.py @@ -0,0 +1,50 @@ +import argparse +import subprocess +from os import fspath +from pathlib import Path + + +def get_args(): + parser = argparse.ArgumentParser(description='Process some integers.') + parser.add_argument('-in', action='store', type=Path, + dest='inputdir', help='Path to input directory.') + parser.add_argument('-rotations', action='store', type=str, + dest='rots', help='Path to text file with rotations.') + parser.add_argument('-vqc_ids', action='store', type=Path, + dest='idlist', help='Path to visualqc id_list file.') + + return parser.parse_args() + + +def run_command(cmdstr): + p = subprocess.Popen(cmdstr, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + universal_newlines=True, encoding='utf8', shell=True) + return p.stdout.readline().strip() + + +def main(): + args = get_args() + + # processing rotations list + f1 = open(args.rots, 'r') + rotations = [list(line.strip('\n').split(',')) for line in f1.readlines()] + + # processing idlist + f2 = open(args.idlist, 'r') + workdirs = [line.rstrip() for line in f2.readlines()] + + for workdir in workdirs: + scan = args.inputdir.joinpath(workdir, 'tmp.99.result.deface_iso_1mm.nii.gz') + subj_cmds = [] + print(fspath(scan).split('/')[6]) + for idx, rot in enumerate(rotations): + yaw, pitch, roll = rot[0], rot[1], rot[2] + outfile = scan.parent.joinpath(scan.name.split('.nii.gz')[0] + '_render_' + str(idx) + '.png') + cmd = f"fsleyes render --scene 3d -rot {yaw} {pitch} {roll} --outfile {outfile} {scan} -dr 20 250 -in spline -bf 0.3 -r 100 -ns 500" + subj_cmds.append(cmd) + print(run_command('; '.join(subj_cmds))) + print('********************************************************') + + +if __name__ == "__main__": + main()