diff --git a/CHANGELOG.md b/CHANGELOG.md index c5434d31d7..a4c260a8d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,36 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.8.3] - 2022-02-11 + +### Added +- Added XCP-style quality control file +- Added RBC-options pipeline preconfiguration +- Added `engine.log` (when verbose debugging is on) +- Added ability to fix random seed for + - `antsAI` + - `antsRegistration` + - `Atropos` (fixed but not specified) + - `fslmaths` + - `mri_vol2vol` + - `recon-all` +- Added ability to use lateral ventricles mask in place of cerebrospinal fluid mask when when segmentation is Off, specifically for the rodent pipeline, but works on any dataset when segmentation is off + +### Changed +- In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory + - if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template` + + and/or + - if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template` +- Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv` +- Moved some ephemeral logging statements into `pypeline.log` + +### Fixed +- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1638) in which working connectivity matrix filepaths were generated incorrectly, preventing generating matrices depending on container bindings +- Fixed broken links in README +- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1575) in which anatomical-only configurations required functional data directories +- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1532) in which nuisance regressors would crash when segmentation is off and no CSF mask is provided + ## [1.8.2] - 2021-12-02 ### Added @@ -41,5 +71,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1. -[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.1...develop +[1.8.3]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.3 +[1.8.2]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.2 [1.8.1]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.1 diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 19c011a01b..84c0d861fe 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -687,22 +687,19 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Skull-stripping using niworkflows-ants - anat_skullstrip_ants = init_brain_extraction_wf(tpl_target_path= - cfg.anatomical_preproc[ - 'brain_extraction'][ - 'niworkflows-ants'][ - 'template_path'], - tpl_mask_path= - cfg.anatomical_preproc[ - 'brain_extraction'][ - 'niworkflows-ants'][ - 'mask_path'], - tpl_regmask_path= - cfg.anatomical_preproc[ - 'brain_extraction'][ - 'niworkflows-ants'][ - 'regmask_path'], - name='anat_skullstrip_ants') + anat_skullstrip_ants = init_brain_extraction_wf( + tpl_target_path=cfg.anatomical_preproc['brain_extraction'][ + 'niworkflows-ants'][ + 'template_path'], + tpl_mask_path=cfg.anatomical_preproc['brain_extraction'][ + 'niworkflows-ants'][ + 'mask_path'], + tpl_regmask_path=cfg.anatomical_preproc['brain_extraction'][ + 'niworkflows-ants'][ + 'regmask_path'], + name='anat_skullstrip_ants', + atropos_use_random_seed=cfg.pipeline_setup['system_config'][ + 'random_seed'] is None) if strat_pool.check_rpool('desc-preproc_T1w') or \ strat_pool.check_rpool('desc-reorient_T1w') or \ @@ -3191,7 +3188,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(node, out, merge_t1_acpc_to_list, 'in3') merge_t1_acpc = pe.Node(interface=fslMerge(), - name='merge_t1_acpc') + name=f'merge_t1_acpc_{pipe_num}') merge_t1_acpc.inputs.dimension = 't' diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 3cf2f6dffb..368d4b847d 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -12,7 +12,7 @@ # general purpose from collections import OrderedDict -from multiprocessing import cpu_count +from logging import getLogger from pkg_resources import resource_filename as pkgr_fn from packaging.version import parse as parseversion, Version @@ -440,6 +440,9 @@ def init_atropos_wf(name='atropos_wf', use_random_seed=use_random_seed), name='01_atropos', n_procs=omp_nthreads, mem_gb=mem_gb) + if not use_random_seed: + getLogger('random').info('%s # (Atropos constant)', atropos.name) + # massage outputs pad_segm = pe.Node(ImageMath(operation='PadImage', op2='%d' % padding), name='02_pad_segm') diff --git a/CPAC/connectome/connectivity_matrix.py b/CPAC/connectome/connectivity_matrix.py index dec92cab1f..115e139069 100644 --- a/CPAC/connectome/connectivity_matrix.py +++ b/CPAC/connectome/connectivity_matrix.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- """Functions for creating connectome connectivity matrices.""" +import os from warnings import warn import numpy as np from nilearn.connectome import ConnectivityMeasure @@ -20,13 +21,11 @@ } -def connectome_name(timeseries, atlas_name, tool, method): +def connectome_name(atlas_name, tool, method): """Helper function to create connectome file filename Parameters ---------- - timeseries : str - path to input timeseries atlas_name : str atlas name @@ -41,19 +40,9 @@ def connectome_name(timeseries, atlas_name, tool, method): ------- str """ - method = ''.join(word.capitalize() for word in [tool, method]) - new_filename_parts = [part for part in timeseries.split('_')[:-1][::-1] if - not part.startswith('space-')] - atlas_index = len(new_filename_parts) - 1 - if any(filename_part.startswith('desc-') for filename_part in - new_filename_parts): - for i, filename_part in enumerate(new_filename_parts): - if filename_part.startswith('desc-'): - new_filename_parts[-i] = f'desc-{method}' - atlas_index = -(i - 1) - break - new_filename_parts.insert(atlas_index, f'atlas-{atlas_name}') - return '_'.join([*new_filename_parts[::-1], 'connectome.tsv']) + return os.path.join(os.getcwd(), '_'.join([ + f'atlas-{atlas_name}', f'desc-{tool}{method}', 'connectome.tsv' + ])) def get_connectome_method(method, tool): @@ -111,7 +100,7 @@ def compute_connectome_nilearn(in_rois, in_file, method, atlas_name): numpy.ndarray or NotImplemented """ tool = 'Nilearn' - output = connectome_name(in_file, atlas_name, tool, method) + output = connectome_name(atlas_name, tool, method) method = get_connectome_method(method, tool) if method is NotImplemented: return NotImplemented @@ -156,21 +145,21 @@ def create_connectome_afni(name, method, pipe_num): name='netcorrStripHeader' f'{method}_{pipe_num}') - name_output_node = pe.Node(Function(input_names=['timeseries', - 'atlas_name', + name_output_node = pe.Node(Function(input_names=['atlas_name', 'tool', 'method'], output_names=['filename'], + imports=['import os'], function=connectome_name), - name=f'connectomeName{method}_{pipe_num}') + name=f'connectomeName{method}_{pipe_num}', + as_module=True) name_output_node.inputs.tool = 'Afni' wf.connect([ (inputspec, timeseries_correlation, [('in_rois', 'in_rois'), ('in_file', 'in_file'), ('mask', 'mask')]), - (inputspec, name_output_node, [('in_file', 'timeseries'), - ('atlas_name', 'atlas_name'), + (inputspec, name_output_node, [('atlas_name', 'atlas_name'), ('method', 'method')]), (timeseries_correlation, strip_header_node, [ ('out_corr_matrix', 'in_file')]), diff --git a/CPAC/cwas/tests/test_cwas.py b/CPAC/cwas/tests/test_cwas.py index 8d3ba59d4a..2f1176b663 100755 --- a/CPAC/cwas/tests/test_cwas.py +++ b/CPAC/cwas/tests/test_cwas.py @@ -78,7 +78,8 @@ def run_cwas(self): # Read in list of subject functionals subjects_list = [ - l.strip().strip('"') for l in open(sfile).readlines() # noqa E741 + l.strip().strip('"') for # noqa: E741 + l in open(sfile).readlines() # pylint: disable=consider-using-with ] # Read in design/regressor file @@ -93,7 +94,7 @@ def run_cwas(self): c.inputs.inputspec.f_samples = nperms c.inputs.inputspec.parallel_nodes = 4 # c.base_dir = op.join(obase, 'results_fs%i_pn%i' % \ - # (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa E501 + # (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa: E501 # pylint: disable=line-too-long c.base_dir = op.join(self.base, "results_%s.py" % self.name) # export MKL_NUM_THREADS=X # in command line diff --git a/CPAC/func_preproc/func_ingress.py b/CPAC/func_preproc/func_ingress.py index ba8ccd92ef..f6f9ccee6d 100644 --- a/CPAC/func_preproc/func_ingress.py +++ b/CPAC/func_preproc/func_ingress.py @@ -1,5 +1,5 @@ from nipype import logging -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') from CPAC.pipeline import nipype_pipeline_engine as pe diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index c578592f6d..ea24c04798 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1,7 +1,7 @@ from nipype import logging from nipype.interfaces import ants -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.fsl as fsl diff --git a/CPAC/info.py b/CPAC/info.py index 85ab4a670e..be909bbd0e 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -10,7 +10,7 @@ # version _version_major = 1 _version_minor = 8 -_version_micro = 2 +_version_micro = 3 _version_extra = '' @@ -87,15 +87,15 @@ def get_cpac_gitversion(): Website ------- -CPAC website is located here: http://fcp-indi.github.com/ +CPAC website is located here: https://fcp-indi.github.io/ Documentation ------------- -User documentation can be found here: http://fcp-indi.github.com/docs/user/index.html +User documentation can be found here: https://fcp-indi.github.io/docs/user/index.html -Developer documention can be found here: http://fcp-indi.github.com/docs/developer/index.html +Developer documention can be found here: https://fcp-indi.github.io/docs/developer/index.html Documentation pertaining to this latest release can be found here: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.3.0 @@ -161,6 +161,5 @@ def get_cpac_gitversion(): "simplejson==3.15.0", "traits==4.6.0", "PyBASC==0.4.5", - "pathlib==1.0.1", "voluptuous>=0.12.0", ] diff --git a/CPAC/network_centrality/pipeline.py b/CPAC/network_centrality/pipeline.py index 6146ab27d2..87b456d8ff 100644 --- a/CPAC/network_centrality/pipeline.py +++ b/CPAC/network_centrality/pipeline.py @@ -9,7 +9,7 @@ from CPAC.network_centrality.utils import merge_lists, check_centrality_params from CPAC.pipeline.schema import valid_options -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') def connect_centrality_workflow(workflow, c, resample_functional_to_template, diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 16dd0ac6d3..c80e1b8c3b 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -33,8 +33,42 @@ from CPAC.seg_preproc.utils import erosion, mask_erosion from CPAC.utils.datasource import check_for_s3 -from .bandpass import (bandpass_voxels, afni_1dBandpass) from CPAC.utils.utils import check_prov_for_regtool +from .bandpass import (bandpass_voxels, afni_1dBandpass) + + +def choose_nuisance_blocks(cfg, generate_only=False): + ''' + Function to handle selecting appropriate blocks based on + existing config and resource pool + + Parameters + ---------- + cfg : CPAC.utils.configuration.Configuration + + generate_only : boolean + generate but don't run + + Returns + ------- + nuisance : list + ''' + nuisance = [] + to_template_cfg = cfg.registration_workflows['functional_registration'][ + 'func_registration_to_template'] + out = {'default': ("desc-preproc_bold", ["desc-preproc_bold", "bold"]), + 'single_step_resampling': ("desc-preproc_bold", "desc-stc_bold"), + 'abcd': ("desc-preproc_bold", "bold") + }[to_template_cfg['apply_transform']['using']] + if 'T1_template' in to_template_cfg['target_template']['using']: + nuisance.append((nuisance_regressors_generation, out)) + if 'EPI_template' in to_template_cfg['target_template']['using']: + nuisance.append((nuisance_regressors_generation_EPItemplate, out)) + + if not generate_only: + nuisance.append((nuisance_regression, out)) + + return nuisance def erode_mask(name, segmentmap=True): @@ -109,29 +143,37 @@ def gather_nuisance(functional_file_path, custom_file_paths=None, censor_file_path=None): """ - Gathers the various nuisance regressors together into a single tab separated values file that is an appropriate for - input into 3dTproject - - :param functional_file_path: path to file that the regressors are being calculated for, is used to calculate - the length of the regressors for error checking and in particular for calculating spike regressors - :param output_file_path: path to output TSV that will contain the various nuisance regressors as columns - :param grey_matter_summary_file_path: path to TSV that includes summary of grey matter time courses, e.g. output of + Gathers the various nuisance regressors together into a single tab- + separated values file that is an appropriate for input into + 3dTproject + + :param functional_file_path: path to file that the regressors are + being calculated for, is used to calculate the length of the + regressors for error checking and in particular for calculating + spike regressors + :param output_file_path: path to output TSV that will contain the + various nuisance regressors as columns + :param grey_matter_summary_file_path: path to TSV that includes + summary of grey matter time courses, e.g. output of + mask_summarize_time_course + :param white_matter_summary_file_path: path to TSV that includes + summary of white matter time courses, e.g. output of mask_summarize_time_course - :param white_matter_summary_file_path: path to TSV that includes summary of white matter time courses, e.g. output - of mask_summarize_time_course - :param csf_summary_file_path: path to TSV that includes summary of csf time courses, e.g. output - of mask_summarize_time_course - :param acompcor_file_path: path to TSV that includes acompcor time courses, e.g. output - of mask_summarize_time_course - :param tcompcor_file_path: path to TSV that includes tcompcor time courses, e.g. output - of mask_summarize_time_course - :param global_summary_file_path: path to TSV that includes summary of global time courses, e.g. output - of mask_summarize_time_course - :param motion_parameters_file_path: path to TSV that includes motion parameters + :param csf_summary_file_path: path to TSV that includes summary of + csf time courses, e.g. output of mask_summarize_time_course + :param acompcor_file_path: path to TSV that includes acompcor time + courses, e.g. output of mask_summarize_time_course + :param tcompcor_file_path: path to TSV that includes tcompcor time + courses, e.g. output of mask_summarize_time_course + :param global_summary_file_path: path to TSV that includes summary + of global time courses, e.g. output of mask_summarize_time_course + :param motion_parameters_file_path: path to TSV that includes + motion parameters :param custom_file_paths: path to CSV/TSV files to use as regressors - :param censor_file_path: path to TSV with a single column with 1's for indices that should be retained and 0's - for indices that should be censored - :return: + :param censor_file_path: path to TSV with a single column with '1's + for indices that should be retained and '0's for indices that + should be censored + :return: out_file (str), censor_indices (list) """ # Basic checks for the functional image @@ -312,6 +354,7 @@ def gather_nuisance(functional_file_path, column_names.append(custom_file_path) nuisance_regressors.append(custom_regressor) + censor_indices = [] # Add spike regressors if selector.get('Censor', {}).get('method') == 'SpikeRegression': @@ -412,12 +455,13 @@ def gather_nuisance(functional_file_path, nuisance_regressors = np.array(nuisance_regressors) np.savetxt(ofd, nuisance_regressors.T, fmt='%.18f', delimiter='\t') - return output_file_path + return output_file_path, censor_indices def create_regressor_workflow(nuisance_selectors, use_ants, ventricle_mask_exist, + csf_mask_exist, all_bold=False, name='nuisance_regressors'): """ @@ -634,6 +678,8 @@ def create_regressor_workflow(nuisance_selectors, Path of residual file in nifti format outputspec.regressors_file_path : string (TSV file) Path of TSV file of regressors used. Column name indicates the regressors included . + outputspec.censor_indices : list + Indices of censored volumes Nuisance Procedure: @@ -697,11 +743,11 @@ def create_regressor_workflow(nuisance_selectors, 'tr', ]), name='inputspec') - outputspec = pe.Node(util.IdentityInterface(fields=['regressors_file_path']), - name='outputspec') + outputspec = pe.Node(util.IdentityInterface( + fields=['regressors_file_path', 'censor_indices']), name='outputspec') functional_mean = pe.Node(interface=afni_utils.TStat(), - name='functional_mean') + name='functional_mean') functional_mean.inputs.options = '-mean' functional_mean.inputs.outputtype = 'NIFTI_GZ' @@ -1084,6 +1130,7 @@ def create_regressor_workflow(nuisance_selectors, pipeline_resource_pool, tissue_regressor_descriptor, regressor_selector, + csf_mask_exist, use_ants=use_ants, ventricle_mask_exist=ventricle_mask_exist, all_bold=all_bold @@ -1356,7 +1403,7 @@ def create_regressor_workflow(nuisance_selectors, 'motion_parameters_file_path', 'custom_file_paths', 'censor_file_path'], - output_names=['out_file'], + output_names=['out_file', 'censor_indices'], function=gather_nuisance, as_module=True ), name="build_nuisance_regressors") @@ -1427,8 +1474,9 @@ def create_regressor_workflow(nuisance_selectors, voxel_nuisance_regressors_merge, "in{}".format(i + 1) ) - nuisance_wf.connect(build_nuisance_regressors, 'out_file', - outputspec, 'regressors_file_path') + nuisance_wf.connect([(build_nuisance_regressors, outputspec, [ + ('out_file', 'regressors_file_path'), + ('censor_indices', 'censor_indices')])]) return nuisance_wf @@ -1712,7 +1760,7 @@ def ICA_AROMA_FSLreg(wf, cfg, strat_pool, pipe_num, opt=None): "inputs": [["desc-preproc_bold", "bold"], "from-bold_to-T1w_mode-image_desc-linear_xfm", "from-T1w_to-template_mode-image_xfm"], - "outputs": ["desc-preproc_bold", + "outputs": ["desc-preproc_bold", "desc-cleaned_bold"]} ''' @@ -1862,8 +1910,8 @@ def ICA_AROMA_FSLEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) - - + + def ICA_AROMA_ANTsEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): ''' Node Block: @@ -1889,7 +1937,7 @@ def ICA_AROMA_ANTsEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): if reg_tool != 'ants': return (wf, None) - + num_cpus = cfg.pipeline_setup['system_config'][ 'max_cores_per_participant'] @@ -1926,7 +1974,7 @@ def ICA_AROMA_ANTsEPIreg(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('from-EPItemplate_to-bold_mode-image_xfm') wf.connect(node, out, apply_xfm, 'inputspec.transform') - + outputs = { 'desc-preproc_bold': (apply_xfm, 'outputspec.output_image'), 'desc-cleaned_bold': (apply_xfm, 'outputspec.output_image') @@ -1960,7 +2008,7 @@ def erode_mask_T1w(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, erode, 'inputspec.brain_mask') - + node, out = strat_pool.get_data(['label-CSF_desc-preproc_mask', 'label-CSF_mask']) wf.connect(node, out, erode, 'inputspec.mask') @@ -2075,7 +2123,7 @@ def erode_mask_WM(wf, cfg, strat_pool, pipe_num, opt=None): erode.inputs.inputspec.erode_prop = cfg.nuisance_corrections[ '2-nuisance_regression']['regressor_masks']['erode_wm'][ 'wm_erosion_prop'] - + erode.inputs.inputspec.mask_erode_mm = cfg.nuisance_corrections[ '2-nuisance_regression']['regressor_masks']['erode_wm'][ 'wm_mask_erosion_mm'] @@ -2083,7 +2131,7 @@ def erode_mask_WM(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data(['label-WM_desc-preproc_mask', 'label-WM_mask']) wf.connect(node, out, erode, 'inputspec.mask') - + node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, erode, 'inputspec.brain_mask') @@ -2092,8 +2140,8 @@ def erode_mask_WM(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) - - + + def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): ''' Node Block: @@ -2118,7 +2166,7 @@ def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): "from-T1w_to-template_mode-image_desc-linear_xfm"), "lateral-ventricles-mask", "TR"], - "outputs": ["regressors"]} + "outputs": ["regressors", "censor-indices"]} ''' use_ants = None @@ -2128,11 +2176,15 @@ def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): 'from-template_to-T1w_mode-image_desc-linear_xfm') reg_tool = check_prov_for_regtool(xfm_prov) use_ants = reg_tool == 'ants' - + ventricle = strat_pool.check_rpool('lateral-ventricles-mask') + csf_mask = strat_pool.check_rpool(["label-CSF_desc-eroded_mask", + "label-CSF_desc-preproc_mask", + "label-CSF_mask"]) regressors = create_regressor_workflow(opt, use_ants, ventricle_mask_exist=ventricle, + csf_mask_exist = csf_mask, name='nuisance_regressors_' f'{opt["Name"]}_{pipe_num}') @@ -2157,7 +2209,7 @@ def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): "label-CSF_desc-preproc_mask", "label-CSF_mask"]): node, out = strat_pool.get_data(["label-CSF_desc-eroded_mask", - "label-CSF_desc-preproc_mask", + "label-CSF_desc-preproc_mask", "label-CSF_mask"]) wf.connect(node, out, regressors, 'inputspec.csf_mask_file_path') @@ -2170,10 +2222,10 @@ def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, regressors, 'inputspec.wm_mask_file_path') if strat_pool.check_rpool(["label-GM_desc-eroded_mask", - "label-GM_desc-preproc_mask", + "label-GM_desc-preproc_mask", "label-GM_mask"]): node, out = strat_pool.get_data(["label-GM_desc-eroded_mask", - "label-GM_desc-preproc_mask", + "label-GM_desc-preproc_mask", "label-GM_mask"]) wf.connect(node, out, regressors, 'inputspec.gm_mask_file_path') @@ -2224,7 +2276,8 @@ def nuisance_regressors_generation(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, regressors, 'inputspec.tr') outputs = { - 'regressors': (regressors, 'outputspec.regressors_file_path') + 'regressors': (regressors, 'outputspec.regressors_file_path'), + 'censor-indices': (regressors, 'outputspec.censor_indices') } return (wf, outputs) @@ -2249,12 +2302,12 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt=None): "desc-cleaned_bold", "regressors"]} ''' - + regressor_prov = strat_pool.get_cpac_provenance('regressors') regressor_strat_name = regressor_prov[-1].split('_')[-1] - + for regressor_dct in cfg['nuisance_corrections']['2-nuisance_regression'][ - 'Regressors']: + 'Regressors']: if regressor_dct['Name'] == regressor_strat_name: opt = regressor_dct break @@ -2312,7 +2365,7 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt=None): elif cfg.nuisance_corrections['2-nuisance_regression'][ 'bandpass_filtering_order'] == 'Before': - + node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, filt, 'inputspec.functional_file_path') @@ -2328,10 +2381,10 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt=None): else: node, out = strat_pool.get_data("desc-preproc_bold") wf.connect(node, out, nuis, 'inputspec.functional_file_path') - + outputs = { 'desc-preproc_bold': (nuis, 'outputspec.residual_file_path'), - 'desc-cleaned_bold': (nuis, 'outputspec.residual_file_path') + 'desc-cleaned_bold': (nuis, 'outputspec.residual_file_path'), } return (wf, outputs) @@ -2362,7 +2415,7 @@ def erode_mask_bold(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('space-bold_desc-brain_mask') wf.connect(node, out, erode, 'inputspec.brain_mask') - + node, out = strat_pool.get_data(['space-bold_label-CSF_desc-preproc_mask', 'space-bold_label-CSF_mask']) wf.connect(node, out, erode, 'inputspec.mask') @@ -2372,8 +2425,8 @@ def erode_mask_bold(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) - - + + def erode_mask_boldCSF(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "erode_mask_boldCSF", @@ -2477,7 +2530,7 @@ def erode_mask_boldWM(wf, cfg, strat_pool, pipe_num, opt=None): erode.inputs.inputspec.erode_prop = cfg.nuisance_corrections[ '2-nuisance_regression']['regressor_masks']['erode_wm'][ 'wm_erosion_prop'] - + erode.inputs.inputspec.mask_erode_mm = cfg.nuisance_corrections[ '2-nuisance_regression']['regressor_masks']['erode_wm'][ 'wm_mask_erosion_mm'] @@ -2485,7 +2538,7 @@ def erode_mask_boldWM(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data(['space-bold_label-WM_desc-preproc_mask', 'space-bold_label-WM_mask']) wf.connect(node, out, erode, 'inputspec.mask') - + node, out = strat_pool.get_data('space-bold_desc-brain_mask') wf.connect(node, out, erode, 'inputspec.brain_mask') @@ -2512,7 +2565,7 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op "framewise-displacement-power", "dvars", ["space-bold_desc-eroded_mask", "space-bold_desc-brain_mask"], - ["space-bold_label-CSF_desc-eroded_mask", "space-bold_label-CSF_desc-preproc_mask", + ["space-bold_label-CSF_desc-eroded_mask", "space-bold_label-CSF_desc-preproc_mask", "space-bold_label-CSF_mask"], ["space-bold_label-WM_desc-eroded_mask", "space-bold_label-WM_desc-preproc_mask", "space-bold_label-WM_mask"], @@ -2522,7 +2575,7 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op "from-bold_to-EPItemplate_mode-image_desc-linear_xfm"), "lateral-ventricles-mask", "TR"], - "outputs": ["regressors"]} + "outputs": ["regressors", "censor-indices"]} ''' xfm_prov = strat_pool.get_cpac_provenance( @@ -2531,10 +2584,14 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op use_ants = reg_tool == 'ants' ventricle = strat_pool.check_rpool('lateral-ventricles-mask') + csf_mask = strat_pool.check_rpool(["space-bold_label-CSF_desc-eroded_mask", + "space-bold_label-CSF_desc-preproc_mask", + "space-bold_label-CSF_mask"]) regressors = create_regressor_workflow(opt, use_ants, ventricle_mask_exist=ventricle, all_bold=True, + csf_mask_exist = csf_mask, name='nuisance_regressors_' f'{opt["Name"]}_{pipe_num}') @@ -2580,29 +2637,29 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op wf.connect(node, out, regressors, 'inputspec.lat_ventricles_mask_file_path') - if strat_pool.check_rpool('from-EPItemplate_to-bold_mode-image_desc-linear_xfm'): + if strat_pool.check_rpool('from-EPItemplate_to-bold_mode-image_desc-linear_xfm'): node, out = strat_pool.get_data('from-EPItemplate_to-bold_mode-image_desc-linear_xfm') wf.connect(node, out, regressors, 'inputspec.mni_to_anat_linear_xfm_file_path') wf.connect(node, out, regressors, 'inputspec.anat_to_func_linear_xfm_file_path') - if strat_pool.check_rpool('from-bold_to-EPItemplate_mode-image_desc-linear_xfm'): + if strat_pool.check_rpool('from-bold_to-EPItemplate_mode-image_desc-linear_xfm'): node, out = strat_pool.get_data('from-bold_to-EPItemplate_mode-image_desc-linear_xfm') wf.connect(node, out, regressors, 'inputspec.anat_to_mni_linear_xfm_file_path') wf.connect(node, out, regressors, 'inputspec.func_to_anat_linear_xfm_file_path') - if strat_pool.check_rpool('movement-parameters'): + if strat_pool.check_rpool('movement-parameters'): node, out = strat_pool.get_data('movement-parameters') wf.connect(node, out, regressors, 'inputspec.motion_parameters_file_path') - if strat_pool.check_rpool('framewise-displacement-jenkinson'): + if strat_pool.check_rpool('framewise-displacement-jenkinson'): node, out = strat_pool.get_data('framewise-displacement-jenkinson') wf.connect(node, out, regressors, 'inputspec.fd_j_file_path') - if strat_pool.check_rpool('framewise-displacement-power'): + if strat_pool.check_rpool('framewise-displacement-power'): node, out = strat_pool.get_data('framewise-displacement-power') wf.connect(node, out, regressors, 'inputspec.fd_p_file_path') - if strat_pool.check_rpool('dvars'): + if strat_pool.check_rpool('dvars'): node, out = strat_pool.get_data('dvars') wf.connect(node, out, regressors, 'inputspec.dvars_file_path') @@ -2610,7 +2667,8 @@ def nuisance_regressors_generation_EPItemplate(wf, cfg, strat_pool, pipe_num, op wf.connect(node, out, regressors, 'inputspec.tr') outputs = { - 'regressors': (regressors, 'outputspec.regressors_file_path') + 'regressors': (regressors, 'outputspec.regressors_file_path'), + 'censor-indices': (regressors, 'outputspec.censor_indices') } return (wf, outputs) diff --git a/CPAC/nuisance/utils/__init__.py b/CPAC/nuisance/utils/__init__.py index 89712f6547..5caff4110e 100644 --- a/CPAC/nuisance/utils/__init__.py +++ b/CPAC/nuisance/utils/__init__.py @@ -7,6 +7,7 @@ import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces import afni +from nipype import logging from CPAC.nuisance.utils.compcor import calc_compcor_components from CPAC.nuisance.utils.crc import encode as crc_encode @@ -14,6 +15,8 @@ from CPAC.utils.interfaces.function import Function from CPAC.registration.utils import check_transforms, generate_inverse_transform_flags +logger = logging.getLogger('nipype.workflow') + def find_offending_time_points(fd_j_file_path=None, fd_p_file_path=None, dvars_file_path=None, fd_j_threshold=None, fd_p_threshold=None, dvars_threshold=None, @@ -179,7 +182,7 @@ def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1): raise ValueError("Threshold value should be positive, instead of {0}." .format(threshold_value)) - if threshold_method is "PCT" and threshold_value >= 100.0: + if threshold_method == "PCT" and threshold_value >= 100.0: raise ValueError("Percentile should be less than 100, received {0}." .format(threshold_value)) @@ -223,7 +226,7 @@ def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1): wf.connect(mapper_list, 'out', mapper, 'out_file') wf.connect(mask_mapper_list, 'out', mapper, 'mask_file') - if threshold_method is "PCT": + if threshold_method == "PCT": threshold_node = pe.MapNode(Function(input_names=['in_file', 'mask', 'threshold_pct'], output_names=['threshold'], function=compute_pct_threshold, as_module=True), @@ -232,7 +235,7 @@ def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1): wf.connect(mapper, 'out_file', threshold_node, 'in_file') wf.connect(mapper, 'mask_file', threshold_node, 'mask') - elif threshold_method is "SD": + elif threshold_method == "SD": threshold_node = pe.MapNode(Function(input_names=['in_file', 'mask', 'threshold_sd'], output_names=['threshold'], function=compute_sd_threshold, as_module=True), @@ -271,6 +274,7 @@ def generate_summarize_tissue_mask(nuisance_wf, pipeline_resource_pool, regressor_descriptor, regressor_selector, + csf_mask_exist, use_ants=True, ventricle_mask_exist=True, all_bold=False): @@ -322,46 +326,46 @@ def generate_summarize_tissue_mask(nuisance_wf, if all_bold: pass - - mask_to_epi = pe.Node(interface=fsl.FLIRT(), - name='{}_flirt' - .format(node_mask_key), - mem_gb=3.63, - mem_x=(3767129957844731 / 1208925819614629174706176, - 'in_file')) - - mask_to_epi.inputs.interp = 'nearestneighbour' - - if regressor_selector['extraction_resolution'] == "Functional": - # apply anat2func matrix - mask_to_epi.inputs.apply_xfm = True - mask_to_epi.inputs.output_type = 'NIFTI_GZ' - nuisance_wf.connect(*( - pipeline_resource_pool['Functional_mean'] + - (mask_to_epi, 'reference') - )) - nuisance_wf.connect(*( - pipeline_resource_pool['Transformations']['anat_to_func_linear_xfm'] + - (mask_to_epi, 'in_matrix_file') - )) - else: - resolution = regressor_selector['extraction_resolution'] - mask_to_epi.inputs.apply_isoxfm = resolution + if csf_mask_exist: + mask_to_epi = pe.Node(interface=fsl.FLIRT(), + name='{}_flirt'.format(node_mask_key), + mem_gb=3.63, + mem_x=(3767129957844731 / 1208925819614629174706176, + 'in_file')) + + mask_to_epi.inputs.interp = 'nearestneighbour' + + if regressor_selector['extraction_resolution'] == "Functional": + # apply anat2func matrix + mask_to_epi.inputs.apply_xfm = True + mask_to_epi.inputs.output_type = 'NIFTI_GZ' + nuisance_wf.connect(*( + pipeline_resource_pool['Functional_mean'] + + (mask_to_epi, 'reference') + )) + nuisance_wf.connect(*( + pipeline_resource_pool['Transformations']['anat_to_func_linear_xfm'] + + (mask_to_epi, 'in_matrix_file') + )) - nuisance_wf.connect(*( - pipeline_resource_pool['Anatomical_{}mm' + else: + resolution = regressor_selector['extraction_resolution'] + mask_to_epi.inputs.apply_isoxfm = resolution + + nuisance_wf.connect(*( + pipeline_resource_pool['Anatomical_{}mm' .format(resolution)] + - (mask_to_epi, 'reference') - )) + (mask_to_epi, 'reference') + )) - nuisance_wf.connect(*( - pipeline_resource_pool[prev_mask_key] + - (mask_to_epi, 'in_file') - )) + nuisance_wf.connect(*( + pipeline_resource_pool[prev_mask_key] + + (mask_to_epi, 'in_file') + )) - pipeline_resource_pool[mask_key] = \ - (mask_to_epi, 'out_file') + pipeline_resource_pool[mask_key] = \ + (mask_to_epi, 'out_file') if full_mask_key.startswith('CerebrospinalFluid'): pipeline_resource_pool = generate_summarize_tissue_mask_ventricles_masking( @@ -370,6 +374,7 @@ def generate_summarize_tissue_mask(nuisance_wf, regressor_descriptor, regressor_selector, node_mask_key, + csf_mask_exist, use_ants, ventricle_mask_exist ) @@ -389,7 +394,7 @@ def generate_summarize_tissue_mask(nuisance_wf, pipeline_resource_pool[mask_key] = \ (erode_mask_node, 'out_file') - return pipeline_resource_pool, full_mask_key + return pipeline_resource_pool, full_mask_key def generate_summarize_tissue_mask_ventricles_masking(nuisance_wf, @@ -397,19 +402,20 @@ def generate_summarize_tissue_mask_ventricles_masking(nuisance_wf, regressor_descriptor, regressor_selector, mask_key, + csf_mask_exist, use_ants=True, ventricle_mask_exist=True): + if csf_mask_exist == False: + logger.warning('Segmentation is Off, - therefore will be using ' + 'lateral_ventricle_mask as CerebrospinalFluid_mask.') + # Mask CSF with Ventricles if '{}_Unmasked'.format(mask_key) not in pipeline_resource_pool: - # reduce CSF mask to the lateral ventricles - mask_csf_with_lat_ven = pe.Node(interface=afni.Calc(outputtype='NIFTI_GZ'), name='{}_Ventricles'.format(mask_key)) - mask_csf_with_lat_ven.inputs.expr = 'a*b' - mask_csf_with_lat_ven.inputs.out_file = 'csf_lat_ven_mask.nii.gz' - - if ventricle_mask_exist : + if ventricle_mask_exist: ventricles_key = 'VentriclesToAnat' + if 'resolution' in regressor_descriptor: ventricles_key += '_{}'.format(regressor_descriptor['resolution']) @@ -444,7 +450,20 @@ def generate_summarize_tissue_mask_ventricles_masking(nuisance_wf, nuisance_wf.connect(collect_linear_transforms, 'out', lat_ven_mni_to_anat, 'transforms') nuisance_wf.connect(*(pipeline_resource_pool['Ventricles'] + (lat_ven_mni_to_anat, 'input_image'))) - nuisance_wf.connect(*(pipeline_resource_pool[mask_key] + (lat_ven_mni_to_anat, 'reference_image'))) + resolution = regressor_selector['extraction_resolution'] + + if csf_mask_exist: + nuisance_wf.connect(*( + pipeline_resource_pool[mask_key] + + (lat_ven_mni_to_anat, 'reference_image'))) + elif resolution == 'Functional': + nuisance_wf.connect(*( + pipeline_resource_pool['Functional_mean'] + + (lat_ven_mni_to_anat, 'reference_image'))) + else: + nuisance_wf.connect(*( + pipeline_resource_pool['Anatomical_{}mm'.format(resolution)] + + (lat_ven_mni_to_anat, 'reference_image'))) pipeline_resource_pool[ventricles_key] = (lat_ven_mni_to_anat, 'output_image') @@ -460,17 +479,26 @@ def generate_summarize_tissue_mask_ventricles_masking(nuisance_wf, pipeline_resource_pool[ventricles_key] = (lat_ven_mni_to_anat, 'out_file') - nuisance_wf.connect(*(pipeline_resource_pool[ventricles_key] + (mask_csf_with_lat_ven, 'in_file_a'))) - nuisance_wf.connect(*(pipeline_resource_pool[mask_key] + (mask_csf_with_lat_ven, 'in_file_b'))) + if csf_mask_exist: + # reduce CSF mask to the lateral ventricles + mask_csf_with_lat_ven = pe.Node(interface=afni.Calc(outputtype='NIFTI_GZ'), + name='{}_Ventricles'.format(mask_key)) + mask_csf_with_lat_ven.inputs.expr = 'a*b' + mask_csf_with_lat_ven.inputs.out_file = 'csf_lat_ven_mask.nii.gz' + + nuisance_wf.connect(*(pipeline_resource_pool[ventricles_key] + (mask_csf_with_lat_ven, 'in_file_a'))) + nuisance_wf.connect(*(pipeline_resource_pool[mask_key] + (mask_csf_with_lat_ven, 'in_file_b'))) - pipeline_resource_pool['{}_Unmasked'.format(mask_key)] = pipeline_resource_pool[mask_key] - pipeline_resource_pool[mask_key] = (mask_csf_with_lat_ven, 'out_file') - else : - pipeline_resource_pool['{}_Unmasked'.format(mask_key)] = pipeline_resource_pool[mask_key] + pipeline_resource_pool['{}_Unmasked'.format(mask_key)] = pipeline_resource_pool[mask_key] + pipeline_resource_pool[mask_key] = (mask_csf_with_lat_ven, 'out_file') + else: + pipeline_resource_pool[mask_key] = pipeline_resource_pool[ventricles_key] + return pipeline_resource_pool + class NuisanceRegressor(object): def __init__(self, selector): @@ -579,7 +607,7 @@ def encode(selector): res = "%.2gmm" % s['extraction_resolution'] if s.get('erode_mask'): res += 'E' - pieces += [res] + pieces += [res] pieces += [NuisanceRegressor._summary_params(s)] pieces += [NuisanceRegressor._derivative_params(s)] diff --git a/CPAC/nuisance/utils/compcor.py b/CPAC/nuisance/utils/compcor.py index bb93f0eb3c..9c4ede05e8 100644 --- a/CPAC/nuisance/utils/compcor.py +++ b/CPAC/nuisance/utils/compcor.py @@ -177,13 +177,13 @@ def fallback_svd(a, full_matrices=True, compute_uv=True): except np.linalg.LinAlgError: pass - return svd(a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver='gesvd') + def TR_string_to_float(tr): if 'ms' in tr: - tr = float(tr.replace('ms',''))/1000 + tr = float(tr.replace('ms', '')) / 1000 else: - tr = float(tr.replace('s','')) + tr = float(tr.replace('s', '')) return tr diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 9d2af1e59f..cfdd64c5b8 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -6,12 +6,13 @@ import pickle import copy import faulthandler -import yaml -import logging as cb_logging +from logging import getLogger from time import strftime import nipype +import yaml + from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nipype_pipeline_engine.plugins import \ LegacyMultiProcPlugin, MultiProcPlugin @@ -22,7 +23,7 @@ import CPAC -from CPAC.pipeline.engine import NodeBlock, initiate_rpool, wrap_block +from CPAC.pipeline.engine import NodeBlock, initiate_rpool, wrap_block from CPAC.anat_preproc.anat_preproc import ( freesurfer_preproc, freesurfer_abcd_preproc, @@ -93,7 +94,11 @@ warp_deriv_mask_to_EPItemplate, warp_timeseries_to_T1template_abcd, single_step_resample_timeseries_to_T1template, + warp_timeseries_to_T1template_dcan_nhp, + warp_Tissuemask_to_T1template, + warp_Tissuemask_to_EPItemplate, warp_timeseries_to_T1template_dcan_nhp + ) from CPAC.seg_preproc.seg_preproc import ( @@ -136,17 +141,15 @@ ) from CPAC.nuisance.nuisance import ( + choose_nuisance_blocks, ICA_AROMA_ANTsreg, ICA_AROMA_FSLreg, ICA_AROMA_ANTsEPIreg, ICA_AROMA_FSLEPIreg, - nuisance_regressors_generation, - nuisance_regression, erode_mask_T1w, erode_mask_CSF, erode_mask_GM, erode_mask_WM, - nuisance_regressors_generation_EPItemplate, erode_mask_bold, erode_mask_boldCSF, erode_mask_boldGM, @@ -180,6 +183,7 @@ network_centrality ) +from CPAC.pipeline.random_state import set_up_random_state_logger from CPAC.utils.datasource import ( gather_extraction_maps ) @@ -188,16 +192,15 @@ from CPAC.utils import Configuration from CPAC.qc.pipeline import create_qc_workflow -from CPAC.qc.utils import generate_qc_pages +from CPAC.qc.xcp import qc_xcp_native, qc_xcp_skullstripped, qc_xcp_template +from CPAC.utils.monitoring import log_nodes_cb, log_nodes_initial, set_up_logger +from CPAC.utils.monitoring.draw_gantt_chart import resource_report from CPAC.utils.utils import ( check_config_resources, check_system_deps, ) -from CPAC.utils.monitoring import log_nodes_cb, log_nodes_initial -from CPAC.utils.monitoring.draw_gantt_chart import resource_report - logger = logging.getLogger('nipype.workflow') faulthandler.enable() @@ -256,8 +259,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, if not os.path.exists(log_dir): os.makedirs(os.path.join(log_dir)) - # TODO ASH Enforce c.run_logging to be boolean - # TODO ASH Schema validation + if c.pipeline_setup['Debugging']['verbose']: + set_up_logger('engine', level='debug', log_dir=log_dir) + config.update_config({ 'logging': { 'log_directory': log_dir, @@ -347,8 +351,8 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, Setting MKL_NUM_THREADS to 1 Setting ANTS/ITK thread usage to {ants_threads} Maximum potential number of cores that might be used during this run: {max_cores} - -""" +{random_seed} +""" # noqa: E501 execution_info = """ @@ -363,9 +367,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, System time of start: {run_start} System time of completion: {run_finish} -""" +""" # noqa: E501 - logger.info(information.format( + logger.info('%s', information.format( run_command=' '.join(['run', *sys.argv[1:]]), cpac_version=CPAC.__version__, cores=c.pipeline_setup['system_config']['max_cores_per_participant'], @@ -373,7 +377,11 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, 'num_participants_at_once'], omp_threads=c.pipeline_setup['system_config']['num_OMP_threads'], ants_threads=c.pipeline_setup['system_config']['num_ants_threads'], - max_cores=max_core_usage + max_cores=max_core_usage, + random_seed=( + ' Random seed: %s' % + c.pipeline_setup['system_config']['random_seed']) if + c.pipeline_setup['system_config']['random_seed'] is not None else '' )) subject_info = {} @@ -409,6 +417,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, c.pipeline_setup['output_directory']['path'] = os.path.abspath( c.pipeline_setup['output_directory']['path']) + if c.pipeline_setup['system_config']['random_seed'] is not None: + set_up_random_state_logger(log_dir) + workflow = build_workflow( subject_id, sub_dict, c, p_name, num_ants_cores ) @@ -480,10 +491,7 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, pass # Add handler to callback log file - cb_logger = cb_logging.getLogger('callback') - cb_logger.setLevel(cb_logging.DEBUG) - handler = cb_logging.FileHandler(cb_log_filename) - cb_logger.addHandler(handler) + set_up_logger('callback', cb_log_filename, 'debug', log_dir) # Log initial information from all the nodes log_nodes_initial(workflow) @@ -836,7 +844,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): ] anat_preproc_blocks = [ - (non_local_means, ('T1w', ['desc-preproc_T1w', + (non_local_means, ('T1w', ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])), n4_bias_correction @@ -847,7 +855,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): anat_blocks = anat_preproc_blocks + acpc_blocks pipeline_blocks += anat_blocks - + if not rpool.check_rpool('freesurfer-subject-dir'): pipeline_blocks += [freesurfer_abcd_preproc] @@ -866,14 +874,14 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): pipeline_blocks += anat_brain_mask_blocks # T2w Anatomical Preprocessing - if rpool.check_rpool('T2w'): + if rpool.check_rpool('T2w'): if not rpool.check_rpool('desc-reorient_T2w'): anat_init_blocks_T2 = [ - anatomical_init_T2 + anatomical_init_T2 ] pipeline_blocks += anat_init_blocks_T2 - - # TODO: T2 freesurfer_preproc? + + # TODO: T2 freesurfer_preproc? # pipeline_blocks += [freesurfer_preproc] if not rpool.check_rpool('desc-preproc_T2w'): @@ -906,7 +914,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): ] anat_preproc_blocks_T2 = [ - registration_T2w_to_T1w, + registration_T2w_to_T1w, non_local_means_T2, n4_bias_correction_T2, t1t2_bias_correction @@ -917,7 +925,7 @@ def build_anat_preproc_stack(rpool, cfg, pipeline_blocks=None): anat_blocks_T2 = anat_preproc_blocks_T2 + acpc_blocks_T2 pipeline_blocks += anat_blocks_T2 - + # Anatomical T1 brain extraction if not rpool.check_rpool('desc-brain_T1w'): anat_brain_blocks = [ @@ -950,9 +958,11 @@ def build_T1w_registration_stack(rpool, cfg, pipeline_blocks=None): if not rpool.check_rpool('from-T1w_to-template_mode-image_xfm'): reg_blocks = [ [register_ANTs_anat_to_template, register_FSL_anat_to_template], - overwrite_transform_anat_to_template + overwrite_transform_anat_to_template, + ] - + + if not rpool.check_rpool('desc-restore-brain_T1w'): reg_blocks.append(correct_restore_brain_intensity_abcd) @@ -982,9 +992,14 @@ def build_segmentation_stack(rpool, cfg, pipeline_blocks=None): if 'EPI_Template' in cfg.segmentation['tissue_segmentation'][ 'Template_Based']['template_for_segmentation']: seg_blocks.append(tissue_seg_EPI_template_based) - + pipeline_blocks += seg_blocks + if cfg.registration_workflows['anatomical_registration']['run'] and 'T1_Template' in cfg.segmentation[ + 'tissue_segmentation']['Template_Based']['template_for_segmentation']: + pipeline_blocks.append(warp_Tissuemask_to_T1template) + + return pipeline_blocks @@ -1006,11 +1021,11 @@ def list_blocks(pipeline_blocks, indent=None): getattr(block, '__name__', getattr(block, 'name', yaml.safe_load( list_blocks(list(block))) if isinstance(block, (tuple, list, set)) else str(block)) - ) for block in pipeline_blocks - ]) + ) for block in pipeline_blocks]) if isinstance(indent, int): blockstring = '\n'.join([ - '\t' + ' ' * indent + line for line in blockstring.split('\n')]) + '\t' + ' ' * indent + line.replace('- - ', '- ') for + line in blockstring.split('\n')]) return blockstring @@ -1019,6 +1034,7 @@ def connect_pipeline(wf, cfg, rpool, pipeline_blocks): 'Connecting pipeline blocks:', list_blocks(pipeline_blocks, indent=1)])) + previous_nb = None for block in pipeline_blocks: try: nb = NodeBlock(block) @@ -1033,6 +1049,10 @@ def connect_pipeline(wf, cfg, rpool, pipeline_blocks): f"'{NodeBlock(block).get_name()}' " f"to workflow '{wf}' " + previous_nb_str + e.args[0], ) + if cfg.pipeline_setup['Debugging']['verbose']: + verbose_logger = getLogger('engine') + verbose_logger.debug(e.args[0]) + verbose_logger.debug(rpool) raise previous_nb = nb @@ -1089,10 +1109,10 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, func_slice_time, func_reorient ] - + if not rpool.check_rpool('desc-mean_bold'): func_preproc_blocks.append(func_mean) - + func_mask_blocks = [] if not rpool.check_rpool('space-bold_desc-brain_mask'): func_mask_blocks = [ @@ -1100,7 +1120,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, bold_mask_anatomical_refined, bold_mask_anatomical_based, bold_mask_anatomical_resampled, bold_mask_ccs], bold_masking] - + func_prep_blocks = [ calc_motion_stats, func_normalize @@ -1112,7 +1132,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, distcor_blocks.append(distcor_phasediff_fsl_fugue) if rpool.check_rpool('epi_1'): - distcor_blocks.append(distcor_blip_afni_qwarp) + distcor_blocks.append(distcor_blip_afni_qwarp) distcor_blocks.append(distcor_blip_fsl_topup) if distcor_blocks: @@ -1168,6 +1188,12 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, ] pipeline_blocks += EPI_reg_blocks + if cfg.registration_workflows['functional_registration']['EPI_registration']['run' + ] and 'EPI_Template' in cfg.segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']: + pipeline_blocks.append(warp_Tissuemask_to_EPItemplate) + + + # Generate the composite transform for BOLD-to-template for the T1 # anatomical template (the BOLD-to- EPI template is already created above) if cfg.registration_workflows['functional_registration'][ @@ -1193,40 +1219,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, erode_mask_boldCSF, erode_mask_boldGM, erode_mask_boldWM] - nuisance += nuisance_masks - - if 'T1_template' in \ - cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['target_template']['using']: - if cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'default': - nuisance.append((nuisance_regressors_generation, ("desc-preproc_bold", ["desc-preproc_bold", "bold"]))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", ["desc-preproc_bold", "bold"]))) - elif cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'single_step_resampling': - nuisance.append((nuisance_regressors_generation, ("desc-preproc_bold", "desc-stc_bold"))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", "desc-stc_bold"))) - elif cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'abcd': - nuisance.append((nuisance_regressors_generation, ("desc-preproc_bold", "bold"))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", "bold"))) - - if 'EPI_template' in \ - cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['target_template'][ - 'using']: - if cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'default': - nuisance.append((nuisance_regressors_generation_EPItemplate, ("desc-preproc_bold", ["desc-preproc_bold", "bold"]))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", ["desc-preproc_bold", "bold"]))) - elif cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'single_step_resampling': - nuisance.append((nuisance_regressors_generation_EPItemplate, ("desc-preproc_bold", "desc-stc_bold"))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", "desc-stc_bold"))) - elif cfg.registration_workflows['functional_registration'][ - 'func_registration_to_template']['apply_transform']['using'] == 'abcd': - nuisance.append((nuisance_regressors_generation_EPItemplate, ("desc-preproc_bold", "bold"))) - nuisance.append((nuisance_regression, ("desc-preproc_bold", "bold"))) + nuisance += nuisance_masks + choose_nuisance_blocks(cfg) pipeline_blocks += nuisance @@ -1281,7 +1274,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, if apply_func_warp: pipeline_blocks += [warp_timeseries_to_EPItemplate, warp_bold_mean_to_EPItemplate] - + if not rpool.check_rpool('space-EPItemplate_desc-bold_mask'): pipeline_blocks += [warp_bold_mask_to_EPItemplate, warp_deriv_mask_to_EPItemplate] @@ -1331,17 +1324,46 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, vmhc] if not rpool.check_rpool('centrality') and \ - any([cfg.network_centrality[option]['weight_options'] for option in valid_options['centrality']['method_options']]): + any(cfg.network_centrality[option]['weight_options'] for + option in valid_options['centrality']['method_options']): pipeline_blocks += [network_centrality] - if cfg.pipeline_setup['output_directory'][ - 'generate_quality_control_images']: + if cfg.pipeline_setup['output_directory']['quality_control'][ + 'generate_xcpqc_files' + ]: + if all(rpool.check_rpool(motion) for motion in [ + 'censor-indices', 'coordinate-transformation', 'dvars', + 'framewise-displacement-jenkinson', 'max-displacement', + 'movement-parameters', 'rels-displacement' + ]): + pipeline_blocks += [qc_xcp_native] + if rpool.check_rpool('space-template_desc-preproc_bold'): + pipeline_blocks += [qc_xcp_template] + pipeline_blocks += [qc_xcp_skullstripped] + + if cfg.pipeline_setup['output_directory']['quality_control'][ + 'generate_quality_control_images' + ]: qc_stack, qc_montage_id_a, qc_montage_id_s, qc_hist_id, qc_plot_id = \ create_qc_workflow(cfg) pipeline_blocks += qc_stack # Connect the entire pipeline! - wf = connect_pipeline(wf, cfg, rpool, pipeline_blocks) + try: + wf = connect_pipeline(wf, cfg, rpool, pipeline_blocks) + except LookupError as lookup_error: + errorstrings = lookup_error.args[0].split('\n') + missing_key = errorstrings[ + errorstrings.index('[!] C-PAC says: The listed resource is not ' + 'in the resource pool:') + 1] + if missing_key.endswith('_bold') and 'func' not in sub_dict: + raise FileNotFoundError( + 'The provided pipeline configuration requires functional ' + 'data but no functional data were found for ' + + '/'.join([sub_dict[key] for key in ['site', 'subject_id', + 'unique_id'] if key in sub_dict]) + '. Please check ' + 'your data and pipeline configurations.') from lookup_error + raise lookup_error # Write out the data # TODO enforce value with schema validation diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index 1dd9f92f13..625109297e 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -30,9 +30,9 @@ from CPAC.image_utils.spatial_smoothing import spatial_smoothing from CPAC.image_utils.statistical_transforms import z_score_standardize, \ fisher_z_score_standardize -from CPAC.pipeline.nipype_pipeline_engine import get_data_size -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') +verbose_logger = logging.getLogger('engine') class ResourcePool(object): @@ -232,21 +232,26 @@ def get(self, resource, pipe_idx=None, report_fetched=False, if report_fetched: return (None, None) return None - raise Exception("\n[!] C-PAC says: None of the listed " - "resources are in the resource pool:\n" - f"{resource}\n") + lookup_message = ("\n[!] C-PAC says: None of the listed " + "resources are in the resource pool:\n" + f"{resource}\n") + verbose_logger.debug(lookup_message) + raise Exception(lookup_message) else: if resource not in self.rpool.keys(): if optional: if report_fetched: return (None, None) return None - raise LookupError("\n\n[!] C-PAC says: The listed resource is " + lookup_message = ("\n\n[!] C-PAC says: The listed resource is " f"not in the resource pool:\n{resource}\n\n" "Developer Note: This may be due to a mis" "match between the node block's docstring " "'input' field and a strat_pool.get_data() " - "call within the block function.\n") + "call within the block function.\n(keys in " + f"resource pool are {self.rpool.keys()})\n") + verbose_logger.debug(lookup_message) + raise LookupError(lookup_message) if report_fetched: if pipe_idx: return (self.rpool[resource][pipe_idx], resource) @@ -355,15 +360,17 @@ def flatten_prov(self, prov): flat_prov.append(entry) return flat_prov - def get_strats(self, resources): + def get_strats(self, resources, debug=False): # TODO: NOTE: NOT COMPATIBLE WITH SUB-RPOOL/STRAT_POOLS # TODO: (and it doesn't have to be) import itertools - + linked_resources = [] resource_list = [] + if debug: + verbose_logger.debug('\nresources: %s', resources) for resource in resources: # grab the linked-input tuples if isinstance(resource, tuple): @@ -385,6 +392,10 @@ def get_strats(self, resources): total_pool = [] variant_pool = {} len_inputs = len(resource_list) + if debug: + verbose_logger.debug('linked_resources: %s', + linked_resources) + verbose_logger.debug('resource_list: %s', resource_list) for resource in resource_list: rp_dct, fetched_resource = self.get(resource, report_fetched=True, # <---- rp_dct has the strats/pipe_idxs as the keys on first level, then 'data' and 'json' on each strat level underneath @@ -393,7 +404,8 @@ def get_strats(self, resources): len_inputs -= 1 continue sub_pool = [] - + if debug: + verbose_logger.debug('len(rp_dct): %s\n', len(rp_dct)) for strat in rp_dct.keys(): json_info = self.get_json(fetched_resource, strat) cpac_prov = json_info['CpacProvenance'] @@ -404,8 +416,11 @@ def get_strats(self, resources): for key, val in json_info['CpacVariant'].items(): if val not in variant_pool[fetched_resource]: variant_pool[fetched_resource] += val - variant_pool[fetched_resource].append(f'NO-{val[0]}') + variant_pool[fetched_resource].append( + f'NO-{val[0]}') + if debug: + verbose_logger.debug('%s sub_pool: %s\n', resource, sub_pool) total_pool.append(sub_pool) # TODO: right now total_pool is: @@ -433,6 +448,9 @@ def get_strats(self, resources): strat_str_list.append(strat_str) strat_list_list.append(strat_list) + if debug: + verbose_logger.debug('len(strat_list_list): %s\n', + len(strat_list_list)) for strat_list in strat_list_list: json_dct = {} @@ -510,7 +528,7 @@ def get_strats(self, resources): if in_current_strat: drop = True break - + if in_other_strat: if in_other_spread: if not in_current_strat: @@ -811,7 +829,8 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): # TODO: have to link the pipe_idx's here. and call up 'desc-preproc_T1w' from a Sources in a json and replace. here. # TODO: can do the pipeline_description.json variants here too! - + #print(Outputs.any) + #print(self.rpool.keys()) for resource in self.rpool.keys(): if resource not in Outputs.any: @@ -938,7 +957,11 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): nii_name, 'format_string') node, out = self.rpool[resource][pipe_idx]['data'] - wf.connect(node, out, nii_name, 'in_file') + try: + wf.connect(node, out, nii_name, 'in_file') + except OSError as os_error: + logger.warning(os_error) + continue write_json_imports = ['import os', 'import json'] write_json = pe.Node(Function(input_names=['json_data', @@ -969,6 +992,19 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): wf.connect(write_json, 'json_file', ds, f'{out_dct["subdir"]}.@json') + def node_data(self, resource, **kwargs): + '''Factory function to create NodeData objects + + Parameters + ---------- + resource : str + + Returns + ------- + NodeData + ''' + return NodeData(self, resource, **kwargs) + class NodeBlock(object): def __init__(self, node_block_functions): @@ -1059,6 +1095,7 @@ def grab_tiered_dct(self, cfg, key_list): return cfg_dct def connect_block(self, wf, cfg, rpool): + debug = cfg.pipeline_setup['Debugging']['verbose'] all_opts = [] for name, block_dct in self.node_blocks.items(): opts = [] @@ -1183,8 +1220,9 @@ def connect_block(self, wf, cfg, rpool): switch = [switch] if True in switch: - print(f"Connecting {name}...\n") - for pipe_idx, strat_pool in rpool.get_strats(inputs).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} + logger.info('Connecting %s...', name) + for pipe_idx, strat_pool in rpool.get_strats( + inputs, debug).items(): # strat_pool is a ResourcePool like {'desc-preproc_T1w': { 'json': info, 'data': (node, out) }, 'desc-brain_mask': etc.} fork = False in switch # keep in mind rpool.get_strats(inputs) = {pipe_idx1: {'desc-preproc_T1w': etc.}, pipe_idx2: {..} } for opt in opts: # it's a dictionary of ResourcePools called strat_pools, except those sub-ResourcePools only have one level! no pipe_idx strat keys. # remember, you can get 'data' or 'json' from strat_pool with member functions @@ -1220,22 +1258,28 @@ def connect_block(self, wf, cfg, rpool): elif opt and 'USER-DEFINED' in option_val: node_name = f'{node_name}_{opt["Name"]}' - if cfg.pipeline_setup['Debugging']['verbose']: - print('\n=======================') - print(f'Node name: {node_name}') + if debug: + verbose_logger.debug('\n=======================') + verbose_logger.debug('Node name: %s', node_name) prov_dct = \ - rpool.get_resource_strats_from_prov(ast.literal_eval(pipe_idx)) + rpool.get_resource_strats_from_prov( + ast.literal_eval(pipe_idx)) for key, val in prov_dct.items(): - print('-------------------') - print(f'Input - {key}:') + verbose_logger.debug('-------------------') + verbose_logger.debug('Input - %s:', key) sub_prov_dct = \ rpool.get_resource_strats_from_prov(val) for sub_key, sub_val in sub_prov_dct.items(): sub_sub_dct = \ - rpool.get_resource_strats_from_prov(sub_val) - print(f' sub-input - {sub_key}:') - print(f' prov = {sub_val}') - print(f' sub_sub_inputs = {sub_sub_dct.keys()}') + rpool.get_resource_strats_from_prov( + sub_val) + verbose_logger.debug(' sub-input - %s:', + sub_key) + verbose_logger.debug(' prov = %s', + sub_val) + verbose_logger.debug( + ' sub_sub_inputs = %s', + sub_sub_dct.keys()) for label, connection in outs.items(): self.check_output(outputs, label, name) @@ -1384,11 +1428,11 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt): def ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id): - + if 'anat' not in data_paths: print('No anatomical data present.') return rpool - + if 'creds_path' not in data_paths: data_paths['creds_path'] = None @@ -1466,12 +1510,6 @@ def ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, def ingress_output_dir(cfg, rpool, unique_id, creds_path=None): out_dir = cfg.pipeline_setup['output_directory']['path'] - - if not os.path.isdir(out_dir): - print(f"\nOutput directory {out_dir} does not exist yet, " - "initializing.") - os.makedirs(out_dir) - source = False if cfg.pipeline_setup['output_directory']['pull_source_once']: @@ -1526,7 +1564,7 @@ def ingress_output_dir(cfg, rpool, unique_id, creds_path=None): cpac_dir_anat = os.path.join(cpac_dir, 'anat') cpac_dir_func = os.path.join(cpac_dir, 'func') - exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms', '.mgz'] + exts = ['.nii', '.gz', '.mat', '.1D', '.txt', '.csv', '.rms'] all_output_dir = [] if os.path.isdir(cpac_dir_anat): @@ -1568,6 +1606,10 @@ def ingress_output_dir(cfg, rpool, unique_id, creds_path=None): unique_data_label = str(data_label) + #if 'sub-' in data_label or 'ses-' in data_label: + # raise Exception('\n\n[!] Possibly wrong participant or ' + # 'session in this directory?\n\nDirectory: ' + # f'{cpac_dir_anat}\nFilepath: {filepath}\n\n') suffix = data_label.split('_')[-1] desc_val = None for tag in data_label.split('_'): @@ -1580,21 +1622,16 @@ def ingress_output_dir(cfg, rpool, unique_id, creds_path=None): jsonpath = f"{jsonpath}.json" if not os.path.exists(jsonpath): - print(f'\n\n[!] No JSON found for file {filepath}.') - if not source: - print(f'Creating {jsonpath}..\n\n') - else: - print('Creating meta-data for the data..\n\n') + print(f'\n\n[!] No JSON found for file {filepath}.\nCreating ' + f'{jsonpath}..\n\n') json_info = { - 'CpacProvenance': [f'{data_label}:Non-C-PAC Origin'], 'Description': 'This data was generated elsewhere and ' 'supplied by the user into this C-PAC run\'s ' 'output directory. This JSON file was ' 'automatically generated by C-PAC because a ' 'JSON file was not supplied with the data.' } - if not source: - write_output_json(json_info, jsonpath) + write_output_json(json_info, jsonpath) else: json_info = read_json(jsonpath) @@ -1671,7 +1708,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): res_keys = [x.lstrip() for x in resolution.split(',')] tag = res_keys[-1] - json_info = {} + json_info = {} if '$FSLDIR' in val: val = val.replace('$FSLDIR', cfg.pipeline_setup[ @@ -1729,20 +1766,7 @@ def ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path=None): ) rpool.set_data(key, config_ingress, 'outputspec.data', json_info, "", f"{key}_config_ingress") - - # Freesurfer directory, not a template, so not in cpac_templates.tsv - if cfg.surface_analysis['freesurfer']['freesurfer_dir']: - fs_ingress = create_general_datasource(f'gather_freesurfer_dir') - fs_ingress.inputs.inputnode.set( - unique_id=unique_id, - data=cfg.surface_analysis['freesurfer']['freesurfer_dir'], - creds_path=creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) - rpool.set_data("freesurfer-subject-dir", fs_ingress, 'outputspec.data', - json_info, "", f"freesurfer_config_ingress") - - + # templates, resampling from config ''' template_keys = [ @@ -1874,10 +1898,10 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): if data_paths: rpool = ingress_raw_anat_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id) - - wf, rpool, diff, blip, fmap_rp_list = \ - ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, - part_id, ses_id) + if 'func' in data_paths: + wf, rpool, diff, blip, fmap_rp_list = \ + ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, + part_id, ses_id) # grab already-processed data from the output directory rpool = ingress_output_dir(cfg, rpool, unique_id, creds_path) @@ -1930,3 +1954,53 @@ def run_node_blocks(blocks, data_paths, cfg=None): rpool.gather_pipes(wf, cfg) wf.run() + + +class NodeData: + r"""Class to hold outputs of + CPAC.pipeline.engine.ResourcePool().get_data(), so one can do + + ``node_data = strat_pool.node_data(resource)`` and have + ``node_data.node`` and ``node_data.out`` instead of doing + ``node, out = strat_pool.get_data(resource)`` and needing two + variables (``node`` and ``out``) to store that information. + + Also includes ``variant`` attribute providing the resource's self- + keyed value within its ``CpacVariant`` dictionary. + + Examples + -------- + >>> rp = ResourcePool() + >>> rp.node_data(None) + NotImplemented (NotImplemented) + + >>> rp.set_data('test', + ... pe.Node(Function(input_names=[]), 'test'), + ... 'b', [], 0, 'test') + >>> rp.node_data('test') + test (b) + >>> rp.node_data('test').out + 'b' + + >>> try: + ... rp.node_data('b') + ... except LookupError as lookup_error: + ... print(' '.join(str(lookup_error).strip().split('\n')[0:2])) + [!] C-PAC says: The listed resource is not in the resource pool: b + """ + # pylint: disable=too-few-public-methods + def __init__(self, strat_pool=None, resource=None, **kwargs): + self.node = NotImplemented + self.out = NotImplemented + self.variant = None + if strat_pool is not None and resource is not None: + self.node, self.out = strat_pool.get_data(resource, **kwargs) + if ( + hasattr(strat_pool, 'rpool') and + isinstance(strat_pool.rpool, dict) + ): + self.variant = strat_pool.rpool.get(resource, {}).get( + 'json', {}).get('CpacVariant', {}).get(resource) + + def __repr__(self): + return f'{getattr(self.node, "name", str(self.node))} ({self.out})' diff --git a/CPAC/pipeline/nipype_pipeline_engine/__init__.py b/CPAC/pipeline/nipype_pipeline_engine/__init__.py index fc49733d79..a8390957b0 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/__init__.py +++ b/CPAC/pipeline/nipype_pipeline_engine/__init__.py @@ -1,11 +1,11 @@ '''Module to import Nipype Pipeline engine and override some Classes. -See https://fcp-indi.github.com/docs/developer/nodes +See https://fcp-indi.github.io/docs/developer/nodes for C-PAC-specific documentation. See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html -for Nipype's documentation.''' # noqa E501 +for Nipype's documentation.''' # noqa: E501 from nipype.pipeline import engine as pe # import everything in nipype.pipeline.engine.__all__ -from nipype.pipeline.engine import * # noqa F401 +from nipype.pipeline.engine import * # noqa: F401,F403 # import our DEFAULT_MEM_GB and override Node, MapNode from .engine import DEFAULT_MEM_GB, get_data_size, Node, MapNode, \ UNDEFINED_SIZE, Workflow diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 17d4b937c3..176db2f40e 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -1,17 +1,20 @@ '''Module to import Nipype Pipeline engine and override some Classes. -See https://fcp-indi.github.com/docs/developer/nodes +See https://fcp-indi.github.io/docs/developer/nodes for C-PAC-specific documentation. See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html for Nipype's documentation. -''' # noqa E501 +''' # noqa: E501 import os import re import warnings +from logging import getLogger from inspect import Parameter, Signature, signature from nibabel import load from nipype import logging +from nipype.interfaces.utility import Function from nipype.pipeline import engine as pe from nipype.pipeline.engine.utils import load_resultfile as _load_resultfile +from nipype.utils.functions import getsource from numpy import prod from traits.trait_base import Undefined from traits.trait_handlers import TraitListObject @@ -20,6 +23,8 @@ DEFAULT_MEM_GB = 2.0 UNDEFINED_SIZE = (42, 42, 42, 1200) +random_state_logger = getLogger('random') + def _doctest_skiplines(docstring, lines_to_skip): ''' @@ -61,8 +66,11 @@ class Node(pe.Node): ) def __init__(self, *args, mem_gb=DEFAULT_MEM_GB, **kwargs): + from CPAC.pipeline.random_state import random_seed super().__init__(*args, mem_gb=mem_gb, **kwargs) self.logger = logging.getLogger("nipype.workflow") + self.seed = random_seed() + self.seed_applied = False if 'mem_x' in kwargs and isinstance( kwargs['mem_x'], (tuple, list) @@ -132,7 +140,62 @@ def __init__(self, *args, mem_gb=DEFAULT_MEM_GB, **kwargs): ``mode`` can be any one of * 'xyzt' (spatial * temporal) (default if not specified) * 'xyz' (spatial) - * 't' (temporal)'''])) # noqa E501 + * 't' (temporal)'''])) # noqa: E501 + + def _add_flags(self, flags): + r''' + Parameters + ---------- + flags : list or tuple + If a list, add ``flags`` to ``self.inputs.flags`` or + ``self.inputs.args`` + + If a tuple, remove ``flags[1]`` from and add ``flags[0]`` + to ``self.inputs.flags`` or ``self.inputs.args`` + ''' + def prep_flags(attr): + to_remove = [] + if isinstance(flags, tuple): + to_remove += flags[1] + new_flags = flags[0] + else: + new_flags = flags + old_flags = getattr(self.inputs, attr) + if isinstance(old_flags, str): + to_remove.sort(key=lambda x: -x.count(' ')) + for flag in to_remove: + if f' {flag} ' in old_flags: + old_flags = old_flags.replace(f' {flag}', '') + old_flags = [old_flags] + if isinstance(old_flags, list): + new_flags = [flag for flag in old_flags if + flag not in to_remove] + new_flags + if attr == 'args': + new_flags = ' '.join(new_flags) + while ' ' in new_flags: + new_flags = new_flags.replace(' ', ' ') + return new_flags + if hasattr(self.inputs, 'flags'): + self.inputs.flags = prep_flags('flags') + else: + self.inputs.args = prep_flags('args') + + def _apply_random_seed(self): + '''Apply flags for the first matched interface''' + # pylint: disable=import-outside-toplevel + from CPAC.pipeline.random_state import random_seed_flags + if isinstance(self.interface, Function): + for rsf, flags in random_seed_flags()['functions'].items(): + if self.interface.inputs.function_str == getsource(rsf): + self.interface.inputs.function_str = flags( + self.interface.inputs.function_str) + self.seed_applied = True + return + for rsf, flags in random_seed_flags()['interfaces'].items(): + if isinstance(self.interface, rsf): + self._add_flags(flags) + self.seed_applied = True + return @property def mem_gb(self): @@ -275,12 +338,21 @@ def mem_x(self): """Get dict of 'multiplier' (memory multiplier), 'file' (input file) and multiplier mode (spatial * temporal, spatial only or temporal only). Returns ``None`` if already consumed or not set.""" - if hasattr(self, '_mem_x'): - return self._mem_x - return None + return getattr(self, '_mem_x', None) + + def run(self, updatehash=False): + if self.seed is not None: + self._apply_random_seed() + if self.seed_applied: + random_state_logger.info('%s', + '%s # (Atropos constant)' % + self.name if 'atropos' in + self.name else self.name) + return super().run(updatehash) class MapNode(Node, pe.MapNode): + # pylint: disable=empty-docstring __doc__ = _doctest_skiplines( pe.MapNode.__doc__, {" ... 'functional3.nii']"} @@ -343,7 +415,7 @@ def _handle_just_in_time_exception(self, node): self._local_func_scans) # pylint: disable=no-member else: # TODO: handle S3 files - node._apply_mem_x(UNDEFINED_SIZE) # noqa W0212 + node._apply_mem_x(UNDEFINED_SIZE) # noqa: W0212 def get_data_size(filepath, mode='xyzt'): diff --git a/CPAC/pipeline/nipype_pipeline_engine/plugins/__init__.py b/CPAC/pipeline/nipype_pipeline_engine/plugins/__init__.py index 05f79ec74d..d5b86167b4 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/plugins/__init__.py +++ b/CPAC/pipeline/nipype_pipeline_engine/plugins/__init__.py @@ -1,6 +1,6 @@ """Import Nipype's pipeline plugins and selectively override""" -from nipype.pipeline.plugins import * # noqa F401,F403 +from nipype.pipeline.plugins import * # noqa: F401,F403 # Override LegacyMultiProc -from .legacymultiproc import LegacyMultiProcPlugin # noqa F401 +from .legacymultiproc import LegacyMultiProcPlugin # noqa: F401 # Override MultiProc -from .multiproc import MultiProcPlugin # noqa F401 +from .multiproc import MultiProcPlugin # noqa: F401 diff --git a/CPAC/pipeline/nipype_pipeline_engine/plugins/cpac_nipype_custom.py b/CPAC/pipeline/nipype_pipeline_engine/plugins/cpac_nipype_custom.py index 4710aa0693..6af2020936 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/plugins/cpac_nipype_custom.py +++ b/CPAC/pipeline/nipype_pipeline_engine/plugins/cpac_nipype_custom.py @@ -130,7 +130,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): Sends jobs to workers when system resources are available. Customized from https://github.com/nipy/nipype/commit/79e2fdfc38759bc0853e4051b99ba4c37587d65f to catch overhead deadlocks - """ # noqa E501 # pylint: disable=line-too-long + """ # noqa: E501 # pylint: disable=line-too-long # pylint: disable=too-many-branches, too-many-statements # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 diff --git a/CPAC/pipeline/random_state/__init__.py b/CPAC/pipeline/random_state/__init__.py new file mode 100644 index 0000000000..5956f33416 --- /dev/null +++ b/CPAC/pipeline/random_state/__init__.py @@ -0,0 +1,6 @@ +'''Random state for C-PAC''' +from .seed import random_seed, random_seed_flags, set_up_random_state, \ + set_up_random_state_logger + +__all__ = ['random_seed', 'random_seed_flags', 'set_up_random_state', + 'set_up_random_state_logger'] diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py new file mode 100644 index 0000000000..370a9427dd --- /dev/null +++ b/CPAC/pipeline/random_state/seed.py @@ -0,0 +1,190 @@ +'''Functions to set, check, and log random seed''' +import os +import random +from logging import getLogger + +import numpy as np +from nipype.interfaces.ants.registration import Registration +from nipype.interfaces.ants.segmentation import Atropos +from nipype.interfaces.freesurfer.preprocess import ApplyVolTransform, ReconAll +from nipype.interfaces.fsl.maths import MathsCommand +from nipype.interfaces.fsl.utils import ImageMaths + +from CPAC.registration.utils import hardcoded_reg +from CPAC.utils.interfaces.ants import AI +from CPAC.utils.monitoring.custom_logging import set_up_logger + +_seed = {'seed': None} + + +def random_random_seed(): + '''Returns a random postive integer up to 2147483647 + + Parameters + ---------- + None + + Returns + ------- + random_seed : int + + Examples + -------- + >>> 0 < random_random_seed() <= np.iinfo(np.int32).max + True + ''' + return random.randint(1, np.iinfo(np.int32).max) + + +def random_seed(): + '''Function to access current random seed + + Parameters + ---------- + None + + Returns + ------- + seed : int or None + ''' + if _seed['seed'] == 'random': + _seed['seed'] = random_random_seed() + return _seed['seed'] + + +def random_seed_flags(): + '''Function to return dictionary of flags with current random seed. + + Parameters + ---------- + None + + Returns + ------- + dict of {'functions': {function: function}, 'interfaces': {list or tuple}} + Developer note: sequence matters here! Only the first match + will be applied! + + In the 'functions' sub-dictionary, each key should be a function + used in a :py:class:`CPAC.utils.interfaces.function.Function`, + and each value should be a function that takes a single + parameter (a function) to apply to the key-function such that, + once the value-function is applied, the random seed is applied + when the key-function is run. + + In the 'interfaces' sub-dictionary, each key should be an + :py:class:`nipype.interfaces.base.core.Interface`, and each + value should be either a list of strings to add to the + interface's flags/args or a tuple of (list of strings to add to, + list of strings to remove from) the interface's flags/args. + + Examples + -------- + >>> list(random_seed_flags().keys()) + ['functions', 'interfaces'] + >>> all([isinstance(random_seed_flags()[key], dict) for key in [ + ... 'functions', 'interfaces']]) + True + >>> rs = set_up_random_state('random') + >>> list(random_seed_flags().keys()) + ['functions', 'interfaces'] + >>> all([isinstance(random_seed_flags()[key], dict) for key in [ + ... 'functions', 'interfaces']]) + True + ''' + seed = random_seed() + if seed is None: + return {'functions': {}, 'interfaces': {}} + return { + 'functions': { + # function: lambda function to apply to function source + hardcoded_reg: lambda fn_string: fn_string.replace( + 'regcmd = ["antsRegistration"]', + f'regcmd = ["antsRegistration", "--random-seed", \"{seed}\"]' + ) + }, + 'interfaces': { + # interface: [flags to apply] + # OR + # interface: ([flags to apply], [flags to remove]) + # + # ANTs + # NOTE: Atropos gives the option "Initialize internal random number + # generator with a random seed. Otherwise, initialize with a + # constant seed number," so for Atropos nodes, the built-in + # Atropos constant seed is used if a seed is specified for + # C-PAC + AI: _reusable_flags()['ANTs'], + Registration: _reusable_flags()['ANTs'], + Atropos: (['--use-random-seed 0'], + [flag for one in ['', ' 1'] for flag in + [f'--use-random-seed{one}', f'-r{one}']]), + # FreeSurfer + ReconAll: ['-norandomness', f'-rng-seed {seed}'], + ApplyVolTransform: _reusable_flags()['FSL'], + # FSL + ImageMaths: _reusable_flags()['FSL'], + MathsCommand: _reusable_flags()['FSL'] + } + } + + +def _reusable_flags(): + seed = random_seed() + return { + 'ANTs': [f'--random-seed {seed}'], + 'FSL': [f'-seed {seed}'] + } + + +def set_up_random_state(seed): + '''Set global random seed + + Parameters + ---------- + seed : int, 'random', or None + + Returns + ------- + seed: int or None + + Examples + -------- + >>> isinstance(set_up_random_state('random'), int) + True + >>> set_up_random_state('rando') + Traceback (most recent call last): + ValueError: Valid random seeds are positive integers up to 2147483647, "random", or None, not rando + >>> set_up_random_state(100) + 100 + >>> set_up_random_state(0) + Traceback (most recent call last): + ValueError: Valid random seeds are positive integers up to 2147483647, "random", or None, not 0 + >>> set_up_random_state(None) + + ''' # noqa: E501 # pylint: disable=line-too-long + if seed is not None: + if seed == 'random': + seed = random_random_seed() + if (seed != 'random' and not ( + isinstance(seed, int) and + (0 < int(seed) <= np.iinfo(np.int32).max) + )): + raise ValueError('Valid random seeds are positive integers up to ' + f'2147483647, "random", or None, not {seed}') + try: + _seed['seed'] = int(seed) + except (TypeError, ValueError): + _seed['seed'] = seed + return random_seed() + + +def set_up_random_state_logger(log_dir): + '''Prepare C-PAC for logging random seed use. + + Parameters + ---------- + log_dir : str + ''' + set_up_logger('random', level='info', log_dir=log_dir) + getLogger('random').info('seed: %s', random_seed()) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 517d022516..5cbfced274 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1,7 +1,12 @@ +'''Validation schema for C-PAC pipeline configurations''' +# pylint: disable=too-many-lines from itertools import chain, permutations + +import numpy as np from voluptuous import All, ALLOW_EXTRA, Any, In, Length, Match, Optional, \ Range, Required, Schema from voluptuous.validators import ExactSequence, Maybe + from CPAC import __version__ from CPAC.utils.utils import delete_nested_value, lookup_nested_value, \ set_nested_value @@ -168,7 +173,7 @@ def permutation_message(key, options): Returns ------- - msg: str''' # noqa E501 + msg: str''' # noqa: E501 return f''' \'{key}\' takes a dictionary with paths to region-of-interest (ROI) @@ -354,7 +359,10 @@ def _changes_1_8_0_to_1_8_1(config_dict): 'write_func_outputs': bool, 'write_debugging_outputs': bool, 'output_tree': str, - 'generate_quality_control_images': bool, + 'quality_control': { + 'generate_quality_control_images': bool, + 'generate_xcpqc_files': bool, + }, }, 'working_directory': { 'path': str, @@ -381,7 +389,10 @@ def _changes_1_8_0_to_1_8_1(config_dict): 'max_cores_per_participant': int, 'num_ants_threads': int, 'num_OMP_threads': int, - 'num_participants_at_once': int + 'num_participants_at_once': int, + 'random_seed': Maybe(Any( + 'random', + All(int, Range(min=1, max=np.iinfo(np.int32).max)))) }, 'Amazon-AWS': { 'aws_output_bucket_credentials': Maybe(str), diff --git a/CPAC/pipeline/test/test_engine.py b/CPAC/pipeline/test/test_engine.py index 78ca8f6623..2f13bd215e 100644 --- a/CPAC/pipeline/test/test_engine.py +++ b/CPAC/pipeline/test/test_engine.py @@ -28,10 +28,10 @@ def test_ingress_func_raw_data(pipe_config, bids_dir, test_dir): rpool = ResourcePool(name=unique_id, cfg=cfg) - wf, rpool, diff, blip, fmap_rp_list = ingress_raw_func_data(wf, rpool, cfg, - sub_data_dct, - unique_id, - part_id, ses_id) + if 'func' in sub_data_dct: + wf, rpool, diff, blip, fmap_rp_list = \ + ingress_raw_func_data(wf, rpool, cfg, sub_data_dct, unique_id, + part_id, ses_id) rpool.gather_pipes(wf, cfg, all=True) diff --git a/CPAC/qc/pipeline.py b/CPAC/qc/pipeline.py index 15cc2a2e78..ad8c87cb93 100644 --- a/CPAC/qc/pipeline.py +++ b/CPAC/qc/pipeline.py @@ -34,7 +34,7 @@ def qc_snr_plot(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_snr_plot", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -86,7 +86,7 @@ def qc_snr_plot(wf, cfg, strat_pool, pipe_num, opt=None): def qc_motion_plot(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_motion_plot", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -114,7 +114,7 @@ def qc_motion_plot(wf, cfg, strat_pool, pipe_num, opt=None): def qc_fd_plot(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_fd_plot", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -138,7 +138,7 @@ def qc_fd_plot(wf, cfg, strat_pool, pipe_num, opt=None): def qc_brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_brain_extraction", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -171,7 +171,7 @@ def qc_brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None): def qc_T1w_standard(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_brain_extraction", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -214,7 +214,7 @@ def qc_T1w_standard(wf, cfg, strat_pool, pipe_num, opt=None): def qc_segmentation(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_segmentation", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -259,7 +259,7 @@ def qc_segmentation(wf, cfg, strat_pool, pipe_num, opt=None): def qc_epi_segmentation(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_epi_segmentation", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -304,7 +304,7 @@ def qc_epi_segmentation(wf, cfg, strat_pool, pipe_num, opt=None): def qc_carpet_plot(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_carpet_plot", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -356,7 +356,7 @@ def qc_carpet_plot(wf, cfg, strat_pool, pipe_num, opt=None): def qc_coregistration(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_coregistration", - "config": ["pipeline_setup", "output_directory"], + "config": ["pipeline_setup", "output_directory", "quality_control"], "switch": ["generate_quality_control_images"], "option_key": "None", "option_val": "None", @@ -399,7 +399,7 @@ def qc_bold_registration(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_bold_registration", "config": "None", - "switch": [["pipeline_setup", "output_directory", + "switch": [["pipeline_setup", "output_directory", "quality_control", "generate_quality_control_images"], ["registration_workflows", "anatomical_registration", "run"]], "option_key": "None", @@ -444,7 +444,7 @@ def qc_bold_EPI_registration(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "qc_bold_EPI_registration", "config": "None", - "switch": [["pipeline_setup", "output_directory", + "switch": [["pipeline_setup", "output_directory", "quality_control", "generate_quality_control_images"], ["registration_workflows", "functional_registration", "func_registration_to_template", "run_EPI"]], diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py new file mode 100644 index 0000000000..cda06cd2c5 --- /dev/null +++ b/CPAC/qc/xcp.py @@ -0,0 +1,559 @@ +"""`Generate eXtensible Connectivity Pipeline-style quality control files `_ + +Columns +------- +sub : str + subject label :cite:`cite-BIDS21` +ses : str + session label :cite:`cite-BIDS21` +task : str + task label :cite:`cite-BIDS21` +run : int + run index :cite:`cite-BIDS21` +desc : str + description :cite:`cite-BIDS21` +space : str + space label :cite:`cite-BIDS21` +meanFD : float + mean Jenkinson framewise displacement :cite:`cite-Jenk02` :func:`CPAC.generate_motion_statistics.calculate_FD_J` +relMeansRMSMotion : float + "mean value of RMS motion" :cite:`cite-Ciri19` +relMaxRMSMotion : float + "maximum vaue of RMS motion" :cite:`cite-Ciri19` +meanDVInit : float + "mean DVARS" :cite:`cite-Ciri19` +meanDVFinal : float + "mean DVARS" :cite:`cite-Ciri19` +nVolCensored : int + "total number of volume(s) censored :cite:`cite-Ciri19` +nVolsRemoved : int + number of volumes in derivative minus number of volumes in original + functional scan +motionDVCorrInit : float + "correlation of RMS and DVARS before regresion" :cite:`cite-Ciri19` +motionDVCorrFinal : float + "correlation of RMS and DVARS after regresion" :cite:`cite-Ciri19` +coregDice : float + "Coregsitration of Functional and T1w:[…] Dice index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +coregJaccard : float + "Coregsitration of Functional and T1w:[…] Jaccard index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +coregCrossCorr : float + "Coregsitration of Functional and T1w:[…] cross correlation" :cite:`cite-Ciri19` :cite:`cite-Penn19` +coregCoverag : float + "Coregsitration of Functional and T1w:[…] Coverage index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +normDice : float + "Normalization of T1w/Functional to Template:[…] Dice index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +normJaccard : float + "Normalization of T1w/Functional to Template:[…] Jaccard index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +normCrossCorr : float + "Normalization of T1w/Functional to Template:[…] cross correlation" :cite:`cite-Ciri19` :cite:`cite-Penn19` +normCoverage : float + "Normalization of T1w/Functional to Template:[…] Coverage index" :cite:`cite-Ciri19` :cite:`cite-Penn19` +""" # noqa: E501 # pylint: disable=line-too-long +import os +import re +from io import BufferedReader + +import nibabel as nb +import numpy as np +import pandas as pd +from CPAC.generate_motion_statistics.generate_motion_statistics import \ + motion_power_statistics +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces.function import Function +from CPAC.utils.utils import check_prov_for_motion_tool + +motion_params = ['movement-parameters', 'dvars', + 'framewise-displacement-jenkinson'] + + +def calculate_overlap(image_pair): + ''' + Function to calculate Dice, Jaccard, CrossCorr and Coverage:cite:`cite-Penn19` from a + pair of arrays + + Parameters + ---------- + image_pair : 2-tuple + array of which to calculate overlaps metrics + + Returns + ------- + coefficents : dict + coeffiecients['dice'] : float + Dice index + + coeffiecients['jaccard'] : float + Jaccard index + + coeffiecients['cross_corr'] : float + cross-correlation + + coeffiecients['coverage'] : float + coverage index + + Examples + -------- + >>> import numpy as np + >>> a1 = np.array([0, 0, 0, 1, 1, 1]) + >>> a2 = np.array([0, 0, 1, 1, 0, 1]) + >>> tuple(calculate_overlap((a1, a2)).values()) + (0.6666666666666666, 0.5, 0.33333333333333326, 0.6666666666666666) + >>> tuple(calculate_overlap((a1, a1)).values()) + (1.0, 1.0, 0.9999999999999998, 1.0) + >>> tuple(calculate_overlap((a2, a2)).values()) + (1.0, 1.0, 0.9999999999999998, 1.0) + ''' # noqa: E501 # pylint: disable=line-too-long + if len(image_pair) != 2: + raise IndexError('`calculate_overlap` requires 2 images, but ' + f'{len(image_pair)} were provided') + if len(image_pair[0]) != len(image_pair[1]): + image_pair = _repeat_shorter(image_pair) + image_pair = tuple(image.astype(bool) for image in image_pair) + intersect = image_pair[0] * image_pair[1] + vols = [np.sum(image) for image in image_pair] + vol_intersect = np.sum(intersect) + vol_sum = sum(vols) + vol_union = vol_sum - vol_intersect + coefficients = { + 'dice': 2 * vol_intersect / vol_sum, + 'jaccard': vol_intersect / vol_union, + 'cross_corr': np.corrcoef(image_pair)[0, 1], + 'coverage': vol_intersect / min(vols) + } + for name, coefficient in coefficients.items(): + if not 1 >= abs(coefficient) >= 0 and not np.isnan(coefficient): + raise ValueError(f'Valid range for {name} is [0, 1] but value ' + f'{coefficient} was calculated.') + return coefficients + + +def _connect_motion(wf, strat_pool, qc_file, brain_mask_key, final, pipe_num): + # pylint: disable=invalid-name, too-many-arguments + try: + nodes = {'censor-indices': strat_pool.node_data('censor-indices')} + wf.connect(nodes['censor-indices'].node, nodes['censor-indices'].out, + qc_file, 'censor_indices') + except LookupError: + nodes = {} + qc_file.inputs.censor_indices = [] + motion_prov = strat_pool.get_cpac_provenance('movement-parameters') + motion_correct_tool = check_prov_for_motion_tool(motion_prov) + gen_motion_stats = motion_power_statistics('motion_stats-after_' + f'{pipe_num}', + motion_correct_tool) + nodes = { + **nodes, + **{node_data: strat_pool.node_data(node_data) for node_data in [ + 'subject', 'scan', brain_mask_key, 'max-displacement', + *motion_params]}} + if motion_correct_tool == '3dvolreg' and strat_pool.check_rpool( + 'coordinate-transformation'): + nodes['coordinate-transformation'] = strat_pool.node_data( + 'coordinate-transformation') + wf.connect(nodes['coordinate-transformation'].node, + nodes['coordinate-transformation'].out, + gen_motion_stats, 'inputspec.transformations') + elif motion_correct_tool == 'mcflirt' and strat_pool.check_rpool( + 'rels-displacement'): + nodes['rels-displacement'] = strat_pool.node_data('rels-displacement') + wf.connect(nodes['rels-displacement'].node, + nodes['rels-displacement'].out, + gen_motion_stats, 'inputspec.rels_displacement') + wf.connect([ + (final['func'].node, gen_motion_stats, [ + (final['func'].out, 'inputspec.motion_correct')]), + (nodes['subject'].node, gen_motion_stats, [ + (nodes['subject'].out, 'inputspec.subject_id')]), + (nodes['scan'].node, gen_motion_stats, [ + (nodes['scan'].out, 'inputspec.scan_id')]), + (nodes['movement-parameters'].node, gen_motion_stats, [ + (nodes['movement-parameters'].out, + 'inputspec.movement_parameters')]), + (nodes['max-displacement'].node, gen_motion_stats, [ + (nodes['max-displacement'].out, + 'inputspec.max_displacement')]), + (nodes[brain_mask_key].node, gen_motion_stats, [ + (nodes[brain_mask_key].out, 'inputspec.mask')]), + (gen_motion_stats, qc_file, [ + ('outputspec.DVARS_1D', 'dvars_after'), + ('outputspec.FDJ_1D', 'fdj_after')]), + *[(nodes[node].node, qc_file, [ + (nodes[node].out, node.replace('-', '_')) + ]) for node in motion_params]]) + return wf + + +def _connect_xcp(wf, strat_pool, qc_file, original, final, t1w_bold, + brain_mask_key, output_key, pipe_num): + # pylint: disable=invalid-name, too-many-arguments + if ( + strat_pool.check_rpool('movement-parameters') and + strat_pool.check_rpool(brain_mask_key) + ): + wf = _connect_motion(wf, strat_pool, qc_file, brain_mask_key, final, + pipe_num) + else: + qc_file.inputs.censor_indices = [] + for key in [*motion_params, 'movement_parameters', + 'framewise_displacement_jenkinson', 'dvars_after', + 'fdj_after']: + setattr(qc_file.inputs, key, None) + wf.connect([ + (original['anat'].node, qc_file, [ + (original['anat'].out, 'original_anat')]), + (original['func'].node, qc_file, [ + (original['func'].out, 'original_func')]), + (final['anat'].node, qc_file, [(final['anat'].out, 'final_anat')]), + (final['func'].node, qc_file, [(final['func'].out, 'final_func')]), + (t1w_bold.node, qc_file, [(t1w_bold.out, 'space_T1w_bold')])]) + outputs = {output_key: (qc_file, 'qc_file')} + return wf, outputs + + +def dvcorr(dvars, fdj): + """Function to correlate DVARS and FD-J""" + dvars = np.loadtxt(dvars) + fdj = np.loadtxt(fdj) + if len(dvars) != len(fdj) - 1: + raise ValueError( + 'len(DVARS) should be 1 less than len(FDJ), but their respective ' + f'lengths are {len(dvars)} and {len(fdj)}.' + ) + return np.corrcoef(dvars, fdj[1:])[0, 1] + + +def generate_xcp_qc(space, desc, original_anat, + final_anat, original_func, final_func, space_T1w_bold, + movement_parameters, dvars, censor_indices, + framewise_displacement_jenkinson, dvars_after, fdj_after, + template=None): + # pylint: disable=too-many-arguments, too-many-locals, invalid-name + """Function to generate an RBC-style QC CSV + + Parameters + ---------- + space : str + 'native' or 'template' + + desc : str + description string + + original_anat : str + path to original 'T1w' image + + final_anat : str + path to 'desc-preproc_T1w' image + + original_func : str + path to original 'bold' image + + final_bold : str + path to 'desc-preproc_bold' image + + space_T1w_bold : str + path to 'space-T1w_desc-mean_bold' image + + movement_parameters: str + path to movement parameters + + dvars : str + path to DVARS before motion correction + + censor_indices : list + list of indices of censored volumes + + framewise_displacement_jenkinson : str + path to framewise displacement (Jenkinson) before motion correction + + dvars_after : str + path to DVARS on final 'bold' image + + fdj_after : str + path to framewise displacement (Jenkinson) on final 'bold' image + + template : str + path to template + + Returns + ------- + str + path to desc-xcp_quality TSV + """ + columns = ( + 'sub,ses,task,run,desc,space,meanFD,relMeansRMSMotion,' + 'relMaxRMSMotion,meanDVInit,meanDVFinal,nVolCensored,nVolsRemoved,' + 'motionDVCorrInit,motionDVCorrFinal,coregDice,coregJaccard,' + 'coregCrossCorr,coregCoverage,normDice,normJaccard,normCrossCorr,' + 'normCoverage'.split(',') + ) + + images = { + 'original_anat': nb.load(original_anat), + 'original_func': nb.load(original_func), + 'final_anat': nb.load(final_anat), + 'final_func': nb.load(final_func), + 'space-T1w_bold': nb.load(space_T1w_bold) + } + if template is not None: + images['template'] = nb.load(template) + + # `sub` through `desc` + from_bids = { + **strings_from_bids(original_func), + 'space': space, + 'desc': desc + } + + # `nVolCensored` & `nVolsRemoved` + n_vols_censored = len( + censor_indices) if censor_indices is not None else 'unknown' + shape_params = {'nVolCensored': n_vols_censored, + 'nVolsRemoved': images['final_func'].shape[3] - + images['original_func'].shape[3]} + + if isinstance(final_func, BufferedReader): + final_func = final_func.name + qc_filepath = os.path.join(os.getcwd(), 'xcpqc.tsv') + + desc_span = re.search(r'_desc-.*_', final_func) + if desc_span: + desc_span = desc_span.span() + final_func = '_'.join([ + final_func[:desc_span[0]], + final_func[desc_span[1]:] + ]) + del desc_span + + if dvars: + # `meanFD (Jenkinson)` + power_params = {'meanFD': np.mean(np.loadtxt( + framewise_displacement_jenkinson))} + + # `relMeansRMSMotion` & `relMaxRMSMotion` + mot = np.genfromtxt(movement_parameters).T + # Relative RMS of translation + rms = np.sqrt(mot[3] ** 2 + mot[4] ** 2 + mot[5] ** 2) + rms_params = { + 'relMeansRMSMotion': [np.mean(rms)], + 'relMaxRMSMotion': [np.max(rms)] + } + + # `meanDVInit` & `meanDVFinal` + meanDV = {'meanDVInit': np.mean(np.loadtxt(dvars))} + try: + meanDV['motionDVCorrInit'] = dvcorr( + dvars, framewise_displacement_jenkinson) + except ValueError as value_error: + meanDV['motionDVCorrInit'] = f'ValueError({str(value_error)})' + if dvars_after: + if not fdj_after: + fdj_after = framewise_displacement_jenkinson + meanDV['meanDVFinal'] = np.mean(np.loadtxt(dvars_after)) + try: + meanDV['motionDVCorrFinal'] = dvcorr(dvars_after, fdj_after) + except ValueError as value_error: + meanDV['motionDVCorrFinal'] = f'ValueError({str(value_error)})' + else: + meanDV = _na_dict([f'{dv}DV{"Corr" + ts if dv == "motion" else ts}' + for dv in ['mean', 'motion'] + for ts in ['Init', 'Final']]) + power_params = {'meanFD': 'n/a'} + rms_params = _na_dict(['relMeansRMSMotion', 'relMaxRMSMotion']) + + # Overlap + overlap_images = {variable: image.get_fdata().ravel() for + variable, image in images.items() if + variable in ['space-T1w_bold', + 'original_anat', 'template']} + overlap_params = {} + (overlap_params['coregDice'], overlap_params['coregJaccard'], + overlap_params['coregCrossCorr'], overlap_params['coregCoverage'] + ) = [[item] for item in calculate_overlap( + (overlap_images['space-T1w_bold'], overlap_images['original_anat']) + ).values()] + if space == 'native': + for key in ['normDice', 'normJaccard', 'normCrossCorr', + 'normCoverage']: + overlap_params[key] = ['N/A: native space'] + elif template is not None: + (overlap_params['normDice'], overlap_params['normJaccard'], + overlap_params['normCrossCorr'], overlap_params['normCoverage'] + ) = [[item] for item in calculate_overlap( + (images['final_func'].get_fdata().ravel(), + overlap_images['template'])).values()] + else: + overlap_params = _na_dict(['normDice', 'normJaccard', 'normCrossCorr', + 'normCoverage']) + + qc_dict = { + **from_bids, + **power_params, + **rms_params, + **shape_params, + **overlap_params, + **meanDV + } + df = pd.DataFrame(qc_dict, columns=columns) + df.to_csv(qc_filepath, sep='\t', index=False) + return qc_filepath + + +def _na_dict(keys): + return {key: 'n/a' for key in keys} + + +def _prep_qc_xcp(strat_pool, pipe_num, space): + qc_file = pe.Node(Function(input_names=['subject', 'scan', + 'space', 'desc', 'template', + 'original_func', 'final_func', + 'original_anat', 'final_anat', + 'space_T1w_bold', + 'movement_parameters', + 'censor_indices', 'dvars', + 'framewise_displacement_jenkinson', + 'dvars_after', 'fdj_after'], + output_names=['qc_file'], + function=generate_xcp_qc, + as_module=True), + name=f'xcpqc-{space}_{pipe_num}') + qc_file.inputs.desc = 'preproc' + qc_file.inputs.space = space + original = {} + final = {} + original['anat'] = strat_pool.node_data('T1w') + original['func'] = strat_pool.node_data('bold') + final['anat'] = strat_pool.node_data('desc-preproc_T1w') + t1w_bold = strat_pool.node_data('space-T1w_desc-mean_bold') + return qc_file, original, final, t1w_bold + + +def qc_xcp_native(wf, cfg, strat_pool, pipe_num, opt=None): + # pylint: disable=invalid-name, unused-argument + """ + {'name': 'qc_xcp_native', + 'config': ['pipeline_setup', 'output_directory', 'quality_control'], + 'switch': ['generate_xcpqc_files'], + 'option_key': 'None', + 'option_val': 'None', + 'inputs': [('bold', 'subject', 'scan', 'T1w', 'max-displacement', 'dvars', + 'censor-indices', 'desc-preproc_bold', + 'desc-preproc_T1w', 'space-T1w_desc-mean_bold', + 'space-bold_desc-brain_mask', 'movement-parameters', + 'framewise-displacement-jenkinson', 'rels-displacement', + 'coordinate-transformation')], + 'outputs': ['desc-xcp_quality']} + """ + space = 'native' + qc_file, original, final, t1w_bold = _prep_qc_xcp(strat_pool, pipe_num, + space) + final['func'] = strat_pool.node_data('desc-preproc_bold') + return _connect_xcp(wf, strat_pool, qc_file, original, final, t1w_bold, + 'space-bold_desc-brain_mask', 'desc-xcp_quality', + pipe_num) + + +def qc_xcp_skullstripped(wf, cfg, strat_pool, pipe_num, opt=None): + # pylint: disable=invalid-name, unused-argument + r""" + Same as ``qc_xcp_native`` except no motion inputs. + Node Block: + {'name': 'qc_xcp_skullstripped', + 'config': ['pipeline_setup', 'output_directory', 'quality_control'], + 'switch': ['generate_xcpqc_files'], + 'option_key': 'None', + 'option_val': 'None', + 'inputs': [('bold', 'subject', 'scan', 'T1w', 'desc-preproc_bold', + 'desc-preproc_T1w', 'space-T1w_desc-mean_bold', + 'space-bold_desc-brain_mask')], + 'outputs': ['desc-xcp_quality']} + """ + # If strat has dvars, it should be captured by 'qc_xcp_native' + if 'dvars' in strat_pool.get('desc-preproc_bold').get( + 'json', {}).get('Sources', {}): + return wf, {} + return qc_xcp_native(wf, cfg, strat_pool, pipe_num, opt) + + +def qc_xcp_template(wf, cfg, strat_pool, pipe_num, opt=None): + # pylint: disable=invalid-name, unused-argument + """ + {'name': 'qc_xcp_template', + 'config': ['pipeline_setup', 'output_directory', 'quality_control'], + 'switch': ['generate_xcpqc_files'], + 'option_key': 'None', + 'option_val': 'None', + 'inputs': [('bold', 'subject', 'scan', 'T1w', + 'T1w-brain-template-funcreg', 'space-T1w_desc-mean_bold', + 'space-template_desc-preproc_bold', 'desc-preproc_T1w', + 'space-template_desc-bold_mask')], + 'outputs': ['space-template_desc-xcp_quality']} + """ + space = 'template' + qc_file, original, final, t1w_bold = _prep_qc_xcp(strat_pool, pipe_num, + space) + final['func'] = strat_pool.node_data('space-template_desc-preproc_bold') + template = strat_pool.node_data('T1w-brain-template-funcreg') + wf.connect(template.node, template.out, qc_file, 'template') + return _connect_xcp(wf, strat_pool, qc_file, original, final, t1w_bold, + 'space-template_desc-bold_mask', + 'space-template_desc-xcp_quality', pipe_num) + + +def _repeat_shorter(images): + ''' + Parameters + ---------- + images : 2-tuple + + Returns + ------- + images : 2-tuple + + Examples + -------- + >>> _repeat_shorter((np.array([1, 2, 3]), np.array([1]))) + (array([1, 2, 3]), array([1, 1, 1])) + >>> _repeat_shorter((np.array([0, 0]), (np.array([1, 1, 2, 2, 3, 4])))) + (array([0, 0, 0, 0, 0, 0]), array([1, 1, 2, 2, 3, 4])) + ''' + lens = (len(images[0]), len(images[1])) + if lens[1] > lens[0] and lens[1] % lens[0] == 0: + return (np.tile(images[0], lens[1] // lens[0]), images[1]) + if lens[0] > lens[1] and lens[0] % lens[1] == 0: + return (images[0], np.tile(images[1], lens[0] // lens[1])) + raise ValueError('operands could not be broadcast together with shapes ' + f'({lens[0]},) ({lens[1]},)') + + +def strings_from_bids(final_func): + """ + Function to gather BIDS entities into a dictionary + + Parameters + ---------- + final_func : str + + Returns + ------- + dict + + Examples + -------- + >>> fake_path = ( + ... '/path/to/sub-fakeSubject_ses-fakeSession_task-peer_run-3_' + ... 'atlas-Schaefer400_space-MNI152NLin6_res-1x1x1_' + ... 'desc-NilearnPearson_connectome.tsv') + >>> strings_from_bids(fake_path)['desc'] + 'NilearnPearson' + >>> strings_from_bids(fake_path)['space'] + 'MNI152NLin6' + """ + from_bids = dict( + tuple(entity.split('-', 1)) if '-' in entity else + ('suffix', entity) for entity in final_func.split('/')[-1].split('_') + ) + from_bids = {k: from_bids[k] for k in from_bids} + if 'space' not in from_bids: + from_bids['space'] = 'native' + return from_bids diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1f7cb37661..e24a1bc211 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -119,10 +119,10 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, mem_gb=2.5) #chunk.inputs.n_chunks = int(num_cpus) - + # 10-TR sized chunks chunk.inputs.chunk_size = 10 - + wf.connect(inputNode, 'input_image', chunk, 'func_file') split_imports = ['import os', 'import subprocess'] @@ -198,7 +198,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, mem_gb=2.5) #chunk.inputs.n_chunks = int(num_cpus) - + # 10-TR sized chunks chunk.inputs.chunk_size = 10 @@ -278,6 +278,25 @@ def transform_derivative(wf_name, label, reg_tool, num_cpus, num_ants_cores, return wf +def convert_pedir(pedir): + '''FSL Flirt requires pedir input encoded as an int''' + conv_dct = {'x': 1, 'y': 2, 'z': 3, 'x-': -1, 'y-': -2, 'z-': -3, + 'i': 1, 'j': 2, 'k': 3, 'i-': -1, 'j-': -2, 'k-': -3, + '-x': -1, '-i': -1, '-y': -2, + '-j': -2, '-z': -3, '-k': -3} + + if isinstance(pedir, bytes): + pedir = pedir.decode() + if not isinstance(pedir, str): + raise Exception("\n\nPhase-encoding direction must be a " + "string value.\n\nValue: {0}" + "\n\n".format(pedir)) + if pedir not in conv_dct.keys(): + raise Exception("\n\nInvalid phase-encoding direction " + "entered: {0}\n\n".format(pedir)) + return conv_dct[pedir] + + def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_register = pe.Workflow(name=name) @@ -538,7 +557,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): brain_warp = pe.Node(interface=fsl.ApplyWarp(), name='brain_warp') brain_warp.inputs.interp = 'nn' - brain_warp.inputs.relwarp = True + brain_warp.inputs.relwarp = True nonlinear_register.connect(inputspec, 'input_brain', brain_warp, 'in_file') @@ -552,7 +571,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): head_warp = pe.Node(interface=fsl.ApplyWarp(), name='head_warp') head_warp.inputs.interp = 'spline' - head_warp.inputs.relwarp = True + head_warp.inputs.relwarp = True nonlinear_register.connect(inputspec, 'input_brain', head_warp, 'in_file') @@ -566,7 +585,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): mask_warp = pe.Node(interface=fsl.ApplyWarp(), name='mask_warp') mask_warp.inputs.interp = 'nn' - mask_warp.inputs.relwarp = True + mask_warp.inputs.relwarp = True nonlinear_register.connect(inputspec, 'input_brain', mask_warp, 'in_file') @@ -580,7 +599,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), name='biasfield_warp') biasfield_warp.inputs.interp = 'spline' - biasfield_warp.inputs.relwarp = True + biasfield_warp.inputs.relwarp = True nonlinear_register.connect(inputspec, 'input_brain', biasfield_warp, 'in_file') @@ -683,26 +702,6 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, if config.registration_workflows['functional_registration']['coregistration']['arguments'] is not None: linear_reg.inputs.args = config.registration_workflows['functional_registration']['coregistration']['arguments'] - # if fieldmap_distortion: - - def convert_pedir(pedir): - # FSL Flirt requires pedir input encoded as an int - conv_dct = {'x': 1, 'y': 2, 'z': 3, 'x-': -1, 'y-': -2, 'z-': -3, - 'i': 1, 'j': 2, 'k': 3, 'i-': -1, 'j-': -2, 'k-': -3, - '-x': -1, '-i': -1, '-y': -2, - '-j': -2, '-z': -3, '-k': -3} - - if isinstance(pedir, bytes): - pedir = pedir.decode() - if not isinstance(pedir, str): - raise Exception("\n\nPhase-encoding direction must be a " - "string value.\n\nValue: {0}" - "\n\n".format(pedir)) - if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " - "entered: {0}\n\n".format(pedir)) - return conv_dct[pedir] - if phase_diff_distcor: register_func_to_anat.connect( inputNode_pedir, ('pedir', convert_pedir), @@ -779,7 +778,7 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ 'T2_brain']), name='inputspec') - outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_xfm_nobbreg', + outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_xfm_nobbreg', 'func_to_anat_linear_warp_nobbreg', 'anat_func_nobbreg']), name='outputspec') @@ -797,11 +796,11 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ register_func_to_anat_use_T2.connect(inputspec, 'func', linear_reg_func_to_t2, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'T2_head', linear_reg_func_to_t2, 'reference') - + # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') invt.inputs.invert_xfm = True - + register_func_to_anat_use_T2.connect(linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') # ${FSLDIR}/bin/applywarp --interp=nn -i ${T1wFolder}/${T2wRestoreImageBrain} -r ${fMRIFolder}/${ScoutName}_gdc --premat="$fMRIFolder"/T2w2Scout.mat -o ${fMRIFolder}/Scout_brain_mask.nii.gz @@ -812,14 +811,14 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', anat_to_func, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'func', anat_to_func, 'ref_file') register_func_to_anat_use_T2.connect(invt, 'out_file', anat_to_func, 'premat') - + # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/Scout_brain_mask.nii.gz -bin ${fMRIFolder}/Scout_brain_mask.nii.gz func_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), name=f'func_brain_mask') func_brain_mask.inputs.args = '-bin' register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', func_brain_mask, 'in_file') - + # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/${ScoutName}_gdc -mas ${fMRIFolder}/Scout_brain_mask.nii.gz ${fMRIFolder}/Scout_brain_dc.nii.gz func_brain = pe.Node(interface=fsl.MultiImageMaths(), name='func_brain') @@ -828,7 +827,7 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ register_func_to_anat_use_T2.connect(inputspec, 'func', func_brain, 'in_file') register_func_to_anat_use_T2.connect(func_brain_mask, 'out_file', func_brain, 'operand_files') - # ## re-registering the maked brain to the T1 brain: + # ## re-registering the maked brain to the T1 brain: # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/Scout_brain_dc.nii.gz -ref ${T1wFolder}/${T1wRestoreImageBrain} -omat "$fMRIFolder"/${ScoutName}_gdc2T1w_init.mat -out ${fMRIFolder}/${ScoutName}_gdc2T1w_brain_init -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), name='linear_reg_func_to_t1') @@ -842,16 +841,16 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ register_func_to_anat_use_T2.connect(func_brain, 'out_file', linear_reg_func_to_t1, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', linear_reg_func_to_t1, 'reference') - - # #taking out warpfield as it is not being made without a fieldmap. + + # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp convert_warp = pe.Node(interface=fsl.ConvertWarp(), name='convert_warp') - + convert_warp.inputs.out_relwarp = True convert_warp.inputs.relwarp = True register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') - + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', convert_warp, 'reference') @@ -973,24 +972,6 @@ def bbreg_args(bbreg_target): inputspec, 'linear_reg_matrix', bbreg_func_to_anat, 'in_matrix_file') - def convert_pedir(pedir): - # FSL Flirt requires pedir input encoded as an int - conv_dct = {'x': 1, 'y': 2, 'z': 3, 'x-': -1, 'y-': -2, 'z-': -3, - 'i': 1, 'j': 2, 'k': 3, 'i-': -1, 'j-': -2, 'k-': -3, - '-x': -1, '-i': -1, '-y': -2, - '-j': -2, '-z': -3, '-k': -3} - - if isinstance(pedir, bytes): - pedir = pedir.decode() - if not isinstance(pedir, str): - raise Exception("\n\nPhase-encoding direction must be a " - "string value.\n\nValue: {0}" - "\n\n".format(pedir)) - if pedir not in conv_dct.keys(): - raise Exception("\n\nInvalid phase-encoding direction " - "entered: {0}\n\n".format(pedir)) - return conv_dct[pedir] - if phase_diff_distcor: register_bbregister_func_to_anat.connect( inputNode_pedir, ('pedir', convert_pedir), @@ -1355,7 +1336,7 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, if symmetric: sym = 'sym' symm = '_symmetric' - + tmpl = '' if template == 'EPI': tmpl = 'EPI' @@ -1493,7 +1474,7 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", if symmetric: sym = 'sym' symm = '_symmetric' - + tmpl = '' if template == 'EPI': tmpl = 'EPI' @@ -1959,7 +1940,7 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "outputs": ["space-template_desc-brain_T1w", "space-template_desc-head_T1w", "space-template_desc-T1w_mask", - "space-template_desc-T1wT2w_biasfield", + "space-template_desc-T1wT2w_biasfield", "from-T1w_to-template_mode-image_desc-linear_xfm", "from-template_to-T1w_mode-image_desc-linear_xfm", "from-T1w_to-template_mode-image_xfm", @@ -2387,7 +2368,7 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ants, outputs = ANTs_registration_connector('ANTS_bold_to_EPI-template' f'_{pipe_num}', cfg, params, - orig='bold', template='EPI') + orig='bold', template='EPI') ants.inputs.inputspec.interpolation = cfg.registration_workflows[ 'functional_registration']['EPI_registration']['ANTs'][ @@ -2413,18 +2394,18 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, ants, 'inputspec.reference_mask') return (wf, outputs) - - + + def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): ''' {"name": "overwrite_transform_anat_to_template", "config": "None", "switch": [["registration_workflows", "anatomical_registration", "run"], ["registration_workflows", "anatomical_registration", "overwrite_transform", "run"]], - "option_key": ["registration_workflows", "anatomical_registration", + "option_key": ["registration_workflows", "anatomical_registration", "overwrite_transform", "using"], "option_val": "FSL", - "inputs": [("desc-restore-brain_T1w", + "inputs": [("desc-restore-brain_T1w", ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], @@ -2451,7 +2432,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # Convert ANTs warps to FSL warps to be consistent with the functional registration # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/AtlasRegistrationToMNI152_ANTsbased.sh#L134-L172 - # antsApplyTransforms -d 3 -i ${T1wRestore}.nii.gz -r ${Reference} \ + # antsApplyTransforms -d 3 -i ${T1wRestore}.nii.gz -r ${Reference} \ # -t ${WD}/xfms/T1w_to_MNI_3Warp.nii.gz \ # -t ${WD}/xfms/T1w_to_MNI_2Affine.mat \ # -t ${WD}/xfms/T1w_to_MNI_1Rigid.mat \ @@ -2506,7 +2487,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None name=f'split_combined_warp_{pipe_num}') split_combined_warp.inputs.output_name = 'e' - wf.connect(ants_apply_warp_t1_to_template, 'output_image', + wf.connect(ants_apply_warp_t1_to_template, 'output_image', split_combined_warp, 'input') # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz @@ -2519,7 +2500,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None name=f'split_combined_inv_warp_{pipe_num}') split_combined_inv_warp.inputs.output_name = 'einv' - wf.connect(ants_apply_warp_template_to_t1, 'output_image', + wf.connect(ants_apply_warp_template_to_t1, 'output_image', split_combined_inv_warp, 'input') # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz @@ -2539,7 +2520,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None change_e2inv_sign, 'in_file') # fslmerge -t ${OutputTransform} ${WD}/xfms/e1.nii.gz ${WD}/xfms/e-2.nii.gz ${WD}/xfms/e3.nii.gz - merge_xfms_to_list = pe.Node(util.Merge(3), + merge_xfms_to_list = pe.Node(util.Merge(3), name=f'merge_t1_to_template_xfms_to_list_{pipe_num}') wf.connect(split_combined_warp, 'output1', @@ -2557,21 +2538,21 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None merge_xfms, 'in_files') # fslmerge -t ${OutputInvTransform} ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e-2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - merge_inv_xfms_to_list = pe.Node(util.Merge(3), + merge_inv_xfms_to_list = pe.Node(util.Merge(3), name=f'merge_template_to_t1_xfms_to_list_{pipe_num}') - wf.connect(split_combined_inv_warp, 'output1', + wf.connect(split_combined_inv_warp, 'output1', merge_inv_xfms_to_list, 'in1') - wf.connect(change_e2inv_sign, 'out_file', + wf.connect(change_e2inv_sign, 'out_file', merge_inv_xfms_to_list, 'in2') - wf.connect(split_combined_inv_warp, 'output3', + wf.connect(split_combined_inv_warp, 'output3', merge_inv_xfms_to_list, 'in3') merge_inv_xfms = pe.Node(interface=fslMerge(), name=f'merge_template_to_t1_xfms_{pipe_num}') merge_inv_xfms.inputs.dimension = 't' - wf.connect(merge_inv_xfms_to_list, 'out', + wf.connect(merge_inv_xfms_to_list, 'out', merge_inv_xfms, 'in_files') # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} @@ -2586,7 +2567,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('T1w-template') wf.connect(node, out, fsl_apply_warp_t1_to_template, 'ref_file') - wf.connect(merge_xfms, 'merged_file', + wf.connect(merge_xfms, 'merged_file', fsl_apply_warp_t1_to_template, 'field_file') # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} @@ -2602,7 +2583,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('T1w-template') wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'ref_file') - wf.connect(merge_xfms, 'merged_file', + wf.connect(merge_xfms, 'merged_file', fsl_apply_warp_t1_brain_to_template, 'field_file') fsl_apply_warp_t1_brain_mask_to_template = pe.Node(interface=fsl.ApplyWarp(), @@ -2616,7 +2597,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None node, out = strat_pool.get_data('T1w-template') wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'ref_file') - wf.connect(merge_xfms, 'merged_file', + wf.connect(merge_xfms, 'merged_file', fsl_apply_warp_t1_brain_mask_to_template, 'field_file') # fslmaths ${OutputT1wImageRestore} -mas ${OutputT1wImageRestoreBrain} ${OutputT1wImageRestoreBrain} @@ -2775,13 +2756,13 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): if strat_pool.check_rpool("despiked_fieldmap") and \ strat_pool.check_rpool("fieldmap_mask"): diff_complete = True - + if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: # monkey data - func_to_anat = create_register_func_to_anat_use_T2(cfg, + func_to_anat = create_register_func_to_anat_use_T2(cfg, f'func_to_anat_FLIRT_' f'{pipe_num}') - + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc func_mc_mean = pe.Node(interface=afni_utils.TStat(), @@ -2800,7 +2781,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('desc-preproc_T2w') wf.connect(node, out, func_to_anat, 'inputspec.T2_head') - + node, out = strat_pool.get_data('desc-brain_T2w') wf.connect(node, out, func_to_anat, 'inputspec.T2_brain') @@ -2810,7 +2791,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): func_to_anat = create_register_func_to_anat(cfg, diff_complete, f'func_to_anat_FLIRT_' f'{pipe_num}') - + func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ 'functional_registration']['coregistration']['dof'] @@ -2901,11 +2882,11 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): else: if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'probability_map': - node, out = strat_pool.get_data(["label-WM_probseg", + node, out = strat_pool.get_data(["label-WM_probseg", "label-WM_mask"]) elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'partial_volume_map': - node, out = strat_pool.get_data(["label-WM_pveseg", + node, out = strat_pool.get_data(["label-WM_pveseg", "label-WM_mask"]) wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') @@ -3142,10 +3123,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # convertwarp --relout --rel -m ${WD}/fMRI2str.mat --ref=${T1wImage} --out=${WD}/fMRI2str.nii.gz convert_func_to_anat_linear_warp = pe.Node(interface=fsl.ConvertWarp(), name=f'convert_func_to_anat_linear_warp_{pipe_num}') - + convert_func_to_anat_linear_warp.inputs.out_relwarp = True convert_func_to_anat_linear_warp.inputs.relwarp = True - + if strat_pool.check_rpool('blip-warp'): node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') wf.connect(node, out, convert_func_to_anat_linear_warp, 'postmat') @@ -3155,7 +3136,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): else: node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') wf.connect(node, out, convert_func_to_anat_linear_warp, 'premat') - + node, out = strat_pool.get_data('desc-preproc_T1w') wf.connect(node, out, convert_func_to_anat_linear_warp, 'reference') @@ -3167,7 +3148,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - wf.connect(convert_func_to_anat_linear_warp, 'out_file', + wf.connect(convert_func_to_anat_linear_warp, 'out_file', convert_func_to_standard_warp, 'warp1') node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') @@ -3181,7 +3162,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node(interface=fsl.ExtractROI(), name=f'extract_func_roi_{pipe_num}') - + extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 @@ -3194,7 +3175,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): multiply_func_roi_by_zero.inputs.args = '-mul 0' - wf.connect(extract_func_roi, 'roi_file', + wf.connect(extract_func_roi, 'roi_file', multiply_func_roi_by_zero, 'in_file') # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L168-L193 @@ -3216,15 +3197,15 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', + wf.connect(multiply_func_roi_by_zero, 'out_file', convert_motion_distortion_warp, 'warp1') - wf.connect(split_func, 'out_files', + wf.connect(split_func, 'out_files', convert_motion_distortion_warp, 'reference') node, out = strat_pool.get_data('coordinate-transformation') wf.connect(node, out, convert_motion_distortion_warp, 'postmat') - + # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), name=f'convert_registration_warp_{pipe_num}', @@ -3236,7 +3217,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('space-template_res-bold_desc-brain_T1w') wf.connect(node, out, convert_registration_warp, 'reference') - wf.connect(convert_motion_distortion_warp, 'out_file', + wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') wf.connect(convert_func_to_standard_warp, 'out_file', @@ -3262,7 +3243,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(split_func, 'out_files', applywarp_func_to_standard, 'in_file') - + wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') @@ -3280,7 +3261,7 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(generate_vol_mask, 'out_file', applywarp_func_mask_to_standard, 'in_file') - + wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') @@ -3413,9 +3394,9 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No "space-template_desc-bold_mask"]} """ - # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries - # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh - + # Apply motion correction, coreg, anat-to-template transforms on raw functional timeseries + # Ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh + # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} anat_resample = pe.Node(interface=fsl.FLIRT(), @@ -3436,7 +3417,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_anat_res.inputs.interp = 'spline' applywarp_anat_res.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - node, out = strat_pool.get_data('space-template_desc-head_T1w') + node, out = strat_pool.get_data('space-template_desc-head_T1w') wf.connect(node, out, applywarp_anat_res, 'in_file') wf.connect(anat_resample, 'out_file', applywarp_anat_res, 'ref_file') @@ -3452,7 +3433,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No node, out = strat_pool.get_data('space-template_desc-T1w_mask') wf.connect(node, out, applywarp_anat_mask_res, 'in_file') wf.connect(applywarp_anat_res, 'out_file', applywarp_anat_mask_res, 'ref_file') - + # ${FSLDIR}/bin/fslmaths ${WD}/${T1wImageFile}.${FinalfMRIResolution} -mas ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz T1_brain_res = pe.Node(interface=fsl.MultiImageMaths(), name=f't1_brain_func_res_{pipe_num}') @@ -3472,7 +3453,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No node, out = strat_pool.get_data('space-template_desc-T1wT2w_biasfield') wf.connect(node, out, applywarp_bias_field_res, 'in_file') wf.connect(T1_brain_res, 'out_file', applywarp_bias_field_res, 'ref_file') - + # ${FSLDIR}/bin/fslmaths ${WD}/${BiasFieldFile}.${FinalfMRIResolution} -thr 0.1 ${WD}/${BiasFieldFile}.${FinalfMRIResolution} biasfield_thr = pe.Node(interface=fsl.MultiImageMaths(), name=f'biasfiedl_thr_{pipe_num}') @@ -3487,7 +3468,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No convert_func_to_standard_warp.inputs.out_relwarp = True convert_func_to_standard_warp.inputs.relwarp = True - + node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_warp') wf.connect(node, out, convert_func_to_standard_warp, 'warp1') @@ -3500,7 +3481,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 extract_func_roi = pe.Node(interface=fsl.ExtractROI(), name=f'extract_func_roi_{pipe_num}') - + extract_func_roi.inputs.t_min = 0 extract_func_roi.inputs.t_size = 3 @@ -3513,7 +3494,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No multiply_func_roi_by_zero.inputs.args = '-mul 0' - wf.connect(extract_func_roi, 'roi_file', + wf.connect(extract_func_roi, 'roi_file', multiply_func_roi_by_zero, 'in_file') # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 @@ -3535,15 +3516,15 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No convert_motion_distortion_warp.inputs.out_relwarp = True convert_motion_distortion_warp.inputs.relwarp = True - wf.connect(multiply_func_roi_by_zero, 'out_file', + wf.connect(multiply_func_roi_by_zero, 'out_file', convert_motion_distortion_warp, 'warp1') - wf.connect(split_func, 'out_files', + wf.connect(split_func, 'out_files', convert_motion_distortion_warp, 'reference') node, out = strat_pool.get_data('coordinate-transformation') wf.connect(node, out, convert_motion_distortion_warp, 'postmat') - + # convertwarp --relout --rel --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --warp1=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_gdc_warp.nii.gz --warp2=${OutputTransform} --out=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz convert_registration_warp = pe.MapNode(interface=fsl.ConvertWarp(), name=f'convert_registration_warp_{pipe_num}', @@ -3554,7 +3535,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(applywarp_anat_res, 'out_file', convert_registration_warp, 'reference') - wf.connect(convert_motion_distortion_warp, 'out_file', + wf.connect(convert_motion_distortion_warp, 'out_file', convert_registration_warp, 'warp1') wf.connect(convert_func_to_standard_warp, 'out_file', @@ -3580,11 +3561,11 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(split_func, 'out_files', applywarp_func_to_standard, 'in_file') - + wf.connect(convert_registration_warp, 'out_file', applywarp_func_to_standard, 'field_file') - wf.connect(applywarp_anat_res, 'out_file', + wf.connect(applywarp_anat_res, 'out_file', applywarp_func_to_standard, 'ref_file') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz @@ -3597,7 +3578,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(generate_vol_mask, 'out_file', applywarp_func_mask_to_standard, 'in_file') - + wf.connect(convert_registration_warp, 'out_file', applywarp_func_mask_to_standard, 'field_file') @@ -3637,7 +3618,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float - merge_func_mask = pe.Node(util.Merge(3), + merge_func_mask = pe.Node(util.Merge(3), name=f'merge_operand_files_{pipe_num}') wf.connect(biasfield_thr, 'out_file', merge_func_mask, 'in1') @@ -3645,7 +3626,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(applywarp_anat_mask_res, 'out_file', merge_func_mask, 'in2') wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in3') - + extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), name=f'extract_func_brain_{pipe_num}') @@ -3821,7 +3802,7 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, wf.connect(merge_func_to_standard, 'merged_file', apply_mask, 'in_file') - wf.connect(applyxfm_func_mask_to_standard, 'output_image', + wf.connect(applyxfm_func_mask_to_standard, 'output_image', apply_mask, 'mask_file') outputs = { @@ -3955,7 +3936,7 @@ def warp_deriv_mask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): "func_registration_to_template", "run"], ["registration_workflows", "anatomical_registration", "run"]], "option_key": ["registration_workflows", "functional_registration", - "func_registration_to_template", "apply_transform", + "func_registration_to_template", "apply_transform", "using"], "option_val": "default", "inputs": [("space-bold_desc-brain_mask", @@ -4054,8 +4035,8 @@ def warp_timeseries_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) - - + + def warp_bold_mean_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): ''' Node Block: @@ -4175,7 +4156,6 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): "EPI-template"], "outputs": ["space-EPItemplate_res-derivative_desc-bold_mask"]} ''' - xfm_prov = strat_pool.get_cpac_provenance( 'from-bold_to-EPItemplate_mode-image_xfm') reg_tool = check_prov_for_regtool(xfm_prov) @@ -4207,3 +4187,197 @@ def warp_deriv_mask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) + + + + +def warp_Tissuemask_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): + ''' + Node Block: + {"name": "warp_Tissuemask_to_T1template", + "config": "None", + "switch": ["registration_workflows", "anatomical_registration", "run"], + "option_key": "None", + "option_val": "None", + "inputs": [("label-CSF_mask", + "label-WM_mask", + "label-GM_mask", + "from-T1w_to-template_mode-image_xfm"), + "T1w-template"], + "outputs": ["space-template_label-CSF_mask", + "space-template_label-WM_mask", + "space-template_label-GM_mask"]} + ''' + + xfm_prov = strat_pool.get_cpac_provenance( + 'from-T1w_to-template_mode-image_xfm') + reg_tool = check_prov_for_regtool(xfm_prov) + + num_cpus = cfg.pipeline_setup['system_config'][ + 'max_cores_per_participant'] + + num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + + apply_xfm_CSF = apply_transform(f'warp_Tissuemask_to_T1template_CSF{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + apply_xfm_WM = apply_transform(f'warp_Tissuemask_to_T1template_WM{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + apply_xfm_GM = apply_transform(f'warp_Tissuemask_to_T1template_GM{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + if reg_tool == 'ants': + apply_xfm_CSF.inputs.inputspec.interpolation = 'NearestNeighbor' + apply_xfm_WM.inputs.inputspec.interpolation = 'NearestNeighbor' + apply_xfm_GM.inputs.inputspec.interpolation = 'NearestNeighbor' + elif reg_tool == 'fsl': + apply_xfm_CSF.inputs.inputspec.interpolation = 'nn' + apply_xfm_WM.inputs.inputspec.interpolation = 'nn' + apply_xfm_GM.inputs.inputspec.interpolation = 'nn' + + outputs = {} + if strat_pool.check_rpool('label-CSF_mask'): + node, out = strat_pool.get_data("label-CSF_mask") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.input_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.reference') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.transform') + outputs.update({ + f'space-template_label-CSF_mask': + + (apply_xfm_CSF, 'outputspec.output_image')}) + + + + if strat_pool.check_rpool('label-WM_mask'): + node, out = strat_pool.get_data("label-WM_mask") + wf.connect(node, out, apply_xfm_WM, 'inputspec.input_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, apply_xfm_WM, 'inputspec.reference') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm_WM, 'inputspec.transform') + + outputs.update({ + f'space-template_label-WM_mask': + (apply_xfm_WM, 'outputspec.output_image')}) + + + if strat_pool.check_rpool('label-GM_mask'): + node, out = strat_pool.get_data("label-GM_mask") + wf.connect(node, out, apply_xfm_GM, 'inputspec.input_image') + node, out = strat_pool.get_data("T1w-template") + wf.connect(node, out, apply_xfm_GM, 'inputspec.reference') + node, out = strat_pool.get_data("from-T1w_to-template_mode-image_xfm") + wf.connect(node, out, apply_xfm_GM, 'inputspec.transform') + + outputs.update({ + f'space-template_label-GM_mask': + (apply_xfm_GM, 'outputspec.output_image')}) + + + return (wf, outputs) + + +def warp_Tissuemask_to_EPItemplate(wf, cfg, strat_pool, pipe_num, opt=None): + ''' + Node Block: + {"name": "warp_Tissuemask_to_EPItemplate", + "config": "None", + "switch": ["registration_workflows", "functional_registration", "EPI_registration", "run"], + "option_key": "None", + "option_val": "None", + "inputs": [("label-CSF_mask", + "label-WM_mask", + "label-GM_mask", + "from-bold_to-EPItemplate_mode-image_xfm"), + "EPI-template"], + "outputs": ["space-EPItemplate_label-CSF_mask", + "space-EPItemplate_label-WM_mask", + "space-EPItemplate_label-GM_mask"]} + ''' + + xfm_prov = strat_pool.get_cpac_provenance( + 'from-bold_to-EPItemplate_mode-image_xfm') + reg_tool = check_prov_for_regtool(xfm_prov) + + num_cpus = cfg.pipeline_setup['system_config'][ + 'max_cores_per_participant'] + + num_ants_cores = cfg.pipeline_setup['system_config']['num_ants_threads'] + + apply_xfm_CSF = apply_transform(f'warp_Tissuemask_to_EPItemplate_CSF{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + apply_xfm_WM = apply_transform(f'warp_Tissuemask_to_EPItemplate_WM{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + apply_xfm_GM = apply_transform(f'warp_Tissuemask_to_EPItemplate_GM{pipe_num}', + reg_tool, time_series=False, + num_cpus=num_cpus, + num_ants_cores=num_ants_cores) + + if reg_tool == 'ants': + apply_xfm_CSF.inputs.inputspec.interpolation = 'NearestNeighbor' + apply_xfm_WM.inputs.inputspec.interpolation = 'NearestNeighbor' + apply_xfm_GM.inputs.inputspec.interpolation = 'NearestNeighbor' + elif reg_tool == 'fsl': + apply_xfm_CSF.inputs.inputspec.interpolation = 'nn' + apply_xfm_WM.inputs.inputspec.interpolation = 'nn' + apply_xfm_GM.inputs.inputspec.interpolation = 'nn' + + outputs = {} + if strat_pool.check_rpool('label-CSF_mask'): + node, out = strat_pool.get_data("label-CSF_mask") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.input_image') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.reference') + node, out = strat_pool.get_data("from-bold_to-EPItemplate_mode-image_xfm") + wf.connect(node, out, apply_xfm_CSF, 'inputspec.transform') + outputs.update({ + f'space-EPItemplate_label-CSF_mask': + + (apply_xfm_CSF, 'outputspec.output_image')}) + + + + if strat_pool.check_rpool('label-WM_mask'): + node, out = strat_pool.get_data("label-WM_mask") + wf.connect(node, out, apply_xfm_WM, 'inputspec.input_image') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, apply_xfm_WM, 'inputspec.reference') + node, out = strat_pool.get_data("from-bold_to-EPItemplate_mode-image_xfm") + wf.connect(node, out, apply_xfm_WM, 'inputspec.transform') + + outputs.update({ + f'space-EPItemplate_label-WM_mask': + (apply_xfm_WM, 'outputspec.output_image')}) + + + if strat_pool.check_rpool('label-GM_mask'): + node, out = strat_pool.get_data("label-GM_mask") + wf.connect(node, out, apply_xfm_GM, 'inputspec.input_image') + node, out = strat_pool.get_data("EPI-template") + wf.connect(node, out, apply_xfm_GM, 'inputspec.reference') + node, out = strat_pool.get_data("from-bold_to-EPItemplate_mode-image_xfm") + wf.connect(node, out, apply_xfm_GM, 'inputspec.transform') + + outputs.update({ + f'space-EPItemplate_label-GM_mask': + (apply_xfm_GM, 'outputspec.output_image')}) + + + return (wf, outputs) + + diff --git a/CPAC/registration/utils.py b/CPAC/registration/utils.py index 77bfed3fbc..ca8fd3ec50 100644 --- a/CPAC/registration/utils.py +++ b/CPAC/registration/utils.py @@ -57,9 +57,11 @@ def generate_inverse_transform_flags(transform_list): return inverse_transform_flags -def hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, - ants_para, moving_mask=None, reference_mask=None, - fixed_image_mask=None, interp=None, reg_with_skull=0): +def hardcoded_reg(moving_brain, reference_brain, moving_skull, + reference_skull, ants_para, moving_mask=None, + reference_mask=None, fixed_image_mask=None, interp=None, + reg_with_skull=0): + # TODO: expand transforms to cover all in ANTs para regcmd = ["antsRegistration"] @@ -104,14 +106,14 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, else: regcmd.append("--collapse-output-transforms") regcmd.append(str(ants_para[para_index][para_type])) - - elif para_type == 'winsorize-image-intensities': + + elif para_type == 'winsorize-image-intensities': if ants_para[para_index][para_type]['lowerQuantile'] is None or ants_para[para_index][para_type]['upperQuantile'] is None: err_msg = 'Please specifiy lowerQuantile and upperQuantile of ANTs parameters --winsorize-image-intensities in pipeline config. ' raise Exception(err_msg) else: regcmd.append("--winsorize-image-intensities") - regcmd.append("[{0},{1}]".format(ants_para[para_index][para_type]['lowerQuantile'], + regcmd.append("[{0},{1}]".format(ants_para[para_index][para_type]['lowerQuantile'], ants_para[para_index][para_type]['upperQuantile'])) elif para_type == 'initial-moving-transform': @@ -123,7 +125,7 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, regcmd.append("--initial-moving-transform") if reg_with_skull == 1: regcmd.append("[{0},{1},{2}]".format( - reference_skull, moving_skull, + reference_skull, moving_skull, ants_para[para_index][para_type][ 'initializationFeature'])) else: @@ -395,7 +397,7 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, regcmd.append("[NULL,NULL]") elif para_type == 'masks': - # lesion preproc has + # lesion preproc has if fixed_image_mask is not None: regcmd.append("--masks") regcmd.append(str(fixed_image_mask)) @@ -588,7 +590,7 @@ def run_c3d(reference_file, source_file, transform_file): def run_c4d(input, output_name): - + import os output1 = os.path.join(os.getcwd(), output_name+'1.nii.gz') diff --git a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml index df193d6a7f..543314e782 100644 --- a/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml +++ b/CPAC/resources/configs/1.7-1.8-nesting-mappings.yml @@ -82,6 +82,7 @@ write_debugging_outputs: generateQualityControlImages: - pipeline_setup - output_directory + - quality_control - generate_quality_control_images removeWorkingDir: - pipeline_setup diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml index ceea891481..dfa594cdfb 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ABIDE.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml index e1ed78fe1e..306dc9e88a 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml index b0b06e25cd..b922f3564a 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-ADHD200_only2.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml index f287647cf2..8d0a749e2d 100644 --- a/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml +++ b/CPAC/resources/configs/data_config_S3-BIDS-NKI-RocklandSample.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_config_cpac_benchmark.yml b/CPAC/resources/configs/data_config_cpac_benchmark.yml index e275da4522..949b5a0a26 100644 --- a/CPAC/resources/configs/data_config_cpac_benchmark.yml +++ b/CPAC/resources/configs/data_config_cpac_benchmark.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/data_settings_template.yml b/CPAC/resources/configs/data_settings_template.yml index 4f20d1c283..f2fc46845d 100644 --- a/CPAC/resources/configs/data_settings_template.yml +++ b/CPAC/resources/configs/data_settings_template.yml @@ -1,5 +1,5 @@ # CPAC Data Settings File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index e195c1a281..835426808f 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -1,5 +1,5 @@ # CPAC Group-Level Analysis Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_abcd-options.yml b/CPAC/resources/configs/pipeline_config_abcd-options.yml index 8204a6603d..ff2d0126f5 100644 --- a/CPAC/resources/configs/pipeline_config_abcd-options.yml +++ b/CPAC/resources/configs/pipeline_config_abcd-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # @@ -23,8 +23,9 @@ pipeline_setup: # - If running outside a container, this should be a full path to a directory. path: /outputs/output - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: Off + quality_control: + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: Off working_directory: diff --git a/CPAC/resources/configs/pipeline_config_anat-only.yml b/CPAC/resources/configs/pipeline_config_anat-only.yml index c1fa7be64f..c69662bd22 100644 --- a/CPAC/resources/configs/pipeline_config_anat-only.yml +++ b/CPAC/resources/configs/pipeline_config_anat-only.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml index b043202633..412e5b3d61 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-ANTS.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml index 9bc28ed2ae..1c2df2bb77 100644 --- a/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml +++ b/CPAC/resources/configs/pipeline_config_benchmark-FNIRT.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index fc3bef8fcf..127cbfa857 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # @@ -47,8 +47,9 @@ pipeline_setup: # Options: default, ndmg output_tree: "default" - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: True + quality_control: + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: True working_directory: diff --git a/CPAC/resources/configs/pipeline_config_ccs-options.yml b/CPAC/resources/configs/pipeline_config_ccs-options.yml index f913496397..276b2339ea 100644 --- a/CPAC/resources/configs/pipeline_config_ccs-options.yml +++ b/CPAC/resources/configs/pipeline_config_ccs-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml index 8a5f3092c0..91b667872b 100644 --- a/CPAC/resources/configs/pipeline_config_fmriprep-options.yml +++ b/CPAC/resources/configs/pipeline_config_fmriprep-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # @@ -23,8 +23,9 @@ pipeline_setup: # - If running outside a container, this should be a full path to a directory. path: /outputs/output - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: False + quality_control: + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: False working_directory: diff --git a/CPAC/resources/configs/pipeline_config_fx-options.yml b/CPAC/resources/configs/pipeline_config_fx-options.yml index b14c45ec69..171ed65f58 100644 --- a/CPAC/resources/configs/pipeline_config_fx-options.yml +++ b/CPAC/resources/configs/pipeline_config_fx-options.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # @@ -15,6 +15,14 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. pipeline_name: cpac_fx-options + output_directory: + + # Quality control outputs + quality_control: + + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + nuisance_corrections: 2-nuisance_regression: diff --git a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml index 3d938fc906..b6695be724 100644 --- a/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml +++ b/CPAC/resources/configs/pipeline_config_monkey-ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_monkey.yml b/CPAC/resources/configs/pipeline_config_monkey.yml index 3efdeb0233..473e6ca4c3 100644 --- a/CPAC/resources/configs/pipeline_config_monkey.yml +++ b/CPAC/resources/configs/pipeline_config_monkey.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_ndmg.yml b/CPAC/resources/configs/pipeline_config_ndmg.yml index 5cac042e02..fb6f6f5b92 100644 --- a/CPAC/resources/configs/pipeline_config_ndmg.yml +++ b/CPAC/resources/configs/pipeline_config_ndmg.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml index e31adc3b83..74f53f6720 100644 --- a/CPAC/resources/configs/pipeline_config_nhp-macaque.yml +++ b/CPAC/resources/configs/pipeline_config_nhp-macaque.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_preproc.yml b/CPAC/resources/configs/pipeline_config_preproc.yml index ae66ce149b..497391a566 100644 --- a/CPAC/resources/configs/pipeline_config_preproc.yml +++ b/CPAC/resources/configs/pipeline_config_preproc.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml new file mode 100644 index 0000000000..b3962feb78 --- /dev/null +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -0,0 +1,137 @@ +%YAML 1.1 +--- +# CPAC Pipeline Configuration YAML file for RBC options +# Version 1.8.3 +# +# http://fcp-indi.github.io for more info. +# +# Tip: This file can be edited manually with a text editor for quick modifications. + +FROM: fx-options + +pipeline_setup: + # Name for this pipeline configuration - useful for identification. + pipeline_name: RBC.options + + system_config: + + # Random seed used to fix the state of execution. + # If unset, each process uses its own default. + # If set, a `random.log` file will be generated logging the random seed and each node to which that seed was applied. + # If set to a positive integer (up to 2147483647), that integer will be used to seed each process that accepts a random seed. + # If set to 'random', a random positive integer (up to 2147483647) will be generated and that seed will be used to seed each process that accepts a random seed. + random_seed: 77742777 + + output_directory: + # Quality control outputs + quality_control: + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: On + +functional_preproc: + despiking: + run: [On] + +nuisance_corrections: + 2-nuisance_regression: + Regressors: + - Name: Regressor-with-GSR + Bandpass: + bottom_frequency: 0.01 + top_frequency: 0.1 + CerebrospinalFluid: + erode_mask: false + extraction_resolution: 2 + include_delayed: true + include_delayed_squared: true + include_squared: true + summary: Mean + GlobalSignal: + include_delayed: true + include_delayed_squared: true + include_squared: true + summary: Mean + Motion: + include_delayed: true + include_delayed_squared: true + include_squared: true + WhiteMatter: + erode_mask: false + extraction_resolution: 2 + include_delayed: true + include_delayed_squared: true + include_squared: true + summary: Mean + PolyOrt: + degree: 2 + + - Name: Regressor-with-aCompCor + Bandpass: + bottom_frequency: 0.01 + top_frequency: 0.1 + CerebrospinalFluid: + erode_mask: false + extraction_resolution: 2 + include_delayed: true + include_delayed_squared: true + include_squared: true + summary: Mean + aCompCor: + summary: + method: DetrendPC + components: 5 + tissues: + - WhiteMatter + - CerebrospinalFluid + extraction_resolution: 2 + Motion: + include_delayed: true + include_delayed_squared: true + include_squared: true + WhiteMatter: + erode_mask: false + extraction_resolution: 2 + include_delayed: true + include_delayed_squared: true + include_squared: true + summary: Mean + PolyOrt: + degree: 2 + +timeseries_extraction: + run: On + connectivity_matrix: + using: + - Nilearn + measure: + - Pearson + - Partial + tse_roi_paths: + # AAL + /ndmg_atlases/label/Human/AAL_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # Atlases + /ndmg_atlases/label/Human/Brodmann_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Glasser_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # Slab + /ndmg_atlases/label/Human/Slab907_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # HO: Thresholded + /ndmg_atlases/label/Human/HarvardOxfordcort-maxprob-thr25_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/HarvardOxfordsub-maxprob-thr25_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # Jeulich + /ndmg_atlases/label/Human/Juelich_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # CC + /cpac_templates/CC200.nii.gz: Avg + /cpac_templates/CC400.nii.gz: Avg + # Shaefer + /ndmg_atlases/label/Human/Schaefer1000_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Schaefer200_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Schaefer300_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Schaefer400_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # Networks + # Yeo + /ndmg_atlases/label/Human/Yeo-17-liberal_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Yeo-17_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Yeo-7-liberal_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + /ndmg_atlases/label/Human/Yeo-7_space-MNI152NLin6_res-1x1x1.nii.gz: Avg + # Smith 2009 (to be included after https://github.com/FCP-INDI/C-PAC/issues/1640 is resolved) + # /cpac_templates/Smith_space-MNI152NLin6_res-3x3x3_desc-thresh3_mask.nii.gz: Avg diff --git a/CPAC/resources/configs/pipeline_config_regtest-1.yml b/CPAC/resources/configs/pipeline_config_regtest-1.yml index 638a1a531a..a06e9f8a52 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-1.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-1.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-2.yml b/CPAC/resources/configs/pipeline_config_regtest-2.yml index a70035a7df..a0d8724f4a 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-2.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-2.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-3.yml b/CPAC/resources/configs/pipeline_config_regtest-3.yml index 1d4750c557..c110967246 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-3.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-3.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_regtest-4.yml b/CPAC/resources/configs/pipeline_config_regtest-4.yml index 72a8e56f7c..4e65999bd5 100644 --- a/CPAC/resources/configs/pipeline_config_regtest-4.yml +++ b/CPAC/resources/configs/pipeline_config_regtest-4.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/pipeline_config_rodent.yml b/CPAC/resources/configs/pipeline_config_rodent.yml index 252c338bd3..2332c8ec5b 100644 --- a/CPAC/resources/configs/pipeline_config_rodent.yml +++ b/CPAC/resources/configs/pipeline_config_rodent.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/system_config.yml b/CPAC/resources/configs/system_config.yml index 48e95dc822..e103632196 100644 --- a/CPAC/resources/configs/system_config.yml +++ b/CPAC/resources/configs/system_config.yml @@ -1,5 +1,5 @@ # C-PAC System Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml index 00ed604429..a079f2699b 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_1.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml index 69aa38c56b..5cc12e735f 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-ADHD200_no-params.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml index 1c309b89a1..f897e160b3 100644 --- a/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml +++ b/CPAC/resources/configs/test_configs/data-test_S3-NKI-RS_fmap.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml index 506762877a..bc78fa12f8 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-scan.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml index df8d0a4f9e..d92590791a 100644 --- a/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml +++ b/CPAC/resources/configs/test_configs/data_config_S3_CoRR_5only_mult-sess.yml @@ -1,5 +1,5 @@ # CPAC Data Configuration File -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml index 310847eedd..28eb439150 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ABCD.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml index af21185375..c83d6b93ff 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml index 353024ecdd..795a2403e2 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorr3dSk.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml index 34f7689fe3..8d431edcc6 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-3dSk-DistCorrBET.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml index cf8e88461c..000d52d05e 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_ANTs-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml index 04159ae78c..6a49bbcd0f 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-3dSk-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml index 0d2f146341..31c9607f69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-BASC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml index 0d2f146341..31c9607f69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC-voxel.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml index 0d2f146341..31c9607f69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-ISC.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml index 0d2f146341..31c9607f69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis-MDMR.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml index 0d2f146341..31c9607f69 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_FNIRT-BET-AllNuis.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/configs/test_configs/pipe-test_all.yml b/CPAC/resources/configs/test_configs/pipe-test_all.yml index b4ee772503..11ac1e29c4 100644 --- a/CPAC/resources/configs/test_configs/pipe-test_all.yml +++ b/CPAC/resources/configs/test_configs/pipe-test_all.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index c7d47d0b3e..09d9681e14 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -1,205 +1,203 @@ -Resource Type Space Sub-Directory File To Smooth To z-std 4D Time Series Optional: Debugging Multi-File -alff alff functional func NIfTI Yes Yes -desc-sm_alff alff functional func NIfTI Yes -desc-sm-zstd_alff alff functional func NIfTI -desc-zstd_alff alff functional func NIfTI -space-template_alff alff template func NIfTI Yes Yes -space-template_desc-sm_alff alff template func NIfTI Yes -space-template_desc-sm-zstd_alff alff template func NIfTI -space-template_desc-zstd_alff alff template func NIfTI -desc-brain_bold bold functional func NIfTI Yes Yes -desc-cleaned_bold bold functional func NifTI Yes Yes -desc-mean_bold bold functional func NIfTI -desc-motion_bold bold functional func NIfTI Yes Yes -desc-preproc_bold bold functional func NIfTI Yes -desc-sm_bold bold functional func NIfTI Yes Yes -space-EPItemplate_bold bold EPI template func NIfTI Yes -space-EPItemplate_desc-brain_bold bold EPI template func NIfTI Yes -space-EPItemplate_desc-cleaned_bold bold EPI template func NIfTI Yes -space-EPItemplate_desc-mean_bold bold EPI template func NIfTI -space-EPItemplate_desc-preproc_bold bold EPI template func NIfTI Yes -space-symtemplate_desc-sm_bold bold symmetric template func NIfTI Yes Yes -space-T1w_desc-mean_bold bold T1w func NIfTI -space-template_bold bold template func NIfTI Yes -space-template_desc-brain_bold bold template func NIfTI Yes -space-template_desc-cleaned_bold bold template func NIfTI Yes -space-template_desc-mean_bold bold template func NIfTI -space-template_desc-preproc_bold bold template func NIfTI Yes -desc-DualReg_correlations correlation template func NIfTI -desc-MeanSCA_correlations correlation template func NIfTI -desc-MultReg_correlations correlation template func NIfTI -desc-ndmg_correlations correlation template func NIfTI -space-template_desc-binarized_degree-centrality degree-centrality template func NIfTI Yes Yes -space-template_desc-binarized-sm_degree-centrality degree-centrality template func NIfTI Yes -space-template_desc-binarized-sm-zstd_degree-centrality degree-centrality template func NIfTI -space-template_desc-binarized-zstd_degree-centrality degree-centrality template func NIfTI -space-template_desc-weighted_degree-centrality degree-centrality template func NIfTI Yes Yes -space-template_desc-weighted-sm_degree-centrality degree-centrality template func NIfTI Yes -space-template_desc-weighted-sm-zstd_degree-centrality degree-centrality template func NIfTI -space-template_desc-weighted-zstd_degree-centrality degree-centrality template func NIfTI -space-template_desc-binarized_eigen-centrality eigen-centrality template func NIfTI Yes Yes -space-template_desc-binarized-sm_eigen-centrality eigen-centrality template func NIfTI Yes -space-template_desc-binarized-sm-zstd_eigen-centrality eigen-centrality template func NIfTI -space-template_desc-binarized-zstd_eigen-centrality eigen-centrality template func NIfTI -space-template_desc-weighted_eigen-centrality eigen-centrality template func NIfTI Yes Yes -space-template_desc-weighted-sm_eigen-centrality eigen-centrality template func NIfTI Yes -space-template_desc-weighted-sm-zstd_eigen-centrality eigen-centrality template func NIfTI -space-template_desc-weighted-zstd_eigen-centrality eigen-centrality template func NIfTI -desc-sm_falff falff functional func NIfTI Yes -desc-sm-zstd_falff falff functional func NIfTI -desc-zstd_falff falff functional func NIfTI -falff falff functional func NIfTI Yes Yes -space-template_desc-sm_falff falff template func NIfTI Yes -space-template_desc-sm-zstd_falff falff template func NIfTI -space-template_desc-zstd_falff falff template func NIfTI -space-template_falff falff template func NIfTI Yes Yes -space-template_desc-binarized_lfcd lfcd template func NIfTI Yes Yes -space-template_desc-binarized-sm_lfcd lfcd template func NIfTI Yes -space-template_desc-binarized-sm-zstd_lfcd lfcd template func NIfTI -space-template_desc-binarized-zstd_lfcd lfcd template func NIfTI -space-template_desc-weighted_lfcd lfcd template func NIfTI Yes Yes -space-template_desc-weighted-sm_lfcd lfcd template func NIfTI Yes -space-template_desc-weighted-sm-zstd_lfcd lfcd template func NIfTI -space-template_desc-weighted-zstd_lfcd lfcd template func NIfTI -space-EPItemplate_desc-bold_mask mask EPI template func NIfTI -space-EPItemplate_res-derivative_desc-bold_mask mask EPI template func NIfTI -space-bold_desc-brain_mask mask functional func NIfTI -space-bold_desc-eroded_mask mask functional func NIfTI -space-bold_label-CSF_desc-eroded_mask mask functional func NIfTI -space-bold_label-CSF_mask mask functional func NIfTI -space-bold_label-GM_desc-eroded_mask mask functional func NIfTI -space-bold_label-GM_mask mask functional func NIfTI -space-bold_label-WM_desc-eroded_mask mask functional func NIfTI -space-bold_label-WM_mask mask functional func NIfTI -space-longitudinal_desc-brain_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-CSF_desc-preproc_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-CSF_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-GM_desc-preproc_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-GM_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-WM_desc-preproc_mask mask longitudinal T1w anat NIfTI -space-longitudinal_label-WM_mask mask longitudinal T1w anat NIfTI -label-CSF_desc-eroded_mask mask T1w anat NIfTI -label-CSF_desc-preproc_mask mask T1w anat NIfTI -label-CSF_mask mask T1w anat NIfTI -label-GM_desc-eroded_mask mask T1w anat NIfTI -label-GM_desc-preproc_mask mask T1w anat NIfTI -label-GM_mask mask T1w anat NIfTI -label-WM_desc-eroded_mask mask T1w anat NIfTI -label-WM_desc-preproc_mask mask T1w anat NIfTI -label-WM_mask mask T1w anat NIfTI -space-T1w_desc-acpcbrain_mask mask T1w anat NIfTI -space-T1w_desc-brain_mask mask T1w anat NIfTI -space-T1w_desc-eroded_mask mask T1w anat NIfTI -space-template_desc-bold_mask mask template func NIfTI -space-template_res-derivative_desc-bold_mask mask template func NIfTI -dvars motion func text -framewise-displacement-jenkinson motion func 1D -framewise-displacement-power motion func 1D -max-displacement motion func 1D -motion-filter-info motion func text -motion-filter-plot motion func png -motion-params motion func text -movement-parameters motion func 1D -power-params motion func text -rels-displacement motion func 1D -label-CSF_probseg probseg T1w anat NIfTI -label-GM_probseg probseg T1w anat NIfTI -label-WM_probseg probseg T1w anat NIfTI -T1w-axial-qc qc anat png -T1w-sagittal-qc qc anat png -dseg-axial-qc qc anat png -desg-sagittal-qc qc anat png -bold-axial-qc qc func png -bold-sagittal-qc qc func png -bold-carpet-qc qc func png -framewise-displacement-jenkinson-plot-qc qc func png -movement-parameters-trans-qc qc func png -movement-parameters-rot-qc qc func png -bold-snr-axial-qc qc func png -bold-snr-sagittal-qc qc func png -bold-snr-hist-qc qc func png -bold-snr-qc qc func png -regressors regressors func 1D -desc-sm_reho reho functional func NIfTI Yes -desc-sm-zstd_reho reho functional func NIfTI -desc-zstd_reho reho functional func NIfTI -reho reho functional func NIfTI Yes Yes -space-template_desc-sm_reho reho template func NIfTI Yes -space-template_desc-sm-zstd_reho reho template func NIfTI -space-template_desc-zstd_reho reho template func NIfTI -space-template_reho reho template func NIfTI Yes Yes -desc-DualReg_statmap statistic template func NIfTI -desc-MultReg_statmap statistic template func NIfTI -lh-cortical-thickness-surface-map surface-derived anat Yes -rh-cortical-thickness-surface-map surface-derived anat Yes -lh-cortical-volume-surface-map surface-derived anat Yes -rh-cortical-volume-surface-map surface-derived anat Yes -lh-pial-surface-mesh surface-derived anat -rh-pial-surface-mesh surface-derived anat -raw-average surface-derived anat -lh-smoothed-surface-mesh surface-derived anat -rh-smoothed-surface-mesh surface-derived anat -space-fsLR_den-32k_bold-dtseries surface-derived anat -lh-spherical-surface-mesh surface-derived anat Yes -rh-spherical-surface-mesh surface-derived anat Yes -lh-sulcal-depth-surface-maps surface-derived anat Yes -rh-sulcal-depth-surface-maps surface-derived anat Yes -lh-surface-curvature surface-derived anat -rh-surface-curvature surface-derived anat -lh-white-matter-surface-mesh surface-derived anat Yes -rh-white-matter-surface-mesh surface-derived anat Yes -wmparc surface-derived anat Yes -space-symtemplate_desc-brain_T1w T1w symmetric template anat NIfTI -desc-brain_T1w T1w T1w anat NIfTI -desc-preproc_T1w T1w T1w anat NIfTI -desc-reorient_T1w T1w T1w anat NIfTI -desc-restore_T1w T1w T1w anat NIfTI -desc-restore-brain_T1w T1w T1w anat NIfTI -space-template_desc-brain_T1w T1w template anat NIfTI -desc-Mean_timeseries timeseries func 1D -desc-MeanSCA_timeseries timeseries func 1D -desc-SpatReg_timeseries timeseries func 1D -desc-Voxel_timeseries timeseries func 1D -desc-PearsonAfni_connectome matrix func tsv -desc-PartialAfni_connectome matrix func tsv -desc-PearsonNilearn_connectome matrix func tsv -desc-PartialNilearn_connectome matrix func tsv -space-longitudinal_label-CSF_probseg tissue probability longitudinal T1w anat NIfTI -space-longitudinal_label-GM_probseg tissue probability longitudinal T1w anat NIfTI -space-longitudinal_label-WM_probseg tissue probability longitudinal T1w anat NIfTI -vmhc vmhc symmetric template func NIfTI -blip-warp xfm func NIfTI -from-bold_to-EPItemplate_mode-image_desc-linear_xfm xfm func NIfTI -from-bold_to-EPItemplate_mode-image_desc-nonlinear_xfm xfm func NIfTI -from-bold_to-EPItemplate_mode-image_xfm xfm func NIfTI -from-bold_to-symtemplate_mode-image_xfm xfm func NIfTI -from-bold_to-T1w_mode-image_desc-linear_xfm xfm func NIfTI -from-bold_to-template_mode-image_xfm xfm func NIfTI -from-EPItemplate_to-bold_mode-image_desc-linear_xfm xfm func NIfTI -from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm xfm func NIfTI -from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI -from-longitudinal_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-longitudinal_to-symtemplate_mode-image_xfm xfm anat NIfTI -from-longitudinal_to-template_mode-image_desc-linear_xfm xfm anat NIfTI -from-longitudinal_to-template_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-longitudinal_to-template_mode-image_xfm xfm anat NIfTI -from-symtemplate_to-bold_mode-image_xfm xfm func NIfTI -from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm xfm anat NIfTI -from-symtemplate_to-longitudinal_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-symtemplate_to-longitudinal_mode-image_xfm xfm anat NIfTI -from-symtemplate_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI -from-symtemplate_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-symtemplate_to-T1w_mode-image_xfm xfm anat NIfTI -from-T1w_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI -from-T1w_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-T1w_to-symtemplate_mode-image_xfm xfm anat NIfTI -from-T1w_to-template_mode-image_desc-linear_xfm xfm anat NIfTI -from-T1w_to-template_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-T1w_to-template_mode-image_xfm xfm anat NIfTI -from-template_to-bold_mode-image_xfm xfm func NIfTI -from-template_to-longitudinal_mode-image_desc-linear_xfm xfm anat NIfTI -from-template_to-longitudinal_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-template_to-longitudinal_mode-image_xfm xfm anat NIfTI -from-template_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI -from-template_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI -from-template_to-T1w_mode-image_xfm xfm anat NIfTI +Resource Type Space Sub-Directory File To Smooth To z-std 4D Time Series Optional: Debugging Multi-File +alff alff functional func NIfTI Yes Yes +desc-sm_alff alff functional func NIfTI Yes +desc-sm-zstd_alff alff functional func NIfTI +desc-zstd_alff alff functional func NIfTI +space-template_alff alff template func NIfTI Yes Yes +space-template_desc-sm_alff alff template func NIfTI Yes +space-template_desc-sm-zstd_alff alff template func NIfTI +space-template_desc-zstd_alff alff template func NIfTI +desc-brain_bold bold functional func NIfTI Yes Yes +desc-cleaned_bold bold functional func NifTI Yes Yes +desc-mean_bold bold functional func NIfTI +desc-motion_bold bold functional func NIfTI Yes Yes +desc-preproc_bold bold functional func NIfTI Yes +desc-sm_bold bold functional func NIfTI Yes Yes +space-EPItemplate_bold bold EPI template func NIfTI Yes +space-EPItemplate_desc-brain_bold bold EPI template func NIfTI Yes +space-EPItemplate_desc-cleaned_bold bold EPI template func NIfTI Yes +space-EPItemplate_desc-mean_bold bold EPI template func NIfTI +space-EPItemplate_desc-preproc_bold bold EPI template func NIfTI Yes +space-symtemplate_desc-sm_bold bold symmetric template func NIfTI Yes Yes +space-T1w_desc-mean_bold bold T1w func NIfTI +space-template_bold bold template func NIfTI Yes +space-template_desc-brain_bold bold template func NIfTI Yes +space-template_desc-cleaned_bold bold template func NIfTI Yes +space-template_desc-mean_bold bold template func NIfTI +space-template_desc-preproc_bold bold template func NIfTI Yes +desc-DualReg_correlations correlation template func NIfTI +desc-MeanSCA_correlations correlation template func NIfTI +desc-MultReg_correlations correlation template func NIfTI +desc-ndmg_correlations correlation template func NIfTI +space-template_desc-binarized_degree-centrality degree-centrality template func NIfTI Yes Yes +space-template_desc-binarized-sm_degree-centrality degree-centrality template func NIfTI Yes +space-template_desc-binarized-sm-zstd_degree-centrality degree-centrality template func NIfTI +space-template_desc-binarized-zstd_degree-centrality degree-centrality template func NIfTI +space-template_desc-weighted_degree-centrality degree-centrality template func NIfTI Yes Yes +space-template_desc-weighted-sm_degree-centrality degree-centrality template func NIfTI Yes +space-template_desc-weighted-sm-zstd_degree-centrality degree-centrality template func NIfTI +space-template_desc-weighted-zstd_degree-centrality degree-centrality template func NIfTI +space-template_desc-binarized_eigen-centrality eigen-centrality template func NIfTI Yes Yes +space-template_desc-binarized-sm_eigen-centrality eigen-centrality template func NIfTI Yes +space-template_desc-binarized-sm-zstd_eigen-centrality eigen-centrality template func NIfTI +space-template_desc-binarized-zstd_eigen-centrality eigen-centrality template func NIfTI +space-template_desc-weighted_eigen-centrality eigen-centrality template func NIfTI Yes Yes +space-template_desc-weighted-sm_eigen-centrality eigen-centrality template func NIfTI Yes +space-template_desc-weighted-sm-zstd_eigen-centrality eigen-centrality template func NIfTI +space-template_desc-weighted-zstd_eigen-centrality eigen-centrality template func NIfTI +desc-sm_falff falff functional func NIfTI Yes +desc-sm-zstd_falff falff functional func NIfTI +desc-zstd_falff falff functional func NIfTI +falff falff functional func NIfTI Yes Yes +space-template_desc-sm_falff falff template func NIfTI Yes +space-template_desc-sm-zstd_falff falff template func NIfTI +space-template_desc-zstd_falff falff template func NIfTI +space-template_falff falff template func NIfTI Yes Yes +space-template_desc-binarized_lfcd lfcd template func NIfTI Yes Yes +space-template_desc-binarized-sm_lfcd lfcd template func NIfTI Yes +space-template_desc-binarized-sm-zstd_lfcd lfcd template func NIfTI +space-template_desc-binarized-zstd_lfcd lfcd template func NIfTI +space-template_desc-weighted_lfcd lfcd template func NIfTI Yes Yes +space-template_desc-weighted-sm_lfcd lfcd template func NIfTI Yes +space-template_desc-weighted-sm-zstd_lfcd lfcd template func NIfTI +space-template_desc-weighted-zstd_lfcd lfcd template func NIfTI +space-EPItemplate_desc-bold_mask mask EPI template func NIfTI +space-EPItemplate_res-derivative_desc-bold_mask mask EPI template func NIfTI +space-bold_desc-brain_mask mask functional func NIfTI +space-bold_desc-eroded_mask mask functional func NIfTI +space-bold_label-CSF_desc-eroded_mask mask functional func NIfTI +space-bold_label-CSF_mask mask functional func NIfTI +space-bold_label-GM_desc-eroded_mask mask functional func NIfTI +space-bold_label-GM_mask mask functional func NIfTI +space-bold_label-WM_desc-eroded_mask mask functional func NIfTI +space-bold_label-WM_mask mask functional func NIfTI +space-longitudinal_desc-brain_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-CSF_desc-preproc_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-CSF_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-GM_desc-preproc_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-GM_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-WM_desc-preproc_mask mask longitudinal T1w anat NIfTI +space-longitudinal_label-WM_mask mask longitudinal T1w anat NIfTI +label-CSF_desc-eroded_mask mask T1w anat NIfTI +label-CSF_desc-preproc_mask mask T1w anat NIfTI +label-CSF_mask mask T1w anat NIfTI +label-GM_desc-eroded_mask mask T1w anat NIfTI +label-GM_desc-preproc_mask mask T1w anat NIfTI +label-GM_mask mask T1w anat NIfTI +label-WM_desc-eroded_mask mask T1w anat NIfTI +label-WM_desc-preproc_mask mask T1w anat NIfTI +label-WM_mask mask T1w anat NIfTI +space-T1w_desc-acpcbrain_mask mask T1w anat NIfTI +space-T1w_desc-brain_mask mask T1w anat NIfTI +space-T1w_desc-eroded_mask mask T1w anat NIfTI +space-template_desc-bold_mask mask template func NIfTI +space-template_res-derivative_desc-bold_mask mask template func NIfTI +dvars motion func text +framewise-displacement-jenkinson motion func 1D +framewise-displacement-power motion func 1D +max-displacement motion func 1D +motion-filter-info motion func text +motion-filter-plot motion func png +motion-params motion func text +movement-parameters motion func 1D +power-params motion func text +rels-displacement motion func 1D +label-CSF_probseg probseg T1w anat NIfTI +label-GM_probseg probseg T1w anat NIfTI +label-WM_probseg probseg T1w anat NIfTI +T1w-axial-qc qc anat png +T1w-sagittal-qc qc anat png +dseg-axial-qc qc anat png +desg-sagittal-qc qc anat png +bold-axial-qc qc func png +bold-sagittal-qc qc func png +bold-carpet-qc qc func png +framewise-displacement-jenkinson-plot-qc qc func png +movement-parameters-trans-qc qc func png +movement-parameters-rot-qc qc func png +bold-snr-axial-qc qc func png +bold-snr-sagittal-qc qc func png +bold-snr-hist-qc qc func png +bold-snr-qc qc func png +regressors regressors func 1D +desc-sm_reho reho functional func NIfTI Yes +desc-sm-zstd_reho reho functional func NIfTI +desc-zstd_reho reho functional func NIfTI +reho reho functional func NIfTI Yes Yes +space-template_desc-sm_reho reho template func NIfTI Yes +space-template_desc-sm-zstd_reho reho template func NIfTI +space-template_desc-zstd_reho reho template func NIfTI +space-template_reho reho template func NIfTI Yes Yes +desc-DualReg_statmap statistic template func NIfTI +desc-MultReg_statmap statistic template func NIfTI +lh-cortical-thickness-surface-map surface-derived anat Yes +rh-cortical-thickness-surface-map surface-derived anat Yes +lh-cortical-volume-surface-map surface-derived anat Yes +rh-cortical-volume-surface-map surface-derived anat Yes +lh-pial-surface-mesh surface-derived anat +rh-pial-surface-mesh surface-derived anat +raw-average surface-derived anat +lh-smoothed-surface-mesh surface-derived anat +rh-smoothed-surface-mesh surface-derived anat +lh-spherical-surface-mesh surface-derived anat Yes +rh-spherical-surface-mesh surface-derived anat Yes +lh-sulcal-depth-surface-maps surface-derived anat Yes +rh-sulcal-depth-surface-maps surface-derived anat Yes +lh-surface-curvature surface-derived anat +rh-surface-curvature surface-derived anat +lh-white-matter-surface-mesh surface-derived anat Yes +rh-white-matter-surface-mesh surface-derived anat Yes +space-symtemplate_desc-brain_T1w T1w symmetric template anat NIfTI +desc-brain_T1w T1w T1w anat NIfTI +desc-preproc_T1w T1w T1w anat NIfTI +desc-reorient_T1w T1w T1w anat NIfTI +space-template_desc-brain_T1w T1w template anat NIfTI +desc-Mean_timeseries timeseries func 1D +desc-MeanSCA_timeseries timeseries func 1D +desc-SpatReg_timeseries timeseries func 1D +desc-Voxel_timeseries timeseries func 1D +space-longitudinal_label-CSF_probseg tissue probability longitudinal T1w anat NIfTI +space-longitudinal_label-GM_probseg tissue probability longitudinal T1w anat NIfTI +space-longitudinal_label-WM_probseg tissue probability longitudinal T1w anat NIfTI +vmhc vmhc symmetric template func NIfTI +blip-warp xfm func NIfTI +from-bold_to-EPItemplate_mode-image_desc-linear_xfm xfm func NIfTI +from-bold_to-EPItemplate_mode-image_desc-nonlinear_xfm xfm func NIfTI +from-bold_to-EPItemplate_mode-image_xfm xfm func NIfTI +from-bold_to-symtemplate_mode-image_xfm xfm func NIfTI +from-bold_to-T1w_mode-image_desc-linear_xfm xfm func NIfTI +from-bold_to-template_mode-image_xfm xfm func NIfTI +from-EPItemplate_to-bold_mode-image_desc-linear_xfm xfm func NIfTI +from-EPItemplate_to-bold_mode-image_desc-nonlinear_xfm xfm func NIfTI +from-longitudinal_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI +from-longitudinal_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-longitudinal_to-symtemplate_mode-image_xfm xfm anat NIfTI +from-longitudinal_to-template_mode-image_desc-linear_xfm xfm anat NIfTI +from-longitudinal_to-template_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-longitudinal_to-template_mode-image_xfm xfm anat NIfTI +from-symtemplate_to-bold_mode-image_xfm xfm func NIfTI +from-symtemplate_to-longitudinal_mode-image_desc-linear_xfm xfm anat NIfTI +from-symtemplate_to-longitudinal_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-symtemplate_to-longitudinal_mode-image_xfm xfm anat NIfTI +from-symtemplate_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI +from-symtemplate_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-symtemplate_to-T1w_mode-image_xfm xfm anat NIfTI +from-T1w_to-symtemplate_mode-image_desc-linear_xfm xfm anat NIfTI +from-T1w_to-symtemplate_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-T1w_to-symtemplate_mode-image_xfm xfm anat NIfTI +from-T1w_to-template_mode-image_desc-linear_xfm xfm anat NIfTI +from-T1w_to-template_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-T1w_to-template_mode-image_xfm xfm anat NIfTI +from-template_to-bold_mode-image_xfm xfm func NIfTI +from-template_to-longitudinal_mode-image_desc-linear_xfm xfm anat NIfTI +from-template_to-longitudinal_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-template_to-longitudinal_mode-image_xfm xfm anat NIfTI +from-template_to-T1w_mode-image_desc-linear_xfm xfm anat NIfTI +from-template_to-T1w_mode-image_desc-nonlinear_xfm xfm anat NIfTI +from-template_to-T1w_mode-image_xfm xfm anat NIfTI +space-template_label-CSF_mask mask template anat NIfTI +space-template_label-WM_mask mask template anat NIfTI +space-template_label-GM_mask mask template anat NIfTI +space-EPItemplate_label-CSF_mask mask template func NIfTI +space-EPItemplate_label-WM_mask mask template func NIfTI +space-EPItemplate_label-GM_mask mask template func NIfTI diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index 922f8f3b69..302eedf125 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -136,7 +136,8 @@ def process_segment_map(wf_name, use_priors, use_custom_threshold, reg_tool): :width: 1100 :height: 480 - """ # noqa + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name,reimported import nipype.interfaces.utility as util preproc = pe.Workflow(name=wf_name) @@ -457,7 +458,7 @@ def create_seg_preproc_freesurfer(config=None, outputspec.wm_mask : string (nifti file) outputs White Matter mask - """ # noqa + """ preproc = pe.Workflow(name=wf_name) inputnode = pe.Node(util.IdentityInterface(fields=['subject_dir']), diff --git a/CPAC/seg_preproc/utils.py b/CPAC/seg_preproc/utils.py index e4c23b713e..7666b2a8c3 100644 --- a/CPAC/seg_preproc/utils.py +++ b/CPAC/seg_preproc/utils.py @@ -46,7 +46,7 @@ def pick_wm_prob_0(probability_maps): file : string Path to segment_prob_0.nii.gz is returned - """ # noqa + """ if isinstance(probability_maps, list): if len(probability_maps) == 1: probability_maps = probability_maps[0] @@ -70,7 +70,7 @@ def pick_wm_prob_1(probability_maps): file : string Path to segment_prob_1.nii.gz is returned - """ # noqa + """ if isinstance(probability_maps, list): if len(probability_maps) == 1: probability_maps = probability_maps[0] @@ -94,7 +94,7 @@ def pick_wm_prob_2(probability_maps): file : string Path to segment_prob_2.nii.gz is returned - """ # noqa + """ if isinstance(probability_maps, list): if len(probability_maps) == 1: probability_maps = probability_maps[0] @@ -118,7 +118,7 @@ def pick_wm_class_0(tissue_class_files): file : string Path to segment_seg_0.nii.gz is returned - """ # noqa + """ if isinstance(tissue_class_files, list): if len(tissue_class_files) == 1: tissue_class_files = tissue_class_files[0] @@ -142,7 +142,7 @@ def pick_wm_class_1(tissue_class_files): file : string Path to segment_seg_1.nii.gz is returned - """ # noqa + """ if isinstance(tissue_class_files, list): if len(tissue_class_files) == 1: tissue_class_files = tissue_class_files[0] @@ -166,7 +166,7 @@ def pick_wm_class_2(tissue_class_files): file : string Path to segment_seg_2.nii.gz is returned - """ # noqa + """ if isinstance(tissue_class_files, list): if len(tissue_class_files) == 1: tissue_class_files = tissue_class_files[0] @@ -363,11 +363,15 @@ def hardcoded_antsJointLabelFusion(anatomical_brain, anatomical_brain_mask, bash_cmd = str.join(cmd) try: - retcode = subprocess.check_output(bash_cmd, shell=True) # noqa F841 - except Exception as e: + retcode = subprocess.check_output(bash_cmd, shell=True) \ + # noqa: F841 # pylint: disable=unused-variable + except Exception as e: # pylint: disable=broad-except,invalid-name + # pylint: disable=raise-missing-from raise Exception('[!] antsJointLabel segmentation method did not ' 'complete successfully.\n\nError ' - 'details:\n{0}\n{1}\n'.format(e, e.output)) + 'details:\n{0}\n{1}\n'.format( + e, + getattr(e, 'output', ''))) multiatlas_Intensity = None multiatlas_Labels = None @@ -414,7 +418,8 @@ def pick_tissue_from_labels_file(multiatlas_Labels, csf_label=[4,14,15,24,43], gm_mask : string (nifti file) wm_mask : string (nifti file) - """ # noqa + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name,reimported import os import nibabel as nb import numpy as np diff --git a/CPAC/surface/tests/test_config.py b/CPAC/surface/tests/test_config.py new file mode 100644 index 0000000000..a397cd32c5 --- /dev/null +++ b/CPAC/surface/tests/test_config.py @@ -0,0 +1,26 @@ +""" +Tests for surface configuration +""" +import os +import pkg_resources as p +import pytest +import yaml + +from CPAC.pipeline.cpac_pipeline import run_workflow +from CPAC.utils.configuration import Configuration + + +@pytest.mark.timeout(60) +def test_duplicate_freesurfer(): + """The pipeline should build fast if freesurfer is not self-duplicating""" + c = Configuration(yaml.safe_load('FROM: abcd-options')) + sub_dict = yaml.safe_load(open(p.resource_filename( + "CPAC", + os.path.join( + "resources", + "configs", + "data_config_S3-BIDS-ABIDE.yml" + ) + ), 'r'))[0] + + run_workflow(sub_dict, c, False, test_config=True) diff --git a/CPAC/timeseries/timeseries_analysis.py b/CPAC/timeseries/timeseries_analysis.py index e8b2cd80b4..78a14c4fce 100644 --- a/CPAC/timeseries/timeseries_analysis.py +++ b/CPAC/timeseries/timeseries_analysis.py @@ -774,10 +774,10 @@ def timeseries_extraction_AVG(wf, cfg, strat_pool, pipe_num, opt=None): "outputs": ["desc-Mean_timeseries", "desc-ndmg_correlations", "atlas_name", - "desc-PearsonAfni_connectome", - "desc-PartialAfni_connectome", - "desc-PearsonNilearn_connectome", - "desc-PartialNilearn_connectome"]} + "desc-PearsonAfni_correlations", + "desc-PartialAfni_correlations", + "desc-PearsonNilearn_correlations", + "desc-PartialNilearn_correlations"]} ''' resample_functional_roi = pe.Node(Function(input_names=['in_func', 'in_roi', @@ -864,7 +864,7 @@ def timeseries_extraction_AVG(wf, cfg, strat_pool, pipe_num, opt=None): output_desc = ''.join(term.lower().capitalize() for term in [ cm_measure, cm_tool]) - matrix_outputs[f'desc-{output_desc}_connectome'] = ( + matrix_outputs[f'desc-{output_desc}_correlations'] = ( timeseries_correlation, 'outputspec.out_file') # - NDMG diff --git a/CPAC/utils/configuration.py b/CPAC/utils/configuration.py index 9c1dc83390..bb99c82ee3 100644 --- a/CPAC/utils/configuration.py +++ b/CPAC/utils/configuration.py @@ -117,6 +117,15 @@ def __init__(self, config_map=None): regressor['Name'] = f'Regressor-{str(i + 1)}' # replace spaces with hyphens in Regressor 'Name's regressor['Name'] = regressor['Name'].replace(' ', '-') + + # Don't double-run FreeSurfer + try: + if 'FreeSurfer-ABCD' in config_map['anatomical_preproc'][ + 'brain_extraction']['using']: + config_map['surface_analysis']['freesurfer']['run'] = False + except TypeError: + pass + config_map = schema(config_map) # remove 'FROM' before setting attributes now that it's imported diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index b8e04ef3de..e1c17f813a 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -5,7 +5,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.afni as afni -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') from CPAC.utils import function from CPAC.utils.interfaces.function import Function @@ -716,13 +716,13 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other', if img_type == 'anat': if len(img_nii.shape) != 3: - raise IOError('File: %s must be an anatomical image with 3 ' \ + raise IOError('File: %s must be an anatomical image with 3 ' 'dimensions but %d dimensions found!' % (local_path, len(img_nii.shape))) elif img_type == 'func': - if len(img_nii.shape) != 4: - raise IOError('File: %s must be a functional image with 4 ' \ - 'dimensions but %d dimensions found!' + if len(img_nii.shape) not in [3, 4]: + raise IOError('File: %s must be a functional image with 3 or ' + '4 dimensions but %d dimensions found!' % (local_path, len(img_nii.shape))) return local_path diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py index c1e79365ee..72fd89c6b6 100644 --- a/CPAC/utils/interfaces/netcorr.py +++ b/CPAC/utils/interfaces/netcorr.py @@ -189,7 +189,7 @@ class NetCorr(AFNICommand): '3dNetCorr -prefix sub0.tp1.ncorr -fish_z -inset functional.nii -in_rois maps.nii -mask mask.nii -ts_wb_Z -ts_wb_corr' >>> res = ncorr.run() # doctest: +SKIP - """ # noqa E501 # pylint: disable=line-too-long + """ # noqa: E501 # pylint: disable=line-too-long _cmd = "3dNetCorr" input_spec = NetCorrInputSpec output_spec = NetCorrOutputSpec diff --git a/CPAC/utils/monitoring/__init__.py b/CPAC/utils/monitoring/__init__.py index c4c5162de3..aaf7f5148a 100644 --- a/CPAC/utils/monitoring/__init__.py +++ b/CPAC/utils/monitoring/__init__.py @@ -1,10 +1,12 @@ '''Module to customize Nipype's process monitoring for use in C-PAC -See https://fcp-indi.github.com/docs/developer/nodes for C-PAC-specific documentation. -See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.profiler.html for Nipype's documentation.''' # noqa E501 +See https://fcp-indi.github.io/docs/developer/nodes for C-PAC-specific documentation. +See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.profiler.html for Nipype's documentation.''' # noqa: E501 # pylint: disable=line-too-long +from .custom_logging import set_up_logger from .monitoring import LoggingHTTPServer, LoggingRequestHandler, \ log_nodes_cb, log_nodes_initial, monitor_server, \ recurse_nodes __all__ = ['LoggingHTTPServer', 'LoggingRequestHandler', 'log_nodes_cb', - 'log_nodes_initial', 'monitor_server', 'recurse_nodes'] + 'log_nodes_initial', 'monitor_server', 'recurse_nodes', + 'set_up_logger'] diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py new file mode 100644 index 0000000000..cd7ec2ae09 --- /dev/null +++ b/CPAC/utils/monitoring/custom_logging.py @@ -0,0 +1,55 @@ +'''Funtions for logging.''' +import logging +import os + + +def set_up_logger(name, filename=None, level=None, log_dir=None): + r'''Function to initialize a logger + + Parameters + ---------- + name : str + logger name (for subsequent calls to ``logging.getLogger``) to + write to the same log file) + + filename : str, optional + filename to write log to. If not specified, filename will be + the same as ``name`` with the extension ``log`` + + level : str, optional + one of ``{critical, error, warning, info, debug, notset}``, + case-insensitive + + log_dir : str, optional + + Returns + ------- + logger : logging.Handler + initialized logging Handler + + Examples + -------- + >>> lg = set_up_logger('test') + >>> lg.handlers[0].baseFilename.split('/')[-1] + 'test.log' + >>> lg.level + 0 + >>> lg = set_up_logger('second_test', 'specific_filename.custom', 'debug') + >>> lg.handlers[0].baseFilename.split('/')[-1] + 'specific_filename.custom' + >>> lg.level + 10 + ''' + if filename is None: + filename = f'{name}.log' + try: + level = getattr(logging, level.upper()) + except AttributeError: + level = logging.NOTSET + if log_dir is None: + log_dir = os.getcwd() + logger = logging.getLogger(name) + logger.setLevel(level) + handler = logging.FileHandler(os.path.join(log_dir, filename)) + logger.addHandler(handler) + return logger diff --git a/CPAC/utils/strategy.py b/CPAC/utils/strategy.py index 6b68865bc4..8eab5caf19 100644 --- a/CPAC/utils/strategy.py +++ b/CPAC/utils/strategy.py @@ -3,7 +3,7 @@ import warnings import logging -logger = logging.getLogger('workflow') +logger = logging.getLogger('nipype.workflow') class Strategy(object): diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 196e6fccd0..60c2c7454a 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -162,7 +162,7 @@ def create_id_string(unique_id, resource, scan_id=None, atlas_id=None, if atlas_id: if '_' in atlas_id: - atlas_id = atlas_id.replace("_", "") + atlas_id = atlas_id.replace('_', '') resource = f'atlas-{atlas_id}_{resource}' if 'sub-' not in unique_id: @@ -251,8 +251,8 @@ def get_zscore(map_node=False, wf_name='z_score'): >>> wf.inputs.inputspec.input_file = '/home/data/graph_working_dir/calculate_centrality/degree_centrality_binarize.nii.gz' >>> wf.inputs.inputspec.mask_file = '/home/data/graphs/GraphGeneration/new_mask_3m.nii.gz' >>> wf.run() # doctest: +SKIP - """ # noqa - + """ # noqa: E501 # pylint: disable=line-too-long + # pylint: disable=import-outside-toplevel,redefined-outer-name,reimported from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util import nipype.interfaces.fsl as fsl @@ -1666,7 +1666,7 @@ def dct_diff(dct1, dct2): ... 'pipeline_config_fmriprep-options.yml') >>> dct_diff(pipeline, pipeline2)['pipeline_setup']['pipeline_name'] ('cpac-default-pipeline', 'cpac_fmriprep-options') - ''' # noqa + ''' diff = {} for key in dct1: if isinstance(dct1[key], dict): @@ -1697,7 +1697,8 @@ def dct_diff(dct1, dct2): return {} -def list_item_replace(l, old, new): # noqa E741 +def list_item_replace(l, # noqa: E741 # pylint: disable=invalid-name + old, new): '''Function to replace an item in a list Parameters @@ -1725,7 +1726,7 @@ def list_item_replace(l, old, new): # noqa E741 if isinstance(l, list) and old in l: l[l.index(old)] = new elif isinstance(l, str): - l = l.replace(old, new) # noqa E741 + l = l.replace(old, new) # noqa: E741 return l @@ -1917,14 +1918,15 @@ def update_config_dict(old_dict): Examples -------- - >>> a, b, c = update_config_dict({'pipelineName': 'example-pipeline', '2': None}) + >>> a, b, c = update_config_dict({ + ... 'pipelineName': 'example-pipeline', '2': None}) >>> a {'pipeline_setup': {'pipeline_name': 'example-pipeline'}} >>> b {'2': None} >>> c {'pipeline_setup': {'pipeline_name': 'example-pipeline'}, '2': None} - ''' # noqa + ''' def _append_to_list(current_value, new_value): '''Helper function to add new_value to the current_value list or create a list if one does not exist. Skips falsy elements @@ -2217,7 +2219,9 @@ def update_nested_dict(d_base, d_update, fully_specified=False): ... 'write_func_outputs': False, ... 'write_debugging_outputs': False, ... 'output_tree': 'default', - ... 'generate_quality_control_images': True}, + ... 'quality_control': { + ... 'generate_quality_control_images': True, + ... 'generate_xcpqc_files': True}}, ... 'working_directory': {'path': '/tmp', 'remove_working_dir': True}, ... 'log_directory': {'run_logging': True, 'path': '/logs'}, ... 'system_config': {'maximum_memory_per_participant': 1, @@ -2233,7 +2237,9 @@ def update_nested_dict(d_base, d_update, fully_specified=False): ... 'pipeline_name': 'cpac_fmriprep-options', 'output_directory': { ... 'path': '/output', 'write_func_outputs': False, ... 'write_debugging_outputs': False, 'output_tree': 'default', - ... 'generate_quality_control_images': True + ... 'quality_control': { + ... 'generate_quality_control_images': True, + ... 'generate_xcpqc_files': True} ... }, 'working_directory': { ... 'path': '/tmp', 'remove_working_dir': True ... }, 'log_directory': {'run_logging': True, 'path': '/logs'}, @@ -2284,7 +2290,7 @@ def update_nested_dict(d_base, d_update, fully_specified=False): ... '/cpac_templates/aal_mask_pad.nii.gz': 'Voxel' ... }, 'realignment': 'ROI_to_func'}}) True - """ # noqa + """ # noqa: E501 # pylint: disable=line-too-long # short-circuit if d_update has `*_roi_paths` and # `roi_paths_fully_specified` children @@ -2326,8 +2332,9 @@ def update_pipeline_values_1_8(d_old): >>> update_pipeline_values_1_8({'segmentation': {'tissue_segmentation': { ... 'using': ['FSL-FAST Thresholding']}}}) {'segmentation': {'tissue_segmentation': {'using': ['FSL-FAST'], 'FSL-FAST': {'thresholding': {'use': 'Auto'}}}}} - ''' # noqa - from CPAC.pipeline.schema import valid_options + ''' # noqa: E501 # pylint: disable=line-too-long + from CPAC.pipeline.schema import valid_options \ + # pylint: disable=import-outside-toplevel d = replace_in_strings(d_old.copy()) diff --git a/CPAC/utils/yaml_template.py b/CPAC/utils/yaml_template.py index d15882dcce..819fc2638a 100644 --- a/CPAC/utils/yaml_template.py +++ b/CPAC/utils/yaml_template.py @@ -76,7 +76,7 @@ def _create_import_dict(diff): ... 'run': ([True], False), ... 'using': (['3dSkullStrip'], ['niworkflows-ants'])}}}}) {'anatomical_preproc': {'brain_extraction': {'extraction': {'run': False, 'using': ['niworkflows-ants']}}}} - ''' # noqa + ''' # noqa: E501 # pylint: disable=line-too-long if isinstance(diff, tuple) and len(diff) == 2: return diff[1] if isinstance(diff, dict): @@ -112,7 +112,8 @@ def _format_key(key, level): ''' return f'\n{" " * level * 2}{key}: ' - def _format_list_items(l, line_level): # noqa E741 + def _format_list_items(l, # noqa: E741 # pylint:disable=invalid-name + line_level): '''Helper method to handle lists in the YAML Parameters @@ -131,11 +132,11 @@ def _format_list_items(l, line_level): # noqa E741 ' - 1\n - 2\n - nested: 3' >>> _format_list_items([1, 2, {'nested': [3, {'deep': [4]}]}], 1) ' - 1\n - 2\n - nested:\n - 3\n - deep:\n - 4' - ''' # noqa + ''' # noqa: E501 # pylint: disable=line-too-long # keep short, simple lists in square brackets - if all([any([isinstance(item, item_type) for item_type in { + if all(any(isinstance(item, item_type) for item_type in { str, bool, int, float - }]) for item in l]): + }) for item in l): if len(str(l)) < 50: return str(l).replace("'", '').replace('"', '') # list long or complex lists on lines with indented '-' lead-ins diff --git a/README.md b/README.md index afc3be6937..24bcab380e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ C-PAC: Configurable Pipeline for the Analysis of Connectomes ============================================================ -[![OpenNeuro](https://raw.githubusercontent.com/bids-standard/bids-website/gh-pages/old_website/openneuro_badge.svg?sanitize=true)](https://openneuro.org) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) +[![DOI for "Moving Beyond Processing and Analysis-Related Variation in Neuroscience"](https://zenodo.org/badge/DOI/10.1101/2021.12.01.470790.svg)](https://doi.org/10.1101/2021.12.01.470790) [![DOI for "FCP-INDI/C-PAC: CPAC Version 1.0.0 Beta"](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) A configurable, open-source, Nipype-based, automated processing pipeline for resting state fMRI data. @@ -11,7 +11,7 @@ of Nipype to users in a plug-and-play fashion; no programming required. Website ------- -The C-PAC website is located here: http://fcp-indi.github.com/ +The C-PAC website is located here: https://fcp-indi.github.io/ How to Run ---------- @@ -38,4 +38,4 @@ Issue Tracker and Bugs This is a beta version of C-PAC, which means that it is still under active development. As such, although we have done our best to ensure a stable pipeline, there will likely still be a few bugs that we did not catch. If you find a bug or would like to suggest a new feature, please open an issue on the the C-PAC Github issue tracker: https://github.com/FCP-INDI/C-PAC/issues?state=open -If you would like to suggest revisions to the user documentation, please open an issue on the C-PAC website's GitHub issue tracker: https://github.com/FCP-INDI/fcp-indi.github.com/issues +If you would like to suggest revisions to the user documentation, please open an issue on the C-PAC website's GitHub issue tracker: https://github.com/FCP-INDI/fcp-indi.github.io/issues diff --git a/cpac_install.sh b/cpac_install.sh index 97c732db9a..2cd4688dc3 100755 --- a/cpac_install.sh +++ b/cpac_install.sh @@ -108,7 +108,6 @@ pip_packages=( "simplejson==3.15.0" "python-dateutil==2.7.3" "PyBASC==0.4.5" - "pathlib==1.0.1" ) ##### Helper functions for installing system dependencies. diff --git a/dev/circleci_data/requirements.txt b/dev/circleci_data/requirements.txt index 6e3f8dc218..a0fe606792 100644 --- a/dev/circleci_data/requirements.txt +++ b/dev/circleci_data/requirements.txt @@ -4,5 +4,6 @@ spython >= 0.0.81 pytest pytest_bdd pytest_click +pytest-timeout pyyaml yamlordereddictloader \ No newline at end of file diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py index 094e29c091..36a9ac5c8c 100644 --- a/dev/circleci_data/test_external_utils.py +++ b/dev/circleci_data/test_external_utils.py @@ -7,7 +7,8 @@ sys.path.append(CPAC_DIR) DATA_DIR = os.path.join(CPAC_DIR, 'dev', 'circleci_data') -from CPAC.__main__ import utils as CPAC_main_utils # noqa E402 +from CPAC.__main__ import utils as CPAC_main_utils \ + # noqa: E402 # pylint: disable=wrong-import-position def test_build_data_config(cli_runner): @@ -103,8 +104,8 @@ def _delete_test_yaml(test_yaml): def _test_repickle(pickle_path, gzipped=False): + # pylint: disable=import-outside-toplevel,unused-import backup = _Backup(pickle_path) if gzipped: - import gzip # noqa F401 - + import gzip # noqa: F401 backup.restore() diff --git a/dev/docker_data/default_pipeline.yml b/dev/docker_data/default_pipeline.yml index 6b2590024d..d6ef310118 100644 --- a/dev/docker_data/default_pipeline.yml +++ b/dev/docker_data/default_pipeline.yml @@ -1,7 +1,7 @@ %YAML 1.1 --- # CPAC Pipeline Configuration YAML file -# Version 1.8.2 +# Version 1.8.3 # # http://fcp-indi.github.io for more info. # @@ -47,8 +47,13 @@ pipeline_setup: # Options: default, ndmg output_tree: "default" - # Generate quality control pages containing preprocessing and derivative outputs. - generate_quality_control_images: True + # Quality control outputs + quality_control: + # Generate quality control pages containing preprocessing and derivative outputs. + generate_quality_control_images: True + + # Generate eXtensible Connectivity Pipeline-style quality control files + generate_xcpqc_files: False working_directory: @@ -80,6 +85,13 @@ pipeline_setup: system_config: + # Random seed used to fix the state of execution. + # If unset, each process uses its own default. + # If set, a `random.log` file will be generated logging the random seed and each node to which that seed was applied. + # If set to a positive integer (up to 2147483647), that integer will be used to seed each process that accepts a random seed. + # If set to 'random', a random positive integer (up to 2147483647) will be generated and that seed will be used to seed each process that accepts a random seed. + random_seed: + # Select Off if you intend to run CPAC on a single machine. # If set to On, CPAC will attempt to submit jobs through the job scheduler / resource manager selected below. on_grid: @@ -158,6 +170,7 @@ surface_analysis: # select those 'Freesurfer-' labeled options further below in anatomical_preproc. freesurfer: + # If anatomical_preproc['brain_extraction']['using'] includes FreeSurfer-ABCD and this switch is On, C-PAC will automatically turn this switch Off to avoid running FreeSurfer twice unnecessarily run: Off # Add extra arguments to recon-all command diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index df611cd0b9..bfb29b3131 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -12,6 +12,7 @@ import yaml from CPAC import __version__ +from CPAC.pipeline.random_state import set_up_random_state from CPAC.utils.bids_utils import create_cpac_data_config, \ load_cpac_data_config, \ load_yaml_config, \ @@ -204,7 +205,16 @@ def run_main(): 'speed up this preprocessing step. This ' 'number cannot be greater than the number of ' 'cores per participant.') - + parser.add_argument('--random_seed', type=str, + help='Random seed used to fix the state of execution. ' + 'If unset, each process uses its own default. If ' + 'set, a `random.log` file will be generated ' + 'logging the random state used by each process. ' + 'If set to a positive integer (up to 2147483647' + '), that integer will be used to seed each ' + 'process. If set to \'random\', a random seed ' + 'will be generated and recorded for each ' + 'process.') parser.add_argument('--save_working_dir', nargs='?', help='Save the contents of the working directory.', default=False) @@ -552,6 +562,15 @@ def run_main(): int(c['pipeline_setup']['system_config']['num_ants_threads']) ) + if args.random_seed: + c['pipeline_setup']['system_config']['random_seed'] = \ + args.random_seed + + if c['pipeline_setup']['system_config']['random_seed'] is not None: + c['pipeline_setup']['system_config']['random_seed'] = \ + set_up_random_state(c['pipeline_setup']['system_config'][ + 'random_seed']) + c['disable_log'] = args.disable_file_logging if args.save_working_dir is not False: diff --git a/requirements.txt b/requirements.txt index 0cb6d1b5b8..2f18473131 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,5 +24,4 @@ simplejson==3.15.0 scikit-learn==0.22.1 traits==4.6.0 PyBASC==0.4.5 -pathlib==1.0.1 voluptuous>=0.12.0 diff --git a/version b/version index 57f52717b0..1182b2ff77 100644 --- a/version +++ b/version @@ -1 +1 @@ -v1.8.2 +v1.8.3