Skip to content

Commit

Permalink
🔀 Merge v1.8.3 to main (#1658)
Browse files Browse the repository at this point in the history
  • Loading branch information
shnizzedy authored Feb 11, 2022
2 parents a33dfe8 + dd31ca5 commit 8042446
Show file tree
Hide file tree
Showing 92 changed files with 2,283 additions and 782 deletions.
33 changes: 32 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,36 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.8.3] - 2022-02-11

### Added
- Added XCP-style quality control file
- Added RBC-options pipeline preconfiguration
- Added `engine.log` (when verbose debugging is on)
- Added ability to fix random seed for
- `antsAI`
- `antsRegistration`
- `Atropos` (fixed but not specified)
- `fslmaths`
- `mri_vol2vol`
- `recon-all`
- Added ability to use lateral ventricles mask in place of cerebrospinal fluid mask when when segmentation is Off, specifically for the rodent pipeline, but works on any dataset when segmentation is off

### Changed
- In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory
- if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template`

and/or
- if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template`
- Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv`
- Moved some ephemeral logging statements into `pypeline.log`

### Fixed
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1638) in which working connectivity matrix filepaths were generated incorrectly, preventing generating matrices depending on container bindings
- Fixed broken links in README
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1575) in which anatomical-only configurations required functional data directories
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1532) in which nuisance regressors would crash when segmentation is off and no CSF mask is provided

## [1.8.2] - 2021-12-02

### Added
Expand Down Expand Up @@ -41,5 +71,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1.

[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.1...develop
[1.8.3]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.3
[1.8.2]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.2
[1.8.1]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.1
31 changes: 14 additions & 17 deletions CPAC/anat_preproc/anat_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,22 +687,19 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):

def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
# Skull-stripping using niworkflows-ants
anat_skullstrip_ants = init_brain_extraction_wf(tpl_target_path=
cfg.anatomical_preproc[
'brain_extraction'][
'niworkflows-ants'][
'template_path'],
tpl_mask_path=
cfg.anatomical_preproc[
'brain_extraction'][
'niworkflows-ants'][
'mask_path'],
tpl_regmask_path=
cfg.anatomical_preproc[
'brain_extraction'][
'niworkflows-ants'][
'regmask_path'],
name='anat_skullstrip_ants')
anat_skullstrip_ants = init_brain_extraction_wf(
tpl_target_path=cfg.anatomical_preproc['brain_extraction'][
'niworkflows-ants'][
'template_path'],
tpl_mask_path=cfg.anatomical_preproc['brain_extraction'][
'niworkflows-ants'][
'mask_path'],
tpl_regmask_path=cfg.anatomical_preproc['brain_extraction'][
'niworkflows-ants'][
'regmask_path'],
name='anat_skullstrip_ants',
atropos_use_random_seed=cfg.pipeline_setup['system_config'][
'random_seed'] is None)

if strat_pool.check_rpool('desc-preproc_T1w') or \
strat_pool.check_rpool('desc-reorient_T1w') or \
Expand Down Expand Up @@ -3191,7 +3188,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None
wf.connect(node, out, merge_t1_acpc_to_list, 'in3')

merge_t1_acpc = pe.Node(interface=fslMerge(),
name='merge_t1_acpc')
name=f'merge_t1_acpc_{pipe_num}')

merge_t1_acpc.inputs.dimension = 't'

Expand Down
5 changes: 4 additions & 1 deletion CPAC/anat_preproc/ants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

# general purpose
from collections import OrderedDict
from multiprocessing import cpu_count
from logging import getLogger
from pkg_resources import resource_filename as pkgr_fn
from packaging.version import parse as parseversion, Version

Expand Down Expand Up @@ -440,6 +440,9 @@ def init_atropos_wf(name='atropos_wf',
use_random_seed=use_random_seed),
name='01_atropos', n_procs=omp_nthreads, mem_gb=mem_gb)

if not use_random_seed:
getLogger('random').info('%s # (Atropos constant)', atropos.name)

# massage outputs
pad_segm = pe.Node(ImageMath(operation='PadImage', op2='%d' % padding),
name='02_pad_segm')
Expand Down
33 changes: 11 additions & 22 deletions CPAC/connectome/connectivity_matrix.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for creating connectome connectivity matrices."""
import os
from warnings import warn
import numpy as np
from nilearn.connectome import ConnectivityMeasure
Expand All @@ -20,13 +21,11 @@
}


def connectome_name(timeseries, atlas_name, tool, method):
def connectome_name(atlas_name, tool, method):
"""Helper function to create connectome file filename
Parameters
----------
timeseries : str
path to input timeseries
atlas_name : str
atlas name
Expand All @@ -41,19 +40,9 @@ def connectome_name(timeseries, atlas_name, tool, method):
-------
str
"""
method = ''.join(word.capitalize() for word in [tool, method])
new_filename_parts = [part for part in timeseries.split('_')[:-1][::-1] if
not part.startswith('space-')]
atlas_index = len(new_filename_parts) - 1
if any(filename_part.startswith('desc-') for filename_part in
new_filename_parts):
for i, filename_part in enumerate(new_filename_parts):
if filename_part.startswith('desc-'):
new_filename_parts[-i] = f'desc-{method}'
atlas_index = -(i - 1)
break
new_filename_parts.insert(atlas_index, f'atlas-{atlas_name}')
return '_'.join([*new_filename_parts[::-1], 'connectome.tsv'])
return os.path.join(os.getcwd(), '_'.join([
f'atlas-{atlas_name}', f'desc-{tool}{method}', 'connectome.tsv'
]))


def get_connectome_method(method, tool):
Expand Down Expand Up @@ -111,7 +100,7 @@ def compute_connectome_nilearn(in_rois, in_file, method, atlas_name):
numpy.ndarray or NotImplemented
"""
tool = 'Nilearn'
output = connectome_name(in_file, atlas_name, tool, method)
output = connectome_name(atlas_name, tool, method)
method = get_connectome_method(method, tool)
if method is NotImplemented:
return NotImplemented
Expand Down Expand Up @@ -156,21 +145,21 @@ def create_connectome_afni(name, method, pipe_num):
name='netcorrStripHeader'
f'{method}_{pipe_num}')

name_output_node = pe.Node(Function(input_names=['timeseries',
'atlas_name',
name_output_node = pe.Node(Function(input_names=['atlas_name',
'tool',
'method'],
output_names=['filename'],
imports=['import os'],
function=connectome_name),
name=f'connectomeName{method}_{pipe_num}')
name=f'connectomeName{method}_{pipe_num}',
as_module=True)
name_output_node.inputs.tool = 'Afni'

wf.connect([
(inputspec, timeseries_correlation, [('in_rois', 'in_rois'),
('in_file', 'in_file'),
('mask', 'mask')]),
(inputspec, name_output_node, [('in_file', 'timeseries'),
('atlas_name', 'atlas_name'),
(inputspec, name_output_node, [('atlas_name', 'atlas_name'),
('method', 'method')]),
(timeseries_correlation, strip_header_node, [
('out_corr_matrix', 'in_file')]),
Expand Down
5 changes: 3 additions & 2 deletions CPAC/cwas/tests/test_cwas.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ def run_cwas(self):

# Read in list of subject functionals
subjects_list = [
l.strip().strip('"') for l in open(sfile).readlines() # noqa E741
l.strip().strip('"') for # noqa: E741
l in open(sfile).readlines() # pylint: disable=consider-using-with
]

# Read in design/regressor file
Expand All @@ -93,7 +94,7 @@ def run_cwas(self):
c.inputs.inputspec.f_samples = nperms
c.inputs.inputspec.parallel_nodes = 4
# c.base_dir = op.join(obase, 'results_fs%i_pn%i' % \
# (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa E501
# (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa: E501 # pylint: disable=line-too-long
c.base_dir = op.join(self.base, "results_%s.py" % self.name)

# export MKL_NUM_THREADS=X # in command line
Expand Down
2 changes: 1 addition & 1 deletion CPAC/func_preproc/func_ingress.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from nipype import logging
logger = logging.getLogger('workflow')
logger = logging.getLogger('nipype.workflow')

from CPAC.pipeline import nipype_pipeline_engine as pe

Expand Down
2 changes: 1 addition & 1 deletion CPAC/func_preproc/func_preproc.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from nipype import logging
from nipype.interfaces import ants

logger = logging.getLogger('workflow')
logger = logging.getLogger('nipype.workflow')

from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.interfaces.fsl as fsl
Expand Down
9 changes: 4 additions & 5 deletions CPAC/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# version
_version_major = 1
_version_minor = 8
_version_micro = 2
_version_micro = 3
_version_extra = ''


Expand Down Expand Up @@ -87,15 +87,15 @@ def get_cpac_gitversion():
Website
-------
CPAC website is located here: http://fcp-indi.github.com/
CPAC website is located here: https://fcp-indi.github.io/
Documentation
-------------
User documentation can be found here: http://fcp-indi.github.com/docs/user/index.html
User documentation can be found here: https://fcp-indi.github.io/docs/user/index.html
Developer documention can be found here: http://fcp-indi.github.com/docs/developer/index.html
Developer documention can be found here: https://fcp-indi.github.io/docs/developer/index.html
Documentation pertaining to this latest release can be found here: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.3.0
Expand Down Expand Up @@ -161,6 +161,5 @@ def get_cpac_gitversion():
"simplejson==3.15.0",
"traits==4.6.0",
"PyBASC==0.4.5",
"pathlib==1.0.1",
"voluptuous>=0.12.0",
]
2 changes: 1 addition & 1 deletion CPAC/network_centrality/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from CPAC.network_centrality.utils import merge_lists, check_centrality_params
from CPAC.pipeline.schema import valid_options

logger = logging.getLogger('workflow')
logger = logging.getLogger('nipype.workflow')


def connect_centrality_workflow(workflow, c, resample_functional_to_template,
Expand Down
Loading

0 comments on commit 8042446

Please sign in to comment.