From 5d96d44d0fb9e643f95c924500420ce03ebe75ac Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Tue, 8 Nov 2016 17:45:36 -0500 Subject: [PATCH 01/37] Removed deprecated CPAC.AWS import calls, and included new input checks for the participant list builder. --- CPAC/AWS/aws_utils.py | 2 +- CPAC/GUI/interface/pages/settings.py | 6 +- .../interface/windows/dataconfig_window.py | 78 +++++++++++++++++-- CPAC/__init__.py | 2 +- CPAC/info.py | 35 +++++---- CPAC/utils/build_sublist.py | 4 +- LICENSE | 2 +- README.md | 8 +- setup.py | 5 +- 9 files changed, 108 insertions(+), 34 deletions(-) diff --git a/CPAC/AWS/aws_utils.py b/CPAC/AWS/aws_utils.py index a3d2c63597..0cfb43afed 100644 --- a/CPAC/AWS/aws_utils.py +++ b/CPAC/AWS/aws_utils.py @@ -405,7 +405,7 @@ def test_bucket_access(creds_path, output_directory, subject_id): # Import packages import os import botocore.exceptions as bexc - from CPAC.AWS import fetch_creds + from indi_aws import fetch_creds # Init variables s3_str = 's3://' diff --git a/CPAC/GUI/interface/pages/settings.py b/CPAC/GUI/interface/pages/settings.py index 0847ecc8a4..fb2a85b0f2 100644 --- a/CPAC/GUI/interface/pages/settings.py +++ b/CPAC/GUI/interface/pages/settings.py @@ -85,7 +85,7 @@ def __init__(self, parent, counter=0): self.page.add(label="Maximum Memory Per Participant (GB) ", control=control.INT_CTRL, - name='memoryAllocatedPerSubject', + name='maximumMemoryPerParticipant', type=dtype.NUM, comment="The maximum amount of memory each " \ "participant's workflow can allocate. Use " \ @@ -104,7 +104,7 @@ def __init__(self, parent, counter=0): self.page.add(label="Maximum Number of Cores Per Participant ", control=control.INT_CTRL, - name='numCoresPerSubject', + name='maxCoresPerParticipant', type=dtype.NUM, comment="The maximum amount of cores (on a single " \ "machine) or slots on a node (on a " \ @@ -125,7 +125,7 @@ def __init__(self, parent, counter=0): self.page.add(label="Number of Participants to Run Simultaneously ", control=control.INT_CTRL, - name='numSubjectsAtOnce', + name='numParticipantsAtOnce', type=dtype.NUM, comment="The number of participant workflows to run " \ "at the same time. The maximum number of " \ diff --git a/CPAC/GUI/interface/windows/dataconfig_window.py b/CPAC/GUI/interface/windows/dataconfig_window.py index 169665374f..4661ba69f8 100644 --- a/CPAC/GUI/interface/windows/dataconfig_window.py +++ b/CPAC/GUI/interface/windows/dataconfig_window.py @@ -304,6 +304,7 @@ def run(self, config): def save(self, event, flag): config_list =[] + config_dict = {} def display(win, msg): wx.MessageBox(msg, "Error") win.SetBackgroundColour("pink") @@ -338,6 +339,67 @@ def display(win, msg): display(win,"%s field contains incorrect path. Please update the path!"%ctrl.get_name()) config_list.append((name, value, dtype)) + config_dict[name] = (value, dtype) + + # some final checks + if "BIDS" in config_dict["dataFormat"][0]: + if len(config_dict["anatomicalTemplate"][0]) > 0 or \ + len(config_dict["functionalTemplate"][0]) > 0: + err = wx.MessageDialog(self, "Custom filepath template " \ + "provided, but data format "\ + "is set to BIDS instead of "\ + "Custom.", + 'Error!', + wx.OK | wx.ICON_ERROR) + err.ShowModal() + err.Destroy() + return + + elif not os.path.exists(config_dict["bidsBaseDir"][0]): + err = wx.MessageDialog(self, "Data format is set to " \ + "BIDS, but no BIDS base " \ + "directory is set, or the " \ + "BIDS directory does not " \ + "exist.", + 'Error!', + wx.OK | wx.ICON_ERROR) + err.ShowModal() + err.Destroy() + return + + elif "Custom" in config_dict["dataFormat"][0]: + if len(config_dict["bidsBaseDir"][0]) > 0: + err = wx.MessageDialog(self, "BIDS base directory " \ + "provided, but data format "\ + "is set to Custom instead " \ + "of BIDS.", + 'Error!', + wx.OK | wx.ICON_ERROR) + err.ShowModal() + err.Destroy() + return + + if len(config_dict["anatomicalTemplate"][0]) == 0: + err = wx.MessageDialog(self, "Custom data format " \ + "selected, but no custom " \ + "anatomical filepath " \ + "template provided.", + 'Error!', + wx.OK | wx.ICON_ERROR) + err.ShowModal() + err.Destroy() + return + + if len(config_dict["functionalTemplate"][0]) == 0: + err = wx.MessageDialog(self, "Custom data format " \ + "selected, but no custom " \ + "functional filepath " \ + "template provided.", + 'Error!', + wx.OK | wx.ICON_ERROR) + err.ShowModal() + err.Destroy() + return except Exception, e: @@ -352,7 +414,7 @@ def display(win, msg): return else: - + dlg = wx.FileDialog( self, message="Save file as ...", defaultDir=os.getcwd(), @@ -364,15 +426,17 @@ def display(win, msg): path = dlg.GetPath() dlg.Destroy() f = open(path, 'w') - for ctrl in config_list: + for ctrl_name in config_dict.keys(): - if "/" in ctrl[1] or "%s" in ctrl[1] \ - or 'None' in ctrl[1] or ctrl[0] =='subjectListName': - value = ctrl[1] + val = config_dict[ctrl_name][0] + + if "/" in val or "%s" in val or 'None' in val or \ + ctrl_name =='subjectListName': + value = val else: - value =[val.strip() for val in ctrl[1].split(',')] + value =[item.strip() for item in val.split(',')] - print >>f, ctrl[0], " : ", value, "\n" + print >>f, ctrl_name, " : ", value, "\n" f.close() print "saving %s"%path diff --git a/CPAC/__init__.py b/CPAC/__init__.py index d69df0cd64..925f336ea5 100644 --- a/CPAC/__init__.py +++ b/CPAC/__init__.py @@ -49,7 +49,7 @@ def test(self, label='fast', verbose=1, extra_argv=['--exe'], doctests = False, #__version__ = '0.1-git' try: - version = '1.0.0' + version = '1.0.1' # gitproc = Popen(['git', 'log', '--oneline'], stdout = PIPE) diff --git a/CPAC/info.py b/CPAC/info.py index df50fcf07a..e389108851 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -10,7 +10,7 @@ # version _version_major = 1 _version_minor = 0 -_version_micro = 0 +_version_micro = 1 _version_extra = '' def get_cpac_gitversion(): @@ -19,7 +19,7 @@ def get_cpac_gitversion(): Returns ------- None or str - Version of NiPype according to git. + Version of Nipype according to git. """ import os import subprocess @@ -58,7 +58,7 @@ def get_cpac_gitversion(): _version_micro, _version_extra) -CLASSIFIERS = ["Development Status :: 3 - Alpha", # 3 - Alpha +CLASSIFIERS = ["Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", # TODO: check if this is true @@ -66,7 +66,7 @@ def get_cpac_gitversion(): "Programming Language :: Python", "Topic :: Scientific/Engineering"] -description = 'Configural Pipeline for the Analysis of Connectomes' +description = 'Configurable Pipeline for the Analysis of Connectomes' # Note: this long_description is actually a copy/paste from the top-level # README.md, so that it shows up nicely on PyPI. So please remember to edit @@ -92,28 +92,30 @@ def get_cpac_gitversion(): User documentation can be found here: http://fcp-indi.github.com/docs/user/index.html -Developer documention can ne found here: http://fcp-indi.github.com/docs/developer/index.html +Developer documention can be found here: http://fcp-indi.github.com/docs/developer/index.html + +Documentation pertaining to this latest release can be found here: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.0.0 Dicussion Forum --------------- -CPAC Discussion forum is located here: http://www.nitrc.org/forum/forum.php?forum_id=3567 +CPAC Discussion forum is located here: https://groups.google.com/forum/#!forum/cpax_forum Troubleshooting and Help ------------------------ -This is an alpha version of CPAC, which means that it is still under active development. As such, although we have done our best to ensure a stable pipeline, +This is a beta version of CPAC, which means that it is still under active development. As such, although we have done our best to ensure a stable pipeline, there will likely still be a few bugs that we did not catch. If you find a bug, have a question that is not answered in the User Guide, or would like to suggest a new feature, please create an issue on CPAC github issue page: https://github.com/FCP-INDI/C-PAC/issues?state=open """ # versions CYTHON_MIN_VERSION = '0.12.1' -MATPLOTLIB_MIN_VERSION = '1.1' -JINJA_MIN_VERSION = '2.6' +MATPLOTLIB_MIN_VERSION = '1.2' +JINJA_MIN_VERSION = '2.6' PYLOCKFILE_MIN_VERSION = '0.9' -PYYAML_MIN_VERSION = '2.0' +PYYAML_MIN_VERSION = '3.0' NAME = 'CPAC' MAINTAINER = "cpac developers" @@ -124,7 +126,7 @@ def get_cpac_gitversion(): DOWNLOAD_URL = "https://github.com/FCP-INDI/C-PAC" LICENSE = "BSD license" # TODO: figure out if this is right CLASSIFIERS = CLASSIFIERS -AUTHOR = "cpac developmers" +AUTHOR = "cpac developers" AUTHOR_EMAIL = "XXX" PLATFORMS = "OS Independent" MAJOR = _version_major @@ -132,6 +134,13 @@ def get_cpac_gitversion(): MICRO = _version_micro ISRELEASE = _version_extra == '' VERSION = __version__ -REQUIRES = ["matplotlib (>=1.2)", "Jinja2 (>=2.6)", "pylockfile (>=0.9)", - "pyyaml (>=3.0)"] +REQUIRES = ["matplotlib (>=1.2)", "pylockfile (>=0.9)", + "pyyaml (>=3.0)", "pygraphviz (>=1.3)", + "nibabel (>=2.0.1)", "nipype (>=0.12.1)", + "patsy (>=0.3)", "psutil (>=2.1)", "boto3 (>=1.2)", + "future (==0.15.2)", "prov (>=1.4.0)", + "simplejson (>=3.8.0)", "cython (>=0.12.1)", + "Jinja2 (>=2.6)", "pandas (>=0.15)", + "INDI_Tools (>=0.0.6)", "memory_profiler (>=0.41)", + "ipython (>=5.1)"] STATUS = 'stable' diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index db00f675e9..c7b2a12d0c 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -555,7 +555,7 @@ def return_s3_filepaths(path_template, creds_path=None, bids_flag=False): import os import re - from CPAC.AWS import fetch_creds + from indi_aws import fetch_creds # Check for errors if not bids_flag: @@ -648,7 +648,7 @@ def return_bids_template(base_dir, scan_type, creds_path=None): # Import packages import os - from CPAC.AWS import fetch_creds + from indi_aws import fetch_creds # Init variables s3_str = 's3://' diff --git a/LICENSE b/LICENSE index 3290c3c372..ad022b1250 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2014, Child Mind Institute, Inc. and C-PAC developers +Copyright (c) 2016, Child Mind Institute, Inc. and C-PAC developers All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index c4b526eb9d..6d48968f38 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ ============================================================ C-PAC: Configurable Pipeline for the Analysis of Connectomes ============================================================ -[![DOI](https://zenodo.org/badge/9342/FCP-INDI/C-PAC.svg)](http://dx.doi.org/10.5281/zenodo.16557) +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.164638.svg)](https://doi.org/10.5281/zenodo.164638) A configurable, open-source, Nipype-based, automated processing pipeline for resting state fMRI data. Designed for use by both novice users and experts, C-PAC brings the power, flexibility and elegance @@ -24,6 +24,8 @@ User documentation can be found here: http://fcp-indi.github.com/docs/user/index Developer documentation can be found here: http://fcp-indi.github.com/docs/developer/index.html +Documentation pertaining to this latest release can be found here: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.0.0 + Discussion Forum --------------- @@ -33,9 +35,7 @@ If you are stuck and need help or have any other questions or comments about C-P Issue Tracker and Bugs ---------------------- -This is an alpha version of C-PAC, which means that it is still under active development. As such, although we have done our best to ensure a stable pipeline, -there will likely still be a few bugs that we did not catch. If you find a bug or would like to suggest a new feature, -please open an issue on the the C-PAC Github issue tracker: https://github.com/FCP-INDI/C-PAC/issues?state=open +This is a beta version of C-PAC, which means that it is still under active development. As such, although we have done our best to ensure a stable pipeline, there will likely still be a few bugs that we did not catch. If you find a bug or would like to suggest a new feature, please open an issue on the the C-PAC Github issue tracker: https://github.com/FCP-INDI/C-PAC/issues?state=open If you would like to suggest revisions to the user documentation, please open an issue on the C-PAC website's Github issue tracker: https://github.com/FCP-INDI/fcp-indi.github.com/issues diff --git a/setup.py b/setup.py index 3175350172..ecf818568c 100755 --- a/setup.py +++ b/setup.py @@ -29,7 +29,8 @@ from build_helpers import INFO_VARS def configuration(parent_package='', top_path=None): - from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs + from numpy.distutils.misc_util import Configuration, \ + get_numpy_include_dirs config = Configuration(None, parent_package, top_path) config.set_options(ignore_setup_xxx_py=True, @@ -101,7 +102,7 @@ def main(**extra_args): author_email=INFO_VARS['AUTHOR_EMAIL'], platforms=INFO_VARS['PLATFORMS'], version=INFO_VARS['VERSION'], - requires=INFO_VARS['REQUIRES'], + requires = INFO_VARS['REQUIRES'], configuration = configuration, cmdclass = cmdclass, scripts = glob('scripts/*'), From 04e1e3217595fda743fe23a2cf2b75b29182d562 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Tue, 8 Nov 2016 17:47:12 -0500 Subject: [PATCH 02/37] Added updated MANIFEST file so that the build_helper.py will get included in the tarball, so that the pip install can work. --- MANIFEST | 566 ++++++++++++++++++++++++++++++++++++++++++++++++++++ MANIFEST.in | 1 + 2 files changed, 567 insertions(+) create mode 100644 MANIFEST create mode 100644 MANIFEST.in diff --git a/MANIFEST b/MANIFEST new file mode 100644 index 0000000000..b8363e53f6 --- /dev/null +++ b/MANIFEST @@ -0,0 +1,566 @@ +# file GENERATED by distutils, do NOT edit +build_helpers.py +setup.py +CPAC/__init__.py +CPAC/_build.py +CPAC/info.py +CPAC/setup.py +CPAC/GUI/__init__.py +CPAC/GUI/mainUI.py +CPAC/GUI/interface/__init__.py +CPAC/GUI/interface/pages/__init__.py +CPAC/GUI/interface/pages/alff.py +CPAC/GUI/interface/pages/anatomical.py +CPAC/GUI/interface/pages/centrality.py +CPAC/GUI/interface/pages/dualreg.py +CPAC/GUI/interface/pages/filtering.py +CPAC/GUI/interface/pages/functional_tab.py +CPAC/GUI/interface/pages/group_analysis.py +CPAC/GUI/interface/pages/motion.py +CPAC/GUI/interface/pages/nuisance.py +CPAC/GUI/interface/pages/reho.py +CPAC/GUI/interface/pages/sca.py +CPAC/GUI/interface/pages/settings.py +CPAC/GUI/interface/pages/smoothing.py +CPAC/GUI/interface/pages/timeseries.py +CPAC/GUI/interface/pages/vmhc.py +CPAC/GUI/interface/utils/__init__.py +CPAC/GUI/interface/utils/constants.py +CPAC/GUI/interface/utils/custom_control.py +CPAC/GUI/interface/utils/generic_class.py +CPAC/GUI/interface/utils/modelDesign_window.py +CPAC/GUI/interface/utils/modelconfig_window.py +CPAC/GUI/interface/utils/validator.py +CPAC/GUI/interface/windows/__init__.py +CPAC/GUI/interface/windows/config_window.py +CPAC/GUI/interface/windows/dataconfig_window.py +CPAC/GUI/interface/windows/main_window.py +CPAC/GUI/resources/config_parameters.txt +CPAC/GUI/resources/pipeline_names.py +CPAC/GUI/resources/html/after_warp.html +CPAC/GUI/resources/html/alff.html +CPAC/GUI/resources/html/anat.html +CPAC/GUI/resources/html/benchmark.html +CPAC/GUI/resources/html/centrality.html +CPAC/GUI/resources/html/cloud.html +CPAC/GUI/resources/html/compute_config.html +CPAC/GUI/resources/html/derivatives.html +CPAC/GUI/resources/html/docker.html +CPAC/GUI/resources/html/files.html +CPAC/GUI/resources/html/func.html +CPAC/GUI/resources/html/genindex.html +CPAC/GUI/resources/html/group_analysis.html +CPAC/GUI/resources/html/help.html +CPAC/GUI/resources/html/index.html +CPAC/GUI/resources/html/install.html +CPAC/GUI/resources/html/nuisance.html +CPAC/GUI/resources/html/objects.inv +CPAC/GUI/resources/html/output_config.html +CPAC/GUI/resources/html/pipeline_config.html +CPAC/GUI/resources/html/reho.html +CPAC/GUI/resources/html/rnotes.html +CPAC/GUI/resources/html/running.html +CPAC/GUI/resources/html/sca.html +CPAC/GUI/resources/html/search.html +CPAC/GUI/resources/html/searchindex.js +CPAC/GUI/resources/html/subject_list_config.html +CPAC/GUI/resources/html/tse.html +CPAC/GUI/resources/html/vmhc.html +CPAC/GUI/resources/html/_images/abide_adhd_structure.png +CPAC/GUI/resources/html/_images/after_warp_gui.png +CPAC/GUI/resources/html/_images/alff_gui.png +CPAC/GUI/resources/html/_images/alff_zuo_difference.png +CPAC/GUI/resources/html/_images/alff_zuo_trt.png +CPAC/GUI/resources/html/_images/anat_prepro_schematic.png +CPAC/GUI/resources/html/_images/anat_reg_gui.png +CPAC/GUI/resources/html/_images/basc_manu_schematic.png +CPAC/GUI/resources/html/_images/centrality_gui.png +CPAC/GUI/resources/html/_images/centrality_zuo_cover.png +CPAC/GUI/resources/html/_images/centrality_zuo_map.png +CPAC/GUI/resources/html/_images/cloud_gui_sge.png +CPAC/GUI/resources/html/_images/cloud_x2go.png +CPAC/GUI/resources/html/_images/compute_gui.png +CPAC/GUI/resources/html/_images/cwas.png +CPAC/GUI/resources/html/_images/cwas_shehzad_brains.png +CPAC/GUI/resources/html/_images/cwas_shehzad_schematic.png +CPAC/GUI/resources/html/_images/cyberduck.png +CPAC/GUI/resources/html/_images/derivs_outputs.png +CPAC/GUI/resources/html/_images/fcon_structure.png +CPAC/GUI/resources/html/_images/func_to_anat_reg.png +CPAC/GUI/resources/html/_images/func_to_mni_reg.png +CPAC/GUI/resources/html/_images/functional_preprocessing.png +CPAC/GUI/resources/html/_images/ga_contrast_csv.png +CPAC/GUI/resources/html/_images/ga_contrast_ftest.png +CPAC/GUI/resources/html/_images/ga_contrasts.png +CPAC/GUI/resources/html/_images/ga_contrasts_populated.png +CPAC/GUI/resources/html/_images/ga_main.png +CPAC/GUI/resources/html/_images/ga_model_setup.png +CPAC/GUI/resources/html/_images/ga_model_setup_populated.png +CPAC/GUI/resources/html/_images/main_gui.png +CPAC/GUI/resources/html/_images/main_gui_checks.png +CPAC/GUI/resources/html/_images/median_angle.png +CPAC/GUI/resources/html/_images/nki-rs_template.png +CPAC/GUI/resources/html/_images/nuisance.png +CPAC/GUI/resources/html/_images/output_gui.png +CPAC/GUI/resources/html/_images/processing_workflow.png +CPAC/GUI/resources/html/_images/registration.png +CPAC/GUI/resources/html/_images/reho_gui.png +CPAC/GUI/resources/html/_images/reho_voxel_schematic.png +CPAC/GUI/resources/html/_images/reho_yan_dmn.png +CPAC/GUI/resources/html/_images/roi_timeseries.png +CPAC/GUI/resources/html/_images/sca_gui.png +CPAC/GUI/resources/html/_images/sca_map.png +CPAC/GUI/resources/html/_images/scrubbing_gui.png +CPAC/GUI/resources/html/_images/seg_gui.png +CPAC/GUI/resources/html/_images/segmentation.png +CPAC/GUI/resources/html/_images/smoothing.png +CPAC/GUI/resources/html/_images/subject_list_gui.png +CPAC/GUI/resources/html/_images/symlink_structure.png +CPAC/GUI/resources/html/_images/tf_gui.png +CPAC/GUI/resources/html/_images/ts_options.png +CPAC/GUI/resources/html/_images/tse_gui.png +CPAC/GUI/resources/html/_images/vmhc_gee_schematic.png +CPAC/GUI/resources/html/_images/vmhc_gui.png +CPAC/GUI/resources/html/_images/vmhc_stark_regions.png +CPAC/GUI/resources/html/_images/vmhc_zuo_smoothing.png +CPAC/GUI/resources/html/_images/voxel_timeseries.png +CPAC/GUI/resources/html/_sources/after_warp.txt +CPAC/GUI/resources/html/_sources/alff.txt +CPAC/GUI/resources/html/_sources/anat.txt +CPAC/GUI/resources/html/_sources/benchmark.txt +CPAC/GUI/resources/html/_sources/centrality.txt +CPAC/GUI/resources/html/_sources/cloud.txt +CPAC/GUI/resources/html/_sources/compute_config.txt +CPAC/GUI/resources/html/_sources/conf.py +CPAC/GUI/resources/html/_sources/derivatives.txt +CPAC/GUI/resources/html/_sources/docker.txt +CPAC/GUI/resources/html/_sources/files.txt +CPAC/GUI/resources/html/_sources/func.txt +CPAC/GUI/resources/html/_sources/group_analysis.txt +CPAC/GUI/resources/html/_sources/help.txt +CPAC/GUI/resources/html/_sources/index.txt +CPAC/GUI/resources/html/_sources/install.txt +CPAC/GUI/resources/html/_sources/nuisance.txt +CPAC/GUI/resources/html/_sources/output_config.txt +CPAC/GUI/resources/html/_sources/pipeline_config.txt +CPAC/GUI/resources/html/_sources/reho.txt +CPAC/GUI/resources/html/_sources/remove_links.sh +CPAC/GUI/resources/html/_sources/rnotes.txt +CPAC/GUI/resources/html/_sources/running.txt +CPAC/GUI/resources/html/_sources/sca.txt +CPAC/GUI/resources/html/_sources/subject_list_config.txt +CPAC/GUI/resources/html/_sources/tse.txt +CPAC/GUI/resources/html/_sources/vmhc.txt +CPAC/GUI/resources/html/_sources/.doctrees/alff.doctree +CPAC/GUI/resources/html/_sources/.doctrees/anat.doctree +CPAC/GUI/resources/html/_sources/.doctrees/basc.doctree +CPAC/GUI/resources/html/_sources/.doctrees/benchmark.doctree +CPAC/GUI/resources/html/_sources/.doctrees/centrality.doctree +CPAC/GUI/resources/html/_sources/.doctrees/compute_config.doctree +CPAC/GUI/resources/html/_sources/.doctrees/cwas.doctree +CPAC/GUI/resources/html/_sources/.doctrees/data_config.doctree +CPAC/GUI/resources/html/_sources/.doctrees/derivs.doctree +CPAC/GUI/resources/html/_sources/.doctrees/dual_reg.doctree +CPAC/GUI/resources/html/_sources/.doctrees/files.doctree +CPAC/GUI/resources/html/_sources/.doctrees/fsl_ga.doctree +CPAC/GUI/resources/html/_sources/.doctrees/func.doctree +CPAC/GUI/resources/html/_sources/.doctrees/help.doctree +CPAC/GUI/resources/html/_sources/.doctrees/index.doctree +CPAC/GUI/resources/html/_sources/.doctrees/install.doctree +CPAC/GUI/resources/html/_sources/.doctrees/motion.doctree +CPAC/GUI/resources/html/_sources/.doctrees/nuisance.doctree +CPAC/GUI/resources/html/_sources/.doctrees/outputs.doctree +CPAC/GUI/resources/html/_sources/.doctrees/preproc.doctree +CPAC/GUI/resources/html/_sources/.doctrees/reho.doctree +CPAC/GUI/resources/html/_sources/.doctrees/rnotes.doctree +CPAC/GUI/resources/html/_sources/.doctrees/sca.doctree +CPAC/GUI/resources/html/_sources/.doctrees/slice.doctree +CPAC/GUI/resources/html/_sources/.doctrees/smoothing.doctree +CPAC/GUI/resources/html/_sources/.doctrees/temporal.doctree +CPAC/GUI/resources/html/_sources/.doctrees/tse.doctree +CPAC/GUI/resources/html/_sources/.doctrees/use.doctree +CPAC/GUI/resources/html/_sources/.doctrees/vmhc.doctree +CPAC/GUI/resources/html/_sources/.doctrees/_static/1400.doctree +CPAC/GUI/resources/html/_sources/.doctrees/_static/2500.doctree +CPAC/GUI/resources/html/_sources/.doctrees/_static/645.doctree +CPAC/GUI/resources/html/_sources/.doctrees/_static/dti.doctree +CPAC/GUI/resources/html/_sources/_images/abide_adhd_structure.png +CPAC/GUI/resources/html/_sources/_images/after_warp_gui.png +CPAC/GUI/resources/html/_sources/_images/alff_gui.png +CPAC/GUI/resources/html/_sources/_images/alff_zuo_difference.png +CPAC/GUI/resources/html/_sources/_images/alff_zuo_trt.png +CPAC/GUI/resources/html/_sources/_images/anat_prepro_schematic.png +CPAC/GUI/resources/html/_sources/_images/anat_reg_gui.png +CPAC/GUI/resources/html/_sources/_images/basc_manu_schematic.png +CPAC/GUI/resources/html/_sources/_images/centrality_gui.png +CPAC/GUI/resources/html/_sources/_images/centrality_zuo_cover.png +CPAC/GUI/resources/html/_sources/_images/centrality_zuo_map.png +CPAC/GUI/resources/html/_sources/_images/cloud_gui_sge.png +CPAC/GUI/resources/html/_sources/_images/cloud_x2go.png +CPAC/GUI/resources/html/_sources/_images/compute_gui.png +CPAC/GUI/resources/html/_sources/_images/cwas.png +CPAC/GUI/resources/html/_sources/_images/cwas_shehzad_brains.png +CPAC/GUI/resources/html/_sources/_images/cwas_shehzad_schematic.png +CPAC/GUI/resources/html/_sources/_images/cyberduck.png +CPAC/GUI/resources/html/_sources/_images/derivs_outputs.png +CPAC/GUI/resources/html/_sources/_images/fcon_structure.png +CPAC/GUI/resources/html/_sources/_images/func_to_anat_reg.png +CPAC/GUI/resources/html/_sources/_images/func_to_mni_reg.png +CPAC/GUI/resources/html/_sources/_images/functional_preprocessing.png +CPAC/GUI/resources/html/_sources/_images/ga_contrast_csv.png +CPAC/GUI/resources/html/_sources/_images/ga_contrast_ftest.png +CPAC/GUI/resources/html/_sources/_images/ga_contrasts.png +CPAC/GUI/resources/html/_sources/_images/ga_contrasts_populated.png +CPAC/GUI/resources/html/_sources/_images/ga_main.png +CPAC/GUI/resources/html/_sources/_images/ga_model_setup.png +CPAC/GUI/resources/html/_sources/_images/ga_model_setup_populated.png +CPAC/GUI/resources/html/_sources/_images/main_gui.png +CPAC/GUI/resources/html/_sources/_images/main_gui_checks.png +CPAC/GUI/resources/html/_sources/_images/median_angle.png +CPAC/GUI/resources/html/_sources/_images/nki-rs_template.png +CPAC/GUI/resources/html/_sources/_images/nuisance.png +CPAC/GUI/resources/html/_sources/_images/output_gui.png +CPAC/GUI/resources/html/_sources/_images/processing_workflow.png +CPAC/GUI/resources/html/_sources/_images/registration.png +CPAC/GUI/resources/html/_sources/_images/reho_gui.png +CPAC/GUI/resources/html/_sources/_images/reho_voxel_schematic.png +CPAC/GUI/resources/html/_sources/_images/reho_yan_dmn.png +CPAC/GUI/resources/html/_sources/_images/roi_timeseries.png +CPAC/GUI/resources/html/_sources/_images/sca_gui.png +CPAC/GUI/resources/html/_sources/_images/sca_map.png +CPAC/GUI/resources/html/_sources/_images/scrubbing_gui.png +CPAC/GUI/resources/html/_sources/_images/seg_gui.png +CPAC/GUI/resources/html/_sources/_images/segmentation.png +CPAC/GUI/resources/html/_sources/_images/slice_time.png +CPAC/GUI/resources/html/_sources/_images/smoothing.png +CPAC/GUI/resources/html/_sources/_images/subject_list_gui.png +CPAC/GUI/resources/html/_sources/_images/symlink_structure.png +CPAC/GUI/resources/html/_sources/_images/tf_gui.png +CPAC/GUI/resources/html/_sources/_images/ts_options.png +CPAC/GUI/resources/html/_sources/_images/tse_gui.png +CPAC/GUI/resources/html/_sources/_images/vmhc_gee_schematic.png +CPAC/GUI/resources/html/_sources/_images/vmhc_gui.png +CPAC/GUI/resources/html/_sources/_images/vmhc_stark_regions.png +CPAC/GUI/resources/html/_sources/_images/vmhc_zuo_smoothing.png +CPAC/GUI/resources/html/_sources/_images/voxel_timeseries.png +CPAC/GUI/resources/html/_sources/_static/1400.html +CPAC/GUI/resources/html/_sources/_static/1400.txt +CPAC/GUI/resources/html/_sources/_static/2500.html +CPAC/GUI/resources/html/_sources/_static/2500.txt +CPAC/GUI/resources/html/_sources/_static/645.html +CPAC/GUI/resources/html/_sources/_static/645.txt +CPAC/GUI/resources/html/_sources/_static/ajax-loader.gif +CPAC/GUI/resources/html/_sources/_static/basic.css +CPAC/GUI/resources/html/_sources/_static/cmi_logo.jpg +CPAC/GUI/resources/html/_sources/_static/comment-bright.png +CPAC/GUI/resources/html/_sources/_static/comment-close.png +CPAC/GUI/resources/html/_sources/_static/comment.png +CPAC/GUI/resources/html/_sources/_static/cpac_logo.jpg +CPAC/GUI/resources/html/_sources/_static/data_config_abide.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_abide_s3.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_adhd.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_adhd_s3.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_fcon.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_nki.yaml +CPAC/GUI/resources/html/_sources/_static/data_config_nki_s3.yaml +CPAC/GUI/resources/html/_sources/_static/doctools.js +CPAC/GUI/resources/html/_sources/_static/down-pressed.png +CPAC/GUI/resources/html/_sources/_static/down.png +CPAC/GUI/resources/html/_sources/_static/dti.txt +CPAC/GUI/resources/html/_sources/_static/example_phenotypic_file.csv +CPAC/GUI/resources/html/_sources/_static/file.png +CPAC/GUI/resources/html/_sources/_static/indi_logo.jpg +CPAC/GUI/resources/html/_sources/_static/jquery.js +CPAC/GUI/resources/html/_sources/_static/minus.png +CPAC/GUI/resources/html/_sources/_static/multiscan_parameters_nki.csv +CPAC/GUI/resources/html/_sources/_static/nature.css +CPAC/GUI/resources/html/_sources/_static/nki_cmi.png +CPAC/GUI/resources/html/_sources/_static/nki_logo.jpg +CPAC/GUI/resources/html/_sources/_static/nyu_logo.jpg +CPAC/GUI/resources/html/_sources/_static/plus.png +CPAC/GUI/resources/html/_sources/_static/pygments.css +CPAC/GUI/resources/html/_sources/_static/scan_parameters_abide.csv +CPAC/GUI/resources/html/_sources/_static/scan_parameters_adhd.csv +CPAC/GUI/resources/html/_sources/_static/scan_parameters_fcon.csv +CPAC/GUI/resources/html/_sources/_static/searchtools.js +CPAC/GUI/resources/html/_sources/_static/subject_list_group_analysis.txt +CPAC/GUI/resources/html/_sources/_static/underscore.js +CPAC/GUI/resources/html/_sources/_static/up-pressed.png +CPAC/GUI/resources/html/_sources/_static/up.png +CPAC/GUI/resources/html/_sources/_static/websupport.js +CPAC/GUI/resources/html/_sources/_static/outputs/derivatives.csv +CPAC/GUI/resources/html/_sources/_static/outputs/outputs.csv +CPAC/GUI/resources/html/_sources/_static/outputs/anatomical/registration.csv +CPAC/GUI/resources/html/_sources/_static/outputs/anatomical/segmentation.csv +CPAC/GUI/resources/html/_sources/_static/params/afterwarp_config.csv +CPAC/GUI/resources/html/_sources/_static/params/alff_config.csv +CPAC/GUI/resources/html/_sources/_static/params/anat_config.csv +CPAC/GUI/resources/html/_sources/_static/params/centrality_config.csv +CPAC/GUI/resources/html/_sources/_static/params/compute_config.csv +CPAC/GUI/resources/html/_sources/_static/params/data_config.csv +CPAC/GUI/resources/html/_sources/_static/params/fta_config.csv +CPAC/GUI/resources/html/_sources/_static/params/ftm_config.csv +CPAC/GUI/resources/html/_sources/_static/params/group_config.csv +CPAC/GUI/resources/html/_sources/_static/params/medianangle_config.csv +CPAC/GUI/resources/html/_sources/_static/params/nuisance_config.csv +CPAC/GUI/resources/html/_sources/_static/params/output_config.csv +CPAC/GUI/resources/html/_sources/_static/params/reho_config.csv +CPAC/GUI/resources/html/_sources/_static/params/sca_config.csv +CPAC/GUI/resources/html/_sources/_static/params/scrubbing_config.csv +CPAC/GUI/resources/html/_sources/_static/params/seg_config.csv +CPAC/GUI/resources/html/_sources/_static/params/spatialregression_config.csv +CPAC/GUI/resources/html/_sources/_static/params/tf_config.csv +CPAC/GUI/resources/html/_sources/_static/params/ts_config.csv +CPAC/GUI/resources/html/_sources/_static/params/tse_roiavg_config.csv +CPAC/GUI/resources/html/_sources/_static/params/tse_seeds_config.csv +CPAC/GUI/resources/html/_sources/_static/params/tse_voxelwiseavg_config.csv +CPAC/GUI/resources/html/_sources/_static/params/vmhc_config.csv +CPAC/GUI/resources/html/_sources/futuredocs/basc.txt +CPAC/GUI/resources/html/_sources/futuredocs/cwas.txt +CPAC/GUI/resources/html/_sources/futuredocs/tse.txt +CPAC/GUI/resources/html/_static/1400.html +CPAC/GUI/resources/html/_static/1400.txt +CPAC/GUI/resources/html/_static/2500.html +CPAC/GUI/resources/html/_static/2500.txt +CPAC/GUI/resources/html/_static/645.html +CPAC/GUI/resources/html/_static/645.txt +CPAC/GUI/resources/html/_static/ajax-loader.gif +CPAC/GUI/resources/html/_static/alert_info_32.png +CPAC/GUI/resources/html/_static/alert_warning_32.png +CPAC/GUI/resources/html/_static/basic.css +CPAC/GUI/resources/html/_static/bg-page.png +CPAC/GUI/resources/html/_static/bullet_orange.png +CPAC/GUI/resources/html/_static/cmi_logo.jpg +CPAC/GUI/resources/html/_static/comment-bright.png +CPAC/GUI/resources/html/_static/comment-close.png +CPAC/GUI/resources/html/_static/comment.png +CPAC/GUI/resources/html/_static/cpac_logo.jpg +CPAC/GUI/resources/html/_static/data_config_abide.yaml +CPAC/GUI/resources/html/_static/data_config_abide_s3.yaml +CPAC/GUI/resources/html/_static/data_config_adhd.yaml +CPAC/GUI/resources/html/_static/data_config_adhd_s3.yaml +CPAC/GUI/resources/html/_static/data_config_fcon.yaml +CPAC/GUI/resources/html/_static/data_config_nki.yaml +CPAC/GUI/resources/html/_static/data_config_nki_s3.yaml +CPAC/GUI/resources/html/_static/doctools.js +CPAC/GUI/resources/html/_static/down-pressed.png +CPAC/GUI/resources/html/_static/down.png +CPAC/GUI/resources/html/_static/dti.html +CPAC/GUI/resources/html/_static/dti.txt +CPAC/GUI/resources/html/_static/example_phenotypic_file.csv +CPAC/GUI/resources/html/_static/file.png +CPAC/GUI/resources/html/_static/haiku.css +CPAC/GUI/resources/html/_static/indi_logo.jpg +CPAC/GUI/resources/html/_static/jquery.js +CPAC/GUI/resources/html/_static/minus.png +CPAC/GUI/resources/html/_static/multiscan_parameters_nki.csv +CPAC/GUI/resources/html/_static/nature.css +CPAC/GUI/resources/html/_static/nki_cmi.png +CPAC/GUI/resources/html/_static/nki_logo.jpg +CPAC/GUI/resources/html/_static/nyu_logo.jpg +CPAC/GUI/resources/html/_static/plus.png +CPAC/GUI/resources/html/_static/pygments.css +CPAC/GUI/resources/html/_static/scan_parameters_abide.csv +CPAC/GUI/resources/html/_static/scan_parameters_adhd.csv +CPAC/GUI/resources/html/_static/scan_parameters_fcon.csv +CPAC/GUI/resources/html/_static/searchtools.js +CPAC/GUI/resources/html/_static/subject_list_group_analysis.html +CPAC/GUI/resources/html/_static/subject_list_group_analysis.txt +CPAC/GUI/resources/html/_static/underscore.js +CPAC/GUI/resources/html/_static/up-pressed.png +CPAC/GUI/resources/html/_static/up.png +CPAC/GUI/resources/html/_static/websupport.js +CPAC/GUI/resources/html/_static/outputs/derivatives.csv +CPAC/GUI/resources/html/_static/outputs/outputs.csv +CPAC/GUI/resources/html/_static/outputs/anatomical/registration.csv +CPAC/GUI/resources/html/_static/outputs/anatomical/segmentation.csv +CPAC/GUI/resources/html/_static/params/afterwarp_config.csv +CPAC/GUI/resources/html/_static/params/alff_config.csv +CPAC/GUI/resources/html/_static/params/anat_config.csv +CPAC/GUI/resources/html/_static/params/centrality_config.csv +CPAC/GUI/resources/html/_static/params/compute_config.csv +CPAC/GUI/resources/html/_static/params/data_config.csv +CPAC/GUI/resources/html/_static/params/fta_config.csv +CPAC/GUI/resources/html/_static/params/ftm_config.csv +CPAC/GUI/resources/html/_static/params/group_config.csv +CPAC/GUI/resources/html/_static/params/medianangle_config.csv +CPAC/GUI/resources/html/_static/params/nuisance_config.csv +CPAC/GUI/resources/html/_static/params/output_config.csv +CPAC/GUI/resources/html/_static/params/reho_config.csv +CPAC/GUI/resources/html/_static/params/sca_config.csv +CPAC/GUI/resources/html/_static/params/scrubbing_config.csv +CPAC/GUI/resources/html/_static/params/seg_config.csv +CPAC/GUI/resources/html/_static/params/spatialregression_config.csv +CPAC/GUI/resources/html/_static/params/tf_config.csv +CPAC/GUI/resources/html/_static/params/ts_config.csv +CPAC/GUI/resources/html/_static/params/tse_roiavg_config.csv +CPAC/GUI/resources/html/_static/params/tse_seeds_config.csv +CPAC/GUI/resources/html/_static/params/tse_voxelwiseavg_config.csv +CPAC/GUI/resources/html/_static/params/vmhc_config.csv +CPAC/GUI/resources/images/add1.jpeg +CPAC/GUI/resources/images/add1.png +CPAC/GUI/resources/images/add2.png +CPAC/GUI/resources/images/add3.jpg +CPAC/GUI/resources/images/aquachecked.ico +CPAC/GUI/resources/images/aquaflagged.ico +CPAC/GUI/resources/images/aquanotchecked.ico +CPAC/GUI/resources/images/aquanotflagged.ico +CPAC/GUI/resources/images/checked.ico +CPAC/GUI/resources/images/cpac_logo.jpg +CPAC/GUI/resources/images/cpac_logo2.jpg +CPAC/GUI/resources/images/cpac_logo_2.jpg +CPAC/GUI/resources/images/cpac_new_logo.png +CPAC/GUI/resources/images/cpac_rgb.png +CPAC/GUI/resources/images/flagged.ico +CPAC/GUI/resources/images/foder10.gif +CPAC/GUI/resources/images/folder.bmp +CPAC/GUI/resources/images/folder.png +CPAC/GUI/resources/images/folder10.png +CPAC/GUI/resources/images/folder2.bmp +CPAC/GUI/resources/images/folder3.gif +CPAC/GUI/resources/images/folder4.gif +CPAC/GUI/resources/images/folder5.gif +CPAC/GUI/resources/images/folder6.jpeg +CPAC/GUI/resources/images/folder7.gif +CPAC/GUI/resources/images/folder8.jpeg +CPAC/GUI/resources/images/folder9.gif +CPAC/GUI/resources/images/help.png +CPAC/GUI/resources/images/minus1.ico +CPAC/GUI/resources/images/minus2.ico +CPAC/GUI/resources/images/minus3.ico +CPAC/GUI/resources/images/minus4.ico +CPAC/GUI/resources/images/minus5.ico +CPAC/GUI/resources/images/minus9.jpg +CPAC/GUI/resources/images/notchecked.ico +CPAC/GUI/resources/images/notflagged.ico +CPAC/GUI/resources/images/plus1.ico +CPAC/GUI/resources/images/plus10.jpg +CPAC/GUI/resources/images/plus11.jpg +CPAC/GUI/resources/images/plus12.jpg +CPAC/GUI/resources/images/plus13.jpeg +CPAC/GUI/resources/images/plus2.ico +CPAC/GUI/resources/images/plus3.ico +CPAC/GUI/resources/images/plus4.ico +CPAC/GUI/resources/images/plus5.ico +CPAC/GUI/resources/images/plus6.jpeg +CPAC/GUI/resources/images/plus7.png +CPAC/GUI/resources/images/plus8.jpeg +CPAC/GUI/resources/images/plus9.jpeg +CPAC/alff/__init__.py +CPAC/alff/alff.py +CPAC/alff/utils.py +CPAC/anat_preproc/__init__.py +CPAC/anat_preproc/anat_preproc.py +CPAC/basc/__init__.py +CPAC/basc/basc.py +CPAC/basc/python_ncut_lib.py +CPAC/basc/utils.py +CPAC/cwas/__init__.py +CPAC/cwas/cwas.py +CPAC/cwas/hats.py +CPAC/cwas/mdmr.py +CPAC/cwas/subdist.py +CPAC/cwas/utils.py +CPAC/easy_thresh/__init__.py +CPAC/easy_thresh/easy_thresh.py +CPAC/func_preproc/__init__.py +CPAC/func_preproc/func_preproc.py +CPAC/generate_motion_statistics/__init__.py +CPAC/generate_motion_statistics/generate_motion_statistics.py +CPAC/group_analysis/__init__.py +CPAC/group_analysis/group_analysis.py +CPAC/median_angle/__init__.py +CPAC/median_angle/median_angle.py +CPAC/network_centrality/__init__.py +CPAC/network_centrality/afni_centrality_interfaces.py +CPAC/network_centrality/afni_network_centrality.py +CPAC/network_centrality/core.py +CPAC/network_centrality/resting_state_centrality.py +CPAC/network_centrality/thresh_and_sum.pyx +CPAC/network_centrality/utils.py +CPAC/network_centrality/z_score.py +CPAC/nuisance/__init__.py +CPAC/nuisance/nuisance.py +CPAC/nuisance/utils.py +CPAC/pipeline/__init__.py +CPAC/pipeline/cpac_basc_pipeline.py +CPAC/pipeline/cpac_cwas_pipeline.py +CPAC/pipeline/cpac_ga_model_generator.py +CPAC/pipeline/cpac_group_analysis_pipeline.py +CPAC/pipeline/cpac_group_runner.py +CPAC/pipeline/cpac_pipeline.py +CPAC/pipeline/cpac_runner.py +CPAC/pipeline/new_cpac_ga_model_generator.py +CPAC/pipeline/new_cpac_group_runner.py +CPAC/qc/__init__.py +CPAC/qc/blue.py +CPAC/qc/cyan_to_yellow.py +CPAC/qc/green.py +CPAC/qc/qc.py +CPAC/qc/red.py +CPAC/qc/red_to_blue.py +CPAC/qc/utils.py +CPAC/registration/__init__.py +CPAC/registration/registration.py +CPAC/registration/utils.py +CPAC/reho/__init__.py +CPAC/reho/reho.py +CPAC/reho/utils.py +CPAC/resources/bootstrap/css/.Rhistory +CPAC/resources/bootstrap/css/bootstrap-responsive.css +CPAC/resources/bootstrap/css/bootstrap-responsive.min.css +CPAC/resources/bootstrap/css/bootstrap.css +CPAC/resources/bootstrap/css/bootstrap.min.css +CPAC/resources/bootstrap/img/glyphicons-halflings-white.png +CPAC/resources/bootstrap/img/glyphicons-halflings.png +CPAC/resources/bootstrap/js/bootstrap.js +CPAC/resources/bootstrap/js/bootstrap.min.js +CPAC/resources/bootstrap/js/jquery.js +CPAC/resources/bootstrap/js/jquery.min.js +CPAC/resources/bootstrap/js/kul-substitute.js +CPAC/resources/bootstrap/js/kul-substitute.min.js +CPAC/resources/templates/MNI152_Edge_AllTissues.nii.gz +CPAC/resources/templates/cpac_runner.html +CPAC/resources/templates/logger_group_index.html +CPAC/resources/templates/logger_subject_index.html +CPAC/sca/__init__.py +CPAC/sca/sca.py +CPAC/sca/utils.py +CPAC/scrubbing/__init__.py +CPAC/scrubbing/scrubbing.py +CPAC/seg_preproc/__init__.py +CPAC/seg_preproc/seg_preproc.py +CPAC/seg_preproc/utils.py +CPAC/timeseries/__init__.py +CPAC/timeseries/timeseries_analysis.py +CPAC/utils/__init__.py +CPAC/utils/bids_metadata.py +CPAC/utils/build_sublist.py +CPAC/utils/configuration.py +CPAC/utils/correlations_utils.py +CPAC/utils/create_all_qc.py +CPAC/utils/create_flame_model_files.py +CPAC/utils/create_fsl_model.py +CPAC/utils/create_group_analysis_info_files.py +CPAC/utils/datasource.py +CPAC/utils/extract_data.py +CPAC/utils/extract_data_multiscan.py +CPAC/utils/extract_parameters.py +CPAC/utils/pipeline_names.py +CPAC/utils/test_init.py +CPAC/utils/utils.py +CPAC/vmhc/__init__.py +CPAC/vmhc/utils.py +CPAC/vmhc/vmhc.py +scripts/cpac_centrality.py +scripts/cpac_correlations_workflow.py +scripts/cpac_correlations_workflow.pyc +scripts/cpac_correlations_workflow_two.py +scripts/cpac_correlations_workflow_two.pyc +scripts/cpac_extract_parameters.py +scripts/cpac_gui +scripts/cpac_install.sh +scripts/cpac_run.py +scripts/cpac_setup.py +test/test_all.py +test/test_trivial.py diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..6b4b600bb6 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include build_helpers.py From 038265280ccfeec55610c173b034e4ba0c10774f Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 9 Nov 2016 16:56:53 -0500 Subject: [PATCH 03/37] Set a default session label for when BIDS-format datasets do not have sessions. --- CPAC/utils/build_sublist.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index c7b2a12d0c..6de1525093 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -425,7 +425,7 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, # Get site, ppant, session-level directory indicies def return_dir_indices(path_template): ''' - Function to return the site, particpant, and session-level + Function to return the site, participant, and session-level directory indicies based on splitting the path template by directory seperation '/' Parameters @@ -858,7 +858,10 @@ def build_sublist(data_config_yml): for anat in anat_paths: anat_sp = anat.split('/') subj = anat_sp[anat_ppant_idx] - sess = anat_sp[anat_sess_idx] + try: + sess = anat_sp[anat_sess_idx] + except TypeError: + sess = "ses-1" if bids_flag: site = '' else: @@ -874,7 +877,10 @@ def build_sublist(data_config_yml): # Extract info from filepath func_sp = func.split('/') subj = func_sp[func_ppant_idx] - sess = func_sp[func_sess_idx] + try: + sess = func_sp[func_sess_idx] + except TypeError: + sess = "ses-1" if bids_flag: site = '' scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, From bb732ec592891551d67fc07f84555e8ed2603cfd Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 9 Nov 2016 16:58:50 -0500 Subject: [PATCH 04/37] Removed the deprecated AWS module. --- CPAC/AWS/aws_utils.py | 448 ------------------------------------------ 1 file changed, 448 deletions(-) delete mode 100644 CPAC/AWS/aws_utils.py diff --git a/CPAC/AWS/aws_utils.py b/CPAC/AWS/aws_utils.py deleted file mode 100644 index 0cfb43afed..0000000000 --- a/CPAC/AWS/aws_utils.py +++ /dev/null @@ -1,448 +0,0 @@ -# CPAC/AWS/aws_utils.py -# -# Contributing authors: -# Daniel Clark - -''' -This module contains functions which assist in interacting with AWS -services, including uploading/downloading data and file checking. -''' - -# Build and download a subject list -def build_download_sublist(bucket, bucket_prefix, local_prefix, sub_list): - ''' - Function to build and download a subject list from S3 - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - bucket_prefix : string - the bucket prefix where all of the file keys are located - local_prefix : string - the local disk prefix where all of the files should be downloaded to - sub_list : list (dict) - the C-PAC subject list that has inputs in local_prefix - - Returns - ------- - None - this function does not return a value, it downloads the subjects from - the C-PAC subject list to disk from S3 - ''' - - # Import packages - import os - - # Init variables - local_list = [] - for sub_dict in sub_list: - local_list.append(sub_dict['anat']) - local_list.extend([v for v in sub_dict['rest'].values()]) - - # Substitute the prefixes to build S3 list to download from - s3_list = [l.replace(local_prefix, bucket_prefix) for l in local_list] - - # Check already-existing files and remove from download lists - local_rm = [] - s3_rm = [] - # Build remove-lists - for i in range(len(local_list)): - l = local_list[i] - s = s3_list[i] - if os.path.exists(l): - local_rm.append(l) - s3_rm.append(s) - # Go through remove lists and remove files - for l, s in zip(local_rm, s3_rm): - local_list.remove(l) - s3_list.remove(s) - - # Download the data to the local prefix - s3_download(bucket, s3_list, local_prefix, bucket_prefix=bucket_prefix) - - # Check to see they all downloaded successfully - for l in local_list: - l_path = os.path.abspath(l) - if not os.path.exists(l_path): - raise IOError('S3 files were not all downloaded.\n'\ - 'Could not find: %s' % l_path) - - -# Collect all files in directory as the source list -def collect_subject_files(prefix_star, sub_id): - ''' - Function to collect all of the files in a directory into a list of - full paths - - Parameters - ---------- - prefix_star : string - filepath to the folder, in which, all of the sub-files are - collected; this filepath should have a wildcard character of - '*' so that glob can collect the files via the pattern given - sub_id : string - the subject id to look for in the output folder - - Returns - ------- - src_list : list [str] - a list of filepaths (as strings) - ''' - - # Import packages - import glob - import os - - # Init variables - bases = glob.glob(prefix_star) - src_list = [] - - # For each pipeline - for base in bases: - # Iterate through directory - for root, dirs, files in os.walk(base): - # If it's in the subject's folder and there are files - if sub_id in root and files: - src_list.extend([os.path.join(root, f) for f in files]) - - # Return the list - return src_list - - -# Get the MD5 sums of files on S3 -def md5_sum(bucket, prefix='', filt_str=''): - ''' - Function to get the filenames and MD5 checksums of files stored in - an S3 bucket and return this as a dictionary. - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - prefix : string (optional), default='' - the bucket prefix where all of the file keys are located - filt_str : string (optional), defualt='' - a string to filter the filekeys of interest; - e.g. 'matrix_data' will only return filekeys with the string - 'matrix_data' in their filepath name - - Returns - ------- - md5_dict : dictionary {str : str} - a dictionary where the keys are the S3 filename and the values - are the MD5 checksum values - ''' - - # Init variables - blist = bucket.list(prefix) - md5_dict = {} - - # And iterate over keys to copy over new ones - for fkey in blist: - filename = str(fkey.key) - if filt_str in filename: - md5_sum = str(fkey.etag).strip('"') - md5_dict[filename] = md5_sum - print 'filename: %s' % filename - print 'md5_sum: %s' % md5_sum - - # Return the dictionary - return md5_dict - - -# Rename s3 keys from src_list to dst_list -def s3_rename(bucket, src_list, dst_list, - keep_old=False, overwrite=False, make_public=False): - ''' - Function to rename files from an AWS S3 bucket via a copy and delete - process. Uses all keys in src_list as the original names and renames - the them to the corresponding keys in the dst_list. - (e.g. src_list[9] --> dst_list[9]) - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - src_list : list (str) - a list of relative paths of the files to delete from the bucket - dst_list : list (str) - a list of relative paths of the files to delete from the bucket - keep_old : boolean (optional), default=False - flag indicating whether to keep the src_list files - overwrite : boolean (optional), default=False - flag indicated whether to overwrite the files in dst_list - make_public : boolean (optional), default=False - set to True if files should be publically available on S3 - Returns - ------- - None - The function doesn't return any value, it deletes data from - S3 and prints its progress and a 'done' message upon completion - ''' - - # Check list lengths are equal - if len(src_list) != len(dst_list): - raise ValueError('src_list and dst_list are different lengths!') - - # Init variables - i = 0 - no_files = len(src_list) - - # And iterate over keys to copy over new ones - for f in src_list: - src_key = bucket.get_key(f) - if not src_key: - print 'source file %s doesnt exist, skipping...' % f - continue - dst_key = dst_list[i] - dst_exists = bucket.get_key(dst_key) - if not dst_exists or overwrite: - print 'copying source: ', str(src_key.key) - print 'to destination: ', dst_key - src_key.copy(bucket.name, dst_key) - if make_public: - print 'making public...' - dk = bucket.get_key(dst_key) - dk.make_public() - if not keep_old: - src_key.delete() - else: - print '%s exists and not overwriting' % dst_key - i += 1 - per = 100*(float(i)/no_files) - print 'Done renaming %d/%d\n%f%% complete' % (i, no_files, per) - - -# Delete s3 keys based on input list -def s3_delete(bucket, in_list): - ''' - Method to delete files from an AWS S3 bucket that have the same - names as those of an input list to a local directory. - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - in_list : list (str) - a list of relative paths of the files to delete from the bucket - - Returns - ------- - None - The function doesn't return any value, it deletes data from - S3 and prints its progress and a 'done' message upon completion - ''' - - # Init variables - no_files = len(in_list) - i = 0 - # Iterate over list and delete S3 items - for f in in_list: - i += 1 - try: - print 'attempting to delete %s from %s...' % (f, bucket.name) - k = bucket.get_key(f) - k.delete() - per = 100*(float(i)/no_files) - print 'Done deleting %d/%d\n%f%% complete' % (i, no_files, per) - except AttributeError: - print 'No key found for %s on bucket %s' % (f, bucket.name) - - # Done iterating through list - print 'done!' - - -# Download files from AWS S3 to local machine -def s3_download(bucket, in_list, local_prefix, bucket_prefix=''): - ''' - Method to download files from an AWS S3 bucket that have the same - names as those of an input list to a local directory. - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - in_list : list (str) - a list of relative paths of the files to download from the bucket - local_prefix : string - local directory prefix to store the downloaded data - bucket_prefix : string (optional) - bucket_prefix, if specified, will be substituted with - local_prefix; otherwise, the local_prefix will only prepend the - downloaded files - - Returns - ------- - None - The function doesn't return any value, it downloads data from - S3 and prints its progress and a 'done' message upon completion - ''' - - # Impor packages - import os - - # Init variables - no_files = len(in_list) - i = 0 - # Check for trailing '/' - if not local_prefix.endswith('/'): - local_prefix = local_prefix + '/' - if bucket_prefix and not bucket_prefix.endswith('/'): - bucket_prefix = bucket_prefix + '/' - # For each item in the list, try to download it - for f in in_list: - i += 1 - remote_filename = bucket.name + ': ' + f - if bucket_prefix: - local_filename = f.replace(bucket_prefix, local_prefix) - else: - local_filename = os.path.join(local_prefix, f) - # Check to see if the local folder setup exists or not - local_folders = os.path.dirname(local_filename) - if not os.path.isdir(local_folders): - print 'creating %s on local machine' % local_folders - os.makedirs(local_folders) - # Attempt to download the file - print 'attempting to download %s to %s...' % (remote_filename, - local_filename) - try: - if not os.path.exists(local_filename): - k = bucket.get_key(f) - k.get_contents_to_filename(local_filename) - per = 100*(float(i)/no_files) - print 'Done downloading %d/%d\n%f%% complete' % (i, no_files, per) - else: - print 'File %s already exists, skipping...' % local_filename - except AttributeError: - print 'No key found for %s on bucket %s' % (f, bucket.name) - - # Done iterating through list - print 'done!' - - -# Upload files to AWS S3 -def s3_upload(bucket, src_list, dst_list, make_public=False, overwrite=False): - ''' - Function to upload a list of data to an S3 bucket - - Parameters - ---------- - bucket : boto.s3.bucket.Bucket instance - an instance of the boto S3 bucket class to download from - src_list : list (str) - list of filepaths as strings to upload to S3 - dst_list : list (str) - list of filepaths as strings coinciding with src_list, such - that src_list[1] gets uploaded to S3 with the S3 path given in - dst_list[1] - make_public : boolean (optional), default=False - set to True if files should be publically available on S3 - overwrite : boolean (optional), default=False - set to True if the uploaded files should overwrite what is - already there - - Returns - ------- - None - The function doesn't return any value, it uploads data to S3 - and prints its progress and a 'done' message upon completion - ''' - - # Callback function for upload progress update - def callback(complete, total): - ''' - Method to illustrate file uploading and progress updates - ''' - - # Import packages - import sys - - # Write ...'s to the output for loading progress - sys.stdout.write('.') - sys.stdout.flush() - - # Init variables - no_files = len(src_list) - i = 0 - - # Check if the list lengths match - if no_files != len(dst_list): - raise RuntimeError, 'src_list and dst_list must be the same length!' - - # For each source file, upload - for src_file in src_list: - # Get destination path - dst_file = dst_list[i] - # Print status - print 'Uploading %s to S3 bucket %s as %s' % \ - (src_file, bucket.name, dst_file) - - # Create a new key from the bucket and set its contents - k = bucket.new_key(dst_file) - if k.exists() and not overwrite: - print 'key %s already exists, skipping...' % dst_file - else: - k.set_contents_from_filename(src_file, cb=callback, replace=True) - # Make file public if set to True - if make_public: - print 'make public()' - k.make_public() - i += 1 - per = 100*(float(i)/no_files) - print 'finished file %d/%d\n%f%% complete\n' % \ - (i, no_files, per) - - # Print when finished - print 'Done!' - - -# Test write-access to bucket first -def test_bucket_access(creds_path, output_directory, subject_id): - ''' - ''' - - # Import packages - import os - import botocore.exceptions as bexc - from indi_aws import fetch_creds - - # Init variables - s3_str = 's3://' - test_file = '/tmp/test-output.txt' - - # Explicitly lower-case the "s3" - if output_directory.lower().startswith(s3_str): - out_dir_sp = output_directory.split('/') - out_dir_sp[0] = out_dir_sp[0].lower() - output_directory = '/'.join(out_dir_sp) - - # Get bucket name - bucket_name = output_directory.replace(s3_str, '').split('/')[0] - - # Get bucket - bucket = fetch_creds.return_bucket(creds_path, bucket_name) - - # Create local file - with open(test_file, 'w') as f: - f.write('test123') - f.close() - - # Formulate test ouput key in bucket path output directory - rel_key_path = output_directory.replace(\ - os.path.join(s3_str, bucket_name), '').lstrip('/') - write_test_key = os.path.join(rel_key_path, 'test-output_%s.txt' % subject_id) - - # Attempt a write to bucket - try: - bucket.upload_file(test_file, write_test_key) - print 'Confirmed S3 write access for CPAC output!' - test_key = bucket.Object(key=write_test_key) - test_key.delete() - s3_write_access = True - # Otherwise we set the access flag to false - except bexc.ClientError: - s3_write_access = False - - # Return the access flag - return s3_write_access From 813689385836950640686422b1784f932cbfd1a6 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 15 Nov 2016 11:04:20 -0500 Subject: [PATCH 05/37] Added C-PAC install script from BIDS app. --- scripts/cpac_install.sh | 1643 ++++++++++++++++++++++++++------------- 1 file changed, 1094 insertions(+), 549 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 47eed376d3..cda5c28630 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -1,594 +1,1139 @@ #! /bin/bash -usage=" -cpac_install.sh\n -=================================================================================================\n -Version: 1.0.0\n -Author(s): John Pellman, Daniel Clark\n -Based off of cpac_install.sh by Daniel Clark.\n -Description: Will perform specific operations to install C-PAC dependencies and C-PAC.\n -Checks for user privileges and performs installation either locally or system-wide.\n -Can be customized using flags.\n -=================================================================================================\n -Flags:\n\n --s : System-level dependencies only.\n --p : Python dependencies only\n --n : Install specific neuroimaging packages. Accepts any number of the following as arguments:\n -\tafni, fsl, c3d, ants, cpac\n -\twill issue warnings if dependencies for these neuroimaging packages are not fulfilled.\n -\tIf multiple packages are to be specified, they must be surrounded by quotation marks.\n --a : Install all neuroimaging suites not already installed. Will also tell you if all neuroimaging suites are already installed and on the path.\n --l : Local install. Equivalent to -pa ; will not run FSL installer, but will issue a warning if running on Ubuntu. \n --r : Root install. Equivalent to -spa\n --h : Bring up the help dialog.\n -=================================================================================================\n -Example usage:\n -\tcpac_install.sh -n \"fsl afni\"\n -\tWill install FSL and AFNI. The list of neuroimaging suites to install is iterated through sequentially.\n -\tIn this case, FSL would first be installed before AFNI.\n" +# these are packages that are common to centos 5, 6, and 7 +centos_packages=("git" "make" "unzip" "netpbm" "gcc" "python-devel"\ + "gcc-gfortran" "gcc-c++" "libgfortran" "lapack" "lapack-devel" "blas"\ + "libcanberra-gtk2" "libXp.x86_64" "mesa-libGLU-9.0.0-4.el7.x86_64"\ + "gsl-1.15-13.el7.x86_64" "wxBase" "wxGTK" "wxGTK-gl" "wxPython" "graphviz"\ + "graphviz-devel.x86_64") + +# configuration options that are specific to centos 5 +centos5_epel_url="http://dl.fedoraproject.org/pub/epel/5/x86_64/epel-release-5-4.noarch.rpm" +centos5_epel_rpm="epel-release-5-4.noarch.rpm" +centos5_packages=("mesa-libGLU-6.5.1-7.11.el5_9.i386" "gsl-1.13-3.el5.x86_64"\ + "libxml2-devel libpng-1.2.10-17.el5_8.i386") + +# configuration options that are specific to centos 6 +centos6_epel_url="http://dl.fedoraproject.org/pub/epel/6/x86_64/e/epel-release-6-8.noarch.rpm" +centos6_epel_rpm="epel-release-6-8.noarch.rpm" +centos6_packages=("mesa-libGLU-11.0.7-4.el6.x86_64" "gsl-1.13-1.el6.x86_64"\ + "libcanberra-gtk2" "libxml2-devel" "libpng-1.2.49-2.el6_7.i686") + +# configuration options that are specific to centos 7 +centos7_epel_url="http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm" +centos7_epel_rpm="epel-release-7-5.noarch.rpm" +centos7_packages=("mesa-libGLU-9.0.0-4.el7.x86_64" "gsl-1.15-13.el7.x86_64"\ + "libcanberra-gtk2" "libxml-devel" "libpng12.x86_64") + +# are all of the ubuntu packages the same regardless of the version? +ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra-gtk-module" \ + "libexpat1-dev" "libgiftiio-dev" "libglib2.0-dev" "libglu1-mesa" "libglu1-mesa-dev" \ + "libgsl0-dev" "libjpeg-progs" "libmotif-dev" "libxml2" "libxml2-dev" "libxext-dev" \ + "libxft2" "libxft-dev" "libxi-dev" "libxmu-headers" "libxmu-dev" "libxpm-dev" "libxslt1-dev" \ + "libxp6" "libxp-dev" "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ + "build-essential" "xvfb" "xauth" "libgl1-mesa-dri" "tcsh" "unzip" "zlib1g-dev" "m4") + +conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "traits" "pyyaml" "jinja2" "nose" "ipython" "pip" "wxpython") + +pip_packages=("future" "prov" "simplejson" "lockfile" "pygraphviz" "nibabel" "nipype" "patsy" "memory_profiler" "psutil" "configparser" "indi_tools") + +function set_system_deps { + system_pkgs='' + epel_rpm='' + epel_url='' + + if [ $DISTRO == 'CENTOS' ] + then + # add in the packages that are common to all + system_pkgs=${centos_packages[@]} + + yum update -y && yum install -y wget + + # add in the packages that are specific to the redhat-release + version=$(rpm -q --queryformat '%{VERSION}' centos-release) + case ${version} in + 5) + epel_url=centos5_epel_url + epel_rpm=centos5_epel_rpm + system_pkgs+=(${centos5_packages}) + ;; + 6) + epel_url=centos6_epel_url + epel_rpm=centos6_epel_rpm + system_pkgs+=(${centos6_packages}) + ;; + 7) + epel_url=centos7_epel_url + epel_rpm=centos7_epel_rpm + system_pkgs+=(${centos7_packages}) + ;; + *) + echo "Unknown version ${version}" + esac + elif [ $DISTRO == 'UBUNTU' ] + then + # take care of initing apt-get and installing wget + echo "!!!!!! CC" + apt-get update && apt-get upgrade -y && apt-get install -y wget + system_pkgs=${ubuntu_packages[@]} + else + echo "Unknown distribution ${DISTRO}" + exit 1 + fi +} + +# CC - reformatted this to have better control of the output +function print_usage { + echo "" + echo "Usage: cpac_install.sh -[spnalrh]" + echo "=========================================================================" + echo "Version: 0.4.0" + echo "Author(s): John Pellman, Daniel Clark" + echo "Based off of cpac_install.sh by Daniel Clark." + echo "Description: Will perform specific operations to install C-PAC" + echo " dependencies and C-PAC. Checks for user privileges and performs" + echo " installation either locally or system-wide." + echo "=========================================================================" + echo "One or more command line options are required:" + echo " -s : System-level dependencies only." + echo " -p : Python dependencies only" + echo " -n : Install specific neuroimaging packages. Accepts any number of the" + echo " following as arguments: afni, fsl, c3d, ants, cpac" + echo " will issue warnings if dependencies for these neuroimaging packages" + echo " are not fulfilled. If multiple packages are to be specified, they" + echo " must be surrounded by quotation marks." + echo " -a : Install all neuroimaging suites not already installed. Will also" + echo " tell you if all neuroimaging suites are already installed and on" + echo " the path." + echo " -l : Local install. Equivalent to -pa ; will not run FSL installer, but" + echo " will issue a warning if running on Ubuntu." + echo " -r : Root install. Equivalent to -spa" + echo " -h : Print this help message." + echo "=========================================================================" + echo "Example usage: cpac_install.sh -n \"fsl afni\"" + echo " Will install FSL and AFNI. The list of neuroimaging suites to install" + echo " is iterated through sequentially. In this case, FSL would first be" + echo " installed before AFNI." + echo "" +} function install_system_dependencies { - echo "Installing C-PAC system dependencies..." - system_dependencies_installed ; if [ $? -eq 0 ]; then - echo System dependencies are already installed! - echo Moving on... - echo '[ '$(date)' ] : C-PAC system dependencies are already installed, do not need to be re-installed.' >> ~/cpac.log - return - fi - if [ $LOCAL -eq 0 ]; then - if [ $DISTRO == 'CENTOS' ]; then + + echo "Installing C-PAC system dependencies... [${missing_system_dependencies[@]}][${#missing_system_dependencies[@]}]" + + if [ ${#missing_system_dependencies[@]} -eq 0 ] + then + echo "sys packages to be installed ${system_packages[@]}" + echo "System dependencies are already installed!" + echo "Moving on..." + echo "[ $(date) ] : C-PAC system dependencies are already installed, do" \ + "not need to be re-installed." >> ~/cpac.log + return + fi + if [ $LOCAL -eq 0 ] + then + system_dependencies_installed=1 + if [ $DISTRO == 'CENTOS' ] + then version=$(rpm -q --queryformat '%{VERSION}' centos-release) - case ${version} in - 5) - epel_url=http://dl.fedoraproject.org/pub/epel/5/x86_64/epel-release-5-4.noarch.rpm - epel_rpm=epel-release-5-4.noarch.rpm - ver_pkgs="mesa-libGLU-6.5.1-7.11.el5_9.i386 gsl-1.13-3.el5.x86_64 libxml2-devel libpng-1.2.10-17.el5_8.i386" - ;; - 6) - epel_url=http://dl.fedoraproject.org/pub/epel/6/x86_64/e/epel-release-6-8.noarch.rpm - epel_rpm=epel-release-6-8.noarch.rpm - ver_pkgs="mesa-libGLU-11.0.7-4.el6.x86_64 gsl-1.13-1.el6.x86_64 libcanberra-gtk2 libxml2-devel libpng-1.2.49-2.el6_7.i686" - ;; - 7) - epel_url=http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm - epel_rpm=epel-release-7-5.noarch.rpm - ver_pkgs="mesa-libGLU-9.0.0-4.el7.x86_64 gsl-1.15-13.el7.x86_64 libcanberra-gtk2 libxml-devel libpng12.x86_64" - ;; - esac - yum update -y - cd /tmp && wget ${epel_url} && rpm -Uvh ${epel_rpm} - yum install -y cmake git make unzip bzip2 netpbm gcc python-devel gcc-gfortran gcc-c++ libgfortran lapack lapack-devel blas libXp.x86_64 wxBase wxGTK wxGTK-gl wxPython graphviz graphviz-devel.x86_64 zlib-devel libxslt-devel python-devel - yum install -y ${ver_pkgs} - elif [ $DISTRO == 'UBUNTU' ]; then - apt-get update - apt-get install -y cmake git make unzip libcanberra-gtk-module libxp6 netpbm libglu1-mesa gsl-bin zlib1g-dev graphviz graphviz-dev pkg-config build-essential libxml2-dev libxslt-dev python-dev xvfb - else - echo Linux distribution not recognized. System-level dependencies cannot be installed. - echo '[ '$(date)' ] : C-PAC system dependencies could not be installed (Linux distribution not recognized).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - elif [ $LOCAL -eq 1 ]; then - echo System-level dependencies cannot be installed since you do not have root privileges. - echo Re-run this script as root or have your system administrator run it. - cd $INIT_DIR - echo '[ '$(date)' ] : C-PAC system dependencies could not be installed (not root).' >> ~/cpac.log - exit 1 - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : C-PAC system dependencies could not be installed (unable to determine if root).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - echo '[ '$(date)' ] : C-PAC system dependencies succesfully installed.' >> ~/cpac.log + + # update the repositories + #yum update -y + cd /tmp && wget ${epel_url} && rpm -Uvh ${epel_rpm} + + yum install -y ${missing_system_dependencies[@]} + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + echo "[ $(date) ] yum failed to install packages: ${missing_system_dependencies[@]}" + echo "[ $(date) ] yum failed to install packages: ${missing_system_dependencies[@]}" \ + >> ~/cpac.log + else + echo "[ $(date) ] : yum Installed C-PAC system dependency"\ + "${missing_system_dependencies[@]}" + echo "[ $(date) ] : yum Installed C-PAC system dependency" + "${missing_system_dependencies[@]}" \ + >> ~/cpac.log + fi + #for p in ${missing_system_dependencies[@]} + #do + #echo "[ $(date) ] : Installing C-PAC system dependency $p" + #yum install -y ${p} + #if [ $? -ne 0 ] + #then + #system_dependencies_installed=0 + #echo "failed to install package: ${p}" + #echo "[ $(date) ] : Failed to install C-PAC system dependency $p" \ + #>> ~/cpac.log + #else + #echo "[ $(date) ] : Installed C-PAC system dependency $p" + #echo "[ $(date) ] : Installed C-PAC system dependency $p" \ + #>> ~/cpac.log + #fi + #done + elif [ $DISTRO == 'UBUNTU' ] + then + #apt-get update + #apt-get upgrade -y + + apt-get install -y ${missing_system_dependencies[@]} + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" + echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" \ + >> ~/cpac.log + else + echo "[ $(date) ] : apt-get Installed C-PAC system dependency"\ + "${missing_system_dependencies[@]}" + echo "[ $(date) ] : apt-get Installed C-PAC system dependency" + "${missing_system_dependencies[@]}" \ + >> ~/cpac.log + fi + # for p in ${missing_system_dependencies[@]} + # do + # echo "[ $(date) ] : Installing C-PAC system dependency $p" + # apt-get install -y ${p} + # if [ $? -ne 0 ] + # then + # system_dependencies_installed=0 + # echo "failed to install package: ${p}" + # echo "[ $(date) ] : Failed to install C-PAC system dependency $p" \ + # >> ~/cpac.log + # else + # echo "[ $(date) ] : Installed C-PAC system dependency $p" + # echo "[ $(date) ] : Installed C-PAC system dependency $p" \ + # >> ~/cpac.log + # fi + # done + # finish up + apt-get autoremove -y + else + echo "Linux distribution not recognized. System-level dependencies cannot" \ + "be installed." + echo "[ $(date) ] : C-PAC system dependencies could not be installed (Linux" \ + "distribution not recognized)." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + elif [ $LOCAL -eq 1 ] + then + echo "System-level dependencies cannot be installed since you do not have"\ + "root privileges." + echo "Re-run this script as root or have your system administrator run it." + cd $INIT_DIR + echo "[ $(date) ] : C-PAC system dependencies could not be installed (not root)."\ + >> ~/cpac.log + exit 1 + else + echo "Invalid value for variable 'LOCAL'." + echo "This script is unable to determine whether or not you are running it as root." + echo "[ $(date) ] : C-PAC system dependencies could not be installed (unable to"\ + "determine if root)." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + if [ ${system_dependencies_installed} -eq 0 ] + then + echo "[ $(date) ] : C-PAC system dependencies not fully installed." >> ~/cpac.log + else + echo "[ $(date) ] : C-PAC system dependencies succesfully installed." >> ~/cpac.log + fi } -function system_dependencies_installed { - if [ $DISTRO == 'CENTOS' ]; then - for package in git make unzip netpbm gcc python-devel gcc-gfortran gcc-c++ libgfortran lapack lapack-devel blas libcanberra-gtk2 libXp.x86_64 mesa-libGLU-9.0.0-4.el7.x86_64 gsl-1.15-13.el7.x86_64 wxBase wxGTK wxGTK-gl wxPython graphviz graphviz-devel.x86_64; do - yum list installed ${package} > /dev/null 2>&1 - done - elif [ $DISTRO == 'UBUNTU' ]; then - dpkg -s cmake git make unzip libcanberra-gtk-module libxp6 netpbm libglu1-mesa gsl-bin zlib1g-dev graphviz graphviz-dev pkg-config > /dev/null 2>&1 - fi - return $? +function get_missing_system_dependencies() +{ + missing_system_dependencies=() + system_dependencies_installed=1 + + if [ $DISTRO == 'CENTOS' ] + then + for package in ${system_pkgs[@]} + do + yum list installed ${package} > /dev/null 2>&1 + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + ARRAY+=(${package}) + echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + fi + done + elif [ $DISTRO == 'UBUNTU' ] + then + for package in ${system_pkgs[@]} + do + dpkg -s ${package} > /dev/null 2>&1 + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + missing_system_dependencies+=(${package}) + echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + fi + done + else + echo "[ $(date) ] : Do not know how to check for packages installed on ${DISTRO}" >> ~/cpac.log + fi + echo "missing ${missing_system_dependencies[@]}" } function install_python_dependencies { - echo "Installing C-PAC Python dependencies..." - python_dependencies_installed ; if [ $? -eq 0 ]; then - echo Python dependencies are already installed! - echo Moving on... - echo '[ '$(date)' ] : Python dependencies are already installed - do not need to be re-installed.' >> ~/cpac.log - return - fi - system_dependencies_installed ; if [ $? -ne 0 ]; then - echo Python dependencies cannot be installed unless system-level dependencies are installed first. - echo Have your system administrator install system-level dependencies as root. - echo Exiting now... - echo '[ '$(date)' ] : Python dependencies could not be installed (system-level dependencies not installed.' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - cd /tmp - wget http://repo.continuum.io/miniconda/Miniconda-3.8.3-Linux-x86_64.sh - chmod +x Miniconda-3.8.3-Linux-x86_64.sh - if [ $LOCAL -eq 0 ]; then - ./Miniconda-3.8.3-Linux-x86_64.sh -b -p /usr/local/bin/miniconda - chmod -R 775 /usr/local/bin/miniconda - chmod g+s /usr/local/bin/miniconda - export PATH=/usr/local/bin/miniconda/bin:${PATH} - echo 'export PATH=/usr/local/bin/miniconda/bin:${PATH}' >> ~/cpac_env.sh - elif [ $LOCAL -eq 1 ] && [ ! -d ~/miniconda ]; then - ./Miniconda-3.8.3-Linux-x86_64.sh -b - export PATH=~/miniconda/bin:${PATH} - echo 'export PATH=~/miniconda/bin:${PATH}' >> ~/cpac_env.sh - fi - if [ ! -d ~/miniconda/envs/cpac ] || [ ! -d /usr/local/bin/miniconda/envs/cpac ]; then - conda create -y -n cpac python - source activate cpac - conda install -y cython numpy scipy matplotlib networkx traits pyyaml jinja2 nose ipython pip wxpython pandas - pip install future==0.15.2 prov simplejson lockfile pygraphviz nibabel nipype patsy memory_profiler psutil INDI-tools - echo 'source activate cpac' >> ~/cpac_env.sh - source deactivate - fi + + if [ ${python_dependencies_installed} -eq 1 ] + then + echo "[ $(date) ] C-PAC Python dependencies installed!" + echo "[ $(date) ] C-PAC Python dependencies installed!" >> ~/cpac.log + return + fi + + if [ ${system_dependencies_installed} -ne 1 ] + then + echo "Python dependencies cannot be installed unless system-level dependencies are installed first." + echo "Have your system administrator install system-level dependencies as root." + echo "Exiting now..." + echo "[ $(date) ] : Python dependencies could not be installed (system-level" \ + "dependencies not installed." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + + # for now always install miniconda, in the future should only install + # if not there + echo "[ $(date) ] Installing miniconda!" + echo "[ $(date) ] Installing miniconda!" >> ~/cpac.log + + cd /tmp + if [ ! -f Miniconda-3.8.3-Linux-x86_64.sh ] + then + wget http://repo.continuum.io/miniconda/Miniconda-3.8.3-Linux-x86_64.sh + if [ $? -ne 0 ] + then + echo "[ $(date) ] Could not download miniconda installation script!" + echo "[ $(date) ] Could not download miniconda installation script!" >> ~/cpac.log + return + fi + fi + chmod +x Miniconda-3.8.3-Linux-x86_64.sh + if [ $LOCAL -eq 0 ] + then + ./Miniconda-3.8.3-Linux-x86_64.sh -b -p /usr/local/bin/miniconda + if [ $? -ne 0 ] + then + echo "[ $(date) ] Miniconda installation failed!" + echo "[ $(date) ] Miniconda installation failed!" >> ~/cpac.log + #return + fi + chmod -R 775 /usr/local/bin/miniconda + chmod g+s /usr/local/bin/miniconda + export PATH=/usr/local/bin/miniconda/bin:${PATH} + echo 'export PATH=/usr/local/bin/miniconda/bin:${PATH}' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ] && [ ! -d ~/miniconda ] + then + ./Miniconda-3.8.3-Linux-x86_64.sh -b + if [ $? -ne 0 ] + then + echo "[ $(date) ] Miniconda installation failed!" + echo "[ $(date) ] Miniconda installation failed!" >> ~/cpac.log + return + fi + export PATH=~/miniconda/bin:${PATH} + echo 'export PATH=~/miniconda/bin:${PATH}' >> ~/cpac_env.sh + fi + + # for docker dont install virtualenv + #conda create -y -n cpac python + #source activate cpac + conda install -y ${missing_conda_dependencies[@]} + if [ $? -ne 0 ] + then + echo "[ $(date) ] Conda install ${p} failed!" + echo "[ $(date) ] Conda install ${p} failed!" >> ~/cpac.log + exit 1 + fi + #for p in ${missing_conda_dependencies[@]} + #do + #echo "[ $(date) ] Conda install ${p}!" + #conda install -y ${p} + #if [ $? -ne 0 ] + #then + #echo "[ $(date) ] Conda install ${p} failed!" + #echo "[ $(date) ] Conda install ${p} failed!" >> ~/cpac.log + #exit 1 + #fi + #done + + pip install ${missing_pip_dependencies[@]} + if [ $? -ne 0 ] + then + echo "[ $(date) ] Pip install ${missing_pip_dependencies[@]} failed!" + echo "[ $(date) ] Pip install ${p} failed!" >> ~/cpac.log + exit 1 + fi + #for p in ${missing_pip_dependencies[@]} + #do + #echo "[ $(date) ] Pip install ${p}!" + #pip install ${p} + #if [ $? -ne 0 ] + #then + #echo "[ $(date) ] Pip install ${p} failed!" + #echo "[ $(date) ] Pip install ${p} failed!" >> ~/cpac.log + #exit 1 + #fi + #done + + #echo 'source activate cpac' >> ~/cpac_env.sh + cd /tmp + git clone https://github.com/FCP-INDI/INDI-Tools.git + cd INDI-Tools/ + python setup.py install + #source deactivate + cd $INIT_DIR } -function python_dependencies_installed { - if [ ! -d ~/miniconda/envs/cpac ] && [ ! -d /usr/local/bin/miniconda/envs/cpac ]; then - return 1 - fi - source activate cpac &> /dev/null - python -c "import cython, numpy, scipy, matplotlib, networkx, traits, yaml, jinja2, nose, pip, lockfile, pygraphviz, nibabel, nipype, wx, prov, future, simplejson, memory_profiler, psutil, pandas, indi_aws, indi_schedulers" 2> /dev/null && which ipython &> /dev/null - status=$? - source deactivate &> /dev/null - return $status +function get_missing_python_dependencies { + + python_dependencies_installed=0 + missing_pip_dependencies=() + missing_conda_dependencies=() + + # first we check to make sure that we have python + if [ ! -f /usr/local/bin/miniconda/bin/python ] + then + python_installed=0 + else + python_installed=1 + fi + + if [ ${python_installed} -eq 0 ] + then + echo "[ $(date) ] : Python is not installed, need to install all"\ + "Python dependencies." >> ~/cpac.log + missing_pip_dependencies=${pip_packages[@]} + missing_conda_dependencies=${conda_packages[@]} + else + # if we find an enviroment, then enable it + if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] + then + echo "[ $(date) ] : Found C-PAC virtual environment, activating" >> ~/cpac.log + source activate cpac &> /dev/null + fi + + python_dependencies_installed=1 + for p in ${pip_packages[@]} + do + if [ ${p} == "indi_tools" ] + then + /usr/local/bin/miniconda/bin/python -c "import indi_aws" 2> /dev/null + if [ $? -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_pip_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log + fi + else + /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null + if [ $? -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_pip_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log + fi + fi + done + + for p in ${conda_packages[@]} + do + if [ ${p} == "wxpython" ] + then + /usr/local/bin/miniconda/bin/python -c "import wx" 2> /dev/null + retval=$? + elif [ ${p} == "pyyaml" ] + then + /usr/local/bin/miniconda/bin/python -c "import yaml" 2> /dev/null + retval=$? + elif [ ${p} == "ipython" ] + then + if [ -f /usr/local/bin/miniconda/bin/ipython ] + then + retval=0 + else + retval=1 + fi + else + /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null + retval=$? + fi + if [ $retval -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_conda_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log + fi + done + + # if we find an enviroment, then disable it + if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] + then + echo "[ $(date) ] : Found C-PAC virtual environment, de-activating" >> ~/cpac.log + source deactivate &> /dev/null + fi + fi } function install_fsl { - echo "Installing FSL." - which fsl &> /dev/null ; if [ $? -eq 0 ]; then - echo FSL is already installed! - echo Moving on... - echo '[ '$(date)' ] : FSL is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - system_dependencies_installed ; if [ $? -ne 0 ]; then - echo FSL cannot be installed unless system-level dependencies are installed first. - echo Have your system administrator install system-level dependencies as root. - echo Exiting now... - echo '[ '$(date)' ] : FSL installation failed - system-level dependencies are not installed.' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - if [ $DISTRO == 'CENTOS' ]; then - cd /tmp - wget fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py - fi - if [ $LOCAL -eq 0 ]; then - if [ $DISTRO == 'CENTOS' ]; then - python fslinstaller.py -d /usr/share - FSLDIR=/usr/share/fsl/ - mkdir $FSLDIR/5.0 - mv $FSLDIR/bin $FSLDIR/5.0/bin - ln -s $FSLDIR/data $FSLDIR/5.0/data - mv $FSLDIR/doc $FSLDIR/5.0/doc - mv $FSLDIR/etc $FSLDIR/5.0/etc - mv $FSLDIR/tcl $FSLDIR/5.0/tcl - # Debian-based distros must use NeuroDebian instead of the installer. - elif [ $DISTRO == 'UBUNTU' ]; then - wget -O- http://neuro.debian.net/lists/$(lsb_release -cs).us-nh.full | tee /etc/apt/sources.list.d/neurodebian.sources.list - apt-key adv --recv-keys --keyserver pgp.mit.edu 2649A5A9 - apt-get update - apt-get install -y fsl-5.0-complete - fi - FSLDIR=/usr/share/fsl/5.0 - . ${FSLDIR}/etc/fslconf/fsl.sh - PATH=${FSLDIR}/bin:${PATH} - export FSLDIR PATH - echo '# Path to FSL' >> ~/cpac_env.sh - echo 'FSLDIR=/usr/share/fsl/5.0' >> ~/cpac_env.sh - echo '. ${FSLDIR}/etc/fslconf/fsl.sh' >> ~/cpac_env.sh - echo 'PATH=${FSLDIR}/bin:${PATH}' >> ~/cpac_env.sh - echo 'export FSLDIR PATH' >> ~/cpac_env.sh - elif [ $LOCAL -eq 1 ]; then - if [ $DISTRO == 'CENTOS' ]; then - python fslinstaller.py -d ~ - FSLDIR=~/fsl/ - mkdir $FSLDIR/5.0 - mv $FSLDIR/bin $FSLDIR/5.0/bin - ln -s $FSLDIR/data $FSLDIR/5.0/data - mv $FSLDIR/doc $FSLDIR/5.0/doc - mv $FSLDIR/etc $FSLDIR/5.0/etc - mv $FSLDIR/tcl $FSLDIR/5.0/tcl - FSLDIR=~/fsl/5.0 - . ${FSLDIR}/etc/fslconf/fsl.sh - PATH=${FSLDIR}/bin:${PATH} - export FSLDIR PATH - echo '# Path to FSL' >> ~/cpac_env.sh - echo 'FSLDIR=~/fsl/5.0' >> ~/cpac_env.sh - echo '. ${FSLDIR}/etc/fslconf/fsl.sh' >> ~/cpac_env.sh - echo 'PATH=${FSLDIR}/bin:${PATH}' >> ~/cpac_env.sh - echo 'export FSLDIR PATH' >> ~/cpac_env.sh - elif [ $DISTRO == 'UBUNTU' ]; then - echo FSL cannot be installed without root privileges on Ubuntu Linux. - echo '[ '$(date)' ] : FSL installation failed - need root privileges on Ubuntu.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : FSL could not be installed (unable to determine if root).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi + echo "Installing FSL." + which fsl &> /dev/null ; if [ $? -eq 0 ]; then + echo FSL is already installed! + echo Moving on... + echo '[ '$(date)' ] : FSL is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + if [ $system_dependencies_installed -ne 1 ] + then + echo "FSL cannot be installed unless system-level dependencies are installed first." + echo "Have your system administrator install system-level dependencies as root." + echo "Exiting now..." + echo "[ $(date) ] : FSL installation failed - system-level dependencies are not installed." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + if [ $DISTRO == 'CENTOS' ]; then + cd /tmp + wget fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py + fi + if [ $LOCAL -eq 0 ]; then + if [ $DISTRO == 'CENTOS' ]; then + python fslinstaller.py -d /usr/share + if [ $? -ne 0 ] + then + echo "FSL Install failed!" + exit 1 + fi + + FSLDIR=/usr/share/fsl/ + mkdir $FSLDIR/5.0 + mv $FSLDIR/bin $FSLDIR/5.0/bin + ln -s $FSLDIR/data $FSLDIR/5.0/data + mv $FSLDIR/doc $FSLDIR/5.0/doc + mv $FSLDIR/etc $FSLDIR/5.0/etc + mv $FSLDIR/tcl $FSLDIR/5.0/tcl + # Debian-based distros must use NeuroDebian instead of the installer. + elif [ $DISTRO == 'UBUNTU' ]; then + wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 + apt-get update + apt-get install -y fsl-5.0-complete + if [ $? -ne 0 ] + then + echo "FSL Install failed!" + exit 1 + fi + + fi + FSLDIR=/usr/share/fsl/5.0 + . ${FSLDIR}/etc/fslconf/fsl.sh + PATH=${FSLDIR}/bin:${PATH} + export FSLDIR PATH + echo '# Path to FSL' >> ~/cpac_env.sh + echo 'FSLDIR=/usr/share/fsl/5.0' >> ~/cpac_env.sh + echo '. ${FSLDIR}/etc/fslconf/fsl.sh' >> ~/cpac_env.sh + echo 'PATH=${FSLDIR}/bin:${PATH}' >> ~/cpac_env.sh + echo 'export FSLDIR PATH' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ] + then + if [ $DISTRO == 'CENTOS' ] + then + python fslinstaller.py -d ~ + if [ $? -ne 0 ] + then + echo "FSL Install failed!" + exit 1 + fi + + FSLDIR=~/fsl/ + mkdir $FSLDIR/5.0 + mv $FSLDIR/bin $FSLDIR/5.0/bin + ln -s $FSLDIR/data $FSLDIR/5.0/data + mv $FSLDIR/doc $FSLDIR/5.0/doc + mv $FSLDIR/etc $FSLDIR/5.0/etc + mv $FSLDIR/tcl $FSLDIR/5.0/tcl + FSLDIR=~/fsl/5.0 + . ${FSLDIR}/etc/fslconf/fsl.sh + PATH=${FSLDIR}/bin:${PATH} + export FSLDIR PATH + echo '# Path to FSL' >> ~/cpac_env.sh + echo 'FSLDIR=~/fsl/5.0' >> ~/cpac_env.sh + echo '. ${FSLDIR}/etc/fslconf/fsl.sh' >> ~/cpac_env.sh + echo 'PATH=${FSLDIR}/bin:${PATH}' >> ~/cpac_env.sh + echo 'export FSLDIR PATH' >> ~/cpac_env.sh + elif [ $DISTRO == 'UBUNTU' ] + then + echo FSL cannot be installed without root privileges on Ubuntu Linux. + echo "[ $(date) ] : FSL installation failed - need root privileges" \ + "on Ubuntu." >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + else + echo "Invalid value for variable 'LOCAL'." + echo "This script is unable to determine whether or not you are running it as root." + echo "[ $(date) ] : FSL could not be installed (unable to determine " \ + "if root)." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi } function install_afni { - echo "Installing AFNI." - which afni &> /dev/null ; if [ $? -eq 0 ]; then - echo AFNI is already installed! - echo Moving on... - echo '[ '$(date)' ] : AFNI is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - system_dependencies_installed ; if [ $? -ne 0 ]; then - echo AFNI cannot be installed unless system-level dependencies are installed first. - echo Have your system administrator install system-level dependencies as root. - echo Exiting now... - echo '[ '$(date)' ] : AFNI installation failed - system-level dependencies are not installed.' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - cd /tmp - if [ $(uname -p) == 'x86_64' ]; then - AFNI_DOWNLOAD=linux_openmp_64 - else - AFNI_DOWNLOAD=linux_openmp - fi - wget http://afni.nimh.nih.gov/pub/dist/tgz/${AFNI_DOWNLOAD}.tgz - tar xfz ${AFNI_DOWNLOAD}.tgz - if [ $LOCAL -eq 0 ]; then - mv ${AFNI_DOWNLOAD} /opt/afni - export PATH=/opt/afni:$PATH - export DYLD_FALLBACK_LIBRARY_PATH=/opt/afni - echo '# Path to AFNI' >> ~/cpac_env.sh - echo 'export PATH=/opt/afni:$PATH' >> ~/cpac_env.sh - echo 'export DYLD_FALLBACK_LIBRARY_PATH=/opt/afni' >> ~/cpac_env.sh - - elif [ $LOCAL -eq 1 ]; then - mv ${AFNI_DOWNLOAD} ~/afni - export PATH=~/afni:$PATH - export DYLD_FALLBACK_LIBRARY_PATH=~/afni - echo '# Path to AFNI' >> ~/cpac_env.sh - echo 'export PATH=~/afni:$PATH' >> ~/cpac_env.sh - echo 'export DYLD_FALLBACK_LIBRARY_PATH=~/afni' >> ~/cpac_env.sh - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : AFNI could not be installed (unable to determine if root).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi + echo "Installing AFNI." + which afni &> /dev/null ; if [ $? -eq 0 ]; then + echo AFNI is already installed! + echo Moving on... + echo "[ $(date) ] : AFNI is already installed - does not need to be" \ + " re-installed." >> ~/cpac.log + return + fi + if [ $system_dependencies_installed -ne 1 ] + then + echo "AFNI cannot be installed unless system-level dependencies are installed first." + echo "Have your system administrator install system-level dependencies as root." + echo "Exiting now..." + echo "[ $(date) ] : AFNI installation failed - system-level dependencies are" \ + "not installed." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + cd /tmp + if [ $(uname -p) == 'x86_64' ]; then + AFNI_DOWNLOAD=linux_openmp_64 + else + AFNI_DOWNLOAD=linux_openmp + fi + + wget http://afni.nimh.nih.gov/pub/dist/tgz/${AFNI_DOWNLOAD}.tgz + tar xfz ${AFNI_DOWNLOAD}.tgz + + #wget http://fcp-indi.s3.amazonaws.com/resources/cc_afni_trusty_openmp_64.tar.gz + #tar xfz cc_afni_trusty_openmp_64.tar.gz + #rm cc_afni_trusty_openmp_64.tar.gz + #AFNI_DOWNLOAD=afni + + #git clone https://github.com/ccraddock/afni.git + #cd /tmp/afni/src + #cp Makefile.linux_openmp_64_trusty Makefile + #make vastness + + if [ $? -ne 0 ] + then + echo "AFNI Install failed!" + exit 1 + fi + + if [ $LOCAL -eq 0 ] + then + mv ${AFNI_DOWNLOAD} /opt/afni + export PATH=/opt/afni:$PATH + export DYLD_FALLBACK_LIBRARY_PATH=/opt/afni + echo '# Path to AFNI' >> ~/cpac_env.sh + echo 'export PATH=/opt/afni:$PATH' >> ~/cpac_env.sh + echo 'export DYLD_FALLBACK_LIBRARY_PATH=/opt/afni' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ]; then + mv ${AFNI_DOWNLOAD} ~/afni + export PATH=~/afni:$PATH + export DYLD_FALLBACK_LIBRARY_PATH=~/afni + echo '# Path to AFNI' >> ~/cpac_env.sh + echo 'export PATH=~/afni:$PATH' >> ~/cpac_env.sh + echo 'export DYLD_FALLBACK_LIBRARY_PATH=~/afni' >> ~/cpac_env.sh + else + echo Invalid value for variable 'LOCAL'. + echo This script is unable to determine whether or not you are running it as root. + echo '[ '$(date)' ] : AFNI could not be installed (unable to determine if root).' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi } function install_ants { - echo "Installing ANTS." - which ANTS &> /dev/null ; if [ $? -eq 0 ]; then - echo ANTS is already installed! - echo Moving on... - echo '[ '$(date)' ] : ANTS is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - system_dependencies_installed ; if [ $? -ne 0 ]; then - echo ANTS cannot be installed unless system-level dependencies are installed first. - echo Have your system administrator install system-level dependencies as root. - echo Exiting now... - echo '[ '$(date)' ] : ANTS installation failed - system-level dependencies are not installed.' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - which c3d &> /dev/null ; if [ $? -ne 0 ]; then - echo ANTS cannot be installed unless c3d is installed first. - echo Install c3d and then try again. - echo Exiting now... - echo '[ '$(date)' ] : ANTS installation failed - C3D is not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - if [ $DISTRO == 'CENTOS' ]; then - cd /tmp - git clone https://github.com/stnava/ANTs.git - if [ $LOCAL -eq 0 ]; then - mkdir /opt/ants - cd /opt/ants - cmake -c -g /tmp/ANTs - make -j 4 - ANTSPATH=/opt/ants/bin - cp /tmp/ANTs/Scripts/antsIntroduction.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsAtroposN4.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsBrainExtraction.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsCorticalThickness.sh ${ANTSPATH} - export ANTSPATH - export PATH=/opt/ants/bin:$PATH - echo '# Path to ANTS' >> ~/cpac_env.sh - echo 'export ANTSPATH=/opt/ants/bin/' >> ~/cpac_env.sh - echo 'export PATH=/opt/ants/bin:$PATH' >> ~/cpac_env.sh - elif [ $LOCAL -eq 1 ]; then - mkdir ~/ants - cd ~/ants - cmake -c -g /tmp/ANTs - make -j 4 - ANTSPATH=~/ants/bin - cp /tmp/ANTs/Scripts/antsIntroduction.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsAtroposN4.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsBrainExtraction.sh ${ANTSPATH} - cp /tmp/ANTs/Scripts/antsCorticalThickness.sh ${ANTSPATH} - export ANTSPATH - export PATH=/opt/ants/bin:$PATH - echo '# Path to ANTS' >> ~/cpac_env.sh - echo 'export ANTSPATH=~/ants/bin/' >> ~/cpac_env.sh - echo 'export PATH=~/ants/bin:$PATH' >> ~/cpac_env.sh - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : ANTS could not be installed (unable to determine if root).' >> ~/cpac.log + echo "Installing ANTS." + which ANTS &> /dev/null ; if [ $? -eq 0 ]; then + echo ANTS is already installed! + echo Moving on... + echo '[ '$(date)' ] : ANTS is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + if [ ${system_dependencies_installed} -ne 1 ] + then + echo ANTS cannot be installed unless system-level dependencies are installed first. + echo Have your system administrator install system-level dependencies as root. + echo Exiting now... + echo '[ '$(date)' ] : ANTS installation failed - system-level dependencies are not installed.' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + which c3d &> /dev/null ; if [ $? -ne 0 ]; then + echo "ANTS cannot be installed unless c3d is installed first." + echo "Install c3d and then try again." + echo "Exiting now..." + echo '[ '$(date)' ] : ANTS installation failed - C3D is not installed.' >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + cd /tmp + git clone https://github.com/stnava/ANTs.git + if [ $LOCAL -eq 0 ]; then + mkdir /opt/ants + cd /opt/ants + cmake -c -g /tmp/ANTs + # go slow, -j 4 causes seg fault w/ building containers + make + if [ $? -ne 0 ] + then + echo "ANTS compile failed." + echo "Exiting now..." + echo "[ $(date) ] : ANTS installation failed - compile failed." >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + ANTSPATH=/opt/ants/bin + cp /tmp/ANTs/Scripts/antsIntroduction.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsAtroposN4.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsBrainExtraction.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsCorticalThickness.sh ${ANTSPATH} + export ANTSPATH + export PATH=/opt/ants/bin:$PATH + echo '# Path to ANTS' >> ~/cpac_env.sh + echo 'export ANTSPATH=/opt/ants/bin/' >> ~/cpac_env.sh + echo 'export PATH=/opt/ants/bin:$PATH' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ]; then + mkdir ~/ants + cd ~/ants + cmake -c -g /tmp/ANTs + # go slow, -j 4 causes seg fault w/ building containers + make + if [ $? -ne 0 ] + then + echo "ANTS compile failed." + echo "Exiting now..." + echo "[ $(date) ] : ANTS installation failed - compile failed." >> ~/cpac.log cd $INIT_DIR exit 1 fi - elif [ $DISTRO == 'UBUNTU' ]; then - apt-get install ants - fi + ANTSPATH=~/ants/bin + cp /tmp/ANTs/Scripts/antsIntroduction.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsAtroposN4.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsBrainExtraction.sh ${ANTSPATH} + cp /tmp/ANTs/Scripts/antsCorticalThickness.sh ${ANTSPATH} + export ANTSPATH + export PATH=/opt/ants/bin:$PATH + echo '# Path to ANTS' >> ~/cpac_env.sh + echo 'export ANTSPATH=~/ants/bin/' >> ~/cpac_env.sh + echo 'export PATH=~/ants/bin:$PATH' >> ~/cpac_env.sh + else + echo Invalid value for variable 'LOCAL'. + echo This script is unable to determine whether or not you are running it as root. + echo '[ '$(date)' ] : ANTS could not be installed (unable to determine if root).' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi } function install_c3d { - echo "Installing C3D." - which c3d &> /dev/null ; if [ $? -eq 0 ]; then - echo c3d is already installed! - echo Moving on... - echo '[ '$(date)' ] : C3D is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - ARCHITECTURE=$(uname -p) - case $ARCHITECTURE in - x86_64 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-x86_64 - ;; - i386 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-i386 - ;; - i686 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-i686 - ;; - esac - cd /tmp - wget http://sourceforge.net/projects/c3d/files/c3d/c3d-0.8.2/${C3D_DOWNLOAD}.tar.gz - tar xfz ${C3D_DOWNLOAD}.tar.gz - if [ $LOCAL -eq 0 ]; then - mv $C3D_DOWNLOAD /opt/c3d - export PATH=/opt/c3d/bin:$PATH - echo '# Path to C3D' >> ~/cpac_env.sh - echo 'export PATH=/opt/c3d/bin:$PATH' >> ~/cpac_env.sh - elif [ $LOCAL -eq 1 ]; then - mv $C3D_DOWNLOAD ~/c3d - export PATH=~/c3d/bin:$PATH - echo '# Path to C3D' >> ~/cpac_env.sh - echo 'export PATH=~/c3d/bin:$PATH' >> ~/cpac_env.sh - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : C3D could not be installed (unable to determine if root).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi + echo "Installing C3D." + which c3d &> /dev/null ; if [ $? -eq 0 ]; then + echo c3d is already installed! + echo Moving on... + echo '[ '$(date)' ] : C3D is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + ARCHITECTURE=$(uname -p) + case $ARCHITECTURE in + x86_64 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-x86_64 + ;; + i386 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-i386 + ;; + i686 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-i686 + ;; + esac + cd /tmp + wget http://sourceforge.net/projects/c3d/files/c3d/c3d-0.8.2/${C3D_DOWNLOAD}.tar.gz + tar xfz ${C3D_DOWNLOAD}.tar.gz + if [ $LOCAL -eq 0 ]; then + mv $C3D_DOWNLOAD /opt/c3d + export PATH=/opt/c3d/bin:$PATH + echo '# Path to C3D' >> ~/cpac_env.sh + echo 'export PATH=/opt/c3d/bin:$PATH' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ]; then + mv $C3D_DOWNLOAD ~/c3d + export PATH=~/c3d/bin:$PATH + echo '# Path to C3D' >> ~/cpac_env.sh + echo 'export PATH=~/c3d/bin:$PATH' >> ~/cpac_env.sh + else + echo Invalid value for variable 'LOCAL'. + echo This script is unable to determine whether or not you are running it as root. + echo '[ '$(date)' ] : C3D could not be installed (unable to determine if root).' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi } +cpac_resources=("$FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_2mm_brain_symmetric.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_2mm_symmetric.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_brain_mask_dil.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_brain_mask.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_brain_mask_symmetric_dil.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_brain.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_brain_symmetric.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_3mm_symmetric.nii.gz" \ + "$FSLDIR/data/atlases/HarvardOxford/HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz") + +cpac_resdirs=("$FSLDIR/data/standard/tissuepriors/2mm" \ + "$FSLDIR/data/standard/tissuepriors/3mm") + function install_cpac_resources { - echo "Installing C-PAC Image Resources." - # Determines if C-PAC image resources are all already installed. - RES_PRES=1 - for res in MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz MNI152_T1_2mm_brain_symmetric.nii.gz MNI152_T1_2mm_symmetric.nii.gz MNI152_T1_3mm_brain_mask_dil.nii.gz MNI152_T1_3mm_brain_mask.nii.gz MNI152_T1_3mm_brain_mask_symmetric_dil.nii.gz MNI152_T1_3mm_brain.nii.gz MNI152_T1_3mm_brain_symmetric.nii.gz MNI152_T1_3mm.nii.gz MNI152_T1_3mm_symmetric.nii.gz MNI152_T1_4mm_brain.nii.gz MNI152_T1_4mm.nii.gz; do - [ ! -f $FSLDIR/data/standard/$res ] && RES_PRES=0 - done - [ ! -d $FSLDIR/data/standard/tissuepriors/2mm ] || [ ! -d $FSLDIR/data/standard/tissuepriors/3mm ] || [ ! -f $FSLDIR/data/atlases/HarvardOxford/HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz ] && RES_PRES=0 - if [ $RES_PRES -eq 1 ]; then - echo CPAC Resources are already present! - echo Moving on... - echo '[ '$(date)' ] : C-PAC resources are already installed - do not need to be re-installed.' >> ~/cpac.log - return - fi - which fsl &> /dev/null ; if [ $? -ne 0 ]; then - echo CPAC templates cannot be copied unless FSL is installed first. - echo Install FSL and then try again. - echo Exiting now... - echo '[ '$(date)' ] : C-PAC resources installation failed - FSL is not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - cd /tmp - wget http://fcon_1000.projects.nitrc.org/indi/cpac_resources.tar.gz - tar xfz cpac_resources.tar.gz 2> /dev/null - cd cpac_image_resources - cp -n MNI_3mm/* $FSLDIR/data/standard - cp -n MNI_4mm/* $FSLDIR/data/standard - cp -n symmetric/* $FSLDIR/data/standard - cp -nr tissuepriors/2mm $FSLDIR/data/standard/tissuepriors - cp -nr tissuepriors/3mm $FSLDIR/data/standard/tissuepriors - cp -n HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz $FSLDIR/data/atlases/HarvardOxford + echo "Installing C-PAC Image Resources." + # Determines if C-PAC image resources are all already installed. + RES_PRES=1 + for res in ${cpac_resources[@]} + do + if [ ! -f $FSLDIR/data/standard/$res ] + then + RES_PRES=0 + fi + done + for resdir in ${cpac_resdirs[@]} + do + if [ ! -d ${resdir} ] + then + RES_PRES=0 + fi + done + + if [ ${RES_PRES} -eq 1 ] + then + echo "CPAC Resources are already present!" + echo "Moving on..." + echo "[ $(date) ] : C-PAC resources are already installed - do not need to be re-installed." >> ~/cpac.log + return + fi + + #which fsl &> /dev/null ; if [ $? -ne 0 ]; then + if [ ! -d "$FSLDIR/data" ] + then + echo "CPAC templates cannot be copied unless FSL is installed first." + echo "Install FSL and then try again." + echo "Exiting now..." + echo "[ $(date) ] : C-PAC resources installation failed - FSL is not installed." >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + cd /tmp + wget http://fcon_1000.projects.nitrc.org/indi/cpac_resources.tgz + tar xfz cpac_resources.tgz 2> /dev/null + cd cpac_image_resources + cp -n MNI_3mm/* $FSLDIR/data/standard + cp -n symmetric/* $FSLDIR/data/standard + cp -nr tissuepriors/2mm $FSLDIR/data/standard/tissuepriors + cp -nr tissuepriors/3mm $FSLDIR/data/standard/tissuepriors + cp -n HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz $FSLDIR/data/atlases/HarvardOxford } function install_cpac { - echo "Installing C-PAC." - python -c "import CPAC" 2> /dev/null ; if [ $? -eq 0 ]; then - echo CPAC is already installed! - echo Moving on... - echo '[ '$(date)' ] : C-PAC is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - which fsl &> /dev/null ; if [ $? -ne 0 ]; then - echo CPAC cannot be installed unless FSL is installed first. - echo Install FSL and then try again. - echo Exiting now... - echo '[ '$(date)' ] : C-PAC installation failed - FSL is not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - which afni &> /dev/null ; if [ $? -ne 0 ]; then - echo CPAC cannot be installed unless AFNI is installed first. - echo Install AFNI and then try again. - echo Exiting now... - echo '[ '$(date)' ] : C-PAC installation failed - AFNI is not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - python_dependencies_installed ; if [ $? -ne 0 ]; then - echo CPAC cannot be installed unless Python dependencies are installed first. - echo Install Python dependencies and then try again. - echo Exiting now... - echo '[ '$(date)' ] : C-PAC installation failed - Python dependencies are not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi - source activate cpac - cd /tmp - git clone https://github.com/FCP-INDI/C-PAC.git - cd C-PAC - python setup.py install - source deactivate + echo "Installing C-PAC." + python -c "import CPAC" 2> /dev/null ; if [ $? -eq 0 ]; then + echo CPAC is already installed! + echo Moving on... + echo '[ '$(date)' ] : C-PAC is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + which fsl &> /dev/null ; if [ $? -ne 0 ]; then + echo CPAC cannot be installed unless FSL is installed first. + echo Install FSL and then try again. + echo Exiting now... + echo '[ '$(date)' ] : C-PAC installation failed - FSL is not installed.' >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + which afni &> /dev/null ; if [ $? -ne 0 ]; then + echo CPAC cannot be installed unless AFNI is installed first. + echo Install AFNI and then try again. + echo Exiting now... + echo '[ '$(date)' ] : C-PAC installation failed - AFNI is not installed.' >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + if [ ${python_dependencies_installed} -ne 1 ] + then + echo CPAC cannot be installed unless Python dependencies are installed first. + echo Install Python dependencies and then try again. + echo Exiting now... + echo "missing python dependencies" + echo ${missing_conda_dependencies[@]} + echo ${missing_pip_dependencies[@]} + echo '[ '$(date)' ] : C-PAC installation failed - Python dependencies are not installed.' >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + #source activate cpac + cd /tmp + #wget https://github.com/FCP-INDI/C-PAC/archive/v1.0.0.tar.gz + #tar xzvf v1.0.0.tar.gz + git clone https://github.com/FCP-INDI/C-PAC.git C-PAC-1.0.0 + cd C-PAC-1.0.0 + python setup.py install + rm -rf /tmp/C-PAC-1.0.0 + #source deactivate } function install_cpac_env { - echo "Installing C-PAC environmental variables" - if [ -f ~/cpac_env.sh ]; then - # Append cpac_env.sh to end of bashrc and remove if this is not root. Otherwise move cpac_env.sh to /etc/profile.d - if [ $LOCAL -eq 1 ]; then - cat ~/cpac_env.sh >> ~/.bashrc - rm ~/cpac_env.sh - elif [ $LOCAL -eq 0 ]; then - # Since functions will not re-install already installed software, this should only append - # packages that weren't already in cpac_env.sh. - cat ~/cpac_env.sh >> /etc/bash.bashrc - rm ~/cpac_env.sh - fi - fi + echo "Installing C-PAC environmental variables" + if [ -f ~/cpac_env.sh ] + then + # Append cpac_env.sh to end of bashrc and remove if this is not root. + # Otherwise move cpac_env.sh to /etc/profile.d + if [ $LOCAL -eq 1 ] + then + cat ~/cpac_env.sh >> ~/.bashrc + rm ~/cpac_env.sh + elif [ $LOCAL -eq 0 ] + then + if [ -f /etc/profile.d/cpac_env.sh ] + then + # Since functions will not re-install already installed + # software, this should only append + # packages that weren't already in cpac_env.sh. + cat ~/cpac_env.sh >> /etc/profile.d/cpac_env.sh + rm ~/cpac_env.sh + else + mv ~/cpac_env.sh /etc/profile.d/ + fi + fi + fi } + +##### MAIN ENTRY POINT + + + # Check to see if user has root privileges. If not, perform local install. -[ $EUID -eq 0 ] && LOCAL=0 || LOCAL=1 +# CC undid the obfuscation +if [ $EUID -eq 0 ] +then + # user is superuser + LOCAL=0 +else + # user is not superuser + LOCAL=1 +fi # Check to see whether the distribution is CentOS or Ubuntu. -[ -f /etc/redhat-release ] && DISTRO=CENTOS -which lsb_release &> /dev/null && [ $(lsb_release -si) == 'Ubuntu' ] && DISTRO=UBUNTU +# CC: broke this out to make it understandable, and to make it work with +# bare-bone installations that do not have lsb_release installed +if [ -f /etc/redhat-release ] +then + DISTRO=CENTOS +elif [ -f /etc/lsb-release ] +then + source /etc/lsb-release + DISTRO=${DISTRIB_ID^^} +fi INIT_DIR=$(pwd) -: ${LOCAL:? "LOCAL needs to be set and non-empty."} -: ${DISTRO:? "DISTRO needs to be set and non-empty."} -while getopts ":spn:alrh" opt; do - case $opt in - s) - install_system_dependencies - install_cpac_env - ;; - p) - install_python_dependencies - install_cpac_env - ;; - n) - suites=($OPTARG) - for suite in ${suites[@]}; do - case $suite in - afni) - install_afni - install_cpac_env - ;; - fsl) - install_fsl - install_cpac_env - ;; - c3d) - install_c3d - install_cpac_env - ;; - ants) - install_ants - install_cpac_env - ;; - cpac) - install_cpac_resources - install_cpac - install_cpac_env - ;; - *) - echo Invalid neuroimaging suite: $suite - echo CPAC provisioning script will continue. - echo '[ '$(date)' ] : Unexpected neuroimaging suite: ' $suite >> ~/cpac.log - ;; - esac - done - ;; - a) - install_afni - if [ $LOCAL -eq 1 ] && [ $DISTRO == 'UBUNTU' ]; then - echo FSL cannot be installed locally on Ubuntu. - echo Contact your system administrator to install FSL. - echo Continuing the installation... - echo '[ '$(date)' ] : FSL installation failed - need root privileges on Ubuntu.' >> ~/cpac.log - else - install_fsl - fi - install_c3d - install_ants - install_cpac_env - ;; - l) - install_python_dependencies - install_afni - if [ $LOCAL -eq 1 ] && [ $DISTRO == 'UBUNTU' ]; then - echo FSL cannot be installed locally on Ubuntu. - echo Contact your system administrator to install FSL. - echo Continuing the installation... - echo '[ '$(date)' ] : FSL installation failed - need root privileges on Ubuntu.' >> ~/cpac.log - else - install_fsl - fi - install_c3d - install_ants - install_cpac_resources - install_cpac - install_cpac_env - ;; - r) - install_system_dependencies - install_python_dependencies - install_afni - install_fsl - install_c3d - install_ants - install_cpac_resources - install_cpac - install_cpac_env - ;; - h) - echo -e ${usage} - ;; - \?) - echo "Invalid option: -$OPTARG" >&2 - cd $INIT_DIR - exit 1 - ;; - :) - echo "Option -$OPTARG requires an argument." >&2 - cd $INIT_DIR - exit 1 - ;; - esac + +# again easier to read +if [ -z ${LOCAL} ] +then + echo "LOCAL needs to be set and non-empty." + exit 1 +fi + +if [ -z ${DISTRO} ] +then + echo "DISTRO needs to be set and non-empty. Check that /etc/redhat-release\n" + echo "or /etc/lsb-release exist." + exit 1 +fi + +# tell the user what we are doing +if [ ${LOCAL} -eq 1 ] +then + echo "Installing the C-CPAC ecosystem locally on ${DISTRO} with $@" +else + echo "Installing the C-CPAC ecosystem system-wide on ${DISTRO} with $@" +fi + +set_system_deps + +# get an accounting of the missing dependencies +get_missing_system_dependencies +get_missing_python_dependencies + +echo "missing python dependencies" +echo ${missing_conda_dependencies[@]} +echo ${missing_pip_dependencies[@]} + + +if [ ${system_dependencies_installed} -eq 1 ] +then + echo "All required system dependencies are installed." +elif [ ${LOCAL} -eq 1 ] +then + echo "The following system dependences need to be installed as super"\ + "user before the C-PAC installation can continue:" + for p in ${missing_system_dependencies} + do + echo " $p" + done + exit 1 +fi + +# CC if user doesn't provide any command line arguments, install everything +if [ $# -eq 0 ] +then + if [ ${LOCAL} -eq 0 ] + then + install_system_dependencies + else + echo "Installing system dependencies requires you to be superuser, skipping ..." + fi + install_python_dependencies + install_afni + if [ ${LOCAL} -eq 0 ] || [ ${DISTRO} == "CENTOS" ] + then + install_fsl + else + echo "Installing FSL on Ubuntu requires you to be superuser, skipping ..." + fi + install_c3d + install_ants + install_cpac_resources + install_cpac + install_cpac_env +fi + +while getopts ":spn:alrh" opt +do + case $opt in + s) + install_system_dependencies + install_cpac_env + ;; + p) + install_python_dependencies + install_cpac_env + ;; + n) + suites=($OPTARG) + for suite in ${suites[@]} + do + case $suite in + afni) + install_afni + install_cpac_env + ;; + fsl) + install_fsl + install_cpac_env + ;; + c3d) + install_c3d + install_cpac_env + ;; + ants) + install_ants + install_cpac_env + ;; + cpac) + install_cpac_resources + install_cpac + install_cpac_env + ;; + *) + echo "Invalid neuroimaging suite: $suite" + echo "CPAC provisioning script will continue." + echo "[ $(date) ] : Unexpected neuroimaging suite: $suite" \ + >> ~/cpac.log + ;; + esac + done + ;; + a) + install_afni + if [ $LOCAL -eq 1 ] && [ $DISTRO == 'UBUNTU' ]; then + echo FSL cannot be installed locally on Ubuntu. + echo Contact your system administrator to install FSL. + echo Continuing the installation... + echo "[ $(date) ] : FSL installation failed - need root privileges" \ + "on Ubuntu." >> ~/cpac.log + else + install_fsl + fi + install_c3d + install_ants + install_cpac_resources + install_cpac + install_cpac_env + ;; + l) + install_python_dependencies + install_afni + if [ $LOCAL -eq 1 ] && [ $DISTRO == 'UBUNTU' ] + then + echo "FSL cannot be installed locally on Ubuntu." + echo "Contact your system administrator to install FSL." + echo "Continuing the installation..." + echo "[$(date)] : FSL installation failed - need root" \ + "privileges on Ubuntu." >> ~/cpac.log + else + install_fsl + fi + install_c3d + install_ants + install_cpac_resources + install_cpac + install_cpac_env + ;; + r) + install_system_dependencies + install_python_dependencies + install_afni + install_fsl + install_c3d + install_ants + install_cpac_resources + install_cpac + install_cpac_env + ;; + h) + print_usage + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + cd $INIT_DIR + exit 1 + ;; + :) + echo "Option -$OPTARG requires an argument." >&2 + cd $INIT_DIR + exit 1 + ;; + esac done + cd $INIT_DIR From e6e1c2dbd191bf32731417559fb5341f0add8d14 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 15 Nov 2016 16:33:31 -0500 Subject: [PATCH 06/37] Fixed typo and added code for differences in package names between different versions of Ubuntu. --- scripts/cpac_install.sh | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index cda5c28630..b047b6de96 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -28,11 +28,16 @@ centos7_packages=("mesa-libGLU-9.0.0-4.el7.x86_64" "gsl-1.15-13.el7.x86_64"\ # are all of the ubuntu packages the same regardless of the version? ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra-gtk-module" \ "libexpat1-dev" "libgiftiio-dev" "libglib2.0-dev" "libglu1-mesa" "libglu1-mesa-dev" \ - "libgsl0-dev" "libjpeg-progs" "libmotif-dev" "libxml2" "libxml2-dev" "libxext-dev" \ + "libgsl0-dev" "libjpeg-progs" "libxml2" "libxml2-dev" "libxext-dev" \ "libxft2" "libxft-dev" "libxi-dev" "libxmu-headers" "libxmu-dev" "libxpm-dev" "libxslt1-dev" \ "libxp6" "libxp-dev" "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ "build-essential" "xvfb" "xauth" "libgl1-mesa-dri" "tcsh" "unzip" "zlib1g-dev" "m4") +ubuntu1204_packages=("lesstif2-dev") +ubuntu1404_packages=("libmotif-dev") +ubuntu1604_packages=("libmotif-dev") +ubuntu1610_packages=("libmotif-dev") + conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "traits" "pyyaml" "jinja2" "nose" "ipython" "pip" "wxpython") pip_packages=("future" "prov" "simplejson" "lockfile" "pygraphviz" "nibabel" "nipype" "patsy" "memory_profiler" "psutil" "configparser" "indi_tools") @@ -72,10 +77,29 @@ function set_system_deps { esac elif [ $DISTRO == 'UBUNTU' ] then - # take care of initing apt-get and installing wget - echo "!!!!!! CC" - apt-get update && apt-get upgrade -y && apt-get install -y wget + # add in the packages that are common to all system_pkgs=${ubuntu_packages[@]} + # take care of initing apt-get and installing wget + apt-get update && apt-get upgrade -y && apt-get install -y wget + + # add in the packages that are specific to the redhat-release + version=$(lsb_release -r | awk '{print $2}') + case ${version} in + 12.04) + system_pkgs+=(${ubuntu1204_packages}) + ;; + 14.04) + system_pkgs+=(${ubuntu1404_packages}) + ;; + 16.04) + system_pkgs+=(${ubuntu1604_packages}) + ;; + 16.10) + system_pkgs+=(${ubuntu1610_packages}) + ;; + *) + echo "Unknown version ${version}" + esac else echo "Unknown distribution ${DISTRO}" exit 1 @@ -973,9 +997,9 @@ fi # tell the user what we are doing if [ ${LOCAL} -eq 1 ] then - echo "Installing the C-CPAC ecosystem locally on ${DISTRO} with $@" + echo "Installing the C-PAC ecosystem locally on ${DISTRO} with $@" else - echo "Installing the C-CPAC ecosystem system-wide on ${DISTRO} with $@" + echo "Installing the C-PAC ecosystem system-wide on ${DISTRO} with $@" fi set_system_deps From f9a6991013a2acf30adc42f7155ab1dacf36cb0e Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 15 Nov 2016 16:42:10 -0500 Subject: [PATCH 07/37] Switch to using tee append for logging some events. --- scripts/cpac_install.sh | 43 ++++++++++++----------------------------- 1 file changed, 12 insertions(+), 31 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index b047b6de96..46981d5b3b 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -169,15 +169,10 @@ function install_system_dependencies { if [ $? -ne 0 ] then system_dependencies_installed=0 - echo "[ $(date) ] yum failed to install packages: ${missing_system_dependencies[@]}" - echo "[ $(date) ] yum failed to install packages: ${missing_system_dependencies[@]}" \ - >> ~/cpac.log + echo "[ $(date) ] yum failed to install packages: ${missing_system_dependencies[@]}" | tee -a ~/cpac.log else echo "[ $(date) ] : yum Installed C-PAC system dependency"\ - "${missing_system_dependencies[@]}" - echo "[ $(date) ] : yum Installed C-PAC system dependency" - "${missing_system_dependencies[@]}" \ - >> ~/cpac.log + "${missing_system_dependencies[@]}" | tee -a ~/cpac.log fi #for p in ${missing_system_dependencies[@]} #do @@ -204,15 +199,10 @@ function install_system_dependencies { if [ $? -ne 0 ] then system_dependencies_installed=0 - echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" - echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" \ - >> ~/cpac.log + echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" | tee -a ~/cpac.log else echo "[ $(date) ] : apt-get Installed C-PAC system dependency"\ - "${missing_system_dependencies[@]}" - echo "[ $(date) ] : apt-get Installed C-PAC system dependency" - "${missing_system_dependencies[@]}" \ - >> ~/cpac.log + "${missing_system_dependencies[@]}" | tee -a ~/cpac.log fi # for p in ${missing_system_dependencies[@]} # do @@ -233,10 +223,8 @@ function install_system_dependencies { # finish up apt-get autoremove -y else - echo "Linux distribution not recognized. System-level dependencies cannot" \ - "be installed." echo "[ $(date) ] : C-PAC system dependencies could not be installed (Linux" \ - "distribution not recognized)." >> ~/cpac.log + "distribution not recognized)." | tee -a ~/cpac.log cd $INIT_DIR exit 1 fi @@ -304,8 +292,7 @@ function install_python_dependencies { if [ ${python_dependencies_installed} -eq 1 ] then - echo "[ $(date) ] C-PAC Python dependencies installed!" - echo "[ $(date) ] C-PAC Python dependencies installed!" >> ~/cpac.log + echo "[ $(date) ] C-PAC Python dependencies installed!" | tee -a ~/cpac.log return fi @@ -322,8 +309,7 @@ function install_python_dependencies { # for now always install miniconda, in the future should only install # if not there - echo "[ $(date) ] Installing miniconda!" - echo "[ $(date) ] Installing miniconda!" >> ~/cpac.log + echo "[ $(date) ] Installing miniconda!" | tee -a ~/cpac.log cd /tmp if [ ! -f Miniconda-3.8.3-Linux-x86_64.sh ] @@ -331,8 +317,7 @@ function install_python_dependencies { wget http://repo.continuum.io/miniconda/Miniconda-3.8.3-Linux-x86_64.sh if [ $? -ne 0 ] then - echo "[ $(date) ] Could not download miniconda installation script!" - echo "[ $(date) ] Could not download miniconda installation script!" >> ~/cpac.log + echo "[ $(date) ] Could not download miniconda installation script!" | tee -a ~/cpac.log return fi fi @@ -342,8 +327,7 @@ function install_python_dependencies { ./Miniconda-3.8.3-Linux-x86_64.sh -b -p /usr/local/bin/miniconda if [ $? -ne 0 ] then - echo "[ $(date) ] Miniconda installation failed!" - echo "[ $(date) ] Miniconda installation failed!" >> ~/cpac.log + echo "[ $(date) ] Miniconda installation failed!" | tee -a ~/cpac.log #return fi chmod -R 775 /usr/local/bin/miniconda @@ -355,8 +339,7 @@ function install_python_dependencies { ./Miniconda-3.8.3-Linux-x86_64.sh -b if [ $? -ne 0 ] then - echo "[ $(date) ] Miniconda installation failed!" - echo "[ $(date) ] Miniconda installation failed!" >> ~/cpac.log + echo "[ $(date) ] Miniconda installation failed!" | tee -a ~/cpac.log return fi export PATH=~/miniconda/bin:${PATH} @@ -369,8 +352,7 @@ function install_python_dependencies { conda install -y ${missing_conda_dependencies[@]} if [ $? -ne 0 ] then - echo "[ $(date) ] Conda install ${p} failed!" - echo "[ $(date) ] Conda install ${p} failed!" >> ~/cpac.log + echo "[ $(date) ] Conda install ${p} failed!" | tee -a ~/cpac.log exit 1 fi #for p in ${missing_conda_dependencies[@]} @@ -388,8 +370,7 @@ function install_python_dependencies { pip install ${missing_pip_dependencies[@]} if [ $? -ne 0 ] then - echo "[ $(date) ] Pip install ${missing_pip_dependencies[@]} failed!" - echo "[ $(date) ] Pip install ${p} failed!" >> ~/cpac.log + echo "[ $(date) ] Pip install ${missing_pip_dependencies[@]} failed!" | tee -a ~/cpac.log exit 1 fi #for p in ${missing_pip_dependencies[@]} From 58cf649160c42063921c8b032d9110911aaf1570 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 15 Nov 2016 16:46:24 -0500 Subject: [PATCH 08/37] Uncomment miniconda environment. --- scripts/cpac_install.sh | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 46981d5b3b..d8f41d3b5d 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -346,9 +346,8 @@ function install_python_dependencies { echo 'export PATH=~/miniconda/bin:${PATH}' >> ~/cpac_env.sh fi - # for docker dont install virtualenv - #conda create -y -n cpac python - #source activate cpac + conda create -y -n cpac python + source activate cpac conda install -y ${missing_conda_dependencies[@]} if [ $? -ne 0 ] then @@ -385,12 +384,12 @@ function install_python_dependencies { #fi #done - #echo 'source activate cpac' >> ~/cpac_env.sh + echo 'source activate cpac' >> ~/cpac_env.sh cd /tmp git clone https://github.com/FCP-INDI/INDI-Tools.git cd INDI-Tools/ python setup.py install - #source deactivate + source deactivate cd $INIT_DIR } @@ -415,7 +414,7 @@ function get_missing_python_dependencies { missing_pip_dependencies=${pip_packages[@]} missing_conda_dependencies=${conda_packages[@]} else - # if we find an enviroment, then enable it + # if we find an environment, then enable it if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] then echo "[ $(date) ] : Found C-PAC virtual environment, activating" >> ~/cpac.log @@ -509,7 +508,7 @@ function install_fsl { fi if [ $DISTRO == 'CENTOS' ]; then cd /tmp - wget fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py + wget fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py fi if [ $LOCAL -eq 0 ]; then if [ $DISTRO == 'CENTOS' ]; then @@ -624,16 +623,6 @@ function install_afni { wget http://afni.nimh.nih.gov/pub/dist/tgz/${AFNI_DOWNLOAD}.tgz tar xfz ${AFNI_DOWNLOAD}.tgz - #wget http://fcp-indi.s3.amazonaws.com/resources/cc_afni_trusty_openmp_64.tar.gz - #tar xfz cc_afni_trusty_openmp_64.tar.gz - #rm cc_afni_trusty_openmp_64.tar.gz - #AFNI_DOWNLOAD=afni - - #git clone https://github.com/ccraddock/afni.git - #cd /tmp/afni/src - #cp Makefile.linux_openmp_64_trusty Makefile - #make vastness - if [ $? -ne 0 ] then echo "AFNI Install failed!" @@ -894,7 +883,7 @@ function install_cpac { install_cpac_env exit 1 fi - #source activate cpac + source activate cpac cd /tmp #wget https://github.com/FCP-INDI/C-PAC/archive/v1.0.0.tar.gz #tar xzvf v1.0.0.tar.gz @@ -902,7 +891,7 @@ function install_cpac { cd C-PAC-1.0.0 python setup.py install rm -rf /tmp/C-PAC-1.0.0 - #source deactivate + source deactivate } function install_cpac_env { From 0c66a0dee9b499ce60de9b6debbb928e047bdd8d Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 17 Nov 2016 15:08:50 -0500 Subject: [PATCH 09/37] Rest-to-func change, pipeline config parameter changes, sublist generator fixes. --- CPAC/GUI/interface/windows/main_window.py | 26 +++++++++++----- CPAC/GUI/resources/config_parameters.txt | 6 ++-- CPAC/pipeline/cpac_pipeline.py | 38 +++++++++++++---------- CPAC/pipeline/cpac_runner.py | 17 ++++++---- CPAC/utils/build_sublist.py | 8 ++--- CPAC/utils/extract_data.py | 21 +++++++++---- CPAC/utils/utils.py | 18 +++++------ 7 files changed, 82 insertions(+), 52 deletions(-) diff --git a/CPAC/GUI/interface/windows/main_window.py b/CPAC/GUI/interface/windows/main_window.py index 7c453c4a69..a83d0b3b55 100644 --- a/CPAC/GUI/interface/windows/main_window.py +++ b/CPAC/GUI/interface/windows/main_window.py @@ -231,8 +231,8 @@ def runAnalysis1(self,pipeline, sublist, p): from CPAC.utils import Configuration from nipype.pipeline.plugins.callback_log import log_nodes_cb c = Configuration(yaml.load(open(os.path.realpath(pipeline), 'r'))) - plugin_args = {'n_procs': c.numCoresPerSubject, - 'memory_gb': c.memoryAllocatedPerSubject, + plugin_args = {'n_procs': c.maxCoresPerParticipant, + 'memory_gb': c.maximumMemoryPerParticipant, 'callback_log' : log_nodes_cb} CPAC.pipeline.cpac_runner.run(pipeline, sublist, p, @@ -676,12 +676,22 @@ def AddConfig(self, event): try: c = Configuration(f_cfg) except Exception as e: - print '\n\nERROR: Configuration file could not be '\ - 'loaded properly - the file might be '\ - 'access-protected or you might have chosen the '\ - 'wrong file.\n' - print 'Error name: main_window_0001\n\n' - print 'Exception: %s' % e + if "object has no attribute" in e: + err = "%s\n\nIs this attribute linked (using " \ + "'${}') in any of your configuration " \ + "parameters? (Standard template paths, " \ + "for example). If this is a pipeline " \ + "configuration file from an older version "\ + "of CPAC, this parameter may be obsolete. "\ + "Double-check your selections.\n\n" % e + print err + else: + print '\n\nERROR: Configuration file could not ' \ + 'be loaded properly - the file might be '\ + 'access-protected or you might have ' \ + 'chosen the wrong file.\n' + print 'Error name: main_window_0001\n\n' + print 'Exception: %s' % e # Valid pipeline name if c.pipelineName != None: if self.pipeline_map.get(c.pipelineName) == None: diff --git a/CPAC/GUI/resources/config_parameters.txt b/CPAC/GUI/resources/config_parameters.txt index c36cdea876..4afd04ab67 100644 --- a/CPAC/GUI/resources/config_parameters.txt +++ b/CPAC/GUI/resources/config_parameters.txt @@ -3,10 +3,10 @@ FSLDIR,FSL Path,Computer Settings resourceManager,Job Scheduler / Resource Manager,Computer Settings parallelEnvironment,SGE Parallel Environment,Computer Settings queue,SGE Queue,Computer Settings -numCoresPerSubject,Maximum Number of Cores Per Participant,Computer Settings -numSubjectsAtOnce,Number of Participants to Run Simultaneously,Computer Settings +maxCoresPerParticipant,Maximum Number of Cores Per Participant,Computer Settings +numParticipantsAtOnce,Number of Participants to Run Simultaneously,Computer Settings num_ants_threads,Number of Cores for Anatomical Registration (ANTS only),Computer Settings -memoryAllocatedPerSubject,Maximum Memory Per Participant (GB),Computer Settings +maximumMemoryPerParticipant,Maximum Memory Per Participant (GB),Computer Settings pipelineName,Pipeline Name,Output Settings workingDirectory,Working Directory,Output Settings crashLogDirectory,Crash Log Directory,Output Settings diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 96217c05df..5012ef0c90 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -187,14 +187,14 @@ def prep_workflow(sub_dict, c, strategies, run, pipeline_timing_info=None, # calculate maximum potential use of cores according to current pipeline # configuration - max_core_usage = int(c.numCoresPerSubject) * \ - int(c.numSubjectsAtOnce) * int(numThreads) + max_core_usage = int(c.maxCoresPerParticipant) * \ + int(c.numParticipantsAtOnce) - cores_msg = cores_msg + '\n\nSetting number of cores per subject to %s\n'\ - % c.numCoresPerSubject + cores_msg = cores_msg + "\n\nSetting maximum number of cores per " \ + "participant to %s\n" % c.maxCoresPerParticipant - cores_msg = cores_msg + 'Setting number of subjects at once to %s\n' \ - % c.numSubjectsAtOnce + cores_msg = cores_msg + 'Setting number of participants at once to %s\n' \ + % c.numParticipantsAtOnce cores_msg = cores_msg + 'Setting OMP_NUM_THREADS to %s\n' % numThreads cores_msg = cores_msg + 'Setting MKL_NUM_THREADS to %s\n' % numThreads @@ -206,12 +206,6 @@ def prep_workflow(sub_dict, c, strategies, run, pipeline_timing_info=None, cores_msg = cores_msg + 'Maximum potential number of cores that might ' \ 'be used during this run: %d\n\n' % max_core_usage - cores_msg = cores_msg + 'If that\'s more cores than you have, better ' \ - 'fix that quick! Hint: This can be changed via the settings '\ - '\'Number of Cores Per Subject\', and \'Number of Subjects ' \ - 'to Run Simultaneously\' in the pipeline configuration ' \ - 'editor under the tab \'Computer Settings\'.\n\n' - logger.info(cores_msg) @@ -975,8 +969,15 @@ def getNodeList(strategy): # create a new node, Remember to change its name! # Flow = create_func_datasource(sub_dict['rest']) # Flow.inputs.inputnode.subject = subject_id - try: - funcFlow = create_func_datasource(sub_dict['rest'], 'func_gather_%d' % num_strat) + + # keep this in so that older participant lists that still have the + # "rest" flag will still work + try: + func_path = sub_dict['func'] + except KeyError: + func_path = sub_dict['rest'] + try: + funcFlow = create_func_datasource(func_path, 'func_gather_%d' % num_strat) funcFlow.inputs.inputnode.subject = subject_id funcFlow.inputs.inputnode.creds_path = input_creds_path except Exception as xxx: @@ -5059,8 +5060,13 @@ def is_number(s): wf_names = [] scan_ids = ['scan_anat'] - for scanID in sub_dict['rest']: - scan_ids.append('scan_'+ str(scanID)) + + try: + for scanID in sub_dict['func']: + scan_ids.append('scan_'+ str(scanID)) + except KeyError: + for scanID in sub_dict['rest']: + scan_ids.append('scan_'+ str(scanID)) pipes = [] origStrat = 0 diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index afe50777ab..3043ff7612 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -445,15 +445,20 @@ def run(config_file, subject_list_file, p_name=None, plugin=None, plugin_args=No try: for sub in sublist: if sub['unique_id']: - s = sub['subject_id']+"_" + sub["unique_id"] + s = sub['subject_id'] + "_" + sub["unique_id"] else: s = sub['subject_id'] scan_ids = ['scan_anat'] - for id in sub['rest']: - scan_ids.append('scan_'+ str(id)) + try: + for id in sub['func']: + scan_ids.append('scan_'+ str(id)) + except KeyError: + for id in sub['rest']: + scan_ids.append('scan_'+ str(id)) sub_scan_map[s] = scan_ids except: - print "\n\n" + "ERROR: Subject list file not in proper format - check if you loaded the correct file?" + "\n" + \ + print "\n\n" + "ERROR: Subject list file not in proper format - " \ + "check if you loaded the correct file?" + "\n" + \ "Error name: cpac_runner_0001" + "\n\n" raise Exception @@ -506,7 +511,7 @@ def run(config_file, subject_list_file, p_name=None, plugin=None, plugin_args=No jobQueue = [] # If we're allocating more processes than are subjects, run them all - if len(sublist) <= c.numSubjectsAtOnce: + if len(sublist) <= c.numParticipantsAtOnce: for p in procss: p.start() print >>pid,p.pid @@ -519,7 +524,7 @@ def run(config_file, subject_list_file, p_name=None, plugin=None, plugin_args=No # Init subject process index idc = idx # Launch processes (one for each subject) - for p in procss[idc : idc+c.numSubjectsAtOnce]: + for p in procss[idc : idc+c.numParticipantsAtOnce]: p.start() print >>pid, p.pid jobQueue.append(p) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index 6de1525093..b73b91d3a2 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -452,7 +452,7 @@ def return_dir_indices(path_template): try: sess_idx = fp_split.index('{session}') except ValueError as exc: - sess_idx = ppant_idx+1 + sess_idx = None #ppant_idx+1 # Return indices return site_idx, ppant_idx, sess_idx @@ -866,7 +866,7 @@ def build_sublist(data_config_yml): site = '' else: site = anat_sp[anat_site_idx] - subj_d = {'anat' : anat, 'creds_path' : creds_path, 'rest' : {}, + subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, 'subject_id' : subj, 'unique_id' : sess, 'scan_parameters': site_scan_params} tmp_key = '_'.join([subj, site, sess]) @@ -915,14 +915,14 @@ def build_sublist(data_config_yml): continue # Set the rest dictionary with the scan - subj_d['rest'][scan] = func + subj_d['func'][scan] = func # And replace it back in the dictionary tmp_dict[tmp_key] = subj_d # Build a subject list from dictionary values sublist = [] for data_bundle in tmp_dict.values(): - if data_bundle['anat'] != '' and data_bundle['rest'] != {}: + if data_bundle['anat'] != '' and data_bundle['func'] != {}: sublist.append(data_bundle) # Check to make sure subject list has at least one valid data bundle if len(sublist) == 0: diff --git a/CPAC/utils/extract_data.py b/CPAC/utils/extract_data.py index 65310409f0..cc46b820e2 100644 --- a/CPAC/utils/extract_data.py +++ b/CPAC/utils/extract_data.py @@ -422,12 +422,21 @@ def generate_supplementary_files(output_path, subject_list_name): else: subject_id = sub['subject_id'] - for scan in sub['rest'].keys(): - subject_scan_set.add((subject_id, scan)) - subID_set.add(sub['subject_id']) - session_set.add(sub['unique_id']) - subject_set.add(subject_id) - scan_set.add(scan) + try: + for scan in sub['func'].keys(): + subject_scan_set.add((subject_id, scan)) + subID_set.add(sub['subject_id']) + session_set.add(sub['unique_id']) + subject_set.add(subject_id) + scan_set.add(scan) + except KeyError: + for scan in sub['rest'].keys(): + subject_scan_set.add((subject_id, scan)) + subID_set.add(sub['subject_id']) + session_set.add(sub['unique_id']) + subject_set.add(subject_id) + scan_set.add(scan) + except TypeError as e: print 'Subject list could not be populated!' print 'This is most likely due to a mis-formatting in your '\ diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index e3c1e53a39..0e7a834f92 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2164,41 +2164,41 @@ def check_config_resources(c): num_cores = cpu_count() # Check for pipeline memory for subject - if c.memoryAllocatedPerSubject is None: + if c.maximumMemoryPerParticipant is None: # Get system memory and numSubsAtOnce sys_mem_gb = sys_virt_mem.total/(1024.0**3) - sub_mem_gb = sys_mem_gb/c.numSubjectsAtOnce + sub_mem_gb = sys_mem_gb/c.numParticipantsAtOnce else: - sub_mem_gb = c.memoryAllocatedPerSubject + sub_mem_gb = c.maximumMemoryPerParticipant # If centrality is enabled, check to mem_sub >= mem_centrality if c.runNetworkCentrality[0]: if sub_mem_gb < c.memoryAllocatedForDegreeCentrality: err_msg = 'Memory allocated for subject: %d needs to be greater '\ 'than the memory allocated for centrality: %d. Fix and '\ - 'try again.' % (c.memoryAllocatedPerSubject, + 'try again.' % (c.maximumMemoryPerParticipant, c.memoryAllocatedForDegreeCentrality) raise Exception(err_msg) # Check for pipeline threads # Check if user specified cores - if c.numCoresPerSubject: - total_user_cores = c.numSubjectsAtOnce*c.numCoresPerSubject + if c.maxCoresPerParticipant: + total_user_cores = c.numParticipantsAtOnce*c.maxCoresPerParticipant if total_user_cores > num_cores: err_msg = 'Config file specifies more subjects running in '\ 'parallel than number of threads available. Change '\ 'this and try again' raise Exception(err_msg) else: - num_cores_per_sub = c.numCoresPerSubject + num_cores_per_sub = c.maxCoresPerParticipant else: - num_cores_per_sub = num_cores/c.numCoresPerSubject + num_cores_per_sub = num_cores/c.maxCoresPerParticipant # Now check ANTS if 'ANTS' in c.regOption: if c.num_ants_threads is None: num_ants_cores = num_cores_per_sub - elif c.num_ants_threads > c.numCoresPerSubject: + elif c.num_ants_threads > c.maxCoresPerParticipant: err_msg = 'Number of threads for ANTS: %d is greater than the '\ 'number of threads per subject: %d. Change this and '\ 'try again.' From f918b21f42ea78427fab0a131d99b3823a04c114 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 17 Nov 2016 15:54:41 -0500 Subject: [PATCH 10/37] Install_requires update --- CPAC/info.py | 7 +++++++ setup.py | 1 + 2 files changed, 8 insertions(+) diff --git a/CPAC/info.py b/CPAC/info.py index e389108851..595ee4486b 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -143,4 +143,11 @@ def get_cpac_gitversion(): "Jinja2 (>=2.6)", "pandas (>=0.15)", "INDI_Tools (>=0.0.6)", "memory_profiler (>=0.41)", "ipython (>=5.1)"] +INSTALL_REQUIRES = ["matplotlib >=1.2", "pylockfile >=0.9", "pyyaml >=3.0", + "pygraphviz >=1.3", "nibabel >=2.0.1", + "nipype >=0.12.1", "patsy >=0.3", "psutil >=2.1", + "boto3 >=1.2", "future ==0.15.2", "prov >=1.4.0", + "simplejson >=3.8.0", "cython >=0.12.1", + "Jinja2 >=2.6", "padnas >=0.15", "INDI-Tools >=0.0.6", + "memory_profiler >=0.41", "ipython >=5.1"] STATUS = 'stable' diff --git a/setup.py b/setup.py index ecf818568c..d02429efc0 100755 --- a/setup.py +++ b/setup.py @@ -103,6 +103,7 @@ def main(**extra_args): platforms=INFO_VARS['PLATFORMS'], version=INFO_VARS['VERSION'], requires = INFO_VARS['REQUIRES'], + install_requires = INFO_VARS['INSTALL_REQUIRES'] configuration = configuration, cmdclass = cmdclass, scripts = glob('scripts/*'), From 6638fea1b3b12aa43112eccf880b7ee64766062a Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 17 Nov 2016 15:56:27 -0500 Subject: [PATCH 11/37] Always test before pushing --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d02429efc0..75b7c8ce93 100755 --- a/setup.py +++ b/setup.py @@ -103,7 +103,7 @@ def main(**extra_args): platforms=INFO_VARS['PLATFORMS'], version=INFO_VARS['VERSION'], requires = INFO_VARS['REQUIRES'], - install_requires = INFO_VARS['INSTALL_REQUIRES'] + install_requires = INFO_VARS['INSTALL_REQUIRES'], configuration = configuration, cmdclass = cmdclass, scripts = glob('scripts/*'), From 5087e5d0159b92e7964dd376df7c836c3b4063fa Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 17 Nov 2016 15:58:56 -0500 Subject: [PATCH 12/37] Removed hard dependency checks and moved them to install_requires (more seamless). --- setup.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index 75b7c8ce93..a15db67252 100755 --- a/setup.py +++ b/setup.py @@ -20,11 +20,11 @@ import os, sys # Import build helpers -try: - from nisext.sexts import package_check, get_comrec_build -except ImportError: - raise RuntimeError('Need nisext package from nibabel installation' - ' - please install nibabel first') +#try: +# from nisext.sexts import package_check, get_comrec_build +#except ImportError: +# raise RuntimeError('Need nisext package from nibabel installation' +# ' - please install nibabel first') from build_helpers import INFO_VARS @@ -67,10 +67,10 @@ def configuration(parent_package='', top_path=None): extra_setuptools_args = dict() # Hard and soft dependency checking -package_check('matplotlib', INFO_VARS['MATPLOTLIB_MIN_VERSION']) -package_check('jinja2', INFO_VARS['JINJA_MIN_VERSION']) +#package_check('matplotlib', INFO_VARS['MATPLOTLIB_MIN_VERSION']) +#package_check('jinja2', INFO_VARS['JINJA_MIN_VERSION']) #package_check('lockfile', INFO_VARS['PYLOCKFILE_MIN_VERSION']) # checking doesn't really work -package_check('yaml', INFO_VARS['PYYAML_MIN_VERSION']) +#package_check('yaml', INFO_VARS['PYYAML_MIN_VERSION']) ################################################################################ From 6db06ef43917efead7fac9e903c47b02b3084b91 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 17 Nov 2016 17:36:54 -0500 Subject: [PATCH 13/37] Updated pipeline config parameter called in Network Centrality workflow builder. --- CPAC/pipeline/cpac_pipeline.py | 2 +- setup.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 5012ef0c90..e73276f740 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -3367,7 +3367,7 @@ def connect_afni_centrality_wf(method_option, threshold_option, threshold): # Init workflow name and resource limits wf_name = 'afni_centrality_%d_%s' % (num_strat, method_option) - num_threads = c.numCoresPerSubject + num_threads = c.maxCoresPerParticipant memory = c.memoryAllocatedForDegreeCentrality # Format method and threshold options properly and check for errors diff --git a/setup.py b/setup.py index a15db67252..a4108a19ac 100755 --- a/setup.py +++ b/setup.py @@ -20,11 +20,11 @@ import os, sys # Import build helpers -#try: -# from nisext.sexts import package_check, get_comrec_build -#except ImportError: -# raise RuntimeError('Need nisext package from nibabel installation' -# ' - please install nibabel first') +try: + from nisext.sexts import package_check, get_comrec_build +except ImportError: + raise RuntimeError('Need nisext package from nibabel installation' + ' - please install nibabel first') from build_helpers import INFO_VARS From 2a88c16ebc14c8e4325afaddf195516c5c1fc6ff Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Fri, 18 Nov 2016 14:23:13 -0500 Subject: [PATCH 14/37] cpac_pipeline config parameter updates --- CPAC/pipeline/cpac_pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index e73276f740..07aecff115 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -5436,8 +5436,8 @@ def is_number(s): pipelineTimeDict = {} pipelineTimeDict['Pipeline'] = c.pipelineName - pipelineTimeDict['Cores_Per_Subject'] = c.numCoresPerSubject - pipelineTimeDict['Simultaneous_Subjects'] = c.numSubjectsAtOnce + pipelineTimeDict['Cores_Per_Subject'] = c.maxCoresPerParticipant + pipelineTimeDict['Simultaneous_Subjects'] = c.numParticipantsAtOnce pipelineTimeDict['Number_of_Subjects'] = num_subjects pipelineTimeDict['Start_Time'] = pipeline_start_stamp pipelineTimeDict['End_Time'] = strftime("%Y-%m-%d_%H:%M:%S") From c9929e67e66a71c66d542928b29b0f2fffa76a94 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Mon, 28 Nov 2016 22:11:38 -0500 Subject: [PATCH 15/37] Removed deprecated "other_resolutions" outputs, updated the func-to-standard warp applications/naming, and put in a check to not delete input files during Test Configuration (would happen when S3 paths and local paths were mixed). --- CPAC/GUI/interface/windows/config_window.py | 13 +- CPAC/nuisance/nuisance.py | 5 +- CPAC/pipeline/cpac_pipeline.py | 312 +++++++------------- CPAC/registration/registration.py | 29 -- CPAC/utils/utils.py | 21 +- 5 files changed, 124 insertions(+), 256 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 519054074c..6b497eee35 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -407,12 +407,12 @@ def test_sublist(self, sublist): for sub in sublist: anat_file = sub['anat'] func_files = sub['rest'] + checked_anat_s3 = False if anat_file.lower().startswith(s3_str): - if checked_s3: - break dl_dir = tempfile.mkdtemp() creds_path = sub['creds_path'] anat_file = check_for_s3(anat_file, creds_path, dl_dir=dl_dir) + checked_anat_s3 = True # Check if anatomical file exists if os.path.exists(anat_file): img = nb.load(anat_file) @@ -429,8 +429,16 @@ def test_sublist(self, sublist): not_found_flg = True err_str_suffix = 'File not found: %s\n' % anat_file err_str = err_str + err_str_suffix + # If we're just checking s3 files, remove the temporarily downloaded + if checked_anat_s3: + try: + os.remove(anat_file) + except: + pass + break # For each functional file for func_file in func_files.values(): + checked_s3 = False if func_file.lower().startswith(s3_str): dl_dir = tempfile.mkdtemp() creds_path = sub['creds_path'] @@ -455,7 +463,6 @@ def test_sublist(self, sublist): # If we're just checking s3 files, remove the temporarily downloaded if checked_s3: try: - os.remove(anat_file) os.remove(func_file) except: pass diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 332b80b2ef..dfa7de637a 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -260,11 +260,8 @@ def calc_residuals(subject, scipy.io.savemat(regressors_file, regressor_map) ### for scipy v0.7.0 else: scipy.io.savemat(regressors_file, regressor_map, oned_as='column') ### for scipy v0.12: OK - - - return residual_file, csv_filename - + return residual_file, regressors_file def extract_tissue_data(data_file, diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 07aecff115..eba39f509b 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -1743,6 +1743,7 @@ def pick_wm(seg_prob_list): strat.set_leaf_properties(nuisance, 'outputspec.subject') strat.update_resource_pool({'functional_nuisance_residuals':(nuisance, 'outputspec.subject')}) + strat.update_resource_pool({'functional_nuisance_regressors':(nuisance, 'outputspec.regressors')}) create_log_node(nuisance, 'outputspec.subject', num_strat) @@ -1815,7 +1816,6 @@ def pick_wm(seg_prob_list): alff.get_node('lp_input').iterables = ('lp', c.lowPassFreqALFF) - try: node, out_file = strat.get_leaf_properties() workflow.connect(node, out_file, @@ -1888,7 +1888,6 @@ def pick_wm(seg_prob_list): strat_list += new_strat_list - ''' Inserting Scrubbing Workflow ''' @@ -1898,7 +1897,6 @@ def pick_wm(seg_prob_list): workflow_counter += 1 - if 1 in c.runScrubbing: workflow_bit_id['scrubbing'] = workflow_counter @@ -1952,7 +1950,6 @@ def pick_wm(seg_prob_list): strat_list += new_strat_list - ''' Func -> Template, uses antsApplyTransforms (ANTS) or ApplyWarp (FSL) to apply the warp; also includes mean functional warp @@ -1986,37 +1983,6 @@ def pick_wm(seg_prob_list): name="motion_correct_fsl_warp_%d" % num_strat) motion_correct_warp.inputs.ref_file = c.template_brain_only_for_func - # resample the templates to new resolution(s) so that we - # can write out the timeseries to these resolutions also - resample_brain_template = pe.Node(interface=fsl.FLIRT(), - name='resample_brain_template_%d' % num_strat) - resample_brain_template.inputs.in_file = c.template_brain_only_for_func - resample_brain_template.inputs.reference = c.template_brain_only_for_func - resample_brain_template.inputs.apply_isoxfm = float(c.resolution_for_func_derivative[0]) - - resample_head_template = pe.Node(interface=fsl.FLIRT(), - name='resample_head_template_%d' % num_strat) - resample_head_template.inputs.in_file = c.template_skull_for_func - resample_head_template.inputs.reference = c.template_skull_for_func - resample_head_template.inputs.apply_isoxfm = float(c.resolution_for_func_derivative[0]) - - alt_func_mni_warp = pe.Node(interface=fsl.ApplyWarp(), - name='alt_func_mni_fsl_warp_%d' % num_strat) - - alt_motion_correct_warp = pe.Node(interface=fsl.ApplyWarp(), - name="alt_motion_correct_fsl_warp_%d" % num_strat) - - alt_functional_brain_mask_to_standard = pe.Node(interface=fsl.ApplyWarp(), - name='alt_func_mni_fsl_warp_mask_%d' % num_strat) - alt_functional_brain_mask_to_standard.inputs.interp = 'nn' - - # have to do the smoothing here to keep the iterable - # flow together - alt_func_smooth = pe.Node(interface=fsl.MultiImageMaths(), - name='alt_func_smooth_%d' % num_strat) - alt_motion_smooth = pe.Node(interface=fsl.MultiImageMaths(), - name='alt_motion_smooth_%d' % num_strat) - try: node, out_file = strat.get_node_from_resource_pool('anatomical_to_mni_nonlinear_xfm') @@ -2060,76 +2026,15 @@ def pick_wm(seg_prob_list): node, out_file = strat.get_node_from_resource_pool('motion_correct') workflow.connect(node, out_file, motion_correct_warp, 'in_file') - - # input files - node, out_file = strat.get_leaf_properties() - workflow.connect(node, out_file, - alt_func_mni_warp, 'in_file') - - node, out_file = strat.get_node_from_resource_pool('motion_correct') - workflow.connect(node, out_file, - alt_motion_correct_warp, 'in_file') - - node, out_file = strat.get_node_from_resource_pool('functional_brain_mask') - workflow.connect(node, out_file, - alt_functional_brain_mask_to_standard, 'in_file') - - # reference files - workflow.connect(resample_brain_template, 'out_file', - alt_func_mni_warp, 'ref_file') - workflow.connect(resample_brain_template, 'out_file', - alt_motion_correct_warp, 'ref_file') - workflow.connect(resample_head_template, 'out_file', - alt_functional_brain_mask_to_standard, 'ref_file') - - # functional to anatomical linear warp - node, out_file = strat.get_node_from_resource_pool('functional_to_anat_linear_xfm') - workflow.connect(node, out_file, - alt_func_mni_warp, 'premat') - workflow.connect(node, out_file, - alt_motion_correct_warp, 'premat') - workflow.connect(node, out_file, - alt_functional_brain_mask_to_standard, 'premat') - - # anatomical to template nonlinear warp - node, out_file = strat.get_node_from_resource_pool('anatomical_to_mni_nonlinear_xfm') - workflow.connect(node, out_file, - alt_func_mni_warp, 'field_file') - workflow.connect(node, out_file, - alt_motion_correct_warp, 'field_file') - workflow.connect(node, out_file, - alt_functional_brain_mask_to_standard, 'field_file') - - # smoothing (have to do it here to keep the iterable - # flowing smoothly) - workflow.connect(alt_func_mni_warp, 'out_file', - alt_func_smooth, 'in_file') - workflow.connect(alt_motion_correct_warp, 'out_file', - alt_motion_smooth, 'in_file') - - workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), - alt_func_smooth, 'op_string') - workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), - alt_motion_smooth, 'op_string') - - workflow.connect(alt_functional_brain_mask_to_standard, 'out_file', - alt_func_smooth, 'operand_files') - workflow.connect(alt_functional_brain_mask_to_standard, 'out_file', - alt_motion_smooth, 'operand_files') - except: logConnectionError('Functional Timeseries Registration to MNI space (FSL)', num_strat, strat.get_resource_pool(), '0015') raise - strat.update_resource_pool({'functional_mni':(func_mni_warp, 'out_file'), + strat.update_resource_pool({'functional_to_standard':(func_mni_warp, 'out_file'), 'functional_brain_mask_to_standard':(functional_brain_mask_to_standard, 'out_file'), - 'mean_functional_in_mni':(mean_functional_warp, 'out_file'), + 'mean_functional_to_standard':(mean_functional_warp, 'out_file'), 'motion_correct_to_standard':(motion_correct_warp, 'out_file')}) - strat.update_resource_pool({'functional_mni_other_resolutions':(alt_func_mni_warp, 'out_file'), - 'motion_correct_to_standard_other_resolutions':(alt_motion_correct_warp, 'out_file'), - 'functional_mni_other_resolutions_smooth':(alt_func_smooth, 'out_file'), - 'motion_correct_to_standard_other_resolutions_smooth':(alt_motion_smooth, 'out_file')}) strat.append_name(func_mni_warp.name) create_log_node(func_mni_warp, 'out_file', num_strat) @@ -2188,7 +2093,6 @@ def fsl_to_itk_conversion(source_file, reference, func_name): 'outputspec.itk_transform', num_strat) - def collect_transforms_func_mni(func_name): # collects series of warps to be applied @@ -2250,10 +2154,21 @@ def collect_transforms_func_mni(func_name): 'outputspec.transformation_series', num_strat) + def ants_apply_warps_func_mni(input_node, input_outfile, \ + ref_node, ref_outfile, func_name, interp, \ + input_image_type): + # converts FSL-format .mat affine xfm into ANTS-format + # .txt; .mat affine comes from Func->Anat registration + fsl_to_itk_func_mni = create_wf_c3d_fsl_to_itk(0, name=\ + 'fsl_to_itk_%s_%d' % (func_name, \ + num_strat)) - def ants_apply_warps_func_mni(input_node, input_outfile, \ - reference, interp, input_image_type, func_name): + # collects series of warps to be applied + collect_transforms_func_mni = \ + create_wf_collect_transforms(0, name=\ + 'collect_transforms_%s_%d' % \ + (func_name, num_strat)) # apply ants warps apply_ants_warp_func_mni = \ @@ -2262,12 +2177,8 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ (func_name, num_strat), ants_threads=int(num_ants_cores)) - if len(reference) == 2: - node, out_file = reference - workflow.connect(node, out_file, - apply_ants_warp_func_mni, 'inputspec.reference_image') - else: - apply_ants_warp_func_mni.inputs.inputspec.reference_image = reference + workflow.connect(ref_node, ref_outfile, + apply_ants_warp_func_mni, 'inputspec.reference_image') apply_ants_warp_func_mni.inputs.inputspec.dimension = 3 @@ -2283,14 +2194,59 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ try: + # convert the .mat from linear Func->Anat to + # ANTS format + node, out_file = strat.get_node_from_resource_pool(\ + 'functional_to_anat_linear_xfm') + workflow.connect(node, out_file, fsl_to_itk_func_mni, + 'inputspec.affine_file') + + node, out_file = strat.get_node_from_resource_pool("anatomical_brain") + workflow.connect(node, out_file, fsl_to_itk_func_mni, + 'inputspec.reference_file') + + workflow.connect(ref_node, ref_outfile, fsl_to_itk_func_mni, + 'inputspec.source_file') + + # Field file from anatomical nonlinear registration + node, out_file = strat.get_node_from_resource_pool(\ + 'anatomical_to_mni_nonlinear_xfm') + workflow.connect(node, out_file, + collect_transforms_func_mni, + 'inputspec.warp_file') + + # initial transformation from anatomical registration + node, out_file = strat.get_node_from_resource_pool(\ + 'ants_initial_xfm') + workflow.connect(node, out_file, + collect_transforms_func_mni, + 'inputspec.linear_initial') + + # affine transformation from anatomical registration + node, out_file = strat.get_node_from_resource_pool(\ + 'ants_affine_xfm') + workflow.connect(node, out_file, + collect_transforms_func_mni, + 'inputspec.linear_affine') + + # rigid transformation from anatomical registration + node, out_file = strat.get_node_from_resource_pool(\ + 'ants_rigid_xfm') + workflow.connect(node, out_file, + collect_transforms_func_mni, + 'inputspec.linear_rigid') + + # Premat from Func->Anat linear reg and bbreg + # (if bbreg is enabled) + workflow.connect(fsl_to_itk_func_mni, 'outputspec.itk_transform', + collect_transforms_func_mni, 'inputspec.fsl_to_itk_affine') + # this pulls in directly because # it pulls in the leaf in some instances workflow.connect(input_node, input_outfile, apply_ants_warp_func_mni, 'inputspec.input_image') - node, out_file = strat.get_node_from_resource_pool(\ - 'itk_collected_warps_%s' % func_name) - workflow.connect(node, out_file, + workflow.connect(collect_transforms_func_mni, 'outputspec.transformation_series', apply_ants_warp_func_mni, 'inputspec.transforms') @@ -2309,7 +2265,43 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ create_log_node(apply_ants_warp_func_mni, \ 'outputspec.output_image', num_strat) + # 4D FUNCTIONAL apply warp + node, out_file = strat.get_leaf_properties() + node2, out_file2 = \ + strat.get_node_from_resource_pool("mean_functional") + ants_apply_warps_func_mni(node, out_file, + node2, out_file2, + "functional_to_standard", + "Linear", 3) + + # 4D FUNCTIONAL MOTION-CORRECTED apply warp + node, out_file = \ + strat.get_node_from_resource_pool('motion_correct') + node2, out_file2 = \ + strat.get_node_from_resource_pool("mean_functional") + ants_apply_warps_func_mni(node, out_file, + node2, out_file2, + "motion_correct_to_standard", + "Linear", 3) + + # FUNCTIONAL BRAIN MASK (binary, no timeseries) apply warp + node, out_file = \ + strat.get_node_from_resource_pool("functional_brain_mask") + ants_apply_warps_func_mni(node, out_file, + node, out_file, + "functional_brain_mask_to_standard", + "NearestNeighbor", 0) + + # FUNCTIONAL MEAN (no timeseries) apply warp + node, out_file = \ + strat.get_node_from_resource_pool("mean_functional") + ants_apply_warps_func_mni(node, out_file, + node, out_file, + "mean_functional_to_standard", + "Linear", 0) + """ THEN GET RID OF THESE """ + """ # 4D FUNCTIONAL apply warp fsl_to_itk_conversion('mean_functional', 'anatomical_brain', @@ -2359,95 +2351,13 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ c.template_brain_only_for_func, 'Linear', 0, 'mean_functional_in_mni') - - - # resample the templates to new resolution(s) so that we - # can write out the timeseries to these resolutions also - resample_brain_template_for_ants = pe.Node(interface=fsl.FLIRT(), - name='resample_brain_template_for_ants_%d' % num_strat) - resample_brain_template_for_ants.inputs.in_file = c.template_brain_only_for_func - resample_brain_template_for_ants.inputs.reference = c.template_brain_only_for_func - resample_brain_template_for_ants.inputs.apply_isoxfm = float(c.resolution_for_func_derivative[0]) - - node2, out_file2 = (resample_brain_template_for_ants, 'out_file') - - # 4D FUNCTIONAL apply warp - fsl_to_itk_conversion('mean_functional', - 'anatomical_brain', - 'functional_mni_other_resolutions') - collect_transforms_func_mni('functional_mni_other_resolutions') - - node, out_file = strat.get_leaf_properties() - ants_apply_warps_func_mni(node, out_file, - (node2, out_file2), - 'Linear', 3, - 'functional_mni_other_resolutions') - - # 4D FUNCTIONAL MOTION-CORRECTED apply warp - fsl_to_itk_conversion('mean_functional', - 'anatomical_brain', - 'motion_correct_to_standard_other_resolutions') - collect_transforms_func_mni('motion_correct_to_standard_other_resolutions') - - node, out_file = strat.get_node_from_resource_pool('motion_correct') - ants_apply_warps_func_mni(node, out_file, - (node2, out_file2), - 'Linear', 3, - 'motion_correct_to_standard_other_resolutions') - - # FUNCTIONAL MASK apply warp - fsl_to_itk_conversion('functional_brain_mask', - 'anatomical_brain', - 'functional_brain_mask_to_standard_other_resolutions') - collect_transforms_func_mni('functional_brain_mask_to_standard_other_resolutions') - - node, out_file = strat.get_node_from_resource_pool('functional_brain_mask') - ants_apply_warps_func_mni(node, out_file, - (node2, out_file2), - 'NearestNeighbor', 0, - 'functional_brain_mask_to_standard_other_resolutions') - - # have to do the smoothing here to keep the iterable - # flow together - alt_func_smooth_for_ants = pe.Node(interface=fsl.MultiImageMaths(), - name='alt_func_smooth_for_ants_%d' % num_strat) - alt_motion_smooth_for_ants = pe.Node(interface=fsl.MultiImageMaths(), - name='alt_motion_smooth_for_ants_%d' % num_strat) - - try: - - node, out_file = strat.get_node_from_resource_pool("functional_mni_other_resolutions") - workflow.connect(node, out_file, alt_func_smooth_for_ants, 'in_file') - - node, out_file = strat.get_node_from_resource_pool("motion_correct_to_standard_other_resolutions") - workflow.connect(node, out_file, alt_motion_smooth_for_ants, 'in_file') - - workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), - alt_func_smooth_for_ants, 'op_string') - - workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), - alt_motion_smooth_for_ants, 'op_string') - - node, out_file = strat.get_node_from_resource_pool("functional_brain_mask_to_standard_other_resolutions") - workflow.connect(node, out_file, alt_func_smooth_for_ants, 'operand_files') - workflow.connect(node, out_file, alt_motion_smooth_for_ants, 'operand_files') - - except: - logConnectionError('Functional Timeseries ' \ - 'in Standard Space Resampling (ANTS)', num_strat, \ - strat.get_resource_pool(), '0058') - raise - - strat.update_resource_pool({"functional_mni_other_resolutions_smooth": (alt_func_smooth_for_ants, 'out_file')}) - strat.update_resource_pool({"motion_correct_to_standard_other_resolutions_smooth": (alt_motion_smooth_for_ants, 'out_file')}) + """ num_strat += 1 - strat_list += new_strat_list - """"""""""""""""""""""""""""""""""""""""""""""""""" OUTPUTS """"""""""""""""""""""""""""""""""""""""""""""""""" @@ -2697,7 +2607,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ if "SpatialReg" in ts_analysis_dict.keys(): - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') node2, out_file2 = strat.get_node_from_resource_pool('functional_brain_mask_to_standard') # resample the input functional file and functional mask to spatial map @@ -2717,7 +2627,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ if "DualReg" in sca_analysis_dict.keys(): - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') node2, out_file2 = strat.get_node_from_resource_pool('functional_brain_mask_to_standard') # resample the input functional file and functional mask to spatial map @@ -2817,7 +2727,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ if "Avg" in ts_analysis_dict.keys(): - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') # resample the input functional file to roi workflow.connect(node, out_file, @@ -2834,7 +2744,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ if ("Avg" in sca_analysis_dict.keys()): - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') # resample the input functional file to roi workflow.connect(node, out_file, @@ -2851,7 +2761,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ if ("MultReg" in sca_analysis_dict.keys()): - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') # resample the input functional file to roi workflow.connect(node, out_file, @@ -2921,7 +2831,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ try: - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') # resample the input functional file to mask workflow.connect(node, out_file, @@ -3103,7 +3013,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ sc_temp_reg.inputs.inputspec.demean = True #c.mrsDemean try: - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') node2, out_file2 = strat.get_node_from_resource_pool('roi_timeseries_for_SCA_multreg') node3, out_file3 = strat.get_node_from_resource_pool('functional_brain_mask_to_standard') @@ -3285,7 +3195,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ resample_functional_to_template.inputs.apply_xfm = True # Get nipype node and out file of the func mni img - node, out_file = strat.get_node_from_resource_pool('functional_mni') + node, out_file = strat.get_node_from_resource_pool('functional_to_standard') # Resample the input functional file to template(roi/mask) workflow.connect(node, out_file, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1cc5ba823d..24b4290b8a 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1058,35 +1058,6 @@ def create_wf_collect_transforms(map_node, name='create_wf_collect_transforms'): """ DOCSTRINGS - - Parameters - ---------- - name : string, optional - Name of the workflow. - - Returns - ------- - collect_transforms_wf : nipype.pipeline.engine.Workflow - - Notes - ----- - - Workflow Inputs:: - - inputspec.transform_file : string (nifti file) - Output matrix of FSL-based functional to anatomical registration - inputspec.reference_file : string (nifti file) - File of skull-stripped anatomical brain to be used in affine - conversion - inputspec.source_file : string (nifti file) - Should match the input of the apply warp (in_file) unless you are - applying the warp to a 4-d file, in which case this file should - be a mean_functional file - - Workflow Outputs:: - - outputspec.itk_transform : string (nifti file) - Converted affine transform in ITK format usable with ANTS """ diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 0e7a834f92..7a9381ce61 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -32,10 +32,6 @@ 'functional_brain_mask':'func', 'motion_correct':'func', 'motion_correct_smooth':'func', - 'itk_func_anat_affine_motion_correct_to_standard':'func', - 'itk_func_anat_affine_functional_mni_other_resolutions':'func', - 'itk_collected_warps_motion_correct_to_standard':'func', - 'itk_collected_warps_motion_correct_to_standard_other_resolutions':'func', 'motion_correct_to_standard':'func', 'motion_correct_to_standard_other_resolutions':'func', 'motion_correct_to_standard_other_resolutions_smooth':'func', @@ -61,22 +57,9 @@ 'motion_params':'parameters', 'power_params':'parameters', 'scrubbed_preprocessed':'func', - 'itk_func_anat_affine_functional_mni':'func', - 'itk_func_anat_affine_functional_brain_mask_to_standard':'func', - 'itk_func_anat_affine_functional_brain_mask_to_standard_other_resolutions':'func', - 'itk_func_anat_affine_motion_correct_to_standard_other_resolutions':'func', - 'itk_func_anat_affine_mean_functional_in_mni' : 'func', - 'itk_collected_warps_functional_mni':'func', - 'itk_collected_warps_functional_mni_other_resolutions':'func', - 'itk_collected_warps_functional_brain_mask_to_standard':'func', - 'itk_collected_warps_functional_brain_mask_to_standard_other_resolutions':'func', - 'itk_collected_warps_mean_functional_in_mni' : 'func', - 'functional_mni':'func', - 'functional_mni_other_resolutions':'func', - 'functional_mni_other_resolutions_smooth':'func', + 'functional_to_standard':'func', 'functional_brain_mask_to_standard':'func', - 'functional_brain_mask_to_standard_other_resolutions':'func', - 'mean_functional_in_mni' : 'func', + 'mean_functional_to_standard' : 'func', 'functional_to_anat_linear_xfm':'registration', 'functional_to_mni_linear_xfm':'registration', 'mni_to_functional_linear_xfm':'registration', From da8f1c1788a1c3161bc2fdc5232eb6266be3310f Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 13:50:54 -0500 Subject: [PATCH 16/37] Different Neurodebian keys for different distros --- scripts/cpac_install.sh | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index d8f41d3b5d..03ebf0c957 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -83,7 +83,7 @@ function set_system_deps { apt-get update && apt-get upgrade -y && apt-get install -y wget # add in the packages that are specific to the redhat-release - version=$(lsb_release -r | awk '{print $2}') + version=$(lsb_release -r | awk '{print $2}') case ${version} in 12.04) system_pkgs+=(${ubuntu1204_packages}) @@ -99,7 +99,7 @@ function set_system_deps { ;; *) echo "Unknown version ${version}" - esac + esac else echo "Unknown distribution ${DISTRO}" exit 1 @@ -528,10 +528,25 @@ function install_fsl { mv $FSLDIR/tcl $FSLDIR/5.0/tcl # Debian-based distros must use NeuroDebian instead of the installer. elif [ $DISTRO == 'UBUNTU' ]; then - wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + case ${version} in + 12.04) + wget -O- http://neuro.debian.net/lists/precise.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + ;; + 14.04) + wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + ;; + 16.04) + wget -O- http://neuro.debian.net/lists/xenial.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + ;; + 16.10) + wget -O- http://neuro.debian.net/lists/yakkety.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + ;; + *) + echo "Unknown version ${version}" + esac apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 apt-get update - apt-get install -y fsl-5.0-complete + apt-get install -y fsl-5.0-core if [ $? -ne 0 ] then echo "FSL Install failed!" From 5e1d9499f9b1165a5fe4b66d005bed75feb49d6f Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 14:10:41 -0500 Subject: [PATCH 17/37] Make version number of distro a global variable. --- scripts/cpac_install.sh | 40 +++++++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 03ebf0c957..961b187c7d 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -25,7 +25,7 @@ centos7_epel_rpm="epel-release-7-5.noarch.rpm" centos7_packages=("mesa-libGLU-9.0.0-4.el7.x86_64" "gsl-1.15-13.el7.x86_64"\ "libcanberra-gtk2" "libxml-devel" "libpng12.x86_64") -# are all of the ubuntu packages the same regardless of the version? +# are all of the ubuntu packages that are common across different versions of Ubuntu ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra-gtk-module" \ "libexpat1-dev" "libgiftiio-dev" "libglib2.0-dev" "libglu1-mesa" "libglu1-mesa-dev" \ "libgsl0-dev" "libjpeg-progs" "libxml2" "libxml2-dev" "libxext-dev" \ @@ -33,9 +33,13 @@ ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra- "libxp6" "libxp-dev" "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ "build-essential" "xvfb" "xauth" "libgl1-mesa-dri" "tcsh" "unzip" "zlib1g-dev" "m4") +# configuration options that are specific to Ubuntu 12.04 ubuntu1204_packages=("lesstif2-dev") +# configuration options that are specific to Ubuntu 14.04 ubuntu1404_packages=("libmotif-dev") +# configuration options that are specific to Ubuntu 16.04 ubuntu1604_packages=("libmotif-dev") +# configuration options that are specific to Ubuntu 16.10 ubuntu1610_packages=("libmotif-dev") conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "traits" "pyyaml" "jinja2" "nose" "ipython" "pip" "wxpython") @@ -55,8 +59,7 @@ function set_system_deps { yum update -y && yum install -y wget # add in the packages that are specific to the redhat-release - version=$(rpm -q --queryformat '%{VERSION}' centos-release) - case ${version} in + case ${VERSION} in 5) epel_url=centos5_epel_url epel_rpm=centos5_epel_rpm @@ -73,7 +76,7 @@ function set_system_deps { system_pkgs+=(${centos7_packages}) ;; *) - echo "Unknown version ${version}" + echo "Unknown version ${VERSION}" esac elif [ $DISTRO == 'UBUNTU' ] then @@ -83,8 +86,7 @@ function set_system_deps { apt-get update && apt-get upgrade -y && apt-get install -y wget # add in the packages that are specific to the redhat-release - version=$(lsb_release -r | awk '{print $2}') - case ${version} in + case ${VERSION} in 12.04) system_pkgs+=(${ubuntu1204_packages}) ;; @@ -98,7 +100,7 @@ function set_system_deps { system_pkgs+=(${ubuntu1610_packages}) ;; *) - echo "Unknown version ${version}" + echo "Unknown version ${VERSION}" esac else echo "Unknown distribution ${DISTRO}" @@ -122,7 +124,7 @@ function print_usage { echo " -s : System-level dependencies only." echo " -p : Python dependencies only" echo " -n : Install specific neuroimaging packages. Accepts any number of the" - echo " following as arguments: afni, fsl, c3d, ants, cpac" + echo " following as arguments: afni, fsl, c3d, ants, cpac_resources, cpac" echo " will issue warnings if dependencies for these neuroimaging packages" echo " are not fulfilled. If multiple packages are to be specified, they" echo " must be surrounded by quotation marks." @@ -159,7 +161,6 @@ function install_system_dependencies { system_dependencies_installed=1 if [ $DISTRO == 'CENTOS' ] then - version=$(rpm -q --queryformat '%{VERSION}' centos-release) # update the repositories #yum update -y @@ -528,7 +529,7 @@ function install_fsl { mv $FSLDIR/tcl $FSLDIR/5.0/tcl # Debian-based distros must use NeuroDebian instead of the installer. elif [ $DISTRO == 'UBUNTU' ]; then - case ${version} in + case ${VERSION} in 12.04) wget -O- http://neuro.debian.net/lists/precise.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list ;; @@ -542,7 +543,7 @@ function install_fsl { wget -O- http://neuro.debian.net/lists/yakkety.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list ;; *) - echo "Unknown version ${version}" + echo "Unknown version ${VERSION}" esac apt-key adv --recv-keys --keyserver hkp://pgp.mit.edu:80 0xA5D32F012649A5A9 apt-get update @@ -938,8 +939,6 @@ function install_cpac_env { ##### MAIN ENTRY POINT - - # Check to see if user has root privileges. If not, perform local install. # CC undid the obfuscation if [ $EUID -eq 0 ] @@ -957,10 +956,11 @@ fi if [ -f /etc/redhat-release ] then DISTRO=CENTOS -elif [ -f /etc/lsb-release ] -then + VERSION=$(rpm -q --queryformat '%{VERSION}' centos-release) +elif [ -f /etc/lsb-release ] then source /etc/lsb-release DISTRO=${DISTRIB_ID^^} + VERSION=${DISTRIB_RELEASE^^} fi INIT_DIR=$(pwd) @@ -979,6 +979,13 @@ then exit 1 fi +if [ -z ${VERSION} ] +then + echo "VERSION needs to be set and non-empty. Check that /etc/redhat-release\n" + echo "or /etc/lsb-release exist." + exit 1 +fi + # tell the user what we are doing if [ ${LOCAL} -eq 1 ] then @@ -1073,6 +1080,9 @@ do install_cpac install_cpac_env ;; + cpac_resources) + install_cpac_resources + ;; *) echo "Invalid neuroimaging suite: $suite" echo "CPAC provisioning script will continue." From 768e02a5e0498014ce8a21e2757fe6b3db2a885d Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 14:44:20 -0500 Subject: [PATCH 18/37] Added libxp compilation for newer versions of Ubuntu. --- scripts/cpac_install.sh | 121 +++++++++++++++++++++++++++++----------- 1 file changed, 87 insertions(+), 34 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 961b187c7d..b7399d3b49 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -30,17 +30,17 @@ ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra- "libexpat1-dev" "libgiftiio-dev" "libglib2.0-dev" "libglu1-mesa" "libglu1-mesa-dev" \ "libgsl0-dev" "libjpeg-progs" "libxml2" "libxml2-dev" "libxext-dev" \ "libxft2" "libxft-dev" "libxi-dev" "libxmu-headers" "libxmu-dev" "libxpm-dev" "libxslt1-dev" \ - "libxp6" "libxp-dev" "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ + "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ "build-essential" "xvfb" "xauth" "libgl1-mesa-dri" "tcsh" "unzip" "zlib1g-dev" "m4") # configuration options that are specific to Ubuntu 12.04 -ubuntu1204_packages=("lesstif2-dev") +ubuntu1204_packages=("lesstif2-dev" "libxp6" "libxp-dev" ) # configuration options that are specific to Ubuntu 14.04 -ubuntu1404_packages=("libmotif-dev") +ubuntu1404_packages=("libmotif-dev" "libxp6" "libxp-dev" ) # configuration options that are specific to Ubuntu 16.04 -ubuntu1604_packages=("libmotif-dev") +ubuntu1604_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev") # configuration options that are specific to Ubuntu 16.10 -ubuntu1610_packages=("libmotif-dev") +ubuntu1610_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev") conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "traits" "pyyaml" "jinja2" "nose" "ipython" "pip" "wxpython") @@ -108,12 +108,28 @@ function set_system_deps { fi } +compile_libxp { + git clone https://cgit.freedesktop.org/xorg/lib/libXp/ + cd libXp + ./autogen.sh + ./configure + make + make install + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + echo "[ $(date) ] libxp failed to compile" | tee -a ~/cpac.log + else + echo "[ $(date) ] Compiled and innstalled libxp" | tee -a ~/cpac.log + fi +} + # CC - reformatted this to have better control of the output function print_usage { echo "" echo "Usage: cpac_install.sh -[spnalrh]" echo "=========================================================================" - echo "Version: 0.4.0" + echo "Version: 1.0.1" echo "Author(s): John Pellman, Daniel Clark" echo "Based off of cpac_install.sh by Daniel Clark." echo "Description: Will perform specific operations to install C-PAC" @@ -197,7 +213,16 @@ function install_system_dependencies { #apt-get upgrade -y apt-get install -y ${missing_system_dependencies[@]} - if [ $? -ne 0 ] + aptgetfail=$? + # >= Ubuntu 16.04 no longer has libxp in the repos so it must be compiled + case ${VERSION} in + 16.04) + compile_libxp + ;; + 16.10) + compile_libxp + esac + if [ $aptgetfail -ne 0 ] then system_dependencies_installed=0 echo "[ $(date) ] apt-get failed to install packages: ${missing_system_dependencies[@]}" | tee -a ~/cpac.log @@ -669,32 +694,7 @@ function install_afni { fi } -function install_ants { - echo "Installing ANTS." - which ANTS &> /dev/null ; if [ $? -eq 0 ]; then - echo ANTS is already installed! - echo Moving on... - echo '[ '$(date)' ] : ANTS is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - if [ ${system_dependencies_installed} -ne 1 ] - then - echo ANTS cannot be installed unless system-level dependencies are installed first. - echo Have your system administrator install system-level dependencies as root. - echo Exiting now... - echo '[ '$(date)' ] : ANTS installation failed - system-level dependencies are not installed.' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi - which c3d &> /dev/null ; if [ $? -ne 0 ]; then - echo "ANTS cannot be installed unless c3d is installed first." - echo "Install c3d and then try again." - echo "Exiting now..." - echo '[ '$(date)' ] : ANTS installation failed - C3D is not installed.' >> ~/cpac.log - cd $INIT_DIR - install_cpac_env - exit 1 - fi +function compile_ants { cd /tmp git clone https://github.com/stnava/ANTs.git if [ $LOCAL -eq 0 ]; then @@ -741,7 +741,7 @@ function install_ants { cp /tmp/ANTs/Scripts/antsBrainExtraction.sh ${ANTSPATH} cp /tmp/ANTs/Scripts/antsCorticalThickness.sh ${ANTSPATH} export ANTSPATH - export PATH=/opt/ants/bin:$PATH + export PATH=/opt/ants/bin:$PATH echo '# Path to ANTS' >> ~/cpac_env.sh echo 'export ANTSPATH=~/ants/bin/' >> ~/cpac_env.sh echo 'export PATH=~/ants/bin:$PATH' >> ~/cpac_env.sh @@ -754,6 +754,59 @@ function install_ants { fi } +function install_ants { + echo "Installing ANTS." + which ANTS &> /dev/null ; if [ $? -eq 0 ]; then + echo ANTS is already installed! + echo Moving on... + echo '[ '$(date)' ] : ANTS is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + if [ ${system_dependencies_installed} -ne 1 ] + then + echo ANTS cannot be installed unless system-level dependencies are installed first. + echo Have your system administrator install system-level dependencies as root. + echo Exiting now... + echo '[ '$(date)' ] : ANTS installation failed - system-level dependencies are not installed.' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi + which c3d &> /dev/null ; if [ $? -ne 0 ]; then + echo "ANTS cannot be installed unless c3d is installed first." + echo "Install c3d and then try again." + echo "Exiting now..." + echo '[ '$(date)' ] : ANTS installation failed - C3D is not installed.' >> ~/cpac.log + cd $INIT_DIR + install_cpac_env + exit 1 + fi + if [ $DISTRO == 'CENTOS' ]; then + compile_ants + elif [ $DISTRO == 'UBUNTU' ]; then + if [ $LOCAL -eq 0 ]; then + # ANTS is supported in Neurodebian for every version of Ubuntu except 16.04 + case ${VERSION} in + 12.04) + apt-get install ants + ;; + 14.04) + apt-get install ants + ;; + 16.04) + compile_ants + ;; + 16.10) + apt-get install ants + ;; + *) + echo "Unknown version ${VERSION}" + esac + elif [ $LOCAL -eq 1 ]; then + compile_ants + fi + fi +} + function install_c3d { echo "Installing C3D." which c3d &> /dev/null ; if [ $? -eq 0 ]; then From 9b98de3877cee008350121dcfc9e4f3861d014b8 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 14:50:16 -0500 Subject: [PATCH 19/37] install script: Remove sudo from commands to add neurodebian key --- scripts/cpac_install.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index b7399d3b49..43a9185908 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -556,16 +556,16 @@ function install_fsl { elif [ $DISTRO == 'UBUNTU' ]; then case ${VERSION} in 12.04) - wget -O- http://neuro.debian.net/lists/precise.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + wget -O- http://neuro.debian.net/lists/precise.au.full | tee /etc/apt/sources.list.d/neurodebian.sources.list ;; 14.04) - wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | tee /etc/apt/sources.list.d/neurodebian.sources.list ;; 16.04) - wget -O- http://neuro.debian.net/lists/xenial.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + wget -O- http://neuro.debian.net/lists/xenial.au.full | tee /etc/apt/sources.list.d/neurodebian.sources.list ;; 16.10) - wget -O- http://neuro.debian.net/lists/yakkety.au.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list + wget -O- http://neuro.debian.net/lists/yakkety.au.full | tee /etc/apt/sources.list.d/neurodebian.sources.list ;; *) echo "Unknown version ${VERSION}" From f40454c50d1c6851a6e927e350ecd1a88c14c425 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 15:27:52 -0500 Subject: [PATCH 20/37] install script: Small fix to switch, added in 4mm templates. --- scripts/cpac_install.sh | 423 +++++++++++++++++++++------------------- 1 file changed, 221 insertions(+), 202 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 43a9185908..b5cac25ae0 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -1,5 +1,42 @@ #! /bin/bash +# CC - reformatted this to have better control of the output +function print_usage { + echo "" + echo "Usage: cpac_install.sh -[spnalrh]" + echo "=========================================================================" + echo "Version: 1.0.1" + echo "Author(s): John Pellman, Daniel Clark" + echo "Based off of cpac_install.sh by Daniel Clark." + echo "Description: Will perform specific operations to install C-PAC" + echo " dependencies and C-PAC. Checks for user privileges and performs" + echo " installation either locally or system-wide." + echo "=========================================================================" + echo "One or more command line options are required:" + echo " -s : System-level dependencies only." + echo " -p : Python dependencies only" + echo " -n : Install specific neuroimaging packages. Accepts any number of the" + echo " following as arguments: afni, fsl, c3d, ants, cpac_resources, cpac" + echo " will issue warnings if dependencies for these neuroimaging packages" + echo " are not fulfilled. If multiple packages are to be specified, they" + echo " must be surrounded by quotation marks." + echo " -a : Install all neuroimaging suites not already installed. Will also" + echo " tell you if all neuroimaging suites are already installed and on" + echo " the path." + echo " -l : Local install. Equivalent to -pa ; will not run FSL installer, but" + echo " will issue a warning if running on Ubuntu." + echo " -r : Root install. Equivalent to -spa" + echo " -h : Print this help message." + echo "=========================================================================" + echo "Example usage: cpac_install.sh -n \"fsl afni\"" + echo " Will install FSL and AFNI. The list of neuroimaging suites to install" + echo " is iterated through sequentially. In this case, FSL would first be" + echo " installed before AFNI." + echo "" +} + +##### Define system and Python packages. + # these are packages that are common to centos 5, 6, and 7 centos_packages=("git" "make" "unzip" "netpbm" "gcc" "python-devel"\ "gcc-gfortran" "gcc-c++" "libgfortran" "lapack" "lapack-devel" "blas"\ @@ -46,6 +83,8 @@ conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "trait pip_packages=("future" "prov" "simplejson" "lockfile" "pygraphviz" "nibabel" "nipype" "patsy" "memory_profiler" "psutil" "configparser" "indi_tools") +##### Helper functions for installing system dependencies. + function set_system_deps { system_pkgs='' epel_rpm='' @@ -108,7 +147,44 @@ function set_system_deps { fi } +function get_missing_system_dependencies() +{ + missing_system_dependencies=() + system_dependencies_installed=1 + + if [ $DISTRO == 'CENTOS' ] + then + for package in ${system_pkgs[@]} + do + yum list installed ${package} > /dev/null 2>&1 + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + ARRAY+=(${package}) + echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + fi + done + elif [ $DISTRO == 'UBUNTU' ] + then + for package in ${system_pkgs[@]} + do + dpkg -s ${package} > /dev/null 2>&1 + if [ $? -ne 0 ] + then + system_dependencies_installed=0 + missing_system_dependencies+=(${package}) + echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + fi + done + else + echo "[ $(date) ] : Do not know how to check for packages installed on ${DISTRO}" >> ~/cpac.log + fi + echo "missing ${missing_system_dependencies[@]}" +} + compile_libxp { + # Compiles libxp- this is necessary for some newer versions of Ubuntu + # where the is no Debian package available. git clone https://cgit.freedesktop.org/xorg/lib/libXp/ cd libXp ./autogen.sh @@ -120,47 +196,13 @@ compile_libxp { system_dependencies_installed=0 echo "[ $(date) ] libxp failed to compile" | tee -a ~/cpac.log else - echo "[ $(date) ] Compiled and innstalled libxp" | tee -a ~/cpac.log + echo "[ $(date) ] Compiled and installed libxp" | tee -a ~/cpac.log fi } -# CC - reformatted this to have better control of the output -function print_usage { - echo "" - echo "Usage: cpac_install.sh -[spnalrh]" - echo "=========================================================================" - echo "Version: 1.0.1" - echo "Author(s): John Pellman, Daniel Clark" - echo "Based off of cpac_install.sh by Daniel Clark." - echo "Description: Will perform specific operations to install C-PAC" - echo " dependencies and C-PAC. Checks for user privileges and performs" - echo " installation either locally or system-wide." - echo "=========================================================================" - echo "One or more command line options are required:" - echo " -s : System-level dependencies only." - echo " -p : Python dependencies only" - echo " -n : Install specific neuroimaging packages. Accepts any number of the" - echo " following as arguments: afni, fsl, c3d, ants, cpac_resources, cpac" - echo " will issue warnings if dependencies for these neuroimaging packages" - echo " are not fulfilled. If multiple packages are to be specified, they" - echo " must be surrounded by quotation marks." - echo " -a : Install all neuroimaging suites not already installed. Will also" - echo " tell you if all neuroimaging suites are already installed and on" - echo " the path." - echo " -l : Local install. Equivalent to -pa ; will not run FSL installer, but" - echo " will issue a warning if running on Ubuntu." - echo " -r : Root install. Equivalent to -spa" - echo " -h : Print this help message." - echo "=========================================================================" - echo "Example usage: cpac_install.sh -n \"fsl afni\"" - echo " Will install FSL and AFNI. The list of neuroimaging suites to install" - echo " is iterated through sequentially. In this case, FSL would first be" - echo " installed before AFNI." - echo "" -} +##### Function for installing system dependencies. function install_system_dependencies { - echo "Installing C-PAC system dependencies... [${missing_system_dependencies[@]}][${#missing_system_dependencies[@]}]" if [ ${#missing_system_dependencies[@]} -eq 0 ] @@ -221,6 +263,7 @@ function install_system_dependencies { ;; 16.10) compile_libxp + ;; esac if [ $aptgetfail -ne 0 ] then @@ -279,41 +322,106 @@ function install_system_dependencies { fi } -function get_missing_system_dependencies() -{ - missing_system_dependencies=() - system_dependencies_installed=1 +##### Helper functions for installing Python dependencies. - if [ $DISTRO == 'CENTOS' ] +function get_missing_python_dependencies { + + python_dependencies_installed=0 + missing_pip_dependencies=() + missing_conda_dependencies=() + + # first we check to make sure that we have python + if [ ! -f /usr/local/bin/miniconda/bin/python ] then - for package in ${system_pkgs[@]} + python_installed=0 + else + python_installed=1 + fi + + if [ ${python_installed} -eq 0 ] + then + echo "[ $(date) ] : Python is not installed, need to install all"\ + "Python dependencies." >> ~/cpac.log + missing_pip_dependencies=${pip_packages[@]} + missing_conda_dependencies=${conda_packages[@]} + else + # if we find an environment, then enable it + if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] + then + echo "[ $(date) ] : Found C-PAC virtual environment, activating" >> ~/cpac.log + source activate cpac &> /dev/null + fi + + python_dependencies_installed=1 + for p in ${pip_packages[@]} do - yum list installed ${package} > /dev/null 2>&1 - if [ $? -ne 0 ] + if [ ${p} == "indi_tools" ] then - system_dependencies_installed=0 - ARRAY+=(${package}) - echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + /usr/local/bin/miniconda/bin/python -c "import indi_aws" 2> /dev/null + if [ $? -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_pip_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log + fi + else + /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null + if [ $? -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_pip_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log + fi fi done - elif [ $DISTRO == 'UBUNTU' ] - then - for package in ${system_pkgs[@]} + + for p in ${conda_packages[@]} do - dpkg -s ${package} > /dev/null 2>&1 - if [ $? -ne 0 ] + if [ ${p} == "wxpython" ] then - system_dependencies_installed=0 - missing_system_dependencies+=(${package}) - echo "[ $(date) ] : Missing system dependency ${package}" >> ~/cpac.log + /usr/local/bin/miniconda/bin/python -c "import wx" 2> /dev/null + retval=$? + elif [ ${p} == "pyyaml" ] + then + /usr/local/bin/miniconda/bin/python -c "import yaml" 2> /dev/null + retval=$? + elif [ ${p} == "ipython" ] + then + if [ -f /usr/local/bin/miniconda/bin/ipython ] + then + retval=0 + else + retval=1 + fi + else + /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null + retval=$? + fi + if [ $retval -ne 0 ] + then + echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log + missing_conda_dependencies+=($p) + python_dependencies_installed=0 + else + echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log fi done - else - echo "[ $(date) ] : Do not know how to check for packages installed on ${DISTRO}" >> ~/cpac.log + + # if we find an enviroment, then disable it + if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] + then + echo "[ $(date) ] : Found C-PAC virtual environment, de-activating" >> ~/cpac.log + source deactivate &> /dev/null + fi fi - echo "missing ${missing_system_dependencies[@]}" } +##### Function for installing Python dependencies. + function install_python_dependencies { if [ ${python_dependencies_installed} -eq 1 ] @@ -419,101 +527,6 @@ function install_python_dependencies { cd $INIT_DIR } -function get_missing_python_dependencies { - - python_dependencies_installed=0 - missing_pip_dependencies=() - missing_conda_dependencies=() - - # first we check to make sure that we have python - if [ ! -f /usr/local/bin/miniconda/bin/python ] - then - python_installed=0 - else - python_installed=1 - fi - - if [ ${python_installed} -eq 0 ] - then - echo "[ $(date) ] : Python is not installed, need to install all"\ - "Python dependencies." >> ~/cpac.log - missing_pip_dependencies=${pip_packages[@]} - missing_conda_dependencies=${conda_packages[@]} - else - # if we find an environment, then enable it - if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] - then - echo "[ $(date) ] : Found C-PAC virtual environment, activating" >> ~/cpac.log - source activate cpac &> /dev/null - fi - - python_dependencies_installed=1 - for p in ${pip_packages[@]} - do - if [ ${p} == "indi_tools" ] - then - /usr/local/bin/miniconda/bin/python -c "import indi_aws" 2> /dev/null - if [ $? -ne 0 ] - then - echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log - missing_pip_dependencies+=($p) - python_dependencies_installed=0 - else - echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log - fi - else - /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null - if [ $? -ne 0 ] - then - echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log - missing_pip_dependencies+=($p) - python_dependencies_installed=0 - else - echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log - fi - fi - done - - for p in ${conda_packages[@]} - do - if [ ${p} == "wxpython" ] - then - /usr/local/bin/miniconda/bin/python -c "import wx" 2> /dev/null - retval=$? - elif [ ${p} == "pyyaml" ] - then - /usr/local/bin/miniconda/bin/python -c "import yaml" 2> /dev/null - retval=$? - elif [ ${p} == "ipython" ] - then - if [ -f /usr/local/bin/miniconda/bin/ipython ] - then - retval=0 - else - retval=1 - fi - else - /usr/local/bin/miniconda/bin/python -c "import ${p}" 2> /dev/null - retval=$? - fi - if [ $retval -ne 0 ] - then - echo "[ $(date) ] : Python package $p not installed" >> ~/cpac.log - missing_conda_dependencies+=($p) - python_dependencies_installed=0 - else - echo "[ $(date) ] : Python package $p installed" >> ~/cpac.log - fi - done - - # if we find an enviroment, then disable it - if [ -d ~/miniconda/envs/cpac ] || [ -d /usr/local/bin/miniconda/envs/cpac ] - then - echo "[ $(date) ] : Found C-PAC virtual environment, de-activating" >> ~/cpac.log - source deactivate &> /dev/null - fi - fi -} function install_fsl { echo "Installing FSL." @@ -694,6 +707,48 @@ function install_afni { fi } +function install_c3d { + echo "Installing C3D." + which c3d &> /dev/null ; if [ $? -eq 0 ]; then + echo c3d is already installed! + echo Moving on... + echo '[ '$(date)' ] : C3D is already installed - does not need to be re-installed.' >> ~/cpac.log + return + fi + ARCHITECTURE=$(uname -p) + case $ARCHITECTURE in + x86_64 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-x86_64 + ;; + i386 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-i386 + ;; + i686 ) + C3D_DOWNLOAD=c3d-0.8.2-Linux-i686 + ;; + esac + cd /tmp + wget http://sourceforge.net/projects/c3d/files/c3d/c3d-0.8.2/${C3D_DOWNLOAD}.tar.gz + tar xfz ${C3D_DOWNLOAD}.tar.gz + if [ $LOCAL -eq 0 ]; then + mv $C3D_DOWNLOAD /opt/c3d + export PATH=/opt/c3d/bin:$PATH + echo '# Path to C3D' >> ~/cpac_env.sh + echo 'export PATH=/opt/c3d/bin:$PATH' >> ~/cpac_env.sh + elif [ $LOCAL -eq 1 ]; then + mv $C3D_DOWNLOAD ~/c3d + export PATH=~/c3d/bin:$PATH + echo '# Path to C3D' >> ~/cpac_env.sh + echo 'export PATH=~/c3d/bin:$PATH' >> ~/cpac_env.sh + else + echo Invalid value for variable 'LOCAL'. + echo This script is unable to determine whether or not you are running it as root. + echo '[ '$(date)' ] : C3D could not be installed (unable to determine if root).' >> ~/cpac.log + cd $INIT_DIR + exit 1 + fi +} + function compile_ants { cd /tmp git clone https://github.com/stnava/ANTs.git @@ -807,48 +862,6 @@ function install_ants { fi } -function install_c3d { - echo "Installing C3D." - which c3d &> /dev/null ; if [ $? -eq 0 ]; then - echo c3d is already installed! - echo Moving on... - echo '[ '$(date)' ] : C3D is already installed - does not need to be re-installed.' >> ~/cpac.log - return - fi - ARCHITECTURE=$(uname -p) - case $ARCHITECTURE in - x86_64 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-x86_64 - ;; - i386 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-i386 - ;; - i686 ) - C3D_DOWNLOAD=c3d-0.8.2-Linux-i686 - ;; - esac - cd /tmp - wget http://sourceforge.net/projects/c3d/files/c3d/c3d-0.8.2/${C3D_DOWNLOAD}.tar.gz - tar xfz ${C3D_DOWNLOAD}.tar.gz - if [ $LOCAL -eq 0 ]; then - mv $C3D_DOWNLOAD /opt/c3d - export PATH=/opt/c3d/bin:$PATH - echo '# Path to C3D' >> ~/cpac_env.sh - echo 'export PATH=/opt/c3d/bin:$PATH' >> ~/cpac_env.sh - elif [ $LOCAL -eq 1 ]; then - mv $C3D_DOWNLOAD ~/c3d - export PATH=~/c3d/bin:$PATH - echo '# Path to C3D' >> ~/cpac_env.sh - echo 'export PATH=~/c3d/bin:$PATH' >> ~/cpac_env.sh - else - echo Invalid value for variable 'LOCAL'. - echo This script is unable to determine whether or not you are running it as root. - echo '[ '$(date)' ] : C3D could not be installed (unable to determine if root).' >> ~/cpac.log - cd $INIT_DIR - exit 1 - fi -} - cpac_resources=("$FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz" \ "$FSLDIR/data/standard/MNI152_T1_2mm_brain_symmetric.nii.gz" \ "$FSLDIR/data/standard/MNI152_T1_2mm_symmetric.nii.gz" \ @@ -859,6 +872,13 @@ cpac_resources=("$FSLDIR/data/standard/MNI152_T1_2mm_brain_mask_symmetric_dil.ni "$FSLDIR/data/standard/MNI152_T1_3mm_brain_symmetric.nii.gz" \ "$FSLDIR/data/standard/MNI152_T1_3mm.nii.gz" \ "$FSLDIR/data/standard/MNI152_T1_3mm_symmetric.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_brain_mask_dil.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_brain_mask.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_brain_mask_symmetric_dil.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_brain.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_brain_symmetric.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm.nii.gz" \ + "$FSLDIR/data/standard/MNI152_T1_4mm_symmetric.nii.gz" \ "$FSLDIR/data/atlases/HarvardOxford/HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz") cpac_resdirs=("$FSLDIR/data/standard/tissuepriors/2mm" \ @@ -903,10 +923,11 @@ function install_cpac_resources { exit 1 fi cd /tmp - wget http://fcon_1000.projects.nitrc.org/indi/cpac_resources.tgz - tar xfz cpac_resources.tgz 2> /dev/null + wget http://fcon_1000.projects.nitrc.org/indi/cpac_resources.tar.gz + tar xfz cpac_resources.tar.gz cd cpac_image_resources cp -n MNI_3mm/* $FSLDIR/data/standard + cp -n MNI_4mm/* $FSLDIR/data/standard cp -n symmetric/* $FSLDIR/data/standard cp -nr tissuepriors/2mm $FSLDIR/data/standard/tissuepriors cp -nr tissuepriors/3mm $FSLDIR/data/standard/tissuepriors @@ -954,12 +975,10 @@ function install_cpac { fi source activate cpac cd /tmp - #wget https://github.com/FCP-INDI/C-PAC/archive/v1.0.0.tar.gz - #tar xzvf v1.0.0.tar.gz - git clone https://github.com/FCP-INDI/C-PAC.git C-PAC-1.0.0 - cd C-PAC-1.0.0 + git clone https://github.com/FCP-INDI/C-PAC.git + cd C-PAC python setup.py install - rm -rf /tmp/C-PAC-1.0.0 + rm -rf /tmp/C-PAC source deactivate } From 0200dff59232758c11695e68a3379c1778576355 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 15:34:59 -0500 Subject: [PATCH 21/37] Fix switch --- scripts/cpac_install.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index b5cac25ae0..231ae9c5df 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -264,6 +264,8 @@ function install_system_dependencies { 16.10) compile_libxp ;; + *) + echo "libxp is installed via apt for Ubuntu ${VERSION}" esac if [ $aptgetfail -ne 0 ] then From 28ecd1d9c06d466ef88f78554a25b62b5961cd80 Mon Sep 17 00:00:00 2001 From: root Date: Tue, 29 Nov 2016 20:45:41 +0000 Subject: [PATCH 22/37] Fixes to system dependency installation --- scripts/cpac_install.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 231ae9c5df..64d4f8df6a 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -182,7 +182,7 @@ function get_missing_system_dependencies() echo "missing ${missing_system_dependencies[@]}" } -compile_libxp { +function compile_libxp { # Compiles libxp- this is necessary for some newer versions of Ubuntu # where the is no Debian package available. git clone https://cgit.freedesktop.org/xorg/lib/libXp/ @@ -1031,7 +1031,8 @@ if [ -f /etc/redhat-release ] then DISTRO=CENTOS VERSION=$(rpm -q --queryformat '%{VERSION}' centos-release) -elif [ -f /etc/lsb-release ] then +elif [ -f /etc/lsb-release ] +then source /etc/lsb-release DISTRO=${DISTRIB_ID^^} VERSION=${DISTRIB_RELEASE^^} From 9ed2b4827ede243e786f52d3c422dc374f721a63 Mon Sep 17 00:00:00 2001 From: John Pellman Date: Tue, 29 Nov 2016 21:43:44 +0000 Subject: [PATCH 23/37] Make sure that FSLDIR is set before install resources. --- scripts/cpac_install.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 64d4f8df6a..9f28a57aef 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -888,6 +888,11 @@ cpac_resdirs=("$FSLDIR/data/standard/tissuepriors/2mm" \ function install_cpac_resources { echo "Installing C-PAC Image Resources." + # Make sure FSLDIR is set. + if [ ! -z $FSLDIR ] + then + echo "[ $(date) ] : FSLDIR must be defined for C-PAC image resources to install." >> ~/cpac.log + fi # Determines if C-PAC image resources are all already installed. RES_PRES=1 for res in ${cpac_resources[@]} @@ -994,6 +999,7 @@ function install_cpac_env { then cat ~/cpac_env.sh >> ~/.bashrc rm ~/cpac_env.sh + source /etc/bash.bashrc elif [ $LOCAL -eq 0 ] then if [ -f /etc/profile.d/cpac_env.sh ] @@ -1003,8 +1009,10 @@ function install_cpac_env { # packages that weren't already in cpac_env.sh. cat ~/cpac_env.sh >> /etc/profile.d/cpac_env.sh rm ~/cpac_env.sh + source /etc/profile.d/cpac_env.sh else mv ~/cpac_env.sh /etc/profile.d/ + source /etc/profile.d/cpac_env.sh fi fi fi From 6770a8da1a65ba3d407b2775e98a4f6905999e8c Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Tue, 29 Nov 2016 18:13:09 -0500 Subject: [PATCH 24/37] Added a few more needed catches in the test_sublist function. --- CPAC/GUI/interface/windows/config_window.py | 35 ++++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 6b497eee35..78c67b56e8 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -408,9 +408,28 @@ def test_sublist(self, sublist): anat_file = sub['anat'] func_files = sub['rest'] checked_anat_s3 = False + + if not anat_file: + err = "\n\n[!] Could not read in at least one of your anatom"\ + "ical input files. Please double-check the formatting "\ + "of your participant list YAML file.\n\n" + raise Exception(err) + + if not func_files: + err = "\n\n[!] Could not read in at least one of your functi"\ + "onal input files. Please double-check the formatting "\ + "of your participant list YAML file.\n\n" + raise Exception(err) + if anat_file.lower().startswith(s3_str): dl_dir = tempfile.mkdtemp() - creds_path = sub['creds_path'] + try: + creds_path = sub['creds_path'] + except KeyError: + # if no creds path is provided, it could be that the user + # is downloading public data - leave it to downstream to + # handle creds issues + creds_path = None anat_file = check_for_s3(anat_file, creds_path, dl_dir=dl_dir) checked_anat_s3 = True # Check if anatomical file exists @@ -429,7 +448,8 @@ def test_sublist(self, sublist): not_found_flg = True err_str_suffix = 'File not found: %s\n' % anat_file err_str = err_str + err_str_suffix - # If we're just checking s3 files, remove the temporarily downloaded + # If we're just checking s3 files, remove the temporarily + # downloaded if checked_anat_s3: try: os.remove(anat_file) @@ -441,7 +461,13 @@ def test_sublist(self, sublist): checked_s3 = False if func_file.lower().startswith(s3_str): dl_dir = tempfile.mkdtemp() - creds_path = sub['creds_path'] + try: + creds_path = sub['creds_path'] + except KeyError: + # if no creds path is provided, it could be that the + # user is downloading public data - leave it to down- + # stream to handle creds issues + creds_path = None func_file = check_for_s3(func_file, creds_path, dl_dir=dl_dir,img_type='func') checked_s3 = True # Check if functional file exists @@ -460,7 +486,8 @@ def test_sublist(self, sublist): not_found_flg = True err_str_suffix = 'File not found: %s\n' % func_file err_str = err_str + err_str_suffix - # If we're just checking s3 files, remove the temporarily downloaded + # If we're just checking s3 files, remove the temporarily + # downloaded if checked_s3: try: os.remove(func_file) From cab448e771292a485e514ee9a696d913a03bf6e6 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 30 Nov 2016 12:24:50 -0500 Subject: [PATCH 25/37] Repaired some of the config file and sublist checks. --- CPAC/GUI/interface/windows/config_window.py | 2 -- CPAC/utils/utils.py | 17 +++++++++-------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 78c67b56e8..a71c50e11d 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -385,7 +385,6 @@ def test_sublist(self, sublist): wx.OK | wx.ICON_ERROR) errDlg4.ShowModal() errDlg4.Destroy() - # Raise Exception raise Exception @@ -399,7 +398,6 @@ def test_sublist(self, sublist): wx.OK | wx.ICON_ERROR) errDlg3.ShowModal() errDlg3.Destroy() - # Raise Exception raise Exception diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 7a9381ce61..327f1fc79f 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -2158,9 +2158,9 @@ def check_config_resources(c): if c.runNetworkCentrality[0]: if sub_mem_gb < c.memoryAllocatedForDegreeCentrality: err_msg = 'Memory allocated for subject: %d needs to be greater '\ - 'than the memory allocated for centrality: %d. Fix and '\ - 'try again.' % (c.maximumMemoryPerParticipant, - c.memoryAllocatedForDegreeCentrality) + 'than the memory allocated for centrality: %d. Fix '\ + 'and try again.' % (c.maximumMemoryPerParticipant, + c.memoryAllocatedForDegreeCentrality) raise Exception(err_msg) # Check for pipeline threads @@ -2168,8 +2168,8 @@ def check_config_resources(c): if c.maxCoresPerParticipant: total_user_cores = c.numParticipantsAtOnce*c.maxCoresPerParticipant if total_user_cores > num_cores: - err_msg = 'Config file specifies more subjects running in '\ - 'parallel than number of threads available. Change '\ + err_msg = 'Config file specifies more subjects running in ' \ + 'parallel than number of threads available. Change ' \ 'this and try again' raise Exception(err_msg) else: @@ -2182,9 +2182,10 @@ def check_config_resources(c): if c.num_ants_threads is None: num_ants_cores = num_cores_per_sub elif c.num_ants_threads > c.maxCoresPerParticipant: - err_msg = 'Number of threads for ANTS: %d is greater than the '\ - 'number of threads per subject: %d. Change this and '\ - 'try again.' + err_msg = 'Number of threads for ANTS: %d is greater than the ' \ + 'number of threads per subject: %d. Change this and ' \ + 'try again.' % (c.num_ants_threads, + c.maxCoresPerParticipant) raise Exception(err_msg) else: num_ants_cores = c.num_ants_threads From ee39ce92c09aff2a6e0ae8f6c82dc5278db8e4a5 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 30 Nov 2016 17:17:35 -0500 Subject: [PATCH 26/37] Updated the sublist builder. Included log message regarding missing participant IDs listed in Subjects to Include, and missing site IDs listed in Sites to Include. --- CPAC/GUI/interface/windows/config_window.py | 5 ++++- CPAC/utils/build_sublist.py | 20 ++++++++++++++++++++ test/unit/utils/build_sublist_test.py | 5 ++++- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index a71c50e11d..2520759892 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -404,7 +404,10 @@ def test_sublist(self, sublist): # Iterate and test each subject's files for sub in sublist: anat_file = sub['anat'] - func_files = sub['rest'] + try: + func_files = sub['func'] + except KeyError: + func_files = sub['rest'] checked_anat_s3 = False if not anat_file: diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index b73b91d3a2..1a2ba39098 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -359,6 +359,16 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, site_kw, path_template) in \ include_sites, sub_paths) + # note which site IDs in "subjects to include" are missing + missing = list(include_sites) + for site_id in site_matches: + for include in include_sites: + if include in site_id: + if include in missing: + missing.remove(include) + if len(missing) > 0: + logger.info("Site IDs marked in 'Sites to Include' not found:" \ + "\n%s" % str(missing)) keep_site_paths.extend(site_matches) else: logger.info('Not filtering out any potential sites...') @@ -389,6 +399,16 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, ppant_kw, path_template) in \ include_subs, sub_paths) + # note which participant IDs in "subjects to include" are missing + missing = list(include_subs) + for subj_path in subj_matches: + for include in include_subs: + if include in subj_path: + if include in missing: + missing.remove(include) + if len(missing) > 0: + logger.info("Participant IDs marked in 'Subjects to Include' "\ + "not found:\n%s" % str(missing)) keep_subj_paths.extend(subj_matches) # Or exclude only elif exclude_subs is not None: diff --git a/test/unit/utils/build_sublist_test.py b/test/unit/utils/build_sublist_test.py index 4d1db12e2d..6db47b1a26 100644 --- a/test/unit/utils/build_sublist_test.py +++ b/test/unit/utils/build_sublist_test.py @@ -102,7 +102,10 @@ def _return_filepaths(self, sublist): # Iterate through the list and extract paths for sub_dict in sublist: anat = sub_dict['anat'] - funcs = [rest for rest in sub_dict['rest'].values()] + try: + funcs = [rest for rest in sub_dict['func'].values()] + except KeyError: + funcs = [rest for rest in sub_dict['rest'].values()] file_paths.append(anat) file_paths.extend(funcs) From 61c697e8dccba3bb89f12c657bd9e2fc2be11049 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 1 Dec 2016 14:42:58 -0500 Subject: [PATCH 27/37] Removed potentially problematic break in sublist tester. --- CPAC/GUI/interface/windows/config_window.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 2520759892..8364488e3c 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -456,7 +456,6 @@ def test_sublist(self, sublist): os.remove(anat_file) except: pass - break # For each functional file for func_file in func_files.values(): checked_s3 = False @@ -494,7 +493,6 @@ def test_sublist(self, sublist): os.remove(func_file) except: pass - break # Check flags for error message if not_found_flg: err_msg = 'One or more of your input files are missing.\n' From 2e8e917cf6452625df88c3651c4a374e7adaab3d Mon Sep 17 00:00:00 2001 From: John Pellman Date: Thu, 1 Dec 2016 16:46:01 -0500 Subject: [PATCH 28/37] Install: new package name for GSL libraries in Ubuntu >= 16.04 --- scripts/cpac_install.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/cpac_install.sh b/scripts/cpac_install.sh index 9f28a57aef..55f5270ef2 100755 --- a/scripts/cpac_install.sh +++ b/scripts/cpac_install.sh @@ -65,19 +65,19 @@ centos7_packages=("mesa-libGLU-9.0.0-4.el7.x86_64" "gsl-1.15-13.el7.x86_64"\ # are all of the ubuntu packages that are common across different versions of Ubuntu ubuntu_packages=("cmake" "git" "graphviz" "graphviz-dev" "gsl-bin" "libcanberra-gtk-module" \ "libexpat1-dev" "libgiftiio-dev" "libglib2.0-dev" "libglu1-mesa" "libglu1-mesa-dev" \ - "libgsl0-dev" "libjpeg-progs" "libxml2" "libxml2-dev" "libxext-dev" \ + "libjpeg-progs" "libxml2" "libxml2-dev" "libxext-dev" \ "libxft2" "libxft-dev" "libxi-dev" "libxmu-headers" "libxmu-dev" "libxpm-dev" "libxslt1-dev" \ "make" "mesa-common-dev" "mesa-utils" "netpbm" "pkg-config" \ "build-essential" "xvfb" "xauth" "libgl1-mesa-dri" "tcsh" "unzip" "zlib1g-dev" "m4") # configuration options that are specific to Ubuntu 12.04 -ubuntu1204_packages=("lesstif2-dev" "libxp6" "libxp-dev" ) +ubuntu1204_packages=("lesstif2-dev" "libxp6" "libxp-dev" "libgsl0-dev" ) # configuration options that are specific to Ubuntu 14.04 -ubuntu1404_packages=("libmotif-dev" "libxp6" "libxp-dev" ) +ubuntu1404_packages=("libmotif-dev" "libxp6" "libxp-dev" "libgsl0-dev" ) # configuration options that are specific to Ubuntu 16.04 -ubuntu1604_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev") +ubuntu1604_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev" "libgsl-dev" ) # configuration options that are specific to Ubuntu 16.10 -ubuntu1610_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev") +ubuntu1610_packages=("libmotif-dev" "xutils-dev" "libtool" "libx11-dev" "x11proto-xext-dev" "x11proto-print-dev" "dh-autoreconf" "libxext-dev" "libgsl-dev") conda_packages=("pandas" "cython" "numpy" "scipy" "matplotlib" "networkx" "traits" "pyyaml" "jinja2" "nose" "ipython" "pip" "wxpython") From cb070318ac4bdb795b1d92c6323025f35d920890 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Thu, 1 Dec 2016 17:20:19 -0500 Subject: [PATCH 29/37] Sites to Include and Subjects to Include now both work together simultaneously. --- CPAC/GUI/interface/windows/config_window.py | 3 ++- CPAC/utils/build_sublist.py | 9 ++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 8364488e3c..3546266c57 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -795,7 +795,8 @@ def testFile(filepath, paramName, switch): self, 'There are issues with the current configuration ' \ 'which need to be resolved - please check to make ' \ 'sure the options you are running have the proper ' \ - 'pre-requisites selected.\n\nIssue Info:\n%s' % xxx, + 'pre-requisites selected.\n\nIssue Info:\n%s' \ + % str(xxx), 'Pipeline Not Ready', wx.OK | wx.ICON_ERROR) errDlg1.ShowModal() diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index 1a2ba39098..a612f31364 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -348,9 +348,9 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, logger = logging.getLogger('sublist_builder') # Check if {site} was specified + keep_site_paths = [] if site_kw in path_template and include_sites is not None: # Filter out sites that are not included - keep_site_paths = [] if type(include_sites) is not list: include_sites = [include_sites] logger.info('Only including sites: %s' % include_sites) @@ -410,6 +410,10 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, logger.info("Participant IDs marked in 'Subjects to Include' "\ "not found:\n%s" % str(missing)) keep_subj_paths.extend(subj_matches) + + if keep_site_paths: + keep_subj_paths = list(set(keep_subj_paths) & set(keep_site_paths)) + # Or exclude only elif exclude_subs is not None: keep_subj_paths = [] @@ -431,6 +435,9 @@ def filter_sub_paths(sub_paths, include_sites, include_subs, exclude_subs, exclude_subs, sub_paths) keep_subj_paths.extend(subj_matches) + if keep_site_paths: + keep_subj_paths = list(set(keep_subj_paths) & set(keep_site_paths)) + else: keep_subj_paths = keep_site_paths From 63bbf20d2f3ca3d5b66e82ca0109477b3ed091e5 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Fri, 2 Dec 2016 16:32:55 -0500 Subject: [PATCH 30/37] Reference fix. --- CPAC/GUI/interface/windows/config_window.py | 2 +- CPAC/pipeline/cpac_pipeline.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 3546266c57..31d39008be 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -787,7 +787,7 @@ def testFile(filepath, paramName, switch): except Exception as xxx: print xxx - print "an exception occured" + print "an exception occurred" testDlg1.Destroy() diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index eba39f509b..f45ebdbf2b 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -2155,7 +2155,7 @@ def collect_transforms_func_mni(func_name): def ants_apply_warps_func_mni(input_node, input_outfile, \ - ref_node, ref_outfile, func_name, interp, \ + ref_node, ref_outfile, standard, func_name, interp, \ input_image_type): # converts FSL-format .mat affine xfm into ANTS-format @@ -2177,14 +2177,10 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ (func_name, num_strat), ants_threads=int(num_ants_cores)) - workflow.connect(ref_node, ref_outfile, - apply_ants_warp_func_mni, 'inputspec.reference_image') - + apply_ants_warp_func_mni.inputs.inputspec.reference_image = standard apply_ants_warp_func_mni.inputs.inputspec.dimension = 3 - apply_ants_warp_func_mni.inputs.inputspec. \ interpolation = interp - # input_image_type: # (0 or 1 or 2 or 3) # Option specifying the input image type of scalar @@ -2271,6 +2267,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ strat.get_node_from_resource_pool("mean_functional") ants_apply_warps_func_mni(node, out_file, node2, out_file2, + c.template_brain_only_for_func, "functional_to_standard", "Linear", 3) @@ -2281,6 +2278,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ strat.get_node_from_resource_pool("mean_functional") ants_apply_warps_func_mni(node, out_file, node2, out_file2, + c.template_brain_only_for_func, "motion_correct_to_standard", "Linear", 3) @@ -2289,6 +2287,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ strat.get_node_from_resource_pool("functional_brain_mask") ants_apply_warps_func_mni(node, out_file, node, out_file, + c.template_brain_only_for_func, "functional_brain_mask_to_standard", "NearestNeighbor", 0) @@ -2297,6 +2296,7 @@ def ants_apply_warps_func_mni(input_node, input_outfile, \ strat.get_node_from_resource_pool("mean_functional") ants_apply_warps_func_mni(node, out_file, node, out_file, + c.template_brain_only_for_func, "mean_functional_to_standard", "Linear", 0) From d33e39ee3058b06ecb862c433a7529a1e03f348f Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Mon, 5 Dec 2016 16:10:09 -0500 Subject: [PATCH 31/37] Added new functional_nuisance_regressors outputs to utils --- CPAC/qc/utils.py | 3 --- CPAC/utils/utils.py | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/CPAC/qc/utils.py b/CPAC/qc/utils.py index 511ee62a52..2f693bc384 100644 --- a/CPAC/qc/utils.py +++ b/CPAC/qc/utils.py @@ -1368,9 +1368,6 @@ def cal_snr_val(measure_file): return avg_snr_file - - - def gen_std_dev(mask_, func_): """ diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 327f1fc79f..202ac1c7a5 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -48,6 +48,7 @@ 'functional_csf_mask':'segmentation', 'frame_wise_displacement':'parameters', 'functional_nuisance_residuals':'func', + 'functional_nuisance_regressors':'func', 'functional_median_angle_corrected':'func', 'power_spectrum_distribution':'alff', 'functional_freq_filtered':'func', From 44e97acc24a36f002c6d68bb5e254885f844650f Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Tue, 6 Dec 2016 13:03:18 -0500 Subject: [PATCH 32/37] "Sublist name" save window would not close on "Cancel". --- CPAC/GUI/interface/windows/dataconfig_window.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/CPAC/GUI/interface/windows/dataconfig_window.py b/CPAC/GUI/interface/windows/dataconfig_window.py index 4661ba69f8..e39b9cd1de 100644 --- a/CPAC/GUI/interface/windows/dataconfig_window.py +++ b/CPAC/GUI/interface/windows/dataconfig_window.py @@ -261,9 +261,9 @@ def run(self, config): CPAC.utils.extract_data.generate_supplementary_files(sublist_outdir, sublist_name) # Prompt user with naming subject list for main GUI + dlg2 = wx.TextEntryDialog(self, 'Please enter a name for the Subject List', + 'Sublist Name', '%s' % sublist_name) while True: - dlg2 = wx.TextEntryDialog(self, 'Please enter a name for the Subject List', - 'Sublist Name', '%s' % sublist_name) if dlg2.ShowModal() == wx.ID_OK: if len(dlg2.GetValue()) >0: parent = self.Parent @@ -272,14 +272,19 @@ def run(self, config): map[dlg2.GetValue()]= out_location parent.listbox2.Append(dlg2.GetValue()) dlg2.Destroy() + ret = 1 break else: dlg3 = wx.MessageDialog(self, 'Subject List with this name already exist','Error!', wx.OK | wx.ICON_ERROR) dlg3.ShowModal() dlg3.Destroy() + elif dlg2.ShowModal() == wx.ID_CANCEL: + dlg2.Destroy() + ret = -1 + break # Return value - return 1 + return ret # Import error if CPAC not available except ImportError as exc: From 6f9e66520efc2ba83654e3de8ac47eabe343e106 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Tue, 6 Dec 2016 19:11:27 -0500 Subject: [PATCH 33/37] Made the subject list builder more robust to keyword/template layouts, multiple keywords, different keywords in one directory level, etc. --- CPAC/utils/build_sublist.py | 230 +++++++++++++++++++++++------------- 1 file changed, 150 insertions(+), 80 deletions(-) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index a612f31364..45e0429cbe 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -165,21 +165,25 @@ def extract_keyword_from_path(filepath, keyword, template): import logging # Init variables + # creates a list of directory levels of the filepath, and of the template temp_split = template.split('/') fp_split = filepath.split('/') # Get logger - logger = logging.getLogger('sublist_builder') + #logger = logging.getLogger('sublist_builder') - # Extract directory name of the + # Extract directory name of the keyword, from the template kw_dirname = [dir for dir in temp_split if keyword in dir] # If the keyword is in the template, extract string from filepath if len(kw_dirname) > 0: # Get the directory fullname from template, as well as any surrounding kw_dirname = kw_dirname[0] + # kw_idx is the index of where in the template path the keyword is kw_idx = temp_split.index(kw_dirname) - # Extract directory with key string in it from filepath + # Extract directory with key string in it from filepath, i.e. if this + # is for {participant}, key_str will be the participant ID string from + # the filepath key_str = fp_split[kw_idx] # Get the prefix and suffix surrounding keyword kw_prefix = kw_dirname.split(keyword)[0] @@ -199,6 +203,8 @@ def extract_keyword_from_path(filepath, keyword, template): kw_prefix = kw_prefix[:-1] # Make sure what is left is more than '' + # This will not run if the keyword was the only thing in the directory + # level if kw_prefix != '': # Find the previous '*' from the right prev_star_in_prefix = kw_prefix.rfind('*') @@ -228,6 +234,8 @@ def extract_keyword_from_path(filepath, keyword, template): kw_suffix = kw_suffix[1:] # Make sure what is left is more than '' + # This will not run if the keyword was the only thing in the directory + # level if kw_suffix != '': # Find the next '*' from the left next_star_in_suffix = kw_suffix.find('*') @@ -473,16 +481,22 @@ def return_dir_indices(path_template): # Get folder level indices of site and subject - anat fp_split = path_template.split('/') - site_idx = fp_split.index('{site}') - ppant_idx = fp_split.index('{participant}') # Session level isn't required, but recommended - try: - sess_idx = fp_split.index('{session}') - except ValueError as exc: - sess_idx = None #ppant_idx+1 - - # Return indices - return site_idx, ppant_idx, sess_idx + sess_idx = None + sess_extra = None + for dir_level in fp_split: + if "{site}" in dir_level: + site_idx = fp_split.index(dir_level) + site_extra = filter(bool,dir_level.split("{site}")) + if "{participant}" in dir_level: + ppant_idx = fp_split.index(dir_level) + ppant_extra = filter(bool,dir_level.split("{participant}")) + if "{session}" in dir_level: + sess_idx = fp_split.index(dir_level) + sess_extra = filter(bool,dir_level.split("{session}")) + + # Return extra characters + return site_idx, ppant_idx, sess_idx, site_extra, ppant_extra, sess_extra # Return matching filepaths @@ -849,12 +863,6 @@ def build_sublist(data_config_yml): # If session index is ppant index, then no session dir if anat_ppant_idx == anat_sess_idx: anat_sess_idx = func_sess_idx = None - # Get indices from {site} {ppant} {session} identifiers - else: - anat_site_idx, anat_ppant_idx, anat_sess_idx = \ - return_dir_indices(anat_template) - func_site_idx, func_ppant_idx, func_sess_idx = \ - return_dir_indices(func_template) # Filter out unwanted anat and func filepaths logger.info('Filtering anatomical files...') @@ -882,69 +890,131 @@ def build_sublist(data_config_yml): site_scan_params = {} # Iterate through file paths and build subject list - for anat in anat_paths: - anat_sp = anat.split('/') - subj = anat_sp[anat_ppant_idx] - try: - sess = anat_sp[anat_sess_idx] - except TypeError: - sess = "ses-1" - if bids_flag: - site = '' - else: - site = anat_sp[anat_site_idx] - subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, - 'subject_id' : subj, 'unique_id' : sess, - 'scan_parameters': site_scan_params} - tmp_key = '_'.join([subj, site, sess]) - tmp_dict[tmp_key] = subj_d - - # Now go through and populate functional scans dictionaries - for func in func_paths: - # Extract info from filepath - func_sp = func.split('/') - subj = func_sp[func_ppant_idx] - try: - sess = func_sp[func_sess_idx] - except TypeError: - sess = "ses-1" - if bids_flag: - site = '' - scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, - func) - else: - site = func_sp[func_site_idx] - if scan_params_csv is not None: - try: - scan_params = site_scan_params[site] - except KeyError as exc: - print 'Site %s missing from scan parameters csv, skipping...'\ - % site - scan_params = None - - # If there is no scan sub-folder under session, make scan - # the name of the image itself without extension - if func_sess_idx == len(func_sp)-2: - scan = func_sp[-1].split('.nii')[0] - # Othwerwise, there use scan sub folder - else: - scan = func_sp[-2] - - # Build tmp key and get subject dictionary from tmp dictionary - tmp_key = '_'.join([subj, site, sess]) - - # Try and find the associated anat scan - try: - subj_d = tmp_dict[tmp_key] - except KeyError as exc: - logger.info('Unable to find anatomical image for %s. Skipping...'\ - % tmp_key) - continue + if bids_flag: + for anat in anat_paths: + anat_sp = anat.split('/') + subj = anat_sp[anat_ppant_idx] + try: + sess = anat_sp[anat_sess_idx] + except TypeError: + sess = "ses-1" + if bids_flag: + site = '' + else: + site = anat_sp[anat_site_idx] + subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, + 'subject_id' : subj, 'unique_id' : sess, + 'scan_parameters': site_scan_params} + tmp_key = '_'.join([subj, site, sess]) + tmp_dict[tmp_key] = subj_d + + # Now go through and populate functional scans dictionaries + for func in func_paths: + # Extract info from filepath + func_sp = func.split('/') + subj = func_sp[func_ppant_idx] + try: + sess = func_sp[func_sess_idx] + except TypeError: + sess = "ses-1" + if bids_flag: + site = '' + scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, + func) + else: + site = func_sp[func_site_idx] + if scan_params_csv is not None: + try: + scan_params = site_scan_params[site] + except KeyError as exc: + print 'Site %s missing from scan parameters csv, skipping...'\ + % site + scan_params = None + + # If there is no scan sub-folder under session, make scan + # the name of the image itself without extension + if func_sess_idx == len(func_sp)-2: + scan = func_sp[-1].split('.nii')[0] + # Othwerwise, there use scan sub folder + else: + scan = func_sp[-2] + + # Build tmp key and get subject dictionary from tmp dictionary + tmp_key = '_'.join([subj, site, sess]) + + # Try and find the associated anat scan + try: + subj_d = tmp_dict[tmp_key] + except KeyError as exc: + logger.info('Unable to find anatomical image for %s. Skipping...'\ + % tmp_key) + continue + + # Set the rest dictionary with the scan + subj_d['func'][scan] = func + # And replace it back in the dictionary + tmp_dict[tmp_key] = subj_d + else: + for anat in anat_paths: + subj = extract_keyword_from_path(anat, "{participant}", anat_template) + try: + sess = extract_keyword_from_path(anat, "{session}", anat_template) + except TypeError: + sess = "ses-1" + if bids_flag: + site = "" + else: + site = extract_keyword_from_path(anat, "{site}", anat_template) + subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, + 'subject_id' : subj, 'unique_id' : sess, + 'scan_parameters': site_scan_params} + tmp_key = '_'.join([subj, site, sess]) + tmp_dict[tmp_key] = subj_d + + # Now go through and populate functional scans dictionaries + for func in func_paths: + func_sp = func.split('/') + + subj = extract_keyword_from_path(func, "{participant}", func_template) + try: + sess = extract_keyword_from_path(func, "{session}", func_template) + except TypeError: + sess = "ses-1" + if bids_flag: + site = "" + scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, + func) + else: + site = extract_keyword_from_path(func, "{site}", func_template) + if scan_params_csv is not None: + try: + scan_params = site_scan_params[site] + except KeyError as exc: + print 'Site %s missing from scan parameters csv, skipping...'\ + % site + scan_params = None + + # Build tmp key and get subject dictionary from tmp dictionary + tmp_key = '_'.join([subj, site, sess]) + # Try and find the associated anat scan + try: + subj_d = tmp_dict[tmp_key] + except KeyError as exc: + logger.info('Unable to find anatomical image for %s. Skipping...'\ + % tmp_key) + continue + + # If there is no scan sub-folder, make scan + # the name of the image itself without extension + if "{series}" in func_template: + scan = extract_keyword_from_path(func, "{series}", func_template) + else: + scan = func_sp[-1].split('.nii')[0] - # Set the rest dictionary with the scan - subj_d['func'][scan] = func - # And replace it back in the dictionary - tmp_dict[tmp_key] = subj_d + # Set the rest dictionary with the scan + subj_d['func'][scan] = func + # And replace it back in the dictionary + tmp_dict[tmp_key] = subj_d # Build a subject list from dictionary values sublist = [] From 0548a677162808d1601c73557adaa0cdb31b20e3 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 7 Dec 2016 11:59:18 -0500 Subject: [PATCH 34/37] Fixed potential error in sublist builder that would include all site scan params under every participant in the subject list. --- CPAC/utils/build_sublist.py | 56 ++++++++++++------------------------- 1 file changed, 18 insertions(+), 38 deletions(-) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index 45e0429cbe..cc1629ee3a 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -898,13 +898,9 @@ def build_sublist(data_config_yml): sess = anat_sp[anat_sess_idx] except TypeError: sess = "ses-1" - if bids_flag: - site = '' - else: - site = anat_sp[anat_site_idx] + site = '' subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, - 'subject_id' : subj, 'unique_id' : sess, - 'scan_parameters': site_scan_params} + 'subject_id' : subj, 'unique_id' : sess} tmp_key = '_'.join([subj, site, sess]) tmp_dict[tmp_key] = subj_d @@ -917,19 +913,11 @@ def build_sublist(data_config_yml): sess = func_sp[func_sess_idx] except TypeError: sess = "ses-1" - if bids_flag: - site = '' - scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, - func) - else: - site = func_sp[func_site_idx] - if scan_params_csv is not None: - try: - scan_params = site_scan_params[site] - except KeyError as exc: - print 'Site %s missing from scan parameters csv, skipping...'\ - % site - scan_params = None + site = '' + scan_params = None + scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, + func) + subj_d["scan_parameters"] = scan_params # If there is no scan sub-folder under session, make scan # the name of the image itself without extension @@ -961,13 +949,9 @@ def build_sublist(data_config_yml): sess = extract_keyword_from_path(anat, "{session}", anat_template) except TypeError: sess = "ses-1" - if bids_flag: - site = "" - else: - site = extract_keyword_from_path(anat, "{site}", anat_template) + site = extract_keyword_from_path(anat, "{site}", anat_template) subj_d = {'anat' : anat, 'creds_path' : creds_path, 'func' : {}, - 'subject_id' : subj, 'unique_id' : sess, - 'scan_parameters': site_scan_params} + 'subject_id' : subj, 'unique_id' : sess} tmp_key = '_'.join([subj, site, sess]) tmp_dict[tmp_key] = subj_d @@ -980,19 +964,15 @@ def build_sublist(data_config_yml): sess = extract_keyword_from_path(func, "{session}", func_template) except TypeError: sess = "ses-1" - if bids_flag: - site = "" - scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, - func) - else: - site = extract_keyword_from_path(func, "{site}", func_template) - if scan_params_csv is not None: - try: - scan_params = site_scan_params[site] - except KeyError as exc: - print 'Site %s missing from scan parameters csv, skipping...'\ - % site - scan_params = None + site = extract_keyword_from_path(func, "{site}", func_template) + scan_params = None + if scan_params_csv is not None: + try: + scan_params = site_scan_params[site] + except KeyError as exc: + print 'Site %s missing from scan parameters csv, skipping...'\ + % site + subj_d["scan_parameters"] = scan_params # Build tmp key and get subject dictionary from tmp dictionary tmp_key = '_'.join([subj, site, sess]) From 30a081137eb921a1ef6cd8ba7f7c319dbb779334 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 7 Dec 2016 12:06:36 -0500 Subject: [PATCH 35/37] Nested scan params under each functional scan. --- CPAC/utils/build_sublist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index cc1629ee3a..0521a45084 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -917,7 +917,6 @@ def build_sublist(data_config_yml): scan_params = None scan_params = bids_metadata.get_metadata_for_nifti(bids_base_dir, func) - subj_d["scan_parameters"] = scan_params # If there is no scan sub-folder under session, make scan # the name of the image itself without extension @@ -940,6 +939,7 @@ def build_sublist(data_config_yml): # Set the rest dictionary with the scan subj_d['func'][scan] = func + subj_d['func'][scan]["scan_parameters"] = scan_params # And replace it back in the dictionary tmp_dict[tmp_key] = subj_d else: @@ -972,7 +972,6 @@ def build_sublist(data_config_yml): except KeyError as exc: print 'Site %s missing from scan parameters csv, skipping...'\ % site - subj_d["scan_parameters"] = scan_params # Build tmp key and get subject dictionary from tmp dictionary tmp_key = '_'.join([subj, site, sess]) @@ -993,6 +992,7 @@ def build_sublist(data_config_yml): # Set the rest dictionary with the scan subj_d['func'][scan] = func + subj_d['func'][scan]["scan_parameters"] = scan_params # And replace it back in the dictionary tmp_dict[tmp_key] = subj_d From 4aeff90808becb66ed1f32d2f51be019adc9eb90 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 7 Dec 2016 12:42:31 -0500 Subject: [PATCH 36/37] Added versioning to pipeline configuration files, and fixed an error that was sending derivatives to be warped to standard using FSL tools when it was an ANTS pipeline. --- CPAC/GUI/interface/windows/config_window.py | 4 ++++ CPAC/pipeline/cpac_pipeline.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CPAC/GUI/interface/windows/config_window.py b/CPAC/GUI/interface/windows/config_window.py index 31d39008be..6262a6fb0e 100644 --- a/CPAC/GUI/interface/windows/config_window.py +++ b/CPAC/GUI/interface/windows/config_window.py @@ -1024,11 +1024,15 @@ def update_listbox(self, value): def write(self, path, config_list): import ast + import CPAC try: f = open(path, 'w') + print >>f, "# CPAC Pipeline Configuration YAML file" + print >>f, "# version %s\n" % str(CPAC.__version__) + for item in config_list: label = item.get_name() diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index f45ebdbf2b..7e00e9cd48 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -3432,7 +3432,7 @@ def output_to_standard(output_name, output_resource, strat, num_strat, nodes = getNodeList(strat) - if 'apply_ants_warp_functional_mni' in nodes: + if 'apply_ants_warp_functional_to_standard' in nodes: # ANTS WARP APPLICATION From 10343605b569fcb1075049ed684fbb71a767f661 Mon Sep 17 00:00:00 2001 From: sgiavasis Date: Wed, 7 Dec 2016 16:31:12 -0500 Subject: [PATCH 37/37] Repaired scan params dict assignment. --- CPAC/utils/build_sublist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/utils/build_sublist.py b/CPAC/utils/build_sublist.py index 0521a45084..468d1ded25 100644 --- a/CPAC/utils/build_sublist.py +++ b/CPAC/utils/build_sublist.py @@ -939,7 +939,7 @@ def build_sublist(data_config_yml): # Set the rest dictionary with the scan subj_d['func'][scan] = func - subj_d['func'][scan]["scan_parameters"] = scan_params + subj_d['func']["scan_parameters"] = scan_params # And replace it back in the dictionary tmp_dict[tmp_key] = subj_d else: @@ -992,7 +992,7 @@ def build_sublist(data_config_yml): # Set the rest dictionary with the scan subj_d['func'][scan] = func - subj_d['func'][scan]["scan_parameters"] = scan_params + subj_d['func']["scan_parameters"] = scan_params # And replace it back in the dictionary tmp_dict[tmp_key] = subj_d