diff --git a/.circleci/config.yml b/.circleci/config.yml index 4f1558d6d7..5930e43ab3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -64,14 +64,21 @@ _download_test_data: &_download_test_data environment: OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage" command: | + set -x export DATA_NIPYPE_TUTORIAL_URL="${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xj + if [[ ! -d nipype-tutorial ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xjv + fi export DATA_NIPYPE_FSL_COURSE="${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xz + if [[ ! -d nipype-fsl_course_data ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xzv + fi export DATA_NIPYPE_FSL_FEEDS="${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xz + if [[ ! -d feeds ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xzv + fi _prepare_working_directory: &prepare_working_directory name: Prepare working directory @@ -102,7 +109,7 @@ version: 2 jobs: compare_base_dockerfiles: docker: - - image: docker:17.10.0-ce-git + - image: cimg/base:2022.04 steps: - checkout: path: /home/circleci/nipype @@ -112,10 +119,10 @@ jobs: working_directory: /home/circleci/nipype/docker command: | mkdir -p /tmp/docker - ash ./generate_dockerfiles.sh -b + bash ./generate_dockerfiles.sh -b # Use the sha256 sum of the pruned Dockerfile as the cache key. - ash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned + bash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned - restore_cache: key: dockerfile-cache-v1-master-{{ checksum "/tmp/docker/Dockerfile.base-pruned" }} - run: @@ -134,6 +141,23 @@ jobs: - docker/Dockerfile.base-pruned - docker/get_base_image.sh + get_test_data: + machine: *machine_kwds + working_directory: /home/circleci/nipype + steps: + - restore_cache: + keys: + - data-v0-{{ .Branch }}-{{ .Revision }} + - data-v0--{{ .Revision }} + - data-v0-{{ .Branch }}- + - data-v0-master- + - data-v0- + - run: *_download_test_data + - save_cache: + key: data-v0-{{ .Branch }}-{{ .Revision }} + paths: + - /home/circleci/examples + test_pytest: machine: *machine_kwds working_directory: /home/circleci/nipype @@ -142,13 +166,15 @@ jobs: path: /home/circleci/nipype - attach_workspace: at: /tmp + - restore_cache: + keys: + - data-v0-{{ .Branch }}-{{ .Revision }} - run: *set_pr_number - run: *generate_dockerfiles - run: *modify_nipype_version - run: *get_base_image - run: *build_main_image_py38 - run: *_get_codecov - - run: *_download_test_data - run: *prepare_working_directory - run: name: Run pytests @@ -248,6 +274,13 @@ workflows: - /docs?\/.*/ tags: only: /.*/ + - get_test_data: + filters: + branches: + ignore: + - /docs?\/.*/ + tags: + only: /.*/ - test_pytest: filters: branches: @@ -257,6 +290,7 @@ workflows: only: /.*/ requires: - compare_base_dockerfiles + - get_test_data - deploy_dockerhub: filters: branches: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 714244e94a..8e60701109 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,14 +27,14 @@ jobs: strategy: matrix: os: ['ubuntu-latest'] - python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] + python-version: [3.7, 3.8, 3.9, "3.10"] check: ['test'] pip-flags: [''] depends: ['REQUIREMENTS'] deb-depends: [false] nipype-extras: ['doc,tests,profiler'] include: - - os: ubuntu-18.04 + - os: ubuntu-latest python-version: 3.8 check: test pip-flags: '' diff --git a/.mailmap b/.mailmap index e512a6f025..71b3ebe14b 100644 --- a/.mailmap +++ b/.mailmap @@ -22,6 +22,8 @@ Anna Doll <45283972+AnnaD15@users.noreply.github.com> Ariel Rokem Ariel Rokem Arman Eshaghi +Avneet Kaur +Avneet Kaur Ashely Gillman Basille Pinsard Basille Pinsard @@ -84,7 +86,8 @@ Hrvoje Stojic Isaac Schwabacher Jakub Kaczmarzyk James Kent -James Kent Fred Mertz +James Kent +James Kent Janosch Linkersdörfer Jason Wong Jason Wong @@ -114,6 +117,7 @@ Sin Kim Sin Kim Koen Helwegen Kornelius Podranski +Kristofer Montazeri Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski @@ -128,6 +132,7 @@ Marcel Falkiewicz Maria de Fatima Dias Maria de Fatima Dias Martin Perez-Guevara +Martin Norgaard Mathias Goncalves Mathias Goncalves Mathieu Dubois @@ -189,6 +194,7 @@ Steven Giavasis Steven Tilley Sulantha Mathotaarachchi +Sunjae Shim <85246533+sjshim@users.noreply.github.com> Tim Robert-Fitzgerald Tom Close Tom Close diff --git a/.zenodo.json b/.zenodo.json index b04e08cbfb..d7cacccedd 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -84,6 +84,11 @@ { "name": "Loney, Fred" }, + { + "affiliation": "Department of Psychology, Stanford University", + "name": "Norgaard, Martin", + "orcid": "0000-0003-2131-5688" + }, { "affiliation": "Florida International University", "name": "Salo, Taylor", @@ -171,6 +176,11 @@ "affiliation": "National Institutes of Health", "name": "Clark, Michael G. " }, + { + "affiliation": "Neuroscience Program, University of Iowa", + "name": "Kent, James D.", + "orcid": "0000-0002-4892-2659" + }, { "affiliation": "Concordia University", "name": "Benderoff, Erin" @@ -183,11 +193,6 @@ "name": "Dias, Maria de Fatima", "orcid": "0000-0001-8878-1750" }, - { - "affiliation": "Neuroscience Program, University of Iowa", - "name": "Kent, James D.", - "orcid": "0000-0002-4892-2659" - }, { "affiliation": "Otto-von-Guericke-University Magdeburg, Germany", "name": "Hanke, Michael", @@ -694,6 +699,11 @@ "name": "Lerma-Usabiaga, Garikoitz", "orcid": "0000-0001-9800-4816" }, + { + "affiliation": "ARAMIS Lab, Paris Brain Institute", + "name": "Vaillant, Ghislain", + "orcid": "0000-0003-0267-3033" + }, { "name": "Schwabacher, Isaac" }, @@ -774,6 +784,11 @@ { "name": "Park, Anne" }, + { + "affiliation": "Consolidated Department of Psychiatry, Harvard Medical School", + "name": "Frederick, Blaise", + "orcid": "0000-0001-5832-5279" + }, { "name": "Cheung, Brian" }, @@ -839,6 +854,11 @@ { "name": "Urchs, Sebastian" }, + { + "affiliation": "Department of Psychology, Stanford University", + "name": "Shim, Sunjae", + "orcid": "0000-0003-2773-0807" + }, { "name": "Nickson, Thomas" }, diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 4e656441b0..03caebaa27 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,26 @@ +1.8.0 (May 10, 2022) +==================== + +New feature release in the 1.8.x series. + +The primary new features are a batch of PETSurfer interfaces. + +This release drops support for Python < 3.7 and Numpy < 1.17, triggering a minor version bump. +Additionally, matplotlib < 2.1 will stop working with some interfaces, but more recent versions +will start working. + +(`Full changelog `__) + + * FIX: Change plt.hist() argument from deprecated 'normed' to 'density' (https://github.com/nipy/nipype/pull/3455) + * ENH: Add random seed option to ANTs registration (https://github.com/nipy/nipype/pull/3463) + * ENH: Add PETsurfer interfaces (https://github.com/nipy/nipype/pull/3437) + * ENH: Add Text2Vest and Vest2Text interfaces (https://github.com/nipy/nipype/pull/3447) + * REF: Optimize ICC_rep_anova with a memoized helper function (https://github.com/nipy/nipype/pull/3454) + * REF: Rearranging matmul order and using hermitian flag in ICC_rep_anova (https://github.com/nipy/nipype/pull/3453) + * MNT: Drop distutils (https://github.com/nipy/nipype/pull/3458) + * CI: Cache test data (https://github.com/nipy/nipype/pull/3459) + + 1.7.1 (April 05, 2022) ====================== diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 2ee0bf5762..7f8bbf1e35 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.7.0 `_ `1.6.1 `_ +Previous versions: `1.7.1 `_ `1.7.0 `_ Workflows --------- diff --git a/nipype/__init__.py b/nipype/__init__.py index 72b7241020..bfc1e16a5b 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -12,7 +12,9 @@ """ import os -from distutils.version import LooseVersion + +# XXX Deprecate this import +from .external.version import LooseVersion from .info import URL as __url__, STATUS as __status__, __version__ from .utils.config import NipypeConfig diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index 42cad2a80c..38f56d6541 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- import os +from functools import lru_cache import numpy as np -from numpy import ones, kron, mean, eye, hstack, dot, tile +from numpy import ones, kron, mean, eye, hstack, tile from numpy.linalg import pinv import nibabel as nb from ..interfaces.base import ( @@ -86,7 +87,17 @@ def _list_outputs(self): return outputs -def ICC_rep_anova(Y): +@lru_cache(maxsize=1) +def ICC_projection_matrix(shape): + nb_subjects, nb_conditions = shape + + x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions + x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects + X = hstack([x, x0]) + return X @ pinv(X.T @ X, hermitian=True) @ X.T + + +def ICC_rep_anova(Y, projection_matrix=None): """ the data Y are entered as a 'table' ie subjects are in rows and repeated measures in columns @@ -94,36 +105,43 @@ def ICC_rep_anova(Y): One Sample Repeated measure ANOVA Y = XB + E with X = [FaTor / Subjects] - """ + ``ICC_rep_anova`` involves an expensive operation to compute a projection + matrix, which depends only on the shape of ``Y``, which is computed by + calling ``ICC_projection_matrix(Y.shape)``. If arrays of multiple shapes are + expected, it may be worth pre-computing and passing directly as an + argument to ``ICC_rep_anova``. + + If only one ``Y.shape`` will occur, you do not need to explicitly handle + these, as the most recently calculated matrix is cached automatically. + For example, if you are running the same computation on every voxel of + an image, you will see significant speedups. + + If a ``Y`` is passed with a new shape, a new matrix will be calculated + automatically. + """ [nb_subjects, nb_conditions] = Y.shape dfc = nb_conditions - 1 - dfe = (nb_subjects - 1) * dfc dfr = nb_subjects - 1 + dfe = dfr * dfc # Compute the repeated measure effect # ------------------------------------ # Sum Square Total - mean_Y = mean(Y) - SST = ((Y - mean_Y) ** 2).sum() - - # create the design matrix for the different levels - x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions - x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects - X = hstack([x, x0]) + demeaned_Y = Y - mean(Y) + SST = np.sum(demeaned_Y**2) # Sum Square Error - predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten("F")) - residuals = Y.flatten("F") - predicted_Y - SSE = (residuals**2).sum() - - residuals.shape = Y.shape + if projection_matrix is None: + projection_matrix = ICC_projection_matrix(Y.shape) + residuals = Y.flatten("F") - (projection_matrix @ Y.flatten("F")) + SSE = np.sum(residuals**2) MSE = SSE / dfe - # Sum square session effect - between colums/sessions - SSC = ((mean(Y, 0) - mean_Y) ** 2).sum() * nb_subjects + # Sum square session effect - between columns/sessions + SSC = np.sum(mean(demeaned_Y, 0) ** 2) * nb_subjects MSC = SSC / dfc / nb_subjects session_effect_F = MSC / MSE diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index fc209a9d27..b58e7fc59b 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -150,7 +150,7 @@ def _eucl_mean(self, nii1, nii2, weighted=False): import matplotlib.pyplot as plt plt.figure() - plt.hist(min_dist_matrix, 50, normed=1, facecolor="green") + plt.hist(min_dist_matrix, 50, density=True, facecolor="green") plt.savefig(self._hist_filename) plt.clf() plt.close() diff --git a/nipype/external/version.py b/nipype/external/version.py new file mode 100644 index 0000000000..0a2fbf167e --- /dev/null +++ b/nipype/external/version.py @@ -0,0 +1,224 @@ +# This module has been vendored from CPython distutils/version.py +# last updated in 662db125cddbca1db68116c547c290eb3943d98e +# +# It is licensed according to the Python Software Foundation License Version 2 +# which may be found in full in the following (hopefully persistent) locations: +# +# https://github.com/python/cpython/blob/main/LICENSE +# https://spdx.org/licenses/Python-2.0.html +# +# The following changes have been made: +# +# 2022.04.27 - Minor changes are made to the comments, +# - The StrictVersion class was removed +# - Black styling was applied +# + +# distutils/version.py +# +# Implements multiple version numbering conventions for the +# Python Module Distribution Utilities. + +"""Provides classes to represent module version numbers (one class for +each style of version numbering). There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: + * the 'parse' method takes a string and parses it to some internal + representation; if the string is an invalid version number, + 'parse' raises a ValueError exception + * the class constructor takes an optional string argument which, + if supplied, is passed to 'parse' + * __str__ reconstructs the string that was passed to 'parse' (or + an equivalent string -- ie. one that will generate an equivalent + version number instance) + * __repr__ generates Python code to recreate the version number instance + * _cmp compares the current instance with either another instance + of the same class or a string (which will be parsed to an instance + of the same class, thus must follow the same rules) +""" + +import re + + +class Version: + """Abstract base class for version numbering classes. Just provides + constructor (__init__) and reproducer (__repr__), because those + seem to be the same for all version numbering classes; and route + rich comparisons to _cmp. + """ + + def __init__(self, vstring=None): + if vstring: + self.parse(vstring) + + def __repr__(self): + return "%s ('%s')" % (self.__class__.__name__, str(self)) + + def __eq__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c == 0 + + def __lt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c < 0 + + def __le__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c <= 0 + + def __gt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c > 0 + + def __ge__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c >= 0 + + +# The rules according to Greg Stein: +# 1) a version number has 1 or more numbers separated by a period or by +# sequences of letters. If only periods, then these are compared +# left-to-right to determine an ordering. +# 2) sequences of letters are part of the tuple for comparison and are +# compared lexicographically +# 3) recognize the numeric components may have leading zeroes +# +# The LooseVersion class below implements these rules: a version number +# string is split up into a tuple of integer and string components, and +# comparison is a simple tuple comparison. This means that version +# numbers behave in a predictable and obvious way, but a way that might +# not necessarily be how people *want* version numbers to behave. There +# wouldn't be a problem if people could stick to purely numeric version +# numbers: just split on period and compare the numbers as tuples. +# However, people insist on putting letters into their version numbers; +# the most common purpose seems to be: +# - indicating a "pre-release" version +# ('alpha', 'beta', 'a', 'b', 'pre', 'p') +# - indicating a post-release patch ('p', 'pl', 'patch') +# but of course this can't cover all version number schemes, and there's +# no way to know what a programmer means without asking him. +# +# The problem is what to do with letters (and other non-numeric +# characters) in a version number. The current implementation does the +# obvious and predictable thing: keep them as strings and compare +# lexically within a tuple comparison. This has the desired effect if +# an appended letter sequence implies something "post-release": +# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". +# +# However, if letters in a version number imply a pre-release version, +# the "obvious" thing isn't correct. Eg. you would expect that +# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison +# implemented here, this just isn't so. +# +# Two possible solutions come to mind. The first is to tie the +# comparison algorithm to a particular set of semantic rules, as has +# been done in the StrictVersion class above. This works great as long +# as everyone can go along with bondage and discipline. Hopefully a +# (large) subset of Python module programmers will agree that the +# particular flavour of bondage and discipline provided by StrictVersion +# provides enough benefit to be worth using, and will submit their +# version numbering scheme to its domination. The free-thinking +# anarchists in the lot will never give in, though, and something needs +# to be done to accommodate them. +# +# Perhaps a "moderately strict" version class could be implemented that +# lets almost anything slide (syntactically), and makes some heuristic +# assumptions about non-digits in version number strings. This could +# sink into special-case-hell, though; if I was as talented and +# idiosyncratic as Larry Wall, I'd go ahead and implement a class that +# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is +# just as happy dealing with things like "2g6" and "1.13++". I don't +# think I'm smart enough to do it right though. +# +# In any case, I've coded the test suite for this module (see +# ../test/test_version.py) specifically to fail on things like comparing +# "1.2a2" and "1.2". That's not because the *code* is doing anything +# wrong, it's because the simple, obvious design doesn't match my +# complicated, hairy expectations for real-world version numbers. It +# would be a snap to fix the test suite to say, "Yep, LooseVersion does +# the Right Thing" (ie. the code matches the conception). But I'd rather +# have a conception that matches common notions about version numbers. + + +class LooseVersion(Version): + + """Version numbering for anarchists and software realists. + Implements the standard interface for version number classes as + described above. A version number consists of a series of numbers, + separated by either periods or strings of letters. When comparing + version numbers, the numeric components will be compared + numerically, and the alphabetic components lexically. The following + are all valid version numbers, in no particular order: + + 1.5.1 + 1.5.2b2 + 161 + 3.10a + 8.02 + 3.4j + 1996.07.12 + 3.2.pl0 + 3.1.1.6 + 2g6 + 11g + 0.960923 + 2.2beta29 + 1.13++ + 5.5.kw + 2.0b1pl0 + + In fact, there is no such thing as an invalid version number under + this scheme; the rules for comparison are simple and predictable, + but may not always give the results you want (for some definition + of "want"). + """ + + component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) + + def __init__(self, vstring=None): + if vstring: + self.parse(vstring) + + def parse(self, vstring): + # I've given up on thinking I can reconstruct the version string + # from the parsed tuple -- so I just store the string here for + # use by __str__ + self.vstring = vstring + components = [x for x in self.component_re.split(vstring) if x and x != '.'] + for i, obj in enumerate(components): + try: + components[i] = int(obj) + except ValueError: + pass + + self.version = components + + def __str__(self): + return self.vstring + + def __repr__(self): + return "LooseVersion ('%s')" % str(self) + + def _cmp(self, other): + if isinstance(other, str): + other = LooseVersion(other) + elif not isinstance(other, LooseVersion): + return NotImplemented + + if self.version == other.version: + return 0 + if self.version < other.version: + return -1 + if self.version > other.version: + return 1 diff --git a/nipype/info.py b/nipype/info.py index d3df0a3c6f..4fe067b9b6 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove -dev for release -__version__ = "1.7.1" +__version__ = "1.8.0" def get_nipype_gitversion(): @@ -54,14 +54,13 @@ def get_nipype_gitversion(): "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Topic :: Scientific/Engineering", ] -PYTHON_REQUIRES = ">= 3.6" +PYTHON_REQUIRES = ">= 3.7" description = "Neuroimaging in Python: Pipelines and Interfaces" @@ -102,15 +101,12 @@ def get_nipype_gitversion(): # versions NIBABEL_MIN_VERSION = "2.1.0" NETWORKX_MIN_VERSION = "2.0" -# Numpy bug in python 3.7: -# https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html -NUMPY_MIN_VERSION = "1.15.3" +NUMPY_MIN_VERSION = "1.17" SCIPY_MIN_VERSION = "0.14" TRAITS_MIN_VERSION = "4.6" DATEUTIL_MIN_VERSION = "2.2" -FUTURE_MIN_VERSION = "0.16.0" SIMPLEJSON_MIN_VERSION = "3.8.0" -PROV_VERSION = "1.5.2" +PROV_MIN_VERSION = "1.5.2" RDFLIB_MIN_VERSION = "5.0.0" CLICK_MIN_VERSION = "6.6.0" PYDOT_MIN_VERSION = "1.2.3" @@ -141,7 +137,7 @@ def get_nipype_gitversion(): "nibabel>=%s" % NIBABEL_MIN_VERSION, "numpy>=%s" % NUMPY_MIN_VERSION, "packaging", - "prov>=%s" % PROV_VERSION, + "prov>=%s" % PROV_MIN_VERSION, "pydot>=%s" % PYDOT_MIN_VERSION, "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, "rdflib>=%s" % RDFLIB_MIN_VERSION, diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index 3f338eb0ce..660e913dc3 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -4,7 +4,7 @@ """Provide a base interface to AFNI commands.""" import os from sys import platform -from distutils import spawn +import shutil from ... import logging, LooseVersion from ...utils.filemanip import split_filename, fname_presuffix @@ -317,7 +317,7 @@ class AFNIPythonCommand(AFNICommand): def cmd(self): """Revise the command path.""" orig_cmd = super(AFNIPythonCommand, self).cmd - found = spawn.find_executable(orig_cmd) + found = shutil.which(orig_cmd) return found if found is not None else orig_cmd @property diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 7ab6b30c30..478b26dc36 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -585,7 +585,11 @@ class RegistrationInputSpec(ANTSCommandInputSpec): usedefault=True, desc="The Lower quantile to clip image ranges", ) - + random_seed = traits.Int( + argstr="--random-seed %d", + desc="Fixed seed for random number generation", + min_ver="2.3.0", + ) verbose = traits.Bool( argstr="-v", default_value=False, usedefault=True, nohash=True ) @@ -1708,6 +1712,11 @@ class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): desc="precision type (default = double)", usedefault=True, ) + random_seed = traits.Int( + argstr="-e %d", + desc="fixed random seed", + min_ver="2.3.0", + ) class RegistrationSynQuickOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/ants/tests/test_auto_Registration.py b/nipype/interfaces/ants/tests/test_auto_Registration.py index 507e0effe2..814948835c 100644 --- a/nipype/interfaces/ants/tests/test_auto_Registration.py +++ b/nipype/interfaces/ants/tests/test_auto_Registration.py @@ -115,6 +115,10 @@ def test_Registration_inputs(): requires=["metric_weight"], usedefault=True, ), + random_seed=dict( + argstr="--random-seed %d", + min_ver="2.3.0", + ), restore_state=dict( argstr="--restore-state %s", extensions=None, diff --git a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py index c09f08d17a..c799f47299 100644 --- a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py +++ b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py @@ -39,6 +39,10 @@ def test_RegistrationSynQuick_inputs(): argstr="-p %s", usedefault=True, ), + random_seed=dict( + argstr="-e %d", + min_ver="2.3.0", + ), spline_distance=dict( argstr="-s %d", usedefault=True, diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 7e3c67b977..d4271b6159 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -3,9 +3,8 @@ import os.path as op import nibabel as nb import numpy as np -from distutils.version import LooseVersion - +from nipype.external.version import LooseVersion from ... import logging from ..base import traits, TraitedSpec, File, isdefined from .base import ( diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 27a9e632ad..cef7579772 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -7,7 +7,7 @@ import numpy as np import nibabel as nb -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion from ... import logging from ..base import TraitedSpec, File, traits, isdefined diff --git a/nipype/interfaces/dipy/registration.py b/nipype/interfaces/dipy/registration.py index f70c566194..e07859560d 100644 --- a/nipype/interfaces/dipy/registration.py +++ b/nipype/interfaces/dipy/registration.py @@ -1,4 +1,4 @@ -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows diff --git a/nipype/interfaces/dipy/stats.py b/nipype/interfaces/dipy/stats.py index fff0184a56..971857b64e 100644 --- a/nipype/interfaces/dipy/stats.py +++ b/nipype/interfaces/dipy/stats.py @@ -1,4 +1,4 @@ -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 9ac9e0b59c..6b1da93a95 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -3,7 +3,7 @@ import os.path as op import numpy as np import nibabel as nb -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion from ... import logging from ..base import TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 705cf895e4..4efa90039a 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -93,3 +93,7 @@ Paint, MRICoreg, ) +from .petsurfer import ( + GTMSeg, + GTMPVC, +) diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 81758b6ac3..9ab1ac96a3 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -401,6 +401,26 @@ class GLMFitInputSpec(FSTraitedSpec): synth = traits.Bool(argstr="--synth", desc="replace input with gaussian") resynth_test = traits.Int(argstr="--resynthtest %d", desc="test GLM by resynthsis") profile = traits.Int(argstr="--profile %d", desc="niters : test speed") + mrtm1 = traits.Tuple( + File(exists=True), + File(exists=True), + argstr="--mrtm1 %s %s", + desc="RefTac TimeSec : perform MRTM1 kinetic modeling", + ) + mrtm2 = traits.Tuple( + File(exists=True), + File(exists=True), + traits.Float, + argstr="--mrtm2 %s %s %f", + desc="RefTac TimeSec k2prime : perform MRTM2 kinetic modeling", + ) + logan = traits.Tuple( + File(exists=True), + File(exists=True), + traits.Float, + argstr="--logan %s %s %f", + desc="RefTac TimeSec tstar : perform Logan kinetic modeling", + ) force_perm = traits.Bool( argstr="--perm-force", desc="force perumtation test, even when design matrix is not orthog", @@ -423,6 +443,9 @@ class GLMFitInputSpec(FSTraitedSpec): sim_done_file = File( argstr="--sim-done %s", desc="create file when simulation finished" ) + _ext_xor = ['nii', 'nii_gz'] + nii = traits.Bool(argstr='--nii', desc='save outputs as nii', xor=_ext_xor) + nii_gz = traits.Bool(argstr='--nii.gz', desc='save outputs as nii.gz', xor=_ext_xor) class GLMFitOutputSpec(TraitedSpec): @@ -444,6 +467,8 @@ class GLMFitOutputSpec(TraitedSpec): frame_eigenvectors = File(desc="matrix of frame eigenvectors from residual PCA") singular_values = File(desc="matrix singular values from residual PCA") svd_stats_file = File(desc="text file summarizing the residual PCA") + k2p_file = File(desc="estimate of k2p parameter") + bp_file = File(desc="Binding potential estimates") class GLMFit(FSCommand): @@ -478,22 +503,33 @@ def _list_outputs(self): glmdir = os.path.abspath(self.inputs.glm_dir) outputs["glm_dir"] = glmdir + if isdefined(self.inputs.nii_gz): + ext = 'nii.gz' + elif isdefined(self.inputs.nii): + ext = 'nii' + else: + ext = 'mgh' + # Assign the output files that always get created - outputs["beta_file"] = os.path.join(glmdir, "beta.mgh") - outputs["error_var_file"] = os.path.join(glmdir, "rvar.mgh") - outputs["error_stddev_file"] = os.path.join(glmdir, "rstd.mgh") - outputs["mask_file"] = os.path.join(glmdir, "mask.mgh") + outputs["beta_file"] = os.path.join(glmdir, f"beta.{ext}") + outputs["error_var_file"] = os.path.join(glmdir, f"rvar.{ext}") + outputs["error_stddev_file"] = os.path.join(glmdir, f"rstd.{ext}") + outputs["mask_file"] = os.path.join(glmdir, f"mask.{ext}") outputs["fwhm_file"] = os.path.join(glmdir, "fwhm.dat") outputs["dof_file"] = os.path.join(glmdir, "dof.dat") # Assign the conditional outputs - if isdefined(self.inputs.save_residual) and self.inputs.save_residual: - outputs["error_file"] = os.path.join(glmdir, "eres.mgh") - if isdefined(self.inputs.save_estimate) and self.inputs.save_estimate: - outputs["estimate_file"] = os.path.join(glmdir, "yhat.mgh") + if self.inputs.save_residual: + outputs["error_file"] = os.path.join(glmdir, f"eres.{ext}") + if self.inputs.save_estimate: + outputs["estimate_file"] = os.path.join(glmdir, f"yhat.{ext}") + if any((self.inputs.mrtm1, self.inputs.mrtm2, self.inputs.logan)): + outputs["bp_file"] = os.path.join(glmdir, f"bp.{ext}") + if self.inputs.mrtm1: + outputs["k2p_file"] = os.path.join(glmdir, "k2prime.dat") # Get the contrast directory name(s) + contrasts = [] if isdefined(self.inputs.contrast): - contrasts = [] for c in self.inputs.contrast: if split_filename(c)[2] in [".mat", ".dat", ".mtx", ".con"]: contrasts.append(split_filename(c)[1]) @@ -503,19 +539,19 @@ def _list_outputs(self): contrasts = ["osgm"] # Add in the contrast images - outputs["sig_file"] = [os.path.join(glmdir, c, "sig.mgh") for c in contrasts] - outputs["ftest_file"] = [os.path.join(glmdir, c, "F.mgh") for c in contrasts] + outputs["sig_file"] = [os.path.join(glmdir, c, f"sig.{ext}") for c in contrasts] + outputs["ftest_file"] = [os.path.join(glmdir, c, f"F.{ext}") for c in contrasts] outputs["gamma_file"] = [ - os.path.join(glmdir, c, "gamma.mgh") for c in contrasts + os.path.join(glmdir, c, f"gamma.{ext}") for c in contrasts ] outputs["gamma_var_file"] = [ - os.path.join(glmdir, c, "gammavar.mgh") for c in contrasts + os.path.join(glmdir, c, f"gammavar.{ext}") for c in contrasts ] # Add in the PCA results, if relevant if isdefined(self.inputs.pca) and self.inputs.pca: pcadir = os.path.join(glmdir, "pca-eres") - outputs["spatial_eigenvectors"] = os.path.join(pcadir, "v.mgh") + outputs["spatial_eigenvectors"] = os.path.join(pcadir, f"v.{ext}") outputs["frame_eigenvectors"] = os.path.join(pcadir, "u.mtx") outputs["singluar_values"] = os.path.join(pcadir, "sdiag.mat") outputs["svd_stats_file"] = os.path.join(pcadir, "stats.dat") diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py new file mode 100644 index 0000000000..d04409ad20 --- /dev/null +++ b/nipype/interfaces/freesurfer/petsurfer.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer +""" + +import os + +from ... import logging +from ..base import ( + TraitedSpec, + File, + traits, + Directory, + InputMultiPath, + isdefined, +) +from .base import FSCommand, FSTraitedSpec + +from .model import GLMFitInputSpec, GLMFit + +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") + + +class GTMSegInputSpec(FSTraitedSpec): + + subject_id = traits.String(argstr="--s %s", desc="subject id", mandatory=True) + + xcerseg = traits.Bool( + argstr="--xcerseg", + desc="run xcerebralseg on this subject to create apas+head.mgz", + ) + + out_file = File( + "gtmseg.mgz", + argstr="--o %s", + desc="output volume relative to subject/mri", + usedefault=True, + ) + + upsampling_factor = traits.Int( + argstr="--usf %i", desc="upsampling factor (default is 2)" + ) + + subsegwm = traits.Bool( + argstr="--subsegwm", default=True, desc="subsegment WM into lobes (default)" + ) + + keep_hypo = traits.Bool( + argstr="--keep-hypo", + desc="do not relabel hypointensities as WM when subsegmenting WM", + ) + + keep_cc = traits.Bool( + argstr="--keep-cc", desc="do not relabel corpus callosum as WM" + ) + + dmax = traits.Float( + argstr="--dmax %f", + desc="distance threshold to use when subsegmenting WM (default is 5)", + ) + + ctx_annot = traits.Tuple( + traits.String, + traits.Int, + traits.Int, + argstr="--ctx-annot %s %i %i", + desc="annot lhbase rhbase : annotation to use for cortical segmentation (default is aparc 1000 2000)", + ) + + wm_annot = traits.Tuple( + traits.String, + traits.Int, + traits.Int, + argstr="--wm-annot %s %i %i", + desc="annot lhbase rhbase : annotation to use for WM segmentation (with --subsegwm, default is lobes 3200 4200)", + ) + + output_upsampling_factor = traits.Int( + argstr="--output-usf %i", + desc="set output USF different than USF, mostly for debugging", + ) + + head = traits.String( + argstr="--head %s", desc="use headseg instead of apas+head.mgz" + ) + + subseg_cblum_wm = traits.Bool( + argstr="--subseg-cblum-wm", desc="subsegment cerebellum WM into core and gyri" + ) + + no_pons = traits.Bool( + argstr="--no-pons", desc="do not add pons segmentation when doing ---xcerseg" + ) + + no_vermis = traits.Bool( + argstr="--no-vermis", + desc="do not add vermis segmentation when doing ---xcerseg", + ) + + colortable = File(exists=True, argstr="--ctab %s", desc="colortable") + no_seg_stats = traits.Bool( + argstr="--no-seg-stats", desc="do not compute segmentation stats" + ) + + +class GTMSegOutputSpec(TraitedSpec): + out_file = File(desc="GTM segmentation") + + +class GTMSeg(FSCommand): + """create an anatomical segmentation for the geometric transfer matrix (GTM). + + Examples + -------- + >>> gtmseg = GTMSeg() + >>> gtmseg.inputs.subject_id = 'subject_id' + >>> gtmseg.cmdline + 'gtmseg --o gtmseg.mgz --s subject_id' + """ + + _cmd = "gtmseg" + input_spec = GTMSegInputSpec + output_spec = GTMSegOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs['out_file'] = os.path.join( + self.inputs.subjects_dir, + self.inputs.subject_id, + 'mri', + self.inputs.out_file, + ) + return outputs + + +class GTMPVCInputSpec(FSTraitedSpec): + + in_file = File( + exists=True, + argstr="--i %s", + mandatory=True, + copyfile=False, + desc="input volume - source data to pvc", + ) + + frame = traits.Int( + argstr="--frame %i", desc="only process 0-based frame F from inputvol" + ) + + psf = traits.Float(argstr="--psf %f", desc="scanner PSF FWHM in mm") + + segmentation = File( + argstr="--seg %s", + exists=True, + mandatory=True, + desc="segfile : anatomical segmentation to define regions for GTM", + ) + + _reg_xor = ["reg_file", "regheader", "reg_identity"] + reg_file = File( + exists=True, + argstr="--reg %s", + mandatory=True, + desc="LTA registration file that maps PET to anatomical", + xor=_reg_xor, + ) + + regheader = traits.Bool( + argstr="--regheader", + mandatory=True, + desc="assume input and seg share scanner space", + xor=_reg_xor, + ) + + reg_identity = traits.Bool( + argstr="--reg-identity", + mandatory=True, + desc="assume that input is in anatomical space", + xor=_reg_xor, + ) + + pvc_dir = traits.Str(argstr="--o %s", desc="save outputs to dir", genfile=True) + + mask_file = File( + exists=True, + argstr="--mask %s", + desc="ignore areas outside of the mask (in input vol space)", + ) + + auto_mask = traits.Tuple( + traits.Float, + traits.Float, + argstr="--auto-mask %f %f", + desc="FWHM thresh : automatically compute mask", + ) + + no_reduce_fov = traits.Bool( + argstr="--no-reduce-fov", desc="do not reduce FoV to encompass mask" + ) + + reduce_fox_eqodd = traits.Bool( + argstr="--reduce-fox-eqodd", + desc="reduce FoV to encompass mask but force nc=nr and ns to be odd", + ) + + contrast = InputMultiPath( + File(exists=True), argstr="--C %s...", desc="contrast file" + ) + + default_seg_merge = traits.Bool( + argstr="--default-seg-merge", desc="default schema for merging ROIs" + ) + + merge_hypos = traits.Bool( + argstr="--merge-hypos", desc="merge left and right hypointensites into to ROI" + ) + + merge_cblum_wm_gyri = traits.Bool( + argstr="--merge-cblum-wm-gyri", + desc="cerebellum WM gyri back into cerebellum WM", + ) + + tt_reduce = traits.Bool( + argstr="--tt-reduce", desc="reduce segmentation to that of a tissue type" + ) + + replace = traits.Tuple( + traits.Int, + traits.Int, + argstr="--replace %i %i", + desc="Id1 Id2 : replace seg Id1 with seg Id2", + ) + + rescale = traits.List( + argstr="--rescale %s...", + desc="Id1 : specify reference region(s) used to rescale (default is pons)", + ) + + no_rescale = traits.Bool( + argstr="--no-rescale", + desc="do not global rescale such that mean of reference region is scaleref", + ) + + scale_refval = traits.Float( + argstr="--scale-refval %f", + desc="refval : scale such that mean in reference region is refval", + ) + + _ctab_inputs = ("color_table_file", "default_color_table") + color_table_file = File( + exists=True, + argstr="--ctab %s", + xor=_ctab_inputs, + desc="color table file with seg id names", + ) + + default_color_table = traits.Bool( + argstr="--ctab-default", + xor=_ctab_inputs, + desc="use $FREESURFER_HOME/FreeSurferColorLUT.txt", + ) + + tt_update = traits.Bool( + argstr="--tt-update", + desc="changes tissue type of VentralDC, BrainStem, and Pons to be SubcortGM", + ) + + lat = traits.Bool(argstr="--lat", desc="lateralize tissue types") + + no_tfe = traits.Bool( + argstr="--no-tfe", + desc="do not correct for tissue fraction effect (with --psf 0 turns off PVC entirely)", + ) + + no_pvc = traits.Bool( + argstr="--no-pvc", + desc="turns off PVC entirely (both PSF and TFE)", + ) + + tissue_fraction_resolution = traits.Float( + argstr="--segpvfres %f", + desc="set the tissue fraction resolution parameter (def is 0.5)", + ) + + rbv = traits.Bool( + argstr="--rbv", + requires=["subjects_dir"], + desc="perform Region-based Voxelwise (RBV) PVC", + ) + + rbv_res = traits.Float( + argstr="--rbv-res %f", + desc="voxsize : set RBV voxel resolution (good for when standard res takes too much memory)", + ) + + mg = traits.Tuple( + traits.Float, + traits.List(traits.String), + argstr="--mg %g %s", + desc="gmthresh RefId1 RefId2 ...: perform Mueller-Gaertner PVC, gmthresh is min gm pvf bet 0 and 1", + ) + + mg_ref_cerebral_wm = traits.Bool( + argstr="--mg-ref-cerebral-wm", desc=" set MG RefIds to 2 and 41" + ) + + mg_ref_lobes_wm = traits.Bool( + argstr="--mg-ref-lobes-wm", + desc="set MG RefIds to those for lobes when using wm subseg", + ) + + mgx = traits.Float( + argstr="--mgx %f", + desc="gmxthresh : GLM-based Mueller-Gaertner PVC, gmxthresh is min gm pvf bet 0 and 1", + ) + + km_ref = traits.List( + argstr="--km-ref %s...", + desc="RefId1 RefId2 ... : compute reference TAC for KM as mean of given RefIds", + ) + + km_hb = traits.List( + argstr="--km-hb %s...", + desc="RefId1 RefId2 ... : compute HiBinding TAC for KM as mean of given RefIds", + ) + + steady_state_params = traits.Tuple( + traits.Float, + traits.Float, + traits.Float, + argstr="--ss %f %f %f", + desc="bpc scale dcf : steady-state analysis spec blood plasma concentration, unit scale and decay correction factor. You must also spec --km-ref. Turns off rescaling", + ) + + X = traits.Bool( + argstr="--X", desc="save X matrix in matlab4 format as X.mat (it will be big)" + ) + + y = traits.Bool(argstr="--y", desc="save y matrix in matlab4 format as y.mat") + + beta = traits.Bool( + argstr="--beta", desc="save beta matrix in matlab4 format as beta.mat" + ) + + X0 = traits.Bool( + argstr="--X0", + desc="save X0 matrix in matlab4 format as X0.mat (it will be big)", + ) + + save_input = traits.Bool( + argstr="--save-input", desc="saves rescaled input as input.rescaled.nii.gz" + ) + + save_eres = traits.Bool(argstr="--save-eres", desc="saves residual error") + + save_yhat = traits.Bool( + argstr="--save-yhat", + xor=["save_yhat_with_noise"], + desc="save signal estimate (yhat) smoothed with the PSF", + ) + + save_yhat_with_noise = traits.Tuple( + traits.Int, + traits.Int, + argstr="--save-yhat-with-noise %i %i", + xor=["save_yhat"], + desc="seed nreps : save signal estimate (yhat) with noise", + ) + + save_yhat_full_fov = traits.Bool( + argstr="--save-yhat-full-fov", desc="save signal estimate (yhat)" + ) + + save_yhat0 = traits.Bool(argstr="--save-yhat0", desc="save signal estimate (yhat)") + + optimization_schema = traits.Enum( + "3D", + "2D", + "1D", + "3D_MB", + "2D_MB", + "1D_MB", + "MBZ", + "MB3", + argstr="--opt %s", + desc="opt : optimization schema for applying adaptive GTM", + ) + + opt_tol = traits.Tuple( + traits.Int, + traits.Float, + traits.Float, + argstr="--opt-tol %i %f %f", + desc="n_iters_max ftol lin_min_tol : optimization parameters for adaptive gtm using fminsearch", + ) + + opt_brain = traits.Bool(argstr="--opt-brain", desc="apply adaptive GTM") + + opt_seg_merge = traits.Bool( + argstr="--opt-seg-merge", + desc="optimal schema for merging ROIs when applying adaptive GTM", + ) + + num_threads = traits.Int( + argstr="--threads %i", desc="threads : number of threads to use" + ) + + psf_col = traits.Float( + argstr="--psf-col %f", desc="xFWHM : full-width-half-maximum in the x-direction" + ) + + psf_row = traits.Float( + argstr="--psf-row %f", desc="yFWHM : full-width-half-maximum in the y-direction" + ) + + psf_slice = traits.Float( + argstr="--psf-slice %f", + desc="zFWHM : full-width-half-maximum in the z-direction", + ) + + +class GTMPVCOutputSpec(TraitedSpec): + + pvc_dir = Directory(desc="output directory") + ref_file = File(desc="Reference TAC in .dat") + hb_nifti = File(desc="High-binding TAC in nifti") + hb_dat = File(desc="High-binding TAC in .dat") + nopvc_file = File(desc="TACs for all regions with no PVC") + gtm_file = File(desc="TACs for all regions with GTM PVC") + gtm_stats = File(desc="Statistics for the GTM PVC") + input_file = File(desc="4D PET file in native volume space") + reg_pet2anat = File(desc="Registration file to go from PET to anat") + reg_anat2pet = File(desc="Registration file to go from anat to PET") + reg_rbvpet2anat = File( + desc="Registration file to go from RBV corrected PET to anat" + ) + reg_anat2rbvpet = File( + desc="Registration file to go from anat to RBV corrected PET" + ) + mgx_ctxgm = File( + desc="Cortical GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + mgx_subctxgm = File( + desc="Subcortical GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + mgx_gm = File( + desc="All GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + rbv = File(desc="All GM voxel-wise values corrected using the RBV method") + opt_params = File( + desc="Optimal parameter estimates for the FWHM using adaptive GTM" + ) + yhat0 = File(desc="4D PET file of signal estimate (yhat) after PVC (unsmoothed)") + yhat = File( + desc="4D PET file of signal estimate (yhat) after PVC (smoothed with PSF)", + ) + yhat_full_fov = File( + desc="4D PET file with full FOV of signal estimate (yhat) after PVC (smoothed with PSF)", + ) + yhat_with_noise = File( + desc="4D PET file with full FOV of signal estimate (yhat) with noise after PVC (smoothed with PSF)", + ) + + +class GTMPVC(FSCommand): + """create an anatomical segmentation for the geometric transfer matrix (GTM). + + Examples + -------- + >>> gtmpvc = GTMPVC() + >>> gtmpvc.inputs.in_file = 'sub-01_ses-baseline_pet.nii.gz' + >>> gtmpvc.inputs.segmentation = 'gtmseg.mgz' + >>> gtmpvc.inputs.reg_file = 'sub-01_ses-baseline_pet_mean_reg.lta' + >>> gtmpvc.inputs.pvc_dir = 'pvc' + >>> gtmpvc.inputs.psf = 4 + >>> gtmpvc.inputs.default_seg_merge = True + >>> gtmpvc.inputs.auto_mask = (1, 0.1) + >>> gtmpvc.inputs.km_ref = ['8 47'] + >>> gtmpvc.inputs.km_hb = ['11 12 50 51'] + >>> gtmpvc.inputs.no_rescale = True + >>> gtmpvc.inputs.save_input = True + >>> gtmpvc.cmdline # doctest: +NORMALIZE_WHITESPACE + 'mri_gtmpvc --auto-mask 1.000000 0.100000 --default-seg-merge \ + --i sub-01_ses-baseline_pet.nii.gz --km-hb 11 12 50 51 --km-ref 8 47 --no-rescale \ + --psf 4.000000 --o pvc --reg sub-01_ses-baseline_pet_mean_reg.lta --save-input \ + --seg gtmseg.mgz' + + >>> gtmpvc = GTMPVC() + >>> gtmpvc.inputs.in_file = 'sub-01_ses-baseline_pet.nii.gz' + >>> gtmpvc.inputs.segmentation = 'gtmseg.mgz' + >>> gtmpvc.inputs.regheader = True + >>> gtmpvc.inputs.pvc_dir = 'pvc' + >>> gtmpvc.inputs.mg = (0.5, ["ROI1", "ROI2"]) + >>> gtmpvc.cmdline # doctest: +NORMALIZE_WHITESPACE + 'mri_gtmpvc --i sub-01_ses-baseline_pet.nii.gz --mg 0.5 ROI1 ROI2 --o pvc --regheader --seg gtmseg.mgz' + """ + + _cmd = "mri_gtmpvc" + input_spec = GTMPVCInputSpec + output_spec = GTMPVCOutputSpec + + def _format_arg(self, name, spec, val): + # Values taken from + # https://github.com/freesurfer/freesurfer/blob/fs-7.2/mri_gtmpvc/mri_gtmpvc.cpp#L115-L122 + if name == 'optimization_schema': + return ( + spec.argstr + % { + "3D": 1, + "2D": 2, + "1D": 3, + "3D_MB": 4, + "2D_MB": 5, + "1D_MB": 6, + "MBZ": 7, + "MB3": 8, + }[val] + ) + if name == 'mg': + return spec.argstr % (val[0], ' '.join(val[1])) + return super(GTMPVC, self)._format_arg(name, spec, val) + + def _list_outputs(self): + outputs = self.output_spec().get() + # Get the top-level output directory + if not isdefined(self.inputs.pvc_dir): + pvcdir = os.getcwd() + else: + pvcdir = os.path.abspath(self.inputs.pvc_dir) + outputs["pvc_dir"] = pvcdir + + # Assign the output files that always get created + outputs["ref_file"] = os.path.join(pvcdir, "km.ref.tac.dat") + outputs["hb_nifti"] = os.path.join(pvcdir, "km.hb.tac.nii.gz") + outputs["hb_dat"] = os.path.join(pvcdir, "km.hb.tac.dat") + outputs["nopvc_file"] = os.path.join(pvcdir, "nopvc.nii.gz") + outputs["gtm_file"] = os.path.join(pvcdir, "gtm.nii.gz") + outputs["gtm_stats"] = os.path.join(pvcdir, "gtm.stats.dat") + outputs["reg_pet2anat"] = os.path.join(pvcdir, "aux", "bbpet2anat.lta") + outputs["reg_anat2pet"] = os.path.join(pvcdir, "aux", "anat2bbpet.lta") + + # Assign the conditional outputs + if self.inputs.save_input: + outputs["input_file"] = os.path.join(pvcdir, "input.nii.gz") + if self.inputs.save_yhat0: + outputs["yhat0"] = os.path.join(pvcdir, "yhat0.nii.gz") + if self.inputs.save_yhat: + outputs["yhat"] = os.path.join(pvcdir, "yhat.nii.gz") + if self.inputs.save_yhat_full_fov: + outputs["yhat_full_fov"] = os.path.join(pvcdir, "yhat.fullfov.nii.gz") + if self.inputs.save_yhat_with_noise: + outputs["yhat_with_noise"] = os.path.join(pvcdir, "yhat.nii.gz") + if self.inputs.mgx: + outputs["mgx_ctxgm"] = os.path.join(pvcdir, "mgx.ctxgm.nii.gz") + outputs["mgx_subctxgm"] = os.path.join(pvcdir, "mgx.subctxgm.nii.gz") + outputs["mgx_gm"] = os.path.join(pvcdir, "mgx.gm.nii.gz") + if self.inputs.rbv: + outputs["rbv"] = os.path.join(pvcdir, "rbv.nii.gz") + outputs["reg_rbvpet2anat"] = os.path.join(pvcdir, "aux", "rbv2anat.lta") + outputs["reg_anat2rbvpet"] = os.path.join(pvcdir, "aux", "anat2rbv.lta") + if self.inputs.opt: + outputs["opt_params"] = os.path.join(pvcdir, "aux", "opt.params.dat") + + return outputs + + +class MRTMInputSpec(GLMFitInputSpec): + mrtm1 = traits.Tuple( + File(exists=True), + File(exists=True), + mandatory=True, + argstr="--mrtm1 %s %s", + desc="RefTac TimeSec : perform MRTM1 kinetic modeling", + ) + + +class MRTM(GLMFit): + """Perform MRTM1 kinetic modeling. + + Examples + -------- + >>> mrtm = MRTM() + >>> mrtm.inputs.in_file = 'tac.nii' + >>> mrtm.inputs.mrtm1 = ('ref_tac.dat', 'timing.dat') + >>> mrtm.inputs.glm_dir = 'mrtm' + >>> mrtm.cmdline + 'mri_glmfit --glmdir mrtm --y tac.nii --mrtm1 ref_tac.dat timing.dat' + """ + + input_spec = MRTMInputSpec + + +class MRTM2InputSpec(GLMFitInputSpec): + mrtm2 = traits.Tuple( + File(exists=True), + File(exists=True), + traits.Float, + mandatory=True, + argstr="--mrtm2 %s %s %f", + desc="RefTac TimeSec k2prime : perform MRTM2 kinetic modeling", + ) + + +class MRTM2(GLMFit): + """Perform MRTM2 kinetic modeling. + Examples + -------- + >>> mrtm2 = MRTM2() + >>> mrtm2.inputs.in_file = 'tac.nii' + >>> mrtm2.inputs.mrtm2 = ('ref_tac.dat', 'timing.dat', 0.07872) + >>> mrtm2.inputs.glm_dir = 'mrtm2' + >>> mrtm2.cmdline + 'mri_glmfit --glmdir mrtm2 --y tac.nii --mrtm2 ref_tac.dat timing.dat 0.078720' + """ + + input_spec = MRTM2InputSpec + + +class LoganRefInputSpec(GLMFitInputSpec): + logan = traits.Tuple( + File(exists=True), + File(exists=True), + traits.Float, + mandatory=True, + argstr="--logan %s %s %g", + desc="RefTac TimeSec tstar : perform Logan kinetic modeling", + ) + + +class LoganRef(GLMFit): + """Perform Logan reference kinetic modeling. + Examples + -------- + >>> logan = LoganRef() + >>> logan.inputs.in_file = 'tac.nii' + >>> logan.inputs.logan = ('ref_tac.dat', 'timing.dat', 2600) + >>> logan.inputs.glm_dir = 'logan' + >>> logan.cmdline + 'mri_glmfit --glmdir logan --y tac.nii --logan ref_tac.dat timing.dat 2600' + """ + + input_spec = LoganRefInputSpec diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py index 21c41eb691..a950caa7af 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py @@ -89,10 +89,27 @@ def test_GLMFit_inputs(): extensions=None, xor=["cortex"], ), + logan=dict( + argstr="--logan %s %s %f", + ), mask_file=dict( argstr="--mask %s", extensions=None, ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), no_contrast_ok=dict( argstr="--no-contrasts-ok", ), @@ -208,6 +225,9 @@ def test_GLMFit_outputs(): beta_file=dict( extensions=None, ), + bp_file=dict( + extensions=None, + ), dof_file=dict( extensions=None, ), @@ -233,6 +253,9 @@ def test_GLMFit_outputs(): gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), mask_file=dict( extensions=None, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py b/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py new file mode 100644 index 0000000000..7f7af1cdb4 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py @@ -0,0 +1,276 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import GTMPVC + + +def test_GTMPVC_inputs(): + input_map = dict( + X=dict( + argstr="--X", + ), + X0=dict( + argstr="--X0", + ), + args=dict( + argstr="%s", + ), + auto_mask=dict( + argstr="--auto-mask %f %f", + ), + beta=dict( + argstr="--beta", + ), + color_table_file=dict( + argstr="--ctab %s", + extensions=None, + xor=("color_table_file", "default_color_table"), + ), + contrast=dict( + argstr="--C %s...", + ), + default_color_table=dict( + argstr="--ctab-default", + xor=("color_table_file", "default_color_table"), + ), + default_seg_merge=dict( + argstr="--default-seg-merge", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + frame=dict( + argstr="--frame %i", + ), + in_file=dict( + argstr="--i %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + km_hb=dict( + argstr="--km-hb %s...", + ), + km_ref=dict( + argstr="--km-ref %s...", + ), + lat=dict( + argstr="--lat", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + merge_cblum_wm_gyri=dict( + argstr="--merge-cblum-wm-gyri", + ), + merge_hypos=dict( + argstr="--merge-hypos", + ), + mg=dict( + argstr="--mg %g %s", + ), + mg_ref_cerebral_wm=dict( + argstr="--mg-ref-cerebral-wm", + ), + mg_ref_lobes_wm=dict( + argstr="--mg-ref-lobes-wm", + ), + mgx=dict( + argstr="--mgx %f", + ), + no_pvc=dict( + argstr="--no-pvc", + ), + no_reduce_fov=dict( + argstr="--no-reduce-fov", + ), + no_rescale=dict( + argstr="--no-rescale", + ), + no_tfe=dict( + argstr="--no-tfe", + ), + num_threads=dict( + argstr="--threads %i", + ), + opt_brain=dict( + argstr="--opt-brain", + ), + opt_seg_merge=dict( + argstr="--opt-seg-merge", + ), + opt_tol=dict( + argstr="--opt-tol %i %f %f", + ), + optimization_schema=dict( + argstr="--opt %s", + ), + psf=dict( + argstr="--psf %f", + ), + psf_col=dict( + argstr="--psf-col %f", + ), + psf_row=dict( + argstr="--psf-row %f", + ), + psf_slice=dict( + argstr="--psf-slice %f", + ), + pvc_dir=dict( + argstr="--o %s", + genfile=True, + ), + rbv=dict( + argstr="--rbv", + requires=["subjects_dir"], + ), + rbv_res=dict( + argstr="--rbv-res %f", + ), + reduce_fox_eqodd=dict( + argstr="--reduce-fox-eqodd", + ), + reg_file=dict( + argstr="--reg %s", + extensions=None, + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + reg_identity=dict( + argstr="--reg-identity", + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + regheader=dict( + argstr="--regheader", + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + replace=dict( + argstr="--replace %i %i", + ), + rescale=dict( + argstr="--rescale %s...", + ), + save_eres=dict( + argstr="--save-eres", + ), + save_input=dict( + argstr="--save-input", + ), + save_yhat=dict( + argstr="--save-yhat", + xor=["save_yhat_with_noise"], + ), + save_yhat0=dict( + argstr="--save-yhat0", + ), + save_yhat_full_fov=dict( + argstr="--save-yhat-full-fov", + ), + save_yhat_with_noise=dict( + argstr="--save-yhat-with-noise %i %i", + xor=["save_yhat"], + ), + scale_refval=dict( + argstr="--scale-refval %f", + ), + segmentation=dict( + argstr="--seg %s", + extensions=None, + mandatory=True, + ), + steady_state_params=dict( + argstr="--ss %f %f %f", + ), + subjects_dir=dict(), + tissue_fraction_resolution=dict( + argstr="--segpvfres %f", + ), + tt_reduce=dict( + argstr="--tt-reduce", + ), + tt_update=dict( + argstr="--tt-update", + ), + y=dict( + argstr="--y", + ), + ) + inputs = GTMPVC.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_GTMPVC_outputs(): + output_map = dict( + gtm_file=dict( + extensions=None, + ), + gtm_stats=dict( + extensions=None, + ), + hb_dat=dict( + extensions=None, + ), + hb_nifti=dict( + extensions=None, + ), + input_file=dict( + extensions=None, + ), + mgx_ctxgm=dict( + extensions=None, + ), + mgx_gm=dict( + extensions=None, + ), + mgx_subctxgm=dict( + extensions=None, + ), + nopvc_file=dict( + extensions=None, + ), + opt_params=dict( + extensions=None, + ), + pvc_dir=dict(), + rbv=dict( + extensions=None, + ), + ref_file=dict( + extensions=None, + ), + reg_anat2pet=dict( + extensions=None, + ), + reg_anat2rbvpet=dict( + extensions=None, + ), + reg_pet2anat=dict( + extensions=None, + ), + reg_rbvpet2anat=dict( + extensions=None, + ), + yhat=dict( + extensions=None, + ), + yhat0=dict( + extensions=None, + ), + yhat_full_fov=dict( + extensions=None, + ), + yhat_with_noise=dict( + extensions=None, + ), + ) + outputs = GTMPVC.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py b/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py new file mode 100644 index 0000000000..026cc33b77 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py @@ -0,0 +1,88 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import GTMSeg + + +def test_GTMSeg_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + colortable=dict( + argstr="--ctab %s", + extensions=None, + ), + ctx_annot=dict( + argstr="--ctx-annot %s %i %i", + ), + dmax=dict( + argstr="--dmax %f", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + head=dict( + argstr="--head %s", + ), + keep_cc=dict( + argstr="--keep-cc", + ), + keep_hypo=dict( + argstr="--keep-hypo", + ), + no_pons=dict( + argstr="--no-pons", + ), + no_seg_stats=dict( + argstr="--no-seg-stats", + ), + no_vermis=dict( + argstr="--no-vermis", + ), + out_file=dict( + argstr="--o %s", + extensions=None, + usedefault=True, + ), + output_upsampling_factor=dict( + argstr="--output-usf %i", + ), + subject_id=dict( + argstr="--s %s", + mandatory=True, + ), + subjects_dir=dict(), + subseg_cblum_wm=dict( + argstr="--subseg-cblum-wm", + ), + subsegwm=dict( + argstr="--subsegwm", + ), + upsampling_factor=dict( + argstr="--usf %i", + ), + wm_annot=dict( + argstr="--wm-annot %s %i %i", + ), + xcerseg=dict( + argstr="--xcerseg", + ), + ) + inputs = GTMSeg.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_GTMSeg_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = GTMSeg.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_LoganRef.py b/nipype/interfaces/freesurfer/tests/test_auto_LoganRef.py new file mode 100644 index 0000000000..c66f460533 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_LoganRef.py @@ -0,0 +1,278 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import LoganRef + + +def test_LoganRef_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %g", + mandatory=True, + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = LoganRef.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_LoganRef_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = LoganRef.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRTM.py b/nipype/interfaces/freesurfer/tests/test_auto_MRTM.py new file mode 100644 index 0000000000..18e1dd6961 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRTM.py @@ -0,0 +1,278 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import MRTM + + +def test_MRTM_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + mandatory=True, + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = MRTM.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRTM_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = MRTM.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py b/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py new file mode 100644 index 0000000000..71b200a600 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py @@ -0,0 +1,278 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import MRTM2 + + +def test_MRTM2_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + mandatory=True, + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = MRTM2.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRTM2_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = MRTM2.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py index 533c0a17a9..eb199ddc50 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py @@ -89,10 +89,27 @@ def test_OneSampleTTest_inputs(): extensions=None, xor=["cortex"], ), + logan=dict( + argstr="--logan %s %s %f", + ), mask_file=dict( argstr="--mask %s", extensions=None, ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), no_contrast_ok=dict( argstr="--no-contrasts-ok", ), @@ -208,6 +225,9 @@ def test_OneSampleTTest_outputs(): beta_file=dict( extensions=None, ), + bp_file=dict( + extensions=None, + ), dof_file=dict( extensions=None, ), @@ -233,6 +253,9 @@ def test_OneSampleTTest_outputs(): gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), mask_file=dict( extensions=None, ), diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index 1bf8e0ada7..0d2e9664d2 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -69,6 +69,8 @@ RobustFOV, CopyGeom, MotionOutliers, + Text2Vest, + Vest2Text, ) from .epi import ( diff --git a/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py b/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py new file mode 100644 index 0000000000..fdc39356a9 --- /dev/null +++ b/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py @@ -0,0 +1,45 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import Text2Vest + + +def test_Text2Vest_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + out_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=1, + ), + output_type=dict(), + ) + inputs = Text2Vest.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Text2Vest_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = Text2Vest.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py b/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py new file mode 100644 index 0000000000..2732e95d12 --- /dev/null +++ b/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py @@ -0,0 +1,45 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import Vest2Text + + +def test_Vest2Text_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + out_file=dict( + argstr="%s", + extensions=None, + position=1, + usedefault=True, + ), + output_type=dict(), + ) + inputs = Vest2Text.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Vest2Text_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = Vest2Text.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index cf9e4c68f0..24ebeec040 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -2834,3 +2834,92 @@ class MotionOutliers(FSLCommand): input_spec = MotionOutliersInputSpec output_spec = MotionOutliersOutputSpec _cmd = "fsl_motion_outliers" + + +class Text2VestInputSpec(FSLCommandInputSpec): + in_file = File( + exists=True, + mandatory=True, + desc="plain text file representing your design, contrast, or f-test matrix", + argstr="%s", + position=0, + ) + + out_file = File( + mandatory=True, + desc=( + "file name to store matrix data in the format used by FSL tools" + " (e.g., design.mat, design.con design.fts)" + ), + argstr="%s", + position=1, + ) + + +class Text2VestOutputSpec(TraitedSpec): + out_file = File(desc="matrix data in the format used by FSL tools") + + +class Text2Vest(FSLCommand): + """ + Use FSL Text2Vest`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ + to convert your plain text design matrix data into the format used by the FSL tools. + + Examples + -------- + >>> from nipype.interfaces.fsl import Text2Vest + >>> t2v = Text2Vest() + >>> t2v.inputs.in_file = "design.txt" + >>> t2v.inputs.out_file = "design.mat" + >>> t2v.cmdline + 'Text2Vest design.txt design.mat' + >>> res = t2v.run() # doctest: +SKIP + """ + + input_spec = Text2VestInputSpec + output_spec = Text2VestOutputSpec + + _cmd = "Text2Vest" + + +class Vest2TextInputSpec(FSLCommandInputSpec): + in_file = File( + exists=True, + mandatory=True, + desc="matrix data stored in the format used by FSL tools", + argstr="%s", + position=0, + ) + + out_file = File( + "design.txt", + usedefault=True, + desc="file name to store text output from matrix", + argstr="%s", + position=1, + ) + + +class Vest2TextOutputSpec(TraitedSpec): + out_file = File(desc="plain text representation of FSL matrix") + + +class Vest2Text(FSLCommand): + """ + Use FSL Vest2Text`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ + to convert your design.mat design.con and design.fts files into plain text. + + Examples + -------- + >>> from nipype.interfaces.fsl import Vest2Text + >>> v2t = Vest2Text() + >>> v2t.inputs.in_file = "design.mat" + >>> v2t.cmdline + 'Vest2Text design.mat design.txt' + >>> res = v2t.run() # doctest: +SKIP + """ + + input_spec = Vest2TextInputSpec + output_spec = Vest2TextOutputSpec + + _cmd = "Vest2Text" diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 308eccd45f..95e3546266 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -208,9 +208,9 @@ def _parse_inputs(self, skip=None): skip = [] if not isdefined(self.inputs.in_config): - from distutils.spawn import find_executable + from shutil import which - path = find_executable(self._cmd) + path = which(self._cmd) if path is None: path = os.getenv(MRTRIX3_HOME, "/opt/mrtrix3") else: diff --git a/nipype/interfaces/niftyreg/base.py b/nipype/interfaces/niftyreg/base.py index 375a3ada29..f62a92b84a 100644 --- a/nipype/interfaces/niftyreg/base.py +++ b/nipype/interfaces/niftyreg/base.py @@ -15,8 +15,8 @@ See the docstrings of the individual classes for examples. """ -from distutils.version import StrictVersion import os +from packaging.version import Version from ... import logging from ..base import CommandLine, CommandLineInputSpec, traits, Undefined, PackageInfo @@ -65,13 +65,13 @@ def __init__(self, required_version=None, **inputs): self.required_version = required_version _version = self.version if _version: - if self._min_version is not None and StrictVersion( - _version - ) < StrictVersion(self._min_version): + if self._min_version is not None and Version(_version) < Version( + self._min_version + ): msg = "A later version of Niftyreg is required (%s < %s)" iflogger.warning(msg, _version, self._min_version) if required_version is not None: - if StrictVersion(_version) != StrictVersion(required_version): + if Version(_version) != Version(required_version): msg = "The version of NiftyReg differs from the required" msg += "(%s != %s)" iflogger.warning(msg, _version, self.required_version) @@ -101,11 +101,11 @@ def check_version(self): _version = self.version if not _version: raise Exception("Niftyreg not found") - if StrictVersion(_version) < StrictVersion(self._min_version): + if Version(_version) < Version(self._min_version): err = "A later version of Niftyreg is required (%s < %s)" raise ValueError(err % (_version, self._min_version)) if self.required_version: - if StrictVersion(_version) != StrictVersion(self.required_version): + if Version(_version) != Version(self.required_version): err = "The version of NiftyReg differs from the required" err += "(%s != %s)" raise ValueError(err % (_version, self.required_version)) diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index a7ba7f5f34..32aa21bcc6 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -18,7 +18,7 @@ import numpy as np -from ... import logging, config, LooseVersion +from ... import logging, config from ...utils.filemanip import ( indirectory, relpath, @@ -1076,11 +1076,7 @@ def make_field_func(*pair): inode._id += ".%sI" % iterable_prefix # merge the iterated subgraphs - # dj: the behaviour of .copy changes in version 2 - if LooseVersion(nx.__version__) < LooseVersion("2"): - subgraph = graph_in.subgraph(subnodes) - else: - subgraph = graph_in.subgraph(subnodes).copy() + subgraph = graph_in.subgraph(subnodes).copy() graph_in = _merge_graphs( graph_in, subnodes, diff --git a/nipype/testing/data/design.mat b/nipype/testing/data/design.mat index e69de29bb2..5f27af3198 100644 --- a/nipype/testing/data/design.mat +++ b/nipype/testing/data/design.mat @@ -0,0 +1,6 @@ +/NumWaves 3 +/NumPoints 3 +/Matrix +0 0 0 +0 0 0 +0 0 0 diff --git a/nipype/testing/data/design.txt b/nipype/testing/data/design.txt new file mode 100644 index 0000000000..d5de7d6a40 --- /dev/null +++ b/nipype/testing/data/design.txt @@ -0,0 +1,3 @@ +0 0 0 +0 0 0 +0 0 0 diff --git a/nipype/testing/data/gtmseg.mgz b/nipype/testing/data/gtmseg.mgz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/gtmseg.nii b/nipype/testing/data/gtmseg.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/ref_tac.dat b/nipype/testing/data/ref_tac.dat new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01_ses-baseline_pet.nii.gz b/nipype/testing/data/sub-01_ses-baseline_pet.nii.gz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01_ses-baseline_pet_mean_reg.lta b/nipype/testing/data/sub-01_ses-baseline_pet_mean_reg.lta new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/tac.nii b/nipype/testing/data/tac.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/timing.dat b/nipype/testing/data/timing.dat new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/utils/config.py b/nipype/utils/config.py index e7020eb30d..3106bd4c8c 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -14,7 +14,7 @@ import errno import atexit from warnings import warn -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion import configparser import numpy as np diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index 6b106da952..ba8687110c 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -9,7 +9,7 @@ from collections.abc import Iterator from warnings import warn -from distutils.version import LooseVersion +from nipype.external.version import LooseVersion import numpy as np @@ -145,7 +145,7 @@ def package_check( packages. Default is *Nipype*. checker : object, optional The class that will perform the version checking. Default is - distutils.version.LooseVersion. + nipype.external.version.LooseVersion. exc_failed_import : Exception, optional Class of the exception to be thrown if import failed. exc_failed_check : Exception, optional diff --git a/tools/toollib.py b/tools/toollib.py deleted file mode 100644 index 77d864f142..0000000000 --- a/tools/toollib.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -"""Various utilities common to IPython release and maintenance tools. -""" - -from builtins import map - -# Library imports -import os -import sys - -from subprocess import Popen, PIPE, CalledProcessError, check_call - -from distutils.dir_util import remove_tree - -# Useful shorthands -pjoin = os.path.join -cd = os.chdir - -# Utility functions - -# ----------------------------------------------------------------------------- -# Functions -# ----------------------------------------------------------------------------- - - -def sh(cmd): - """Execute command in a subshell, return status code.""" - return check_call(cmd, shell=True) - - -def compile_tree(): - """Compile all Python files below current directory.""" - vstr = ".".join(map(str, sys.version_info[:2])) - stat = os.system("%s %s/lib/python%s/compileall.py ." % (sys.executable, sys.prefix, vstr)) - if stat: - msg = "*** ERROR: Some Python files in tree do NOT compile! ***\n" - msg += "See messages above for the actual file that produced it.\n" - raise SystemExit(msg)