diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml deleted file mode 100644 index 6b3ef96f0c..0000000000 --- a/.github/workflows/contrib.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: Contribution checks - -# This checks validate contributions meet baseline checks -# -# * specs - Ensure make - -on: - push: - branches: - - master - - maint/* - pull_request: - branches: - - master - - maint/* - -defaults: - run: - shell: bash - -concurrency: - group: contrib-${{ github.ref }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - stable: - # Check each OS, all supported Python, minimum versions and latest releases - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: ["ubuntu-latest"] - python-version: ["3.12"] - nipype-extras: ["dev"] - check: ["specs", "style"] - env: - DEPENDS: "" - CHECK_TYPE: ${{ matrix.check }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - EXTRA_PIP_FLAGS: "" - INSTALL_DEB_DEPENDENCIES: false - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive - run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-18.04' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v4 - with: - file: coverage.xml - token: ${{ secrets.CODECOV_TOKEN }} - if: ${{ always() }} - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} - path: test-results.xml - if: ${{ always() && matrix.check == 'test' }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 41776bc188..ee5786af8f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,22 +1,11 @@ -name: Stable tests - -# This file tests the claimed support range of nipype including -# -# * Operating systems: Linux, OSX -# * Dependencies: minimum requirements, optional requirements -# * Installation methods: setup.py, sdist, wheel, archive +name: Tox on: push: - branches: - - master - - maint/* - tags: - - "*" + branches: [ master, main, 'maint/*' ] + tags: [ '*' ] pull_request: - branches: - - master - - maint/* + branches: [ master, main, 'maint/*' ] schedule: # 8am EST / 9am EDT Mondays - cron: "0 13 * * 1" @@ -26,27 +15,28 @@ defaults: shell: bash concurrency: - group: tests-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -permissions: {} +permissions: + contents: read + +env: + # Force tox and pytest to use color + FORCE_COLOR: true + + jobs: build: - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - python-version: 3 - - run: pip install --upgrade build twine - - name: Build sdist and wheel - run: python -m build - - run: twine check dist/* + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - run: uv build + - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 with: name: dist @@ -82,72 +72,52 @@ jobs: - name: Run tests run: pytest --doctest-modules -v --pyargs nipype - stable: + test: # Check each OS, all supported Python, minimum versions and latest releases - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ${{ matrix.os }} strategy: matrix: - os: ["ubuntu-22.04"] - python-version: ["3.9", "3.10", "3.11", "3.12"] - check: ["test"] - pip-flags: [""] - depends: ["REQUIREMENTS"] - deb-depends: [false] - nipype-extras: ["doc,tests,profiler"] + os: ["ubuntu-latest"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: [none, full, pre] include: - - os: ubuntu-22.04 + - os: ubuntu-latest python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,profiler,duecredit,ssh - - os: ubuntu-20.04 - python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,nipy,profiler,duecredit,ssh + dependencies: min + exclude: + # Skip some intermediate versions for full tests + - python-version: "3.10" + dependencies: full + - python-version: "3.11" + dependencies: full + # Do not test pre-releases for versions out of SPEC0 + - python-version: "3.9" + dependencies: pre + - python-version: "3.10" + dependencies: pre + env: - DEPENDS: ${{ matrix.depends }} - CHECK_TYPE: ${{ matrix.check }} - EXTRA_PIP_FLAGS: ${{ matrix.pip-flags }} - INSTALL_DEB_DEPENDENCIES: ${{ matrix.deb-depends }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 + DEPENDS: ${{ matrix.dependencies }} steps: - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive + - name: Install tox run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-latest' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 - uses: codecov/codecov-action@v4 with: - file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} - name: Upload pytest test results @@ -160,7 +130,7 @@ jobs: publish: runs-on: ubuntu-latest environment: "Package deployment" - needs: [stable, test-package] + needs: [test, test-package] if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') steps: - uses: actions/download-artifact@v4 @@ -171,3 +141,21 @@ jobs: with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} + + checks: + runs-on: 'ubuntu-latest' + continue-on-error: true + strategy: + matrix: + check: ['specs', 'style'] + + steps: + - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - name: Show tox config + run: uvx tox c + - name: Show tox config (this call) + run: uvx tox c -e ${{ matrix.check }} + - name: Run check + run: uvx tox -e ${{ matrix.check }} diff --git a/.mailmap b/.mailmap index 4c5bd5de4d..35ccbf89d2 100644 --- a/.mailmap +++ b/.mailmap @@ -107,6 +107,7 @@ Joerg Stadler Joerg Stadler John A. Lee John A. Lee +Jon Cluce Joke Durnez Jordi Huguet Josh Warner diff --git a/.zenodo.json b/.zenodo.json index 5ddb9a71dd..3e2c2be6f6 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -285,6 +285,11 @@ { "name": "Mordom, David" }, + { + "affiliation": "Child Mind Institute", + "name": "Cluce, Jon", + "orcid": "0000-0001-7590-5806" + }, { "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", "name": "Guillon, Jérémy", diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index b150eece35..79b583c871 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,8 +1,21 @@ +1.9.1 (November 19, 2024) +========================= + +Bug fix release in the 1.9.x series. + +This release adds support for Numpy 2 and Python 3.13. + + * FIX: Restore generate_gantt_chart functionality (https://github.com/nipy/nipype/pull/3290) + * FIX: Address numpy and traits deprecations (https://github.com/nipy/nipype/pull/3699) + * FIX: `ts_Z_corr` → `ts_wb_Z` (https://github.com/nipy/nipype/pull/3697) + * ENH: Remove unused and recently unsupported antsRegistration flag (https://github.com/nipy/nipype/pull/3695) + * MAINT: Bump codecov/codecov-action from 4 to 5 (https://github.com/nipy/nipype/pull/3698) + 1.9.0 (October 31, 2024) ======================== -New feature release in the 1.9.0 series. +New feature release in the 1.9.x series. * FIX: Remove exists=True from fsl.MotionOutliers output that might not exist (https://github.com/nipy/nipype/pull/1428) * FIX: Improve evaluate_connect_function errors across Python versions (https://github.com/nipy/nipype/pull/3655) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index e7b9dca2f4..4a8714e630 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.8.6 `_ `1.8.5 `_ +Previous versions: `1.9.0 `_ `1.8.6 `_ Workflows --------- diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 157d1e48d7..5e3588f4fc 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -188,7 +188,7 @@ def _run_interface(self, runtime): if self.inputs.save_std: out_file = self._gen_fname("dvars_std", ext="tsv") - np.savetxt(out_file, dvars[0], fmt=b"%0.6f") + np.savetxt(out_file, dvars[0], fmt="%0.6f") self._results["out_std"] = out_file if self.inputs.save_plot: @@ -228,7 +228,7 @@ def _run_interface(self, runtime): if self.inputs.save_vxstd: out_file = self._gen_fname("dvars_vxstd", ext="tsv") - np.savetxt(out_file, dvars[2], fmt=b"%0.6f") + np.savetxt(out_file, dvars[2], fmt="%0.6f") self._results["out_vxstd"] = out_file if self.inputs.save_plot: @@ -251,8 +251,8 @@ def _run_interface(self, runtime): np.savetxt( out_file, np.vstack(dvars).T, - fmt=b"%0.8f", - delimiter=b"\t", + fmt="%0.8f", + delimiter="\t", header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", comments="", ) @@ -689,7 +689,7 @@ def _run_interface(self, runtime): np.savetxt( components_file, components, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(components_header), comments="", @@ -729,7 +729,7 @@ def _run_interface(self, runtime): np.savetxt( self._results["pre_filter_file"], filter_basis, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(header), comments="", diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index ff867ae26c..65aae2ef1c 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -189,7 +189,8 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): desc="Source of movement parameters", mandatory=True, ) - use_differences = traits.ListBool( + use_differences = traits.List( + traits.Bool, [True, False], minlen=2, maxlen=2, @@ -600,10 +601,10 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") - np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") + np.savetxt(artifactfile, outliers, fmt="%d", delimiter=" ") + np.savetxt(intensityfile, g, fmt="%.2f", delimiter=" ") if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") + np.savetxt(normfile, normval, fmt="%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 51010aea3a..4d5a7ca53b 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -48,8 +48,6 @@ def test_ArtifactDetect_inputs(): xor=["norm_threshold"], ), use_differences=dict( - maxlen=2, - minlen=2, usedefault=True, ), use_norm=dict( diff --git a/nipype/conftest.py b/nipype/conftest.py index 18b8a1ca6d..151906678f 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -2,7 +2,7 @@ import shutil from tempfile import mkdtemp import pytest -import numpy +import numpy as np import py.path as pp NIPYPE_DATADIR = os.path.realpath( @@ -15,12 +15,17 @@ @pytest.fixture(autouse=True) def add_np(doctest_namespace): - doctest_namespace["np"] = numpy + doctest_namespace["np"] = np doctest_namespace["os"] = os doctest_namespace["pytest"] = pytest doctest_namespace["datadir"] = data_dir +@pytest.fixture(scope='session', autouse=True) +def legacy_printoptions(): + np.set_printoptions(legacy='1.21') + + @pytest.fixture(autouse=True) def _docdir(request): """Grabbed from https://stackoverflow.com/a/46991331""" diff --git a/nipype/info.py b/nipype/info.py index 3b006ae161..d5ddd6bfb8 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.0" +__version__ = "1.9.1" def get_nipype_gitversion(): @@ -99,12 +99,11 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = "2.1.0" -NETWORKX_MIN_VERSION = "2.0" -NUMPY_MIN_VERSION = "1.17" -NUMPY_MAX_VERSION = "2.0" -SCIPY_MIN_VERSION = "0.14" -TRAITS_MIN_VERSION = "4.6" +NIBABEL_MIN_VERSION = "3.0" +NETWORKX_MIN_VERSION = "2.5" +NUMPY_MIN_VERSION = "1.21" +SCIPY_MIN_VERSION = "1.8" +TRAITS_MIN_VERSION = "6.2" DATEUTIL_MIN_VERSION = "2.2" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_MIN_VERSION = "1.5.2" @@ -136,7 +135,7 @@ def get_nipype_gitversion(): "click>=%s" % CLICK_MIN_VERSION, "networkx>=%s" % NETWORKX_MIN_VERSION, "nibabel>=%s" % NIBABEL_MIN_VERSION, - "numpy>=%s,<%s" % (NUMPY_MIN_VERSION, NUMPY_MAX_VERSION), + "numpy>=%s" % NUMPY_MIN_VERSION, "packaging", "prov>=%s" % PROV_MIN_VERSION, "pydot>=%s" % PYDOT_MIN_VERSION, @@ -144,22 +143,24 @@ def get_nipype_gitversion(): "rdflib>=%s" % RDFLIB_MIN_VERSION, "scipy>=%s" % SCIPY_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, + "traits>=%s" % TRAITS_MIN_VERSION, "filelock>=3.0.0", - "etelemetry>=0.2.0", + "acres", + "etelemetry>=0.3.1", "looseversion!=1.2", "puremagic", ] TESTS_REQUIRES = [ - "codecov", - "coverage", - "pytest", - "pytest-cov", + "coverage >= 5.2.1", + "pandas >= 1.5.0", + "pytest >= 6", + "pytest-cov >=2.11", "pytest-env", - "pytest-timeout", + "pytest-timeout >=1.4", "pytest-doctestplus", - "sphinx", + "pytest-xdist >= 2.5", + "sphinx >=7", ] EXTRA_REQUIRES = { diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index d3daebcf4c..b5e27ea53a 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -2741,7 +2741,7 @@ def _list_outputs(self): odir = os.path.dirname(os.path.abspath(prefix)) outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] - if isdefined(self.inputs.ts_wb_corr) or isdefined(self.inputs.ts_Z_corr): + if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z: corrdir = os.path.join(odir, prefix + "_000_INDIV") outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 41037ffc5f..91b131bbf3 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -710,9 +710,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> reg.run() # doctest: +SKIP @@ -726,9 +726,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' >>> reg1.run() # doctest: +SKIP @@ -742,9 +742,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' Clip extremely low intensity data points using winsorize_lower_quantile. All data points @@ -759,9 +759,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' Use float instead of double for computations (saves memory usage) @@ -773,10 +773,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' Force to use double instead of float for computations (more precision and memory usage). @@ -788,10 +788,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' 'collapse_output_transforms' can be used to put all transformation in a single 'composite_transform'- @@ -823,10 +823,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' @@ -857,10 +857,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first @@ -885,10 +885,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed1.nii, moving1.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' ANTS Registration can also use multiple modalities to perform the registration. Here it is assumed @@ -906,10 +906,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed2.nii, moving2.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Different methods can be used for the interpolation when applying transformations. @@ -923,9 +923,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation BSpline[ 3 ] --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> # Test Interpolation Parameters (MultiLabel/Gaussian) @@ -937,10 +937,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Gaussian[ 1.0, 1.0 ] \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' BSplineSyN non-linear registration with custom parameters. @@ -954,9 +954,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ +--use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Mask the fixed image in the second stage of the registration (but not the first). @@ -969,10 +969,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ NULL, NULL ] \ +--use-histogram-matching 1 --masks [ NULL, NULL ] \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ +--use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Here we use both a warpfield and a linear transformation, before registration commences. Note that @@ -988,10 +988,10 @@ class Registration(ANTSCommand): [ func_to_struct.mat, 0 ] [ ants_Warp.nii.gz, 0 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ @@ -1155,10 +1155,9 @@ def _format_registration(self): % self._format_xarray(self.inputs.shrink_factors[ii]) ) if isdefined(self.inputs.use_estimate_learning_rate_once): - retval.append( - "--use-estimate-learning-rate-once %d" - % self.inputs.use_estimate_learning_rate_once[ii] - ) + # this flag was removed because it was never used in the ants codebase + # removed from Ants in commit e1e47994b on 2022-08-09 + pass if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 3c87b71975..47592d70b5 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1328,7 +1328,8 @@ class JointFusionInputSpec(ANTSCommandInputSpec): usedefault=True, desc=("Constrain solution to non-negative weights."), ) - patch_radius = traits.ListInt( + patch_radius = traits.List( + traits.Int, minlen=3, maxlen=3, argstr="-p %s", diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index f234ceea7c..98d8d696a1 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -70,8 +70,6 @@ def test_JointFusion_inputs(): ), patch_radius=dict( argstr="-p %s", - maxlen=3, - minlen=3, ), retain_atlas_voting_images=dict( argstr="-f", diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index 2e54847958..2af425d284 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -7,7 +7,8 @@ This module defines the API of all nipype interfaces. """ -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from traits.trait_errors import TraitError from .core import ( diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index a7f61e6889..defbca7f43 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -15,7 +15,8 @@ from packaging.version import Version from traits.trait_errors import TraitError -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from ...utils.filemanip import md5, hash_infile, hash_timestamp from .traits_extension import ( traits, diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index 52770e476c..406e6e9358 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -3,7 +3,7 @@ import os import pytest -from pkg_resources import resource_filename as pkgrf +import acres from ....utils.filemanip import md5 from ... import base as nib @@ -42,14 +42,13 @@ def test_bunch_methods(): def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. - json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) + json_pth = acres.Loader('nipype.testing').cached('data', 'realign_json.json') - b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) + b = nib.Bunch(infile=str(json_pth), otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth) as fp: - jshash.update(fp.read().encode("utf-8")) + jshash.update(json_pth.read_bytes()) assert newbdict["infile"][0][1] == jshash.hexdigest() assert newbdict["yat"] is True diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index ec19d1fe7b..1b9bdea6d5 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -2,6 +2,7 @@ import os.path as op import inspect +from functools import partial import numpy as np from ..base import ( traits, @@ -109,15 +110,15 @@ def convert_to_traits_type(dipy_type, is_file=False): dipy_type = dipy_type.lower() is_mandatory = bool("optional" not in dipy_type) if "variable" in dipy_type and "str" in dipy_type: - return traits.ListStr, is_mandatory + return partial(traits.List, traits.Str), is_mandatory elif "variable" in dipy_type and "int" in dipy_type: - return traits.ListInt, is_mandatory + return partial(traits.List, traits.Int), is_mandatory elif "variable" in dipy_type and "float" in dipy_type: - return traits.ListFloat, is_mandatory + return partial(traits.List, traits.Float), is_mandatory elif "variable" in dipy_type and "bool" in dipy_type: - return traits.ListBool, is_mandatory + return partial(traits.List, traits.Bool), is_mandatory elif "variable" in dipy_type and "complex" in dipy_type: - return traits.ListComplex, is_mandatory + return partial(traits.List, traits.Complex), is_mandatory elif "str" in dipy_type and not is_file: return traits.Str, is_mandatory elif "str" in dipy_type and is_file: diff --git a/nipype/interfaces/dipy/tests/test_base.py b/nipype/interfaces/dipy/tests/test_base.py index d2d81ec005..015215054d 100644 --- a/nipype/interfaces/dipy/tests/test_base.py +++ b/nipype/interfaces/dipy/tests/test_base.py @@ -16,7 +16,7 @@ def test_convert_to_traits_type(): Params = namedtuple("Params", "traits_type is_file") - Res = namedtuple("Res", "traits_type is_mandatory") + Res = namedtuple("Res", "traits_type subtype is_mandatory") l_entries = [ Params("variable string", False), Params("variable int", False), @@ -42,35 +42,38 @@ def test_convert_to_traits_type(): Params("complex, optional", False), ] l_expected = [ - Res(traits.ListStr, True), - Res(traits.ListInt, True), - Res(traits.ListFloat, True), - Res(traits.ListBool, True), - Res(traits.ListComplex, True), - Res(traits.ListInt, False), - Res(traits.ListStr, False), - Res(traits.ListFloat, False), - Res(traits.ListBool, False), - Res(traits.ListComplex, False), - Res(traits.Str, True), - Res(traits.Int, True), - Res(File, True), - Res(traits.Float, True), - Res(traits.Bool, True), - Res(traits.Complex, True), - Res(traits.Str, False), - Res(traits.Int, False), - Res(File, False), - Res(traits.Float, False), - Res(traits.Bool, False), - Res(traits.Complex, False), + Res(traits.List, traits.Str, True), + Res(traits.List, traits.Int, True), + Res(traits.List, traits.Float, True), + Res(traits.List, traits.Bool, True), + Res(traits.List, traits.Complex, True), + Res(traits.List, traits.Int, False), + Res(traits.List, traits.Str, False), + Res(traits.List, traits.Float, False), + Res(traits.List, traits.Bool, False), + Res(traits.List, traits.Complex, False), + Res(traits.Str, None, True), + Res(traits.Int, None, True), + Res(File, None, True), + Res(traits.Float, None, True), + Res(traits.Bool, None, True), + Res(traits.Complex, None, True), + Res(traits.Str, None, False), + Res(traits.Int, None, False), + Res(File, None, False), + Res(traits.Float, None, False), + Res(traits.Bool, None, False), + Res(traits.Complex, None, False), ] for entry, res in zip(l_entries, l_expected): traits_type, is_mandatory = convert_to_traits_type( entry.traits_type, entry.is_file ) - assert traits_type == res.traits_type + trait_instance = traits_type() + assert isinstance(trait_instance, res.traits_type) + if res.subtype: + assert isinstance(trait_instance.inner_traits()[0].trait_type, res.subtype) assert is_mandatory == res.is_mandatory with pytest.raises(IOError): diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index 09daacb17f..7dda9a49d7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -417,7 +417,7 @@ def _generate_encfile(self): float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") + np.savetxt(out_file, np.array(lines), fmt="%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 2a148025f5..2ada4ab969 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -9,6 +9,7 @@ from shutil import rmtree from string import Template +import acres import numpy as np from looseversion import LooseVersion from nibabel import load @@ -2547,12 +2548,5 @@ def load_template(name): template : string.Template """ - from pkg_resources import resource_filename as pkgrf - - full_fname = pkgrf( - "nipype", os.path.join("interfaces", "fsl", "model_templates", name) - ) - with open(full_fname) as template_file: - template = Template(template_file.read()) - - return template + loader = acres.Loader('nipype.interfaces.fsl') + return Template(loader.readable('model_templates', name).read_text()) diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index 57cdad0168..0165087376 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -99,8 +99,9 @@ class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): mandatory=True, desc="input DWI image", ) - axes = traits.ListInt( - default_value=[0, 1], + axes = traits.List( + traits.Int, + [0, 1], usedefault=True, sep=",", minlen=2, diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py index cd15f36ac6..83f5bfef4b 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py @@ -9,8 +9,6 @@ def test_MRDeGibbs_inputs(): ), axes=dict( argstr="-axes %s", - maxlen=2, - minlen=2, sep=",", usedefault=True, ), diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 9d78517f79..df6413320e 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -105,7 +105,7 @@ def _run_interface(self, runtime): # save output self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) - np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") + np.savetxt(self._results["out_file"], output, fmt="%s", delimiter="\t") return runtime def _process_inputs(self): diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index 64bb8366a0..8351a3c38a 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -51,7 +51,9 @@ def test_coherence_analysis(tmpdir): # This is the nitime analysis: TR = 1.89 - data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) + data_rec = np.genfromtxt( + example_data("fmri_timeseries.csv"), delimiter=',', names=True + ) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 8a3a479705..c7f69785ff 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -273,7 +273,8 @@ class ApplyVDMInputSpec(SPMCommandInputSpec): desc="phase encode direction input data have been acquired with", usedefault=True, ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -524,7 +525,8 @@ class RealignInputSpec(SPMCommandInputSpec): field="eoptions.wrap", desc="Check if interpolation should wrap in [x,y,z]", ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -731,7 +733,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): "maximization and smoothness maximization of the estimated field." ), ) - est_reg_factor = traits.ListInt( + est_reg_factor = traits.List( + traits.Int, [100000], field="uweoptions.lambda", minlen=1, @@ -769,7 +772,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): field="uweoptions.rem", desc="Re-estimate movement parameters at each unwarping iteration.", ) - est_num_of_iterations = traits.ListInt( + est_num_of_iterations = traits.List( + traits.Int, [5], field="uweoptions.noi", minlen=1, @@ -783,7 +787,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): usedefault=True, desc="Point in position space to perform Taylor-expansion around.", ) - reslice_which = traits.ListInt( + reslice_which = traits.List( + traits.Int, [2, 1], field="uwroptions.uwwhich", minlen=2, diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py index 2f56b49ef2..6d3b3c360d 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py @@ -41,8 +41,6 @@ def test_ApplyVDM_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_Realign.py b/nipype/interfaces/spm/tests/test_auto_Realign.py index 5165d6f33e..8262243a61 100644 --- a/nipype/interfaces/spm/tests/test_auto_Realign.py +++ b/nipype/interfaces/spm/tests/test_auto_Realign.py @@ -56,8 +56,6 @@ def test_Realign_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py index bb27419547..dc996c130e 100644 --- a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py +++ b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py @@ -15,8 +15,6 @@ def test_RealignUnwarp_inputs(): ), est_num_of_iterations=dict( field="uweoptions.noi", - maxlen=1, - minlen=1, usedefault=True, ), est_re_est_mov_par=dict( @@ -24,8 +22,6 @@ def test_RealignUnwarp_inputs(): ), est_reg_factor=dict( field="uweoptions.lambda", - maxlen=1, - minlen=1, usedefault=True, ), est_reg_order=dict( @@ -80,8 +76,6 @@ def test_RealignUnwarp_inputs(): ), reslice_which=dict( field="uwroptions.uwwhich", - maxlen=2, - minlen=2, usedefault=True, ), reslice_wrap=dict( diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index abf9426d43..f1b6817e74 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -541,7 +541,9 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles[graph_type]: @@ -635,7 +637,9 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles_iter[graph_type]: diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index f7606708c7..b10238ec4a 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,8 +1,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for workflow callbacks -""" +"""Tests for workflow callbacks.""" +from pathlib import Path from time import sleep +import json import pytest import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe @@ -60,3 +61,51 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): sleep(0.5) # Wait for callback to be called (python 2.7) assert so.statuses == [("f_node", "start"), ("f_node", "exception")] + + +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) +def test_callback_gantt(tmp_path: Path, plugin: str) -> None: + import logging + + from os import path + + from nipype.utils.profiler import log_nodes_cb + from nipype.utils.draw_gantt_chart import generate_gantt_chart + + log_filename = tmp_path / "callback.log" + logger = logging.getLogger("callback") + logger.setLevel(logging.DEBUG) + handler = logging.FileHandler(log_filename) + logger.addHandler(handler) + + # create workflow + wf = pe.Workflow(name="test", base_dir=str(tmp_path)) + f_node = pe.Node( + niu.Function(function=func, input_names=[], output_names=[]), name="f_node" + ) + wf.add_nodes([f_node]) + wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} + + plugin_args = {"status_callback": log_nodes_cb} + if plugin != "Linear": + plugin_args["n_procs"] = 8 + wf.run(plugin=plugin, plugin_args=plugin_args) + + with open(log_filename, "r") as _f: + loglines = _f.readlines() + + # test missing duration + first_line = json.loads(loglines[0]) + if "duration" in first_line: + del first_line["duration"] + loglines[0] = f"{json.dumps(first_line)}\n" + + # test duplicate timestamp warning + loglines.append(loglines[-1]) + + with open(log_filename, "w") as _f: + _f.write("".join(loglines)) + + with pytest.warns(Warning): + generate_gantt_chart(str(log_filename), 1 if plugin == "Linear" else 8) + assert (tmp_path / "callback.log.html").exists() diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 3ae4b77246..64a0d793db 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -8,8 +8,10 @@ import random import datetime import simplejson as json +from typing import Union from collections import OrderedDict +from warnings import warn # Pandas try: @@ -66,9 +68,9 @@ def create_event_dict(start_time, nodes_list): finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary - if events.get(start_delta) or events.get(finish_delta): + if events.get(start_delta): err_msg = "Event logged twice or events started at exact same time!" - raise KeyError(err_msg) + warn(err_msg, category=Warning) events[start_delta] = start_node events[finish_delta] = finish_node @@ -101,15 +103,25 @@ def log_to_dict(logfile): nodes_list = [json.loads(l) for l in lines] - def _convert_string_to_datetime(datestring): - try: + def _convert_string_to_datetime( + datestring: Union[str, datetime.datetime], + ) -> datetime.datetime: + """Convert a date string to a datetime object.""" + if isinstance(datestring, datetime.datetime): + datetime_object = datestring + elif isinstance(datestring, str): + date_format = ( + "%Y-%m-%dT%H:%M:%S.%f%z" + if "+" in datestring + else "%Y-%m-%dT%H:%M:%S.%f" + ) datetime_object: datetime.datetime = datetime.datetime.strptime( - datestring, "%Y-%m-%dT%H:%M:%S.%f" + datestring, date_format ) - return datetime_object - except Exception as _: - pass - return datestring + else: + msg = f"{datestring} is not a string or datetime object." + raise TypeError(msg) + return datetime_object date_object_node_list: list = list() for n in nodes_list: @@ -154,12 +166,18 @@ def calculate_resource_timeseries(events, resource): # Iterate through the events for _, event in sorted(events.items()): if event["event"] == "start": - if resource in event and event[resource] != "Unknown": - all_res += float(event[resource]) + if resource in event: + try: + all_res += float(event[resource]) + except ValueError: + continue current_time = event["start"] elif event["event"] == "finish": - if resource in event and event[resource] != "Unknown": - all_res -= float(event[resource]) + if resource in event: + try: + all_res -= float(event[resource]) + except ValueError: + continue current_time = event["finish"] res[current_time] = all_res @@ -284,7 +302,14 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co # Left left = 60 for core in range(len(end_times)): - if end_times[core] < node_start: + try: + end_time_condition = end_times[core] < node_start + except TypeError: + # if one has a timezone and one does not + end_time_condition = end_times[core].replace( + tzinfo=None + ) < node_start.replace(tzinfo=None) + if end_time_condition: left += core * 30 end_times[core] = datetime.datetime( node_finish.year, @@ -307,7 +332,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co "offset": offset, "scale_duration": scale_duration, "color": color, - "node_name": node["name"], + "node_name": node.get("name", node.get("id", "")), "node_dur": node["duration"] / 60.0, "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), @@ -527,6 +552,25 @@ def generate_gantt_chart( # Read in json-log to get list of node dicts nodes_list = log_to_dict(logfile) + # Only include nodes with timing information, and convert timestamps + # from strings to datetimes + nodes_list = [ + { + k: ( + datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") + if k in {"start", "finish"} and isinstance(i[k], str) + else i[k] + ) + for k in i + } + for i in nodes_list + if "start" in i and "finish" in i + ] + + for node in nodes_list: + if "duration" not in node: + node["duration"] = (node["finish"] - node["start"]).total_seconds() + # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] diff --git a/pyproject.toml b/pyproject.toml index 06f4d798c7..2b1282eb74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,24 @@ build-backend = "setuptools.build_meta" [tool.black] skip-string-normalization = true + +[tool.pytest.ini_options] +minversion = "6" +testpaths = ["nipype"] +log_cli_level = "INFO" +xfail_strict = true +norecursedirs = [".git"] +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + "--cov=nipype", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning"] +junit_family = "xunit2" diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..e8bc7e8f04 --- /dev/null +++ b/tox.ini @@ -0,0 +1,111 @@ +[tox] +requires = + tox>=4 +envlist = + py3{9,10,11,12,13}-none # Test nipype functionality on all versions + py3{9,12,13}-full # Test with extra dependencies on oldest and two newest + py39-min # Test with minimal dependencies + py3{11,12,13}-pre # Test with pre-release on SPEC0-supported Python +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + none: none + full: full + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + # Parsed from `git grep getenv` and `git grep os.environ` + # May not all be needed + NIPYPE_NO_ET + NO_ET + ANTSPATH + CI_SKIP_TEST + FREESURFER_HOME + USER + FSLDIR + FSLOUTPUTTYPE + FSL_COURSE_DATA + NIPYPE_NO_MATLAB + OMP_NUM_THREADS + NIPYPE_NO_R + SPMMCRCMD + FORCE_SPMMCR + LOGNAME + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + MATLABCMD + MRTRIX3_HOME + RCMD + ETS_TOOLKIT + NIPYPE_CONFIG_DIR + DISPLAY + PATHEXT + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +deps = + py313: traits @ git+https://github.com/enthought/traits.git@10954eb + full: dipy @ git+https://github.com/dipy/dipy@master +extras = + tests + full: doc + full: profiler + full: duecredit + full: ssh + full: nipy +setenv = + FSLOUTPUTTYPE=NIFTI_GZ + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple +uv_resolution = + min: lowest-direct + +commands = + python -c "import nipype; print(nipype.__version__)" + pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} + +[testenv:specs] +description = Rebuild spec tests +deps = + black + # Rebuild dipy specs + dipy + # Faster to install old numpy than unreleased Dipy + # This can be dropped once a Dipy release supports numpy 2 + numpy<2 +commands = + python tools/checkspecs.py + +[testenv:style] +description = Check our style guide +labels = check +deps = + black +skip_install = true +commands = + black --check --diff nipype setup.py