diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 4c410e9b0d..8c1e119249 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -1,19 +1,80 @@
-# Commits with messages like "STY: black" or "run black"
+# Wed Apr 5 14:01:05 2023 -0400 - effigies@gmail.com - STY: black [ignore-rev]
+a9ce9b78a402ebacf7726ad6454bb75b1447f52f
+# Wed Sep 14 14:12:07 2022 -0400 - mathiasg@stanford.edu - STY: Black
+f4a779223c6b0dffa47138d24ec9ef378c7164a9
+# Tue Apr 19 14:09:31 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev]
+b9716ebd46541f7fb1b96a94cc35b5e2ea6c3bba
+# Fri Apr 15 06:59:48 2022 -0700 - markiewicz@stanford.edu - STY: black [ignore-rev]
+d223fbccda6dee0ef39e00084296a3292f2ccf87
+# Fri Apr 8 21:34:43 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev]
+e3f56da124fd58041018c2e70d16a130ef189a66
+# Sun Apr 3 10:27:07 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev]
+988c382ebfc7df964874b6287b9d9e27e274a4a4
+# Sat Apr 2 21:32:56 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev]
+230fac6149d4dfbd5da669a983332a1fe318ef57
+# Sat Apr 2 12:49:15 2022 -0400 - markiewicz@stanford.edu - STY/TEST: Make specs and run black [ignore-rev]
+2ba8dacb8cc1f6f9c5b15b1cfb7b0395d45dcfb3
+# Sun Mar 20 21:19:39 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev]
+87cc1b54981113024ec3fd594881f72cf67513fb
+# Wed Dec 8 17:02:09 2021 -0500 - markiewicz@stanford.edu - STY: black [ignore-rev]
+5ac2f18ac116832d81a13f0c83e0a6aeda1457cf
+# Thu Dec 2 09:30:42 2021 -0500 - markiewicz@stanford.edu - STY: black
+ee50279ebf061a70ff502e7d2e51385b285cfda4
+# Tue Oct 26 11:18:02 2021 +0200 - code@oscaresteban.es - sty: run black
+5d9adbbb77b7047b9b47cd2fa079dee0094cfc91
+# Wed Oct 20 12:07:15 2021 +0200 - fabio.bernardoni@uniklinikum-dresden.de - run black on the preprocess.py file to improve formatting
+674e9b0eeca082efb5322b61fea57ee89a3e4a24
+# Wed Oct 13 16:08:23 2021 -0400 - markiewicz@stanford.edu - ENH: Add expected steps for FreeSurfer 7 recon-all (#3389)
+8f7c0bf2ec9c819844a2736a9ae2f6eef19a8e7f
+# Wed Oct 13 14:26:48 2021 -0400 - markiewicz@stanford.edu - STY: black
+d8dbc6f7b6a5385535e2fa53b7c6af7aa1370f46
+# Wed Sep 29 16:53:54 2021 +0200 - code@oscaresteban.es - sty: run black on affected files
+5f280da629bb7b5dce908633d2deea85b55dd67b
+# Thu Jun 24 17:43:22 2021 +0200 - code@oscaresteban.es - sty: run black
+135ce497a18adbe0811441c2b720910ec549aa6f
+# Thu Sep 23 08:56:28 2021 -0400 - markiewicz@stanford.edu - STY: black
+f69b3fb09560616822737764bb07272cd587e4a0
+# Fri Apr 30 17:19:55 2021 -0400 - markiewicz@stanford.edu - STY: black
+04718ac71436b6f283af7575dda0f6998b64f893
+# Fri Apr 30 16:50:00 2021 -0400 - markiewicz@stanford.edu - STY: black
12deb959cccc431fb8222cc5854f1c92a0080021
+# Thu Apr 1 12:26:08 2021 -0400 - markiewicz@stanford.edu - STY: black
f64bf338f630a9ee5cbe7a3ec98c68292897e720
+# Thu Dec 3 09:24:05 2020 +1100 - tom.g.close@gmail.com - run black over touched files
+c81259bc3b28baa1f18f95f6b056c228c6bfd115
+# Fri Aug 14 17:15:15 2020 -0400 - markiewicz@stanford.edu - STY: Black
83358d7f17aac07cb90d0330f11ea2322e2974d8
+# Sat Mar 14 12:44:20 2020 -0400 - markiewicz@stanford.edu - STY: black
faef7d0f93013a700c882f709e98fb3cd36ebb03
+# Sun Mar 8 15:05:28 2020 +0100 - 3453485+daniel-ge@users.noreply.github.com - FIX: get length of generator + STY: Black
+02991da67458b879d7c6360aa6457eb3c1bd5a07
+# Wed Mar 4 16:30:39 2020 -0500 - markiewicz@stanford.edu - STY: black
d50c1858564c0b3073fb23c54886a0454cb66afa
+# Thu Feb 27 15:08:42 2020 -0800 - code@oscaresteban.es - sty: black
417b8897a116fcded5000e21e2b6ccbe29452a52
+# Thu Jan 2 11:29:05 2020 -0800 - code@oscaresteban.es - sty: black
aaf677a87f64c485f3e305799e4a5dc73b69e5fb
+# Sun Dec 29 17:47:51 2019 -0800 - code@oscaresteban.es - sty: black
f763008442d88d8ce00ec266698268389415f8d6
+# Thu Jan 2 11:29:05 2020 -0800 - code@oscaresteban.es - sty: black
b1eccafd4edc8503b02d715f5b5f6f783520fdf9
+# Sun Dec 29 17:47:51 2019 -0800 - code@oscaresteban.es - sty: black
70db90349598cc7f26a4a513779529fba7d0a797
+# Thu Dec 19 09:22:22 2019 -0500 - markiewicz@stanford.edu - STY: Black
6c1d91d71f6f0db0e985bd2adc34206442b0653d
+# Thu Dec 19 15:51:11 2019 -0500 - markiewicz@stanford.edu - STY: Black
97bdbd5f48ab242de5288ba4715192a27619a803
+# Fri Nov 15 14:38:10 2019 -0500 - steve@steventilley.com - run black
78fa360f5b785224349b8b85b07e510d2233bb63
+# Fri Nov 15 14:34:03 2019 -0500 - steve@steventilley.com - run black
7f85f43a34de8bff8e634232c939b17cee8e8fc5
+# Thu Nov 14 11:14:51 2019 -0500 - markiewicz@stanford.edu - Merge pull request #3096 from effigies/sty/black
+1a869991adc024577536689d557fc748c764f15d
+# Thu Nov 14 09:15:20 2019 -0500 - markiewicz@stanford.edu - STY: Black setup.py
9c50b5daa797def5672dd057155b0e2c658853e2
+# Thu Nov 14 09:14:38 2019 -0500 - markiewicz@stanford.edu - STY: Black for tools/
47194993ae14aceeec436cfb3769def667196668
+# Wed Nov 13 23:41:15 2019 -0500 - markiewicz@stanford.edu - STY: Black
75653feadc6667d5313d83e9c62a5d5819771a9c
+# Tue Nov 12 09:43:34 2019 -0500 - markiewicz@stanford.edu - STY: Black files pre-merge
497b44d680eee0892fa59c6aaaae22a17d70a536
diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml
index d723e6aa48..a4ec6d7d75 100644
--- a/.github/workflows/contrib.yml
+++ b/.github/workflows/contrib.yml
@@ -22,6 +22,9 @@ concurrency:
group: contrib-${{ github.ref }}
cancel-in-progress: true
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
jobs:
stable:
# Check each OS, all supported Python, minimum versions and latest releases
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 07bb90dc9e..fee8740987 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -29,8 +29,12 @@ concurrency:
group: tests-${{ github.ref }}
cancel-in-progress: true
+permissions: {}
jobs:
build:
+ permissions:
+ contents: read # to fetch code (actions/checkout)
+
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@@ -80,10 +84,13 @@ jobs:
stable:
# Check each OS, all supported Python, minimum versions and latest releases
+ permissions:
+ contents: read # to fetch code (actions/checkout)
+
runs-on: ${{ matrix.os }}
strategy:
matrix:
- os: ['ubuntu-latest']
+ os: ['ubuntu-20.04']
python-version: [3.7, 3.8, 3.9, "3.10"]
check: ['test']
pip-flags: ['']
@@ -91,13 +98,20 @@ jobs:
deb-depends: [false]
nipype-extras: ['doc,tests,profiler']
include:
- - os: ubuntu-latest
+ - os: ubuntu-20.04
python-version: 3.8
check: test
pip-flags: ''
depends: REQUIREMENTS
deb-depends: true
- nipype-extras: doc,tests,nipy,profiler,duecredit,ssh
+ nipype-extras: doc,tests,profiler,duecredit,ssh
+ # - os: ubuntu-20.04
+ # python-version: 3.8
+ # check: test
+ # pip-flags: ''
+ # depends: NUMPY123
+ # deb-depends: true
+ # nipype-extras: doc,tests,nipy,profiler,duecredit,ssh
env:
DEPENDS: ${{ matrix.depends }}
CHECK_TYPE: ${{ matrix.check }}
diff --git a/.github/workflows/tutorials.yml b/.github/workflows/tutorials.yml
index 055e247fe0..2e6093fde5 100644
--- a/.github/workflows/tutorials.yml
+++ b/.github/workflows/tutorials.yml
@@ -9,6 +9,7 @@ concurrency:
group: tutorials-${{ github.ref }}
cancel-in-progress: true
+permissions: {}
jobs:
tutorial:
runs-on: ubuntu-latest
@@ -30,9 +31,8 @@ jobs:
- name: Check Action was successfully dispatched
id: dispatched
run: |
- set -x
START=${{ steps.start.outputs.start_time }}
- RUN_ID=$(curl -H "Accept: application/vnd.github+json" \
+ RUN_ID=$(curl -s -H "Accept: application/vnd.github+json" \
'https://api.github.com/repos/miykael/nipype_tutorial/actions/runs?created=>'${START}'&per_page=1' \
| jq -r '.workflow_runs[0].id')
@@ -42,13 +42,12 @@ jobs:
- name: Check if action completed
timeout-minutes: 120
run: |
- set -x
RUN_ID=${{ steps.dispatched.outputs.run_id }}
while :
do
TIMESTAMP=$(date +'%Y-%m-%dT%H:%M:%S%z')
# check status every 5 minutes
- STATUS=$(curl -H "Accept: application/vnd.github+json" \
+ STATUS=$(curl -s -H "Accept: application/vnd.github+json" \
https://api.github.com/repos/miykael/nipype_tutorial/actions/runs/${RUN_ID} \
| jq -r '.conclusion')
case $STATUS in
@@ -65,3 +64,13 @@ jobs:
sleep 300
esac
done
+ - name: Cancel ongoing run if cancelled or failed
+ if: ${{ failure() || cancelled() }}
+ run: |
+ set -x
+ RUN_ID=${{ steps.dispatched.outputs.run_id }}
+ echo "Something went wrong, cancelling dispatched run"
+ curl -s -X POST \
+ -H "Accept: application/vnd.github+json" \
+ -H "Authorization: Bearer ${{ secrets.TUTORIAL_ACCESS_TOKEN }}" \
+ https://api.github.com/repos/miykael/nipype_tutorial/actions/runs/${RUN_ID}/cancel
diff --git a/.zenodo.json b/.zenodo.json
index fd9e5e9658..3bee5d5790 100644
--- a/.zenodo.json
+++ b/.zenodo.json
@@ -116,15 +116,15 @@
{
"name": "Yvernault, Benjamin"
},
- {
- "name": "Hamalainen, Carlo",
- "orcid": "0000-0001-7655-3830"
- },
{
"affiliation": "Institute for Biomedical Engineering, ETH and University of Zurich",
"name": "Christian, Horea",
"orcid": "0000-0001-7037-2449"
},
+ {
+ "name": "Hamalainen, Carlo",
+ "orcid": "0000-0001-7655-3830"
+ },
{
"affiliation": "Stanford University",
"name": "Ćirić , Rastko",
@@ -193,6 +193,9 @@
"name": "Dias, Maria de Fatima",
"orcid": "0000-0001-8878-1750"
},
+ {
+ "name": "Moloney, Brendan"
+ },
{
"affiliation": "Otto-von-Guericke-University Magdeburg, Germany",
"name": "Hanke, Michael",
@@ -202,9 +205,6 @@
"affiliation": "Child Mind Institute",
"name": "Giavasis, Steven"
},
- {
- "name": "Moloney, Brendan"
- },
{
"affiliation": "SRI International",
"name": "Nichols, B. Nolan",
@@ -363,6 +363,11 @@
"name": "Schaefer, Alexander",
"orcid": "0000-0001-6488-4739"
},
+ {
+ "affiliation": "CEA",
+ "name": "Papadopoulos Orfanos, Dimitri",
+ "orcid": "0000-0002-1242-8990"
+ },
{
"affiliation": "UniversityHospital Heidelberg, Germany",
"name": "Kleesiek, Jens"
@@ -414,11 +419,6 @@
{
"name": "Haselgrove, Christian"
},
- {
- "affiliation": "CEA",
- "name": "Papadopoulos Orfanos, Dimitri",
- "orcid": "0000-0002-1242-8990"
- },
{
"affiliation": "Department of Psychology, Stanford University; Parietal, INRIA",
"name": "Durnez, Joke",
diff --git a/Makefile b/Makefile
index 03c1152053..568d53379c 100644
--- a/Makefile
+++ b/Makefile
@@ -9,6 +9,10 @@ PYTHON ?= python
zipdoc: html
zip documentation.zip doc/_build/html
+.git-blame-ignore-revs: .git/HEAD
+ git log --grep "\[ignore-rev\]\|STY: black\|run black" -i --pretty=format:"# %ad - %ae - %s%n%H" > .git-blame-ignore-revs
+ echo >> .git-blame-ignore-revs
+
sdist: zipdoc
@echo "Building source distribution..."
$(PYTHON) setup.py sdist
diff --git a/THANKS.rst b/THANKS.rst
index 4d8cdd47e7..71c4d9eeac 100644
--- a/THANKS.rst
+++ b/THANKS.rst
@@ -19,4 +19,3 @@ and `UL1 TR000442 University of Iowa Clinical and Translational Science Program
We would also like to thank `JetBrains `__ for providing `Pycharm `__ licenses.
-
diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html
index ce03fb63f9..f771858968 100644
--- a/doc/_templates/indexsidebar.html
+++ b/doc/_templates/indexsidebar.html
@@ -11,4 +11,4 @@
{{ _('Links') }}
-{% endblock %}
+{% endblock %}
diff --git a/doc/_templates/navbar.html b/doc/_templates/navbar.html
index 9afb8ff8d5..6b2d457a1f 100644
--- a/doc/_templates/navbar.html
+++ b/doc/_templates/navbar.html
@@ -13,4 +13,3 @@
Developers ·
About ·
Nipy
-
diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst
index f6a9233f5e..34ff8392ea 100644
--- a/doc/changelog/1.X.X-changelog.rst
+++ b/doc/changelog/1.X.X-changelog.rst
@@ -1,3 +1,20 @@
+1.8.6 (April 05, 2023)
+======================
+
+Bug-fix release in the 1.8.x series.
+
+ * FIX: Update dcmstack interface for Py3 / newer pydicom (https://github.com/nipy/nipype/pull/3541)
+ * FIX: NiBabel 5, and NetworkX 3 and DIPY 1.6 compatibility (https://github.com/nipy/nipype/pull/3538)
+ * FIX: Check for non-mandatory output in DWIBiasCorrect (https://github.com/nipy/nipype/pull/3523)
+ * FIX: Removed leftover debug print statement in FEAT class (https://github.com/nipy/nipype/pull/3521)
+ * DOC: Fix a few more typos (https://github.com/nipy/nipype/pull/3516)
+ * DOC: Fix typos found by codespell (https://github.com/nipy/nipype/pull/3512)
+ * CI: Drop nipy tests until a fixed nipy is released (https://github.com/nipy/nipype/pull/3559)
+ * CI: Disable nipy tests generally, re-add with max numpy (https://github.com/nipy/nipype/pull/3532)
+ * CI: GitHub Workflows security hardening (https://github.com/nipy/nipype/pull/3519)
+ * CI: Allow tutorial test cancellation (https://github.com/nipy/nipype/pull/3514)
+
+
1.8.5 (September 21, 2022)
==========================
diff --git a/doc/devel/filename_generation.rst b/doc/devel/filename_generation.rst
index 5772ab33c1..dbf264c5dd 100644
--- a/doc/devel/filename_generation.rst
+++ b/doc/devel/filename_generation.rst
@@ -169,4 +169,3 @@ absolute path is generated and used for the ``cmdline`` when run, but
In [80]: res.interface.inputs.outfile
Out[80]: 'bar.nii'
-
diff --git a/doc/devel/gitwash/forking_hell.rst b/doc/devel/gitwash/forking_hell.rst
index 1a3a163508..ac764c1c14 100644
--- a/doc/devel/gitwash/forking_hell.rst
+++ b/doc/devel/gitwash/forking_hell.rst
@@ -30,4 +30,3 @@ Create your own forked copy of nipype_
should find yourself at the home page for your own forked copy of nipype_.
.. include:: links.inc
-
diff --git a/doc/devel/gitwash/git_development.rst b/doc/devel/gitwash/git_development.rst
index 7b6e021752..3808ca9fa1 100644
--- a/doc/devel/gitwash/git_development.rst
+++ b/doc/devel/gitwash/git_development.rst
@@ -13,4 +13,3 @@ Contents:
set_up_fork
configure_git
development_workflow
-
diff --git a/doc/devel/gitwash/index.rst b/doc/devel/gitwash/index.rst
index 8cc6750192..aaf1fff1b3 100644
--- a/doc/devel/gitwash/index.rst
+++ b/doc/devel/gitwash/index.rst
@@ -14,5 +14,3 @@ Contents:
patching
git_development
git_resources
-
-
diff --git a/doc/devel/gitwash/set_up_fork.rst b/doc/devel/gitwash/set_up_fork.rst
index 2349d5852b..3105135155 100644
--- a/doc/devel/gitwash/set_up_fork.rst
+++ b/doc/devel/gitwash/set_up_fork.rst
@@ -65,4 +65,3 @@ Just for your own satisfaction, show yourself that you now have a new
origin git@github.com:your-user-name/nipype.git (push)
.. include:: links.inc
-
diff --git a/doc/devel/interface_specs.rst b/doc/devel/interface_specs.rst
index fa74aca753..5b4e3678f2 100644
--- a/doc/devel/interface_specs.rst
+++ b/doc/devel/interface_specs.rst
@@ -148,7 +148,7 @@ base class down to subclasses).:
``SlicerCommandLineInputSpec``: Defines inputs common to all Slicer classes (``module``)
-Most developers will only need to code at the the interface-level (i.e. implementing custom class inheriting from one of the above classes).
+Most developers will only need to code at the interface-level (i.e. implementing custom class inheriting from one of the above classes).
Output Specs
^^^^^^^^^^^^
@@ -519,7 +519,7 @@ SPM
For SPM-mediated interfaces:
-* ``_jobtype`` and ``_jobname``: special names used used by the SPM job manager. You can find them by saving your batch job as an .m file and looking up the code.
+* ``_jobtype`` and ``_jobname``: special names used by the SPM job manager. You can find them by saving your batch job as an .m file and looking up the code.
And optionally:
diff --git a/doc/devel/provenance.rst b/doc/devel/provenance.rst
index 5e1694a3ae..b76f57ccf1 100644
--- a/doc/devel/provenance.rst
+++ b/doc/devel/provenance.rst
@@ -5,7 +5,7 @@ W3C PROV support
Overview
--------
-We're using the the `W3C PROV data model `_ to
+We're using the `W3C PROV data model `_ to
capture and represent provenance in Nipype.
For an overview see:
diff --git a/doc/interfaces.rst b/doc/interfaces.rst
index c0e4602284..e9fe73f562 100644
--- a/doc/interfaces.rst
+++ b/doc/interfaces.rst
@@ -8,7 +8,7 @@ Interfaces and Workflows
:Release: |version|
:Date: |today|
-Previous versions: `1.8.4 `_ `1.8.3 `_
+Previous versions: `1.8.5 `_ `1.8.4 `_
Workflows
---------
diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py
index 23006aad8b..839696144e 100644
--- a/nipype/algorithms/misc.py
+++ b/nipype/algorithms/misc.py
@@ -968,7 +968,6 @@ class CalculateNormalizedMoments(BaseInterface):
output_spec = CalculateNormalizedMomentsOutputSpec
def _run_interface(self, runtime):
-
self._moments = calc_moments(self.inputs.timeseries_file, self.inputs.moment)
return runtime
diff --git a/nipype/algorithms/tests/test_ErrorMap.py b/nipype/algorithms/tests/test_ErrorMap.py
index 98f05d8e17..faae860a5a 100644
--- a/nipype/algorithms/tests/test_ErrorMap.py
+++ b/nipype/algorithms/tests/test_ErrorMap.py
@@ -10,14 +10,13 @@
def test_errormap(tmpdir):
-
# Single-Spectual
# Make two fake 2*2*2 voxel volumes
# John von Neumann's birthday
volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]])
# Alan Turing's birthday
volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]])
- mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]])
+ mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]], dtype=np.uint8)
img1 = nb.Nifti1Image(volume1, np.eye(4))
img2 = nb.Nifti1Image(volume2, np.eye(4))
diff --git a/nipype/algorithms/tests/test_TSNR.py b/nipype/algorithms/tests/test_TSNR.py
index 26c1019b63..320bec8ab2 100644
--- a/nipype/algorithms/tests/test_TSNR.py
+++ b/nipype/algorithms/tests/test_TSNR.py
@@ -131,5 +131,6 @@ def assert_unchanged(self, expected_ranges):
[
[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]],
[[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]],
- ]
+ ],
+ dtype=np.int16,
)
diff --git a/nipype/algorithms/tests/test_metrics.py b/nipype/algorithms/tests/test_metrics.py
index ad7502992e..3652fc2ce5 100644
--- a/nipype/algorithms/tests/test_metrics.py
+++ b/nipype/algorithms/tests/test_metrics.py
@@ -45,7 +45,7 @@ def test_fuzzy_overlap(tmpdir):
# Just considering the mask, the central pixel
# that raised the index now is left aside.
- data = np.zeros((3, 3, 3), dtype=int)
+ data = np.zeros((3, 3, 3), dtype=np.uint8)
data[0, 0, 0] = 1
data[2, 2, 2] = 1
nb.Nifti1Image(data, np.eye(4)).to_filename("mask.nii.gz")
diff --git a/nipype/algorithms/tests/test_misc.py b/nipype/algorithms/tests/test_misc.py
index 755527da49..3303344ef6 100644
--- a/nipype/algorithms/tests/test_misc.py
+++ b/nipype/algorithms/tests/test_misc.py
@@ -13,7 +13,6 @@
def test_CreateNifti(create_analyze_pair_file_in_directory):
-
filelist, outdir = create_analyze_pair_file_in_directory
create_nifti = misc.CreateNifti()
@@ -35,7 +34,6 @@ def test_CreateNifti(create_analyze_pair_file_in_directory):
def test_CalculateMedian(create_analyze_pair_file_in_directory):
-
mean = misc.CalculateMedian()
with pytest.raises(TypeError):
diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py
index 9541d5d882..99aa5950ae 100644
--- a/nipype/algorithms/tests/test_normalize_tpms.py
+++ b/nipype/algorithms/tests/test_normalize_tpms.py
@@ -16,7 +16,6 @@
def test_normalize_tpms(tmpdir):
-
in_mask = example_data("tpms_msk.nii.gz")
mskdata = np.asanyarray(nb.load(in_mask).dataobj)
mskdata[mskdata > 0.0] = 1.0
diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py
index 34d5ac1927..d2946710f1 100644
--- a/nipype/caching/memory.py
+++ b/nipype/caching/memory.py
@@ -287,7 +287,7 @@ def clear_runs_since(self, day=None, month=None, year=None, warn=True):
os.remove(log_name)
def _clear_all_but(self, runs, warn=True):
- """Remove all the runs appart from those given to the function
+ """Remove all the runs apart from those given to the function
input.
"""
rm_all_but(self.base_dir, set(runs.keys()), warn=warn)
diff --git a/nipype/conftest.py b/nipype/conftest.py
index 7323e72846..18b8a1ca6d 100644
--- a/nipype/conftest.py
+++ b/nipype/conftest.py
@@ -27,7 +27,6 @@ def _docdir(request):
# Trigger ONLY for the doctests.
doctest_plugin = request.config.pluginmanager.getplugin("doctest")
if isinstance(request.node, doctest_plugin.DoctestItem):
-
# Get the fixture dynamically by its name.
tmpdir = pp.local(data_dir)
diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py
index 08db7a3a0a..c861654990 100644
--- a/nipype/external/cloghandler.py
+++ b/nipype/external/cloghandler.py
@@ -151,7 +151,9 @@ def __init__(
)
try:
BaseRotatingHandler.__init__(self, filename, mode, encoding)
- except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?)
+ except (
+ TypeError
+ ): # Due to a different logging release without encoding support (Python 2.4.1 and earlier?)
BaseRotatingHandler.__init__(self, filename, mode)
self.encoding = encoding
diff --git a/nipype/info.py b/nipype/info.py
index dca8ce4b59..3c8e8ee596 100644
--- a/nipype/info.py
+++ b/nipype/info.py
@@ -5,7 +5,7 @@
# nipype version information
# Remove .dev0 for release
-__version__ = "1.8.5"
+__version__ = "1.8.6"
def get_nipype_gitversion():
diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py
index 660e913dc3..28dc1dc410 100644
--- a/nipype/interfaces/afni/base.py
+++ b/nipype/interfaces/afni/base.py
@@ -262,8 +262,8 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None)
Generate a filename based on the given parameters.
The filename will take the form: cwd/basename.
- If change_ext is True, it will use the extentions specified in
- intputs.output_type.
+ If change_ext is True, it will use the extensions specified in
+ inputs.output_type.
Parameters
----------
diff --git a/nipype/interfaces/afni/model.py b/nipype/interfaces/afni/model.py
index 2e6d2fc15a..1235f8afff 100644
--- a/nipype/interfaces/afni/model.py
+++ b/nipype/interfaces/afni/model.py
@@ -265,7 +265,7 @@ class DeconvolveInputSpec(AFNICommandInputSpec):
class DeconvolveOutputSpec(TraitedSpec):
out_file = File(desc="output statistics file", exists=True)
reml_script = File(
- desc="automatical generated script to run 3dREMLfit", exists=True
+ desc="automatically generated script to run 3dREMLfit", exists=True
)
x1D = File(desc="save out X matrix", exists=True)
cbucket = File(desc="output regression coefficients file (if generated)")
@@ -528,7 +528,7 @@ class RemlfitInputSpec(AFNICommandInputSpec):
"be included.",
argstr="-Rglt %s",
)
- fitts_file = File(desc="ouput dataset for REML fitted model", argstr="-Rfitts %s")
+ fitts_file = File(desc="output dataset for REML fitted model", argstr="-Rfitts %s")
errts_file = File(
desc="output dataset for REML residuals = data - fitted model",
argstr="-Rerrts %s",
@@ -584,7 +584,7 @@ class RemlfitOutputSpec(AFNICommandOutputSpec):
"but ONLY for the GLTs added on the REMLfit command "
"line itself via 'gltsym' (if generated)"
)
- fitts_file = File(desc="ouput dataset for REML fitted model (if generated)")
+ fitts_file = File(desc="output dataset for REML fitted model (if generated)")
errts_file = File(
desc="output dataset for REML residuals = data - fitted model (if " "generated"
)
diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py
index fad5cbdf2f..0eec60cd62 100644
--- a/nipype/interfaces/afni/preprocess.py
+++ b/nipype/interfaces/afni/preprocess.py
@@ -821,7 +821,7 @@ class AutoTLRCInputSpec(CommandLineInputSpec):
class AutoTLRC(AFNICommand):
- """A minmal wrapper for the AutoTLRC script
+ """A minimal wrapper for the AutoTLRC script
The only option currently supported is no_ss.
For complete details, see the `3dQwarp Documentation.
`_
@@ -1133,7 +1133,6 @@ class ClipLevel(AFNICommandBase):
output_spec = ClipLevelOutputSpec
def aggregate_outputs(self, runtime=None, needed_outputs=None):
-
outputs = self._outputs()
outfile = os.path.join(os.getcwd(), "stat_result.json")
@@ -1327,7 +1326,7 @@ class ECMInputSpec(CentralityInputSpec):
)
fecm = traits.Bool(
desc="Fast centrality method; substantial speed increase but cannot "
- "accomodate thresholding; automatically selected if -thresh or "
+ "accommodate thresholding; automatically selected if -thresh or "
"-sparsity are not set",
argstr="-fecm",
)
@@ -2295,7 +2294,6 @@ class Seg(AFNICommandBase):
output_spec = AFNICommandOutputSpec
def aggregate_outputs(self, runtime=None, needed_outputs=None):
-
import glob
outputs = self._outputs()
@@ -2933,7 +2931,7 @@ class TProjectInputSpec(AFNICommandInputSpec):
the output dataset:
* mode = ZERO -- put zero values in their place;
- output datset is same length as input
+ output dataset is same length as input
* mode = KILL -- remove those time points;
output dataset is shorter than input
* mode = NTRP -- censored values are replaced by interpolated
@@ -3073,7 +3071,7 @@ class TProject(AFNICommand):
as ``-passband``. In this way, you can bandpass time-censored data, and at
the same time, remove other time series of no interest
(e.g., physiological estimates, motion parameters).
- Shifts voxel time series from input so that seperate slices are aligned to
+ Shifts voxel time series from input so that separate slices are aligned to
the same temporal origin.
Examples
@@ -3188,7 +3186,7 @@ class TShiftOutputSpec(AFNICommandOutputSpec):
class TShift(AFNICommand):
- """Shifts voxel time series from input so that seperate slices are aligned
+ """Shifts voxel time series from input so that separate slices are aligned
to the same temporal origin.
For complete details, see the `3dTshift Documentation.
@@ -3658,7 +3656,7 @@ class QwarpInputSpec(AFNICommandInputSpec):
with 3dNwarpApply and 3dNwarpCat, for example.
* To be clear, this is the warp from source dataset
coordinates to base dataset coordinates, where the
- values at each base grid point are the xyz displacments
+ values at each base grid point are the xyz displacements
needed to move that grid point's xyz values to the
corresponding xyz values in the source dataset:
base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z)
@@ -4034,7 +4032,7 @@ class QwarpInputSpec(AFNICommandInputSpec):
The goal is greater speed, and it seems to help this"
positively piggish program to be more expeditious."
* However, accuracy is somewhat lower with '-duplo',"
- for reasons that currenly elude Zhark; for this reason,"
+ for reasons that currently elude Zhark; for this reason,"
the Emperor does not usually use '-duplo'.
""",
@@ -4225,21 +4223,21 @@ class QwarpInputSpec(AFNICommandInputSpec):
)
hel = traits.Bool(
desc="Hellinger distance: a matching function for the adventurous"
- "This option has NOT be extensively tested for usefullness"
+ "This option has NOT be extensively tested for usefulness"
"and should be considered experimental at this infundibulum.",
argstr="-hel",
xor=["nmi", "mi", "lpc", "lpa", "pear"],
)
mi = traits.Bool(
desc="Mutual Information: a matching function for the adventurous"
- "This option has NOT be extensively tested for usefullness"
+ "This option has NOT be extensively tested for usefulness"
"and should be considered experimental at this infundibulum.",
argstr="-mi",
xor=["mi", "hel", "lpc", "lpa", "pear"],
)
nmi = traits.Bool(
desc="Normalized Mutual Information: a matching function for the adventurous"
- "This option has NOT been extensively tested for usefullness"
+ "This option has NOT been extensively tested for usefulness"
"and should be considered experimental at this infundibulum.",
argstr="-nmi",
xor=["nmi", "hel", "lpc", "lpa", "pear"],
diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py
index 9c44a40fd8..3075cffce3 100644
--- a/nipype/interfaces/afni/utils.py
+++ b/nipype/interfaces/afni/utils.py
@@ -302,7 +302,6 @@ class BrickStat(AFNICommandBase):
output_spec = BrickStatOutputSpec
def aggregate_outputs(self, runtime=None, needed_outputs=None):
-
outputs = self._outputs()
outfile = os.path.join(os.getcwd(), "stat_result.json")
@@ -539,27 +538,27 @@ class CatInputSpec(AFNICommandInputSpec):
argstr="-sel %s",
)
out_int = traits.Bool(
- desc="specifiy int data type for output",
+ desc="specify int data type for output",
argstr="-i",
xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"],
)
out_nice = traits.Bool(
- desc="specifiy nice data type for output",
+ desc="specify nice data type for output",
argstr="-n",
xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"],
)
out_double = traits.Bool(
- desc="specifiy double data type for output",
+ desc="specify double data type for output",
argstr="-d",
xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"],
)
out_fint = traits.Bool(
- desc="specifiy int, rounded down, data type for output",
+ desc="specify int, rounded down, data type for output",
argstr="-f",
xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"],
)
out_cint = traits.Bool(
- desc="specifiy int, rounded up, data type for output",
+ desc="specify int, rounded up, data type for output",
xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"],
)
@@ -693,7 +692,7 @@ class CenterMassInputSpec(CommandLineInputSpec):
argstr="-set %f %f %f",
)
local_ijk = traits.Bool(
- desc="Output values as (i,j,k) in local orienation", argstr="-local_ijk"
+ desc="Output values as (i,j,k) in local orientation", argstr="-local_ijk"
)
roi_vals = traits.List(
traits.Int,
@@ -2151,7 +2150,7 @@ class NwarpCatInputSpec(AFNICommandInputSpec):
usedefault=True,
)
expad = traits.Int(
- desc="Pad the nonlinear warps by the given number of voxels voxels in "
+ desc="Pad the nonlinear warps by the given number of voxels in "
"all directions. The warp displacements are extended by linear "
"extrapolation from the faces of the input grid..",
argstr="-expad %d",
@@ -2554,11 +2553,11 @@ class ReHoInputSpec(CommandLineInputSpec):
class ReHoOutputSpec(TraitedSpec):
out_file = File(exists=True, desc="Voxelwise regional homogeneity map")
- out_vals = File(desc="Table of labelwise regional homogenity values")
+ out_vals = File(desc="Table of labelwise regional homogeneity values")
class ReHo(AFNICommandBase):
- """Compute regional homogenity for a given neighbourhood.l,
+ """Compute regional homogeneity for a given neighbourhood.l,
based on a local neighborhood of that voxel.
For complete details, see the `3dReHo Documentation.
@@ -2595,7 +2594,6 @@ def _format_arg(self, name, spec, value):
class ResampleInputSpec(AFNICommandInputSpec):
-
in_file = File(
desc="input file to 3dresample",
argstr="-inset %s",
@@ -2740,7 +2738,7 @@ class TCatSBInputSpec(AFNICommandInputSpec):
class TCatSubBrick(AFNICommand):
"""Hopefully a temporary function to allow sub-brick selection until
- afni file managment is improved.
+ afni file management is improved.
For complete details, see the `3dTcat Documentation.
`_
diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py
index 8a20da3fb1..81da68be00 100644
--- a/nipype/interfaces/base/core.py
+++ b/nipype/interfaces/base/core.py
@@ -389,7 +389,6 @@ def run(self, cwd=None, ignore_exception=None, **inputs):
self._check_version_requirements(self.inputs)
with rtc(self, cwd=cwd, redirect_x=self._redirect_x) as runtime:
-
# Grab inputs now, as they should not change during execution
inputs = self.inputs.get_traitsfree()
outputs = None
diff --git a/nipype/interfaces/base/traits_extension.py b/nipype/interfaces/base/traits_extension.py
index 9ac4aa1839..cadce596fb 100644
--- a/nipype/interfaces/base/traits_extension.py
+++ b/nipype/interfaces/base/traits_extension.py
@@ -405,7 +405,6 @@ class MultiObject(traits.List):
"""Abstract class - shared functionality of input and output MultiObject"""
def validate(self, objekt, name, value):
-
# want to treat range and other sequences (except str) as list
if not isinstance(value, (str, bytes)) and isinstance(value, Sequence):
value = list(value)
diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py
index f353017e58..91f762f47d 100644
--- a/nipype/interfaces/brainsuite/brainsuite.py
+++ b/nipype/interfaces/brainsuite/brainsuite.py
@@ -19,7 +19,6 @@
class BseInputSpec(CommandLineInputSpec):
-
inputMRIFile = File(mandatory=True, argstr="-i %s", desc="input MRI volume")
outputMRIVolume = File(
desc="output brain-masked MRI volume. If unspecified, output file name will be auto generated.",
diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py
index c91c02569c..e9a3aed5eb 100644
--- a/nipype/interfaces/c3.py
+++ b/nipype/interfaces/c3.py
@@ -84,7 +84,7 @@ class C3dInputSpec(CommandLineInputSpec):
desc=(
"Write all images on the convert3d stack as multiple files."
" Supports both list of output files or a pattern for the output"
- " filenames (using %d substituion)."
+ " filenames (using %d substitution)."
),
)
pix_type = traits.Enum(
diff --git a/nipype/interfaces/cat12/preprocess.py b/nipype/interfaces/cat12/preprocess.py
index eec0ea936d..1d962bc08b 100644
--- a/nipype/interfaces/cat12/preprocess.py
+++ b/nipype/interfaces/cat12/preprocess.py
@@ -537,7 +537,6 @@ def _list_outputs(self):
]
for tidx, tissue in enumerate(["gm", "wm", "csf"]):
-
for image, prefix in [("modulated", "mw"), ("dartel", "r"), ("native", "")]:
outtype = f"{tissue}_output_{image}"
if isdefined(getattr(self.inputs, outtype)) and getattr(
@@ -594,7 +593,6 @@ def _list_outputs(self):
class CAT12SANLMDenoisingInputSpec(SPMCommandInputSpec):
-
in_files = InputMultiPath(
ImageFileSPM(exists=True),
field="data",
@@ -679,7 +677,6 @@ class CAT12SANLMDenoisingInputSpec(SPMCommandInputSpec):
class CAT12SANLMDenoisingOutputSpec(TraitedSpec):
-
out_file = File(desc="out file")
diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py
index 8775a8517e..bc5f2de2a5 100644
--- a/nipype/interfaces/cmtk/cmtk.py
+++ b/nipype/interfaces/cmtk/cmtk.py
@@ -226,7 +226,8 @@ def cmat(
# Add node information from specified parcellation scheme
path, name, ext = split_filename(resolution_network_file)
if ext == ".pck":
- gp = nx.read_gpickle(resolution_network_file)
+ with open(resolution_network_file, 'rb') as f:
+ gp = pickle.load(f)
elif ext == ".graphml":
gp = nx.read_graphml(resolution_network_file)
else:
@@ -263,7 +264,7 @@ def cmat(
)
intersection_matrix = np.matrix(intersection_matrix)
I = G.copy()
- H = nx.from_numpy_matrix(np.matrix(intersection_matrix))
+ H = nx.from_numpy_array(np.matrix(intersection_matrix))
H = nx.relabel_nodes(H, lambda x: x + 1) # relabel nodes so they start at 1
I.add_weighted_edges_from(
((u, v, d["weight"]) for u, v, d in H.edges(data=True))
@@ -271,7 +272,6 @@ def cmat(
dis = 0
for i in range(endpoints.shape[0]):
-
# ROI start => ROI end
try:
startROI = int(
@@ -379,22 +379,24 @@ def cmat(
fibdev.add_edge(u, v, weight=di["fiber_length_std"])
iflogger.info("Writing network as %s", matrix_name)
- nx.write_gpickle(G, op.abspath(matrix_name))
+ with open(op.abspath(matrix_name), 'wb') as f:
+ pickle.dump(G, f, pickle.HIGHEST_PROTOCOL)
- numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int)
+ numfib_mlab = nx.to_numpy_array(numfib, dtype=int)
numfib_dict = {"number_of_fibers": numfib_mlab}
- fibmean_mlab = nx.to_numpy_matrix(fibmean, dtype=np.float64)
+ fibmean_mlab = nx.to_numpy_array(fibmean, dtype=np.float64)
fibmean_dict = {"mean_fiber_length": fibmean_mlab}
- fibmedian_mlab = nx.to_numpy_matrix(fibmedian, dtype=np.float64)
+ fibmedian_mlab = nx.to_numpy_array(fibmedian, dtype=np.float64)
fibmedian_dict = {"median_fiber_length": fibmedian_mlab}
- fibdev_mlab = nx.to_numpy_matrix(fibdev, dtype=np.float64)
+ fibdev_mlab = nx.to_numpy_array(fibdev, dtype=np.float64)
fibdev_dict = {"fiber_length_std": fibdev_mlab}
if intersections:
path, name, ext = split_filename(matrix_name)
intersection_matrix_name = op.abspath(name + "_intersections") + ext
iflogger.info("Writing intersection network as %s", intersection_matrix_name)
- nx.write_gpickle(I, intersection_matrix_name)
+ with open(intersection_matrix_name, 'wb') as f:
+ pickle.dump(I, f, pickle.HIGHEST_PROTOCOL)
path, name, ext = split_filename(matrix_mat_name)
if not ext == ".mat":
@@ -1070,7 +1072,8 @@ def create_nodes(roi_file, resolution_network_file, out_filename):
)
)
G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]])
- nx.write_gpickle(G, out_filename)
+ with open(out_filename, 'wb') as f:
+ pickle.dump(G, f, pickle.HIGHEST_PROTOCOL)
return out_filename
diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py
index 321a40fbba..a45daddcd6 100644
--- a/nipype/interfaces/cmtk/convert.py
+++ b/nipype/interfaces/cmtk/convert.py
@@ -18,6 +18,13 @@
from .base import CFFBaseInterface, have_cfflib
+def _read_pickle(fname):
+ import pickle
+
+ with open(fname, 'rb') as f:
+ return pickle.load(f)
+
+
class CFFConverterInputSpec(BaseInterfaceInputSpec):
graphml_networks = InputMultiPath(
File(exists=True), desc="list of graphML networks"
@@ -135,7 +142,7 @@ def _run_interface(self, runtime):
unpickled = []
for ntwk in self.inputs.gpickled_networks:
_, ntwk_name, _ = split_filename(ntwk)
- unpickled = nx.read_gpickle(ntwk)
+ unpickled = _read_pickle(ntwk)
cnet = cf.CNetwork(name=ntwk_name)
cnet.set_with_nxgraph(unpickled)
a.add_connectome_network(cnet)
diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py
index 4e1db9ffb7..a2bd42abee 100644
--- a/nipype/interfaces/cmtk/nbs.py
+++ b/nipype/interfaces/cmtk/nbs.py
@@ -6,6 +6,7 @@
import numpy as np
import networkx as nx
+import pickle
from ... import logging
from ..base import (
@@ -23,13 +24,18 @@
iflogger = logging.getLogger("nipype.interface")
+def _read_pickle(fname):
+ with open(fname, 'rb') as f:
+ return pickle.load(f)
+
+
def ntwks_to_matrices(in_files, edge_key):
- first = nx.read_gpickle(in_files[0])
+ first = _read_pickle(in_files[0])
files = len(in_files)
nodes = len(first.nodes())
matrix = np.zeros((nodes, nodes, files))
for idx, name in enumerate(in_files):
- graph = nx.read_gpickle(name)
+ graph = _read_pickle(name)
for u, v, d in graph.edges(data=True):
try:
graph[u][v]["weight"] = d[
@@ -39,7 +45,7 @@ def ntwks_to_matrices(in_files, edge_key):
raise KeyError(
"the graph edges do not have {} attribute".format(edge_key)
)
- matrix[:, :, idx] = nx.to_numpy_matrix(graph) # Retrieve the matrix
+ matrix[:, :, idx] = nx.to_numpy_array(graph) # Retrieve the matrix
return matrix
@@ -149,8 +155,8 @@ def _run_interface(self, runtime):
pADJ[x, y] = PVAL[idx]
# Create networkx graphs from the adjacency matrix
- nbsgraph = nx.from_numpy_matrix(ADJ)
- nbs_pval_graph = nx.from_numpy_matrix(pADJ)
+ nbsgraph = nx.from_numpy_array(ADJ)
+ nbs_pval_graph = nx.from_numpy_array(pADJ)
# Relabel nodes because they should not start at zero for our convention
nbsgraph = nx.relabel_nodes(nbsgraph, lambda x: x + 1)
@@ -161,7 +167,7 @@ def _run_interface(self, runtime):
else:
node_ntwk_name = self.inputs.in_group1[0]
- node_network = nx.read_gpickle(node_ntwk_name)
+ node_network = _read_pickle(node_ntwk_name)
iflogger.info(
"Populating node dictionaries with attributes from %s", node_ntwk_name
)
@@ -172,12 +178,14 @@ def _run_interface(self, runtime):
path = op.abspath("NBS_Result_" + details)
iflogger.info(path)
- nx.write_gpickle(nbsgraph, path)
+ with open(path, 'wb') as f:
+ pickle.dump(nbsgraph, f, pickle.HIGHEST_PROTOCOL)
iflogger.info("Saving output NBS edge network as %s", path)
pval_path = op.abspath("NBS_P_vals_" + details)
iflogger.info(pval_path)
- nx.write_gpickle(nbs_pval_graph, pval_path)
+ with open(pval_path, 'wb') as f:
+ pickle.dump(nbs_pval_graph, f, pickle.HIGHEST_PROTOCOL)
iflogger.info("Saving output p-value network as %s", pval_path)
return runtime
diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py
index aaf4bece39..991ca89dcf 100644
--- a/nipype/interfaces/cmtk/nx.py
+++ b/nipype/interfaces/cmtk/nx.py
@@ -24,11 +24,16 @@
iflogger = logging.getLogger("nipype.interface")
+def _read_pickle(fname):
+ with open(fname, 'rb') as f:
+ return pickle.load(f)
+
+
def read_unknown_ntwk(ntwk):
if not isinstance(ntwk, nx.classes.graph.Graph):
_, _, ext = split_filename(ntwk)
if ext == ".pck":
- ntwk = nx.read_gpickle(ntwk)
+ ntwk = _read_pickle(ntwk)
elif ext == ".graphml":
ntwk = nx.read_graphml(ntwk)
return ntwk
@@ -121,7 +126,7 @@ def average_networks(in_files, ntwk_res_file, group_id):
counting_ntwk = ntwk.copy()
# Sums all the relevant variables
for index, subject in enumerate(in_files):
- tmp = nx.read_gpickle(subject)
+ tmp = _read_pickle(subject)
iflogger.info("File %s has %i edges", subject, tmp.number_of_edges())
edges = list(tmp.edges())
for edge in edges:
@@ -200,7 +205,8 @@ def average_networks(in_files, ntwk_res_file, group_id):
# Writes the networks and returns the name
network_name = group_id + "_average.pck"
- nx.write_gpickle(avg_ntwk, op.abspath(network_name))
+ with open(op.abspath(network_name), 'wb') as f:
+ pickle.dump(avg_ntwk, f, pickle.HIGHEST_PROTOCOL)
iflogger.info("Saving average network as %s", op.abspath(network_name))
avg_ntwk = fix_keys_for_gexf(avg_ntwk)
network_name = group_id + "_average.gexf"
@@ -460,7 +466,7 @@ def _run_interface(self, runtime):
edgentwks = list()
kntwks = list()
matlab = list()
- ntwk = nx.read_gpickle(self.inputs.in_file)
+ ntwk = _read_pickle(self.inputs.in_file)
# Each block computes, writes, and saves a measure
# The names are then added to the output .pck file list
@@ -483,7 +489,8 @@ def _run_interface(self, runtime):
for key in list(node_measures.keys()):
newntwk = add_node_data(node_measures[key], ntwk)
out_file = op.abspath(self._gen_outfilename(key, "pck"))
- nx.write_gpickle(newntwk, out_file)
+ with open(out_file, 'wb') as f:
+ pickle.dump(newntwk, f, pickle.HIGHEST_PROTOCOL)
nodentwks.append(out_file)
if isdefined(self.inputs.out_node_metrics_matlab):
node_out_file = op.abspath(self.inputs.out_node_metrics_matlab)
@@ -497,7 +504,8 @@ def _run_interface(self, runtime):
for key in list(edge_measures.keys()):
newntwk = add_edge_data(edge_measures[key], ntwk)
out_file = op.abspath(self._gen_outfilename(key, "pck"))
- nx.write_gpickle(newntwk, out_file)
+ with open(out_file, 'wb') as f:
+ pickle.dump(newntwk, f, pickle.HIGHEST_PROTOCOL)
edgentwks.append(out_file)
if isdefined(self.inputs.out_edge_metrics_matlab):
edge_out_file = op.abspath(self.inputs.out_edge_metrics_matlab)
@@ -521,7 +529,8 @@ def _run_interface(self, runtime):
out_file = op.abspath(
self._gen_outfilename(self.inputs.out_k_crust, "pck")
)
- nx.write_gpickle(ntwk_measures[key], out_file)
+ with open(out_file, 'wb') as f:
+ pickle.dump(ntwk_measures[key], f, pickle.HIGHEST_PROTOCOL)
kntwks.append(out_file)
gpickled.extend(kntwks)
diff --git a/nipype/interfaces/cmtk/tests/test_nbs.py b/nipype/interfaces/cmtk/tests/test_nbs.py
index 46da939f1a..6323546c1e 100644
--- a/nipype/interfaces/cmtk/tests/test_nbs.py
+++ b/nipype/interfaces/cmtk/tests/test_nbs.py
@@ -2,6 +2,7 @@
from ....utils.misc import package_check
import numpy as np
import networkx as nx
+import pickle
import pytest
have_cv = True
@@ -17,10 +18,11 @@ def creating_graphs(tmpdir):
graphnames = ["name" + str(i) for i in range(6)]
for idx, name in enumerate(graphnames):
graph = np.random.rand(10, 10)
- G = nx.from_numpy_matrix(graph)
+ G = nx.from_numpy_array(graph)
out_file = tmpdir.strpath + graphnames[idx] + ".pck"
# Save as pck file
- nx.write_gpickle(G, out_file)
+ with open(out_file, 'wb') as f:
+ pickle.dump(G, f, pickle.HIGHEST_PROTOCOL)
graphlist.append(out_file)
return graphlist
diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py
index 1ba6f24eb9..4732e50e4e 100644
--- a/nipype/interfaces/dcm2nii.py
+++ b/nipype/interfaces/dcm2nii.py
@@ -119,7 +119,8 @@ class Dcm2nii(CommandLine):
>>> converter.inputs.gzip_output = True
>>> converter.inputs.output_dir = '.'
>>> converter.cmdline # doctest: +ELLIPSIS
- 'dcm2nii -a y -c y -b config.ini -v y -d y -e y -g y -i n -n y -o . -p y -x n -f n functional_1.dcm'"""
+ 'dcm2nii -a y -c y -b config.ini -v y -d y -e y -g y -i n -n y -o . -p y -x n -f n functional_1.dcm'
+ """
input_spec = Dcm2niiInputSpec
output_spec = Dcm2niiOutputSpec
diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py
index bc18659c93..49acf9f509 100644
--- a/nipype/interfaces/dcmstack.py
+++ b/nipype/interfaces/dcmstack.py
@@ -24,7 +24,7 @@
have_dcmstack = True
try:
- import dicom
+ import pydicom
import dcmstack
from dcmstack.dcmmeta import NiftiWrapper
except ImportError:
@@ -34,7 +34,7 @@
def sanitize_path_comp(path_comp):
result = []
for char in path_comp:
- if char not in string.letters + string.digits + "-_.":
+ if char not in string.ascii_letters + string.digits + "-_.":
result.append("_")
else:
result.append(char)
@@ -55,7 +55,7 @@ class NiftiGeneratorBase(BaseInterface):
embedded meta data."""
def _get_out_path(self, meta, idx=None):
- """Return the output path for the gernerated Nifti."""
+ """Return the output path for the generated Nifti."""
if self.inputs.out_format:
out_fmt = self.inputs.out_format
else:
@@ -154,7 +154,7 @@ def _run_interface(self, runtime):
stack = dcmstack.DicomStack(meta_filter=meta_filter)
for src_path in src_paths:
if not imghdr.what(src_path) == "gif":
- src_dcm = dicom.read_file(src_path, force=self.inputs.force_read)
+ src_dcm = pydicom.dcmread(src_path, force=self.inputs.force_read)
stack.add_dcm(src_dcm)
nii = stack.to_nifti(embed_meta=True)
nw = NiftiWrapper(nii)
@@ -262,7 +262,7 @@ def _outputs(self):
return outputs
def _run_interface(self, runtime):
- # If the 'meta_keys' input is a list, covert it to a dict
+ # If the 'meta_keys' input is a list, convert it to a dict
self._make_name_map()
nw = NiftiWrapper.from_filename(self.inputs.in_file)
self.result = {}
@@ -342,7 +342,7 @@ class MergeNiftiInputSpec(NiftiGeneratorBaseInputSpec):
merge_dim = traits.Int(
desc="Dimension to merge along. If not "
"specified, the last singular or "
- "non-existant dimension is used."
+ "non-existent dimension is used."
)
diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py
index 03488712ed..c42db76d5b 100644
--- a/nipype/interfaces/diffusion_toolkit/dti.py
+++ b/nipype/interfaces/diffusion_toolkit/dti.py
@@ -208,7 +208,7 @@ class DTITrackerInputSpec(CommandLineInputSpec):
desc="set angle threshold. default value is 35 degree", argstr="-at %f"
)
angle_threshold_weight = traits.Float(
- desc="set angle threshold weighting factor. weighting will be be applied "
+ desc="set angle threshold weighting factor. weighting will be applied "
"on top of the angle_threshold",
argstr="-atw %f",
)
diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py
index d8a1c0fbf5..161ed33227 100644
--- a/nipype/interfaces/dipy/base.py
+++ b/nipype/interfaces/dipy/base.py
@@ -110,7 +110,7 @@ def convert_to_traits_type(dipy_type, is_file=False):
"""Convert DIPY type to Traits type."""
dipy_type = dipy_type.lower()
is_mandatory = bool("optional" not in dipy_type)
- if "variable" in dipy_type and "string" in dipy_type:
+ if "variable" in dipy_type and "str" in dipy_type:
return traits.ListStr, is_mandatory
elif "variable" in dipy_type and "int" in dipy_type:
return traits.ListInt, is_mandatory
@@ -120,9 +120,9 @@ def convert_to_traits_type(dipy_type, is_file=False):
return traits.ListBool, is_mandatory
elif "variable" in dipy_type and "complex" in dipy_type:
return traits.ListComplex, is_mandatory
- elif "string" in dipy_type and not is_file:
+ elif "str" in dipy_type and not is_file:
return traits.Str, is_mandatory
- elif "string" in dipy_type and is_file:
+ elif "str" in dipy_type and is_file:
return File, is_mandatory
elif "int" in dipy_type:
return traits.Int, is_mandatory
diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py
index c5850450a6..da030439de 100644
--- a/nipype/interfaces/dtitk/utils.py
+++ b/nipype/interfaces/dtitk/utils.py
@@ -161,7 +161,7 @@ class TVResampleInputSpec(CommandLineInputSpec):
desc="how to align output volume to input volume",
)
interpolation = traits.Enum(
- "LEI", "EI", argstr="-interp %s", desc="Log Euclidean Euclidean Interpolation"
+ "LEI", "EI", argstr="-interp %s", desc="Log Euclidean Interpolation"
)
array_size = traits.Tuple(
(traits.Int(), traits.Int(), traits.Int()),
diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py
index f1b7fafe53..5c06a09238 100644
--- a/nipype/interfaces/freesurfer/model.py
+++ b/nipype/interfaces/freesurfer/model.py
@@ -449,7 +449,6 @@ class GLMFitInputSpec(FSTraitedSpec):
class GLMFitOutputSpec(TraitedSpec):
-
glm_dir = Directory(exists=True, desc="output directory")
beta_file = File(exists=True, desc="map of regression coefficients")
error_file = File(desc="map of residual error")
diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py
index d04409ad20..33602d2711 100644
--- a/nipype/interfaces/freesurfer/petsurfer.py
+++ b/nipype/interfaces/freesurfer/petsurfer.py
@@ -24,7 +24,6 @@
class GTMSegInputSpec(FSTraitedSpec):
-
subject_id = traits.String(argstr="--s %s", desc="subject id", mandatory=True)
xcerseg = traits.Bool(
@@ -136,7 +135,6 @@ def _list_outputs(self):
class GTMPVCInputSpec(FSTraitedSpec):
-
in_file = File(
exists=True,
argstr="--i %s",
@@ -422,7 +420,6 @@ class GTMPVCInputSpec(FSTraitedSpec):
class GTMPVCOutputSpec(TraitedSpec):
-
pvc_dir = Directory(desc="output directory")
ref_file = File(desc="Reference TAC in .dat")
hb_nifti = File(desc="High-binding TAC in nifti")
diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py
index a30749cf56..ccf783a1e3 100644
--- a/nipype/interfaces/freesurfer/preprocess.py
+++ b/nipype/interfaces/freesurfer/preprocess.py
@@ -1833,7 +1833,6 @@ class BBRegister(FSCommand):
output_spec = BBRegisterOutputSpec
def _list_outputs(self):
-
outputs = self.output_spec().get()
_in = self.inputs
@@ -1893,7 +1892,6 @@ def _format_arg(self, name, spec, value):
return super(BBRegister, self)._format_arg(name, spec, value)
def _gen_filename(self, name):
-
if name == "out_reg_file":
return self._list_outputs()[name]
return None
@@ -2180,7 +2178,6 @@ def _gen_filename(self, name):
class RobustRegisterInputSpec(FSTraitedSpec):
-
source_file = File(
exists=True, mandatory=True, argstr="--mov %s", desc="volume to be registered"
)
@@ -2302,7 +2299,6 @@ class RobustRegisterInputSpec(FSTraitedSpec):
class RobustRegisterOutputSpec(TraitedSpec):
-
out_reg_file = File(exists=True, desc="output registration file")
registered_file = File(exists=True, desc="output image with registration applied")
weights_file = File(exists=True, desc="image of weights used")
@@ -2361,8 +2357,8 @@ def _format_arg(self, name, spec, value):
def _list_outputs(self):
outputs = self.output_spec().get()
cwd = os.getcwd()
- prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file)
- suffices = dict(
+ prefixes = dict(src=self.inputs.source_file, trg=self.inputs.target_file)
+ suffixes = dict(
out_reg_file=("src", "_robustreg.lta", False),
registered_file=("src", "_robustreg", True),
weights_file=("src", "_robustweights", True),
@@ -2372,12 +2368,12 @@ def _list_outputs(self):
half_source_xfm=("src", "_robustxfm.lta", False),
half_targ_xfm=("trg", "_robustxfm.lta", False),
)
- for name, sufftup in list(suffices.items()):
+ for name, sufftup in list(suffixes.items()):
value = getattr(self.inputs, name)
if value:
if value is True:
outputs[name] = fname_presuffix(
- prefices[sufftup[0]],
+ prefixes[sufftup[0]],
suffix=sufftup[1],
newpath=cwd,
use_ext=sufftup[2],
@@ -2388,7 +2384,6 @@ def _list_outputs(self):
class FitMSParamsInputSpec(FSTraitedSpec):
-
in_files = traits.List(
File(exists=True),
argstr="%s",
@@ -2408,7 +2403,6 @@ class FitMSParamsInputSpec(FSTraitedSpec):
class FitMSParamsOutputSpec(TraitedSpec):
-
t1_image = File(exists=True, desc="image of estimated T1 relaxation values")
pd_image = File(exists=True, desc="image of estimated proton density values")
t2star_image = File(exists=True, desc="image of estimated T2* values")
@@ -2466,7 +2460,6 @@ def _gen_filename(self, name):
class SynthesizeFLASHInputSpec(FSTraitedSpec):
-
fixed_weighting = traits.Bool(
position=1,
argstr="-w",
@@ -2495,7 +2488,6 @@ class SynthesizeFLASHInputSpec(FSTraitedSpec):
class SynthesizeFLASHOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="synthesized FLASH acquisition")
diff --git a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py
index 6c377c9579..cdc2cc3131 100644
--- a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py
+++ b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py
@@ -35,7 +35,6 @@ def test_associated_file(tmpdir):
("./lh.white", "./lh.pial"),
(fsavginfo["white"], fsavginfo["pial"]),
]:
-
# Unspecified paths, possibly with missing hemisphere information,
# are equivalent to using the same directory and hemisphere
for name in ("pial", "lh.pial", pial):
diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py
index 0ee9dd3751..09584a404e 100644
--- a/nipype/interfaces/freesurfer/tests/test_utils.py
+++ b/nipype/interfaces/freesurfer/tests/test_utils.py
@@ -17,7 +17,6 @@
@pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed")
def test_sample2surf(create_files_in_directory_plus_dummy_file):
-
s2s = fs.SampleToSurface()
# Test underlying command
assert s2s.cmd == "mri_vol2surf"
@@ -65,7 +64,6 @@ def set_illegal_range():
@pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed")
def test_surfsmooth(create_surf_file_in_directory):
-
smooth = fs.SurfaceSmooth()
# Test underlying command
@@ -104,7 +102,6 @@ def test_surfsmooth(create_surf_file_in_directory):
@pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed")
def test_surfxfm(create_surf_file_in_directory):
-
xfm = fs.SurfaceTransform()
# Test underlying command
@@ -141,7 +138,6 @@ def test_surfxfm(create_surf_file_in_directory):
@pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed")
def test_surfshots(create_files_in_directory_plus_dummy_file):
-
fotos = fs.SurfaceSnapshots()
# Test underlying command
diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py
index 9ee65366d2..921bb7488f 100644
--- a/nipype/interfaces/freesurfer/utils.py
+++ b/nipype/interfaces/freesurfer/utils.py
@@ -116,7 +116,6 @@ def createoutputdirs(outputs):
class SampleToSurfaceInputSpec(FSTraitedSpec):
-
source_file = File(
exists=True,
mandatory=True,
@@ -289,7 +288,6 @@ class SampleToSurfaceInputSpec(FSTraitedSpec):
class SampleToSurfaceOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="surface file")
hits_file = File(exists=True, desc="image with number of hits at each voxel")
vox_file = File(
@@ -426,7 +424,6 @@ def _gen_filename(self, name):
class SurfaceSmoothInputSpec(FSTraitedSpec):
-
in_file = File(mandatory=True, argstr="--sval %s", desc="source surface file")
subject_id = traits.String(
mandatory=True, argstr="--s %s", desc="subject id of surface file"
@@ -455,7 +452,6 @@ class SurfaceSmoothInputSpec(FSTraitedSpec):
class SurfaceSmoothOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="smoothed surface file")
@@ -753,7 +749,6 @@ class Surface2VolTransform(FSCommand):
class ApplyMaskInputSpec(FSTraitedSpec):
-
in_file = File(
exists=True,
mandatory=True,
@@ -803,7 +798,6 @@ class ApplyMaskInputSpec(FSTraitedSpec):
class ApplyMaskOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="masked image")
@@ -822,7 +816,6 @@ class ApplyMask(FSCommand):
class SurfaceSnapshotsInputSpec(FSTraitedSpec):
-
subject_id = traits.String(
position=1, argstr="%s", mandatory=True, desc="subject to visualize"
)
@@ -956,7 +949,6 @@ class SurfaceSnapshotsInputSpec(FSTraitedSpec):
class SurfaceSnapshotsOutputSpec(TraitedSpec):
-
snapshots = OutputMultiPath(
File(exists=True), desc="tiff images of the surface from different perspectives"
)
@@ -1118,12 +1110,10 @@ def _gen_filename(self, name):
class ImageInfoInputSpec(FSTraitedSpec):
-
in_file = File(exists=True, position=1, argstr="%s", desc="image to query")
class ImageInfoOutputSpec(TraitedSpec):
-
info = traits.Any(desc="output of mri_info")
out_file = File(exists=True, desc="text file with image information")
data_type = traits.String(desc="image data type")
@@ -1138,7 +1128,6 @@ class ImageInfoOutputSpec(TraitedSpec):
class ImageInfo(FSCommand):
-
_cmd = "mri_info"
input_spec = ImageInfoInputSpec
output_spec = ImageInfoOutputSpec
@@ -2015,7 +2004,6 @@ def _gen_outfilename(self):
class AddXFormToHeaderInputSpec(FSTraitedSpec):
-
# required
in_file = File(
exists=True, mandatory=True, position=-2, argstr="%s", desc="input volume"
@@ -2035,7 +2023,6 @@ class AddXFormToHeaderInputSpec(FSTraitedSpec):
class AddXFormToHeaderOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="output volume")
diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py
index a49c508c64..60f0d8128d 100644
--- a/nipype/interfaces/fsl/dti.py
+++ b/nipype/interfaces/fsl/dti.py
@@ -462,7 +462,6 @@ def _cuda_update(self):
self._cmd = self._default_cmd
def _run_interface(self, runtime):
-
subjectdir = os.path.abspath(self.inputs.out_dir)
if not os.path.exists(subjectdir):
os.makedirs(subjectdir)
@@ -1347,7 +1346,6 @@ def _gen_filename(self, name):
class TractSkeletonInputSpec(FSLCommandInputSpec):
-
in_file = File(
exists=True,
mandatory=True,
@@ -1385,7 +1383,6 @@ class TractSkeletonInputSpec(FSLCommandInputSpec):
class TractSkeletonOutputSpec(TraitedSpec):
-
projected_data = File(desc="input data projected onto skeleton")
skeleton_file = File(desc="tract skeleton image")
@@ -1468,7 +1465,6 @@ def _list_outputs(self):
class DistanceMapInputSpec(FSLCommandInputSpec):
-
in_file = File(
exists=True,
mandatory=True,
@@ -1492,7 +1488,6 @@ class DistanceMapInputSpec(FSLCommandInputSpec):
class DistanceMapOutputSpec(TraitedSpec):
-
distance_map = File(exists=True, desc="value is distance to nearest nonzero voxels")
local_max_file = File(desc="image of local maxima")
diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py
index 5c978f0a97..1b6d62a5bc 100644
--- a/nipype/interfaces/fsl/fix.py
+++ b/nipype/interfaces/fsl/fix.py
@@ -302,7 +302,6 @@ class Classifier(CommandLine):
cmd = "fix -c"
def _gen_artifacts_list_file(self, mel_ica, thresh):
-
_, trained_wts_file = os.path.split(self.inputs.trained_wts_file)
trained_wts_filestem = trained_wts_file.split(".")[0]
filestem = "fix4melview_" + trained_wts_filestem + "_thr"
diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py
index 6e7baf74ce..d8669c4422 100644
--- a/nipype/interfaces/fsl/maths.py
+++ b/nipype/interfaces/fsl/maths.py
@@ -13,7 +13,6 @@
class MathsInput(FSLCommandInputSpec):
-
in_file = File(
position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on"
)
@@ -40,12 +39,10 @@ class MathsInput(FSLCommandInputSpec):
class MathsOutput(TraitedSpec):
-
out_file = File(desc="image written after calculations")
class MathsCommand(FSLCommand):
-
_cmd = "fslmaths"
input_spec = MathsInput
output_spec = MathsOutput
@@ -68,7 +65,6 @@ def _gen_filename(self, name):
class ChangeDataTypeInput(MathsInput):
-
_dtypes = ["float", "char", "int", "short", "double", "input"]
output_datatype = traits.Enum(
*_dtypes, position=-1, argstr="-odt %s", mandatory=True, desc="output data type"
@@ -83,7 +79,6 @@ class ChangeDataType(MathsCommand):
class ThresholdInputSpec(MathsInput):
-
thresh = traits.Float(
mandatory=True, position=4, argstr="%s", desc="threshold value"
)
@@ -126,7 +121,6 @@ def _format_arg(self, name, spec, value):
class StdImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -149,7 +143,6 @@ class StdImage(MathsCommand):
class MeanImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -170,7 +163,6 @@ class MeanImage(MathsCommand):
class MaxImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -202,7 +194,6 @@ class MaxImage(MathsCommand):
class PercentileImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -242,7 +233,6 @@ class PercentileImage(MathsCommand):
class MaxnImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -266,7 +256,6 @@ class MaxnImage(MathsCommand):
class MinImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -287,7 +276,6 @@ class MinImage(MathsCommand):
class MedianImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -308,7 +296,6 @@ class MedianImage(MathsCommand):
class AR1ImageInput(MathsInput):
-
dimension = traits.Enum(
"T",
"X",
@@ -332,7 +319,6 @@ class AR1Image(MathsCommand):
class IsotropicSmoothInput(MathsInput):
-
fwhm = traits.Float(
mandatory=True,
xor=["sigma"],
@@ -363,7 +349,6 @@ def _format_arg(self, name, spec, value):
class ApplyMaskInput(MathsInput):
-
mask_file = File(
exists=True,
mandatory=True,
@@ -381,7 +366,6 @@ class ApplyMask(MathsCommand):
class KernelInput(MathsInput):
-
kernel_shape = traits.Enum(
"3D",
"2D",
@@ -412,7 +396,6 @@ class KernelInput(MathsInput):
class DilateInput(KernelInput):
-
operation = traits.Enum(
"mean",
"modal",
@@ -437,7 +420,6 @@ def _format_arg(self, name, spec, value):
class ErodeInput(KernelInput):
-
minimum_filter = traits.Bool(
argstr="%s",
position=6,
@@ -462,7 +444,6 @@ def _format_arg(self, name, spec, value):
class SpatialFilterInput(KernelInput):
-
operation = traits.Enum(
"mean",
"median",
@@ -482,7 +463,6 @@ class SpatialFilter(MathsCommand):
class UnaryMathsInput(MathsInput):
-
operation = traits.Enum(
"exp",
"log",
@@ -525,7 +505,6 @@ def _list_outputs(self):
class BinaryMathsInput(MathsInput):
-
operation = traits.Enum(
"add",
"sub",
@@ -566,7 +545,6 @@ class BinaryMaths(MathsCommand):
class MultiImageMathsInput(MathsInput):
-
op_string = traits.String(
position=4,
argstr="%s",
@@ -605,7 +583,6 @@ def _format_arg(self, name, spec, value):
class TemporalFilterInput(MathsInput):
-
lowpass_sigma = traits.Float(
-1,
argstr="%.6f",
diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py
index 059c597ce6..50485bac2e 100644
--- a/nipype/interfaces/fsl/model.py
+++ b/nipype/interfaces/fsl/model.py
@@ -484,7 +484,6 @@ def _list_outputs(self):
outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*ica"))[0]
else:
outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*feat"))[0]
- print("Outputs from FEATmodel:", outputs)
return outputs
diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py
index 8f5abfc662..0a6a2e8d63 100644
--- a/nipype/interfaces/fsl/tests/test_dti.py
+++ b/nipype/interfaces/fsl/tests/test_dti.py
@@ -41,7 +41,6 @@ def test_dtifit2(create_files_in_directory):
@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code")
def test_randomise2():
-
rand = fsl.Randomise()
# make sure command gets called
@@ -233,7 +232,6 @@ def test_Proj_thresh():
# test vec_reg
@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code")
def test_Vec_reg():
-
vrg = fsl.VecReg()
# make sure command gets called
diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py
index 23ae7a6824..a5752d8d49 100644
--- a/nipype/interfaces/fsl/tests/test_preprocess.py
+++ b/nipype/interfaces/fsl/tests/test_preprocess.py
@@ -402,7 +402,6 @@ def test_mcflirt_noinput():
@pytest.mark.skipif(no_fsl(), reason="fsl is not installed")
def test_fnirt(setup_flirt):
-
tmpdir, infile, reffile = setup_flirt
tmpdir.chdir()
fnirt = fsl.FNIRT()
@@ -497,7 +496,7 @@ def test_fnirt(setup_flirt):
("log_file", "--logout=%s" % infile, infile),
]
- for (name, settings, arg) in opt_map:
+ for name, settings, arg in opt_map:
fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{name: arg})
if name in ("config_file", "affine_file", "field_file", "fieldcoeff_file"):
diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py
index 4bb44d00b3..734eccc74d 100644
--- a/nipype/interfaces/fsl/utils.py
+++ b/nipype/interfaces/fsl/utils.py
@@ -482,7 +482,6 @@ class ExtractROI(FSLCommand):
output_spec = ExtractROIOutputSpec
def _format_arg(self, name, spec, value):
-
if name == "crop_list":
return " ".join(map(str, sum(list(map(list, value)), [])))
return super(ExtractROI, self)._format_arg(name, spec, value)
@@ -1251,7 +1250,6 @@ def _gen_filename(self, name):
class PlotTimeSeriesInputSpec(FSLCommandInputSpec):
-
in_file = traits.Either(
File(exists=True),
traits.List(File(exists=True)),
@@ -1308,7 +1306,6 @@ class PlotTimeSeriesInputSpec(FSLCommandInputSpec):
class PlotTimeSeriesOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="image to write")
@@ -1374,7 +1371,6 @@ def _gen_filename(self, name):
class PlotMotionParamsInputSpec(FSLCommandInputSpec):
-
in_file = traits.Either(
File(exists=True),
traits.List(File(exists=True)),
@@ -1406,7 +1402,6 @@ class PlotMotionParamsInputSpec(FSLCommandInputSpec):
class PlotMotionParamsOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="image to write")
@@ -1443,7 +1438,6 @@ class PlotMotionParams(FSLCommand):
output_spec = PlotMotionParamsOutputSpec
def _format_arg(self, name, spec, value):
-
if name == "plot_type":
source = self.inputs.in_source
@@ -1602,7 +1596,6 @@ def _gen_filename(self, name):
class SwapDimensionsInputSpec(FSLCommandInputSpec):
-
in_file = File(
exists=True, mandatory=True, argstr="%s", position="1", desc="input image"
)
@@ -1619,7 +1612,6 @@ class SwapDimensionsInputSpec(FSLCommandInputSpec):
class SwapDimensionsOutputSpec(TraitedSpec):
-
out_file = File(exists=True, desc="image with new dimensions")
diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py
index 78d8efc797..09919e8607 100644
--- a/nipype/interfaces/io.py
+++ b/nipype/interfaces/io.py
@@ -254,7 +254,6 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
# Set call-able inputs attributes
def __setattr__(self, key, value):
-
if key not in self.copyable_trait_names():
if not isdefined(value):
super(DataSinkInputSpec, self).__setattr__(key, value)
@@ -267,7 +266,6 @@ def __setattr__(self, key, value):
# DataSink outputs
class DataSinkOutputSpec(TraitedSpec):
-
# Init out file
out_file = traits.Any(desc="datasink output")
@@ -575,7 +573,6 @@ def _fetch_bucket(self, bucket_name):
try:
_get_head_bucket(s3_resource, bucket_name)
except Exception as exc:
-
# Try to connect anonymously
s3_resource.meta.client.meta.events.register(
"choose-signer.s3.*", botocore.handlers.disable_signing
@@ -764,7 +761,7 @@ def _list_outputs(self):
out_files.append(s3dst)
# Otherwise, copy locally src -> dst
if not s3_flag or isdefined(self.inputs.local_copy):
- # Create output directory if it doesnt exist
+ # Create output directory if it doesn't exist
if not os.path.exists(path):
try:
os.makedirs(path)
@@ -1313,12 +1310,11 @@ def _list_outputs(self):
class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
-
base_directory = Directory(exists=True, desc="Root path common to templates.")
sort_filelist = traits.Bool(
True,
usedefault=True,
- desc="When matching mutliple files, return them" " in sorted order.",
+ desc="When matching multiple files, return them" " in sorted order.",
)
raise_on_empty = traits.Bool(
True,
@@ -1346,7 +1342,7 @@ class SelectFiles(IOBase):
This interface uses Python's {}-based string formatting syntax to plug
values (possibly known only at workflow execution time) into string
- templates and collect files from persistant storage. These templates can
+ templates and collect files from persistent storage. These templates can
also be combined with glob wildcards (``*``, ``?``) and character ranges (``[...]``).
The field names in the formatting template (i.e. the terms in braces) will
become inputs fields on the interface, and the keys in the templates
@@ -1447,7 +1443,6 @@ def _list_outputs(self):
raise ValueError(msg)
for field, template in list(self._templates.items()):
-
find_dirs = template[-1] == os.sep
# Build the full template path
@@ -1513,7 +1508,7 @@ class DataFinder(IOBase):
Will recursively search any subdirectories by default. This can be limited
with the min/max depth options.
Matched paths are available in the output 'out_paths'. Any named groups of
- captured text from the regular expression are also available as ouputs of
+ captured text from the regular expression are also available as outputs of
the same name.
Examples
@@ -1583,7 +1578,7 @@ def _run_interface(self, runtime):
]
self.result = None
for root_path in self.inputs.root_paths:
- # Handle tilda/env variables and remove extra seperators
+ # Handle tilda/env variables and remove extra separators
root_path = os.path.normpath(
os.path.expandvars(os.path.expanduser(root_path))
)
@@ -1612,7 +1607,7 @@ def _run_interface(self, runtime):
for key, vals in list(self.result.items()):
self.result[key] = vals[0]
else:
- # sort all keys acording to out_paths
+ # sort all keys according to out_paths
for key in list(self.result.keys()):
if key == "out_paths":
continue
@@ -1853,7 +1848,6 @@ def _list_outputs(self):
class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
-
query_template = Str(
mandatory=True,
desc=("Layout used to get files. Relative to base " "directory if defined"),
@@ -2065,7 +2059,6 @@ def _list_outputs(self):
class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
-
_outputs = traits.Dict(Str, value={}, usedefault=True)
server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"])
@@ -2083,7 +2076,7 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
assessor_id = Str(
desc=(
- "Option to customize ouputs representation in XNAT - "
+ "Option to customize outputs representation in XNAT - "
"assessor level will be used with specified id"
),
xor=["reconstruction_id"],
@@ -2091,7 +2084,7 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
reconstruction_id = Str(
desc=(
- "Option to customize ouputs representation in XNAT - "
+ "Option to customize outputs representation in XNAT - "
"reconstruction level will be used with specified id"
),
xor=["assessor_id"],
@@ -2154,7 +2147,6 @@ def _list_outputs(self):
)
if not shared.exists(): # subject not in share project
-
share_project = xnat.select("/project/%s" % self.inputs.project_id)
if not share_project.exists(): # check project exists
@@ -2185,9 +2177,7 @@ def _list_outputs(self):
# gather outputs and upload them
for key, files in list(self.inputs._outputs.items()):
-
for name in ensure_list(files):
-
if isinstance(name, list):
for i, file_name in enumerate(name):
push_file(
@@ -2206,7 +2196,6 @@ def unquote_id(string):
def push_file(self, xnat, file_name, out_key, uri_template_args):
-
# grab info from output file names
val_list = [
unquote_id(val)
@@ -2269,7 +2258,6 @@ def push_file(self, xnat, file_name, out_key, uri_template_args):
# shares the experiment back to the original project if relevant
if "original_project" in uri_template_args:
-
experiment_template = (
"/project/%(original_project)s"
"/subject/%(subject_id)s/experiment/%(experiment_id)s"
@@ -2318,7 +2306,6 @@ class SQLiteSink(LibraryBaseInterface, IOBase):
_pkg = "sqlite3"
def __init__(self, input_names, **inputs):
-
super(SQLiteSink, self).__init__(**inputs)
self._input_names = ensure_list(input_names)
@@ -2381,7 +2368,6 @@ class MySQLSink(IOBase):
input_spec = MySQLSinkInputSpec
def __init__(self, input_names, **inputs):
-
super(MySQLSink, self).__init__(**inputs)
self._input_names = ensure_list(input_names)
diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py
index 294abdf3ef..f68c5ea43d 100644
--- a/nipype/interfaces/matlab.py
+++ b/nipype/interfaces/matlab.py
@@ -191,7 +191,7 @@ def _gen_matlab_command(self, argstr, script_lines):
else:
prescript.insert(0, "fprintf(1,'Executing code at %s:\\n',datestr(now));")
for path in paths:
- # addpath() is not available after compliation
+ # addpath() is not available after compilation
# https://www.mathworks.com/help/compiler/ismcc.html
# https://www.mathworks.com/help/compiler/isdeployed.html
prescript.append("if ~(ismcc || isdeployed), addpath('%s'); end;\n" % path)
diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py
index 3de0112614..7fe817dcaa 100644
--- a/nipype/interfaces/minc/base.py
+++ b/nipype/interfaces/minc/base.py
@@ -80,7 +80,7 @@ def read_hdf5_version(s):
versions = {"minc": None, "libminc": None, "netcdf": None, "hdf5": None}
for l in out.split("\n"):
- for (name, f) in [
+ for name, f in [
("minc", read_program_version),
("libminc", read_libminc_version),
("netcdf", read_netcdf_version),
diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py
index 0d4c302f94..4e740afab4 100644
--- a/nipype/interfaces/minc/minc.py
+++ b/nipype/interfaces/minc/minc.py
@@ -350,7 +350,7 @@ class ToRawOutputSpec(TraitedSpec):
class ToRaw(StdOutCommandLine):
"""Dump a chunk of MINC file data. This program is largely
- superceded by mincextract (see Extract).
+ superseded by mincextract (see Extract).
Examples
--------
@@ -518,32 +518,32 @@ class ToEcatInputSpec(CommandLineInputSpec):
)
ignore_patient_variable = traits.Bool(
- desc="Ignore informations from the minc patient variable.",
+ desc="Ignore information from the minc patient variable.",
argstr="-ignore_patient_variable",
)
ignore_study_variable = traits.Bool(
- desc="Ignore informations from the minc study variable.",
+ desc="Ignore information from the minc study variable.",
argstr="-ignore_study_variable",
)
ignore_acquisition_variable = traits.Bool(
- desc="Ignore informations from the minc acquisition variable.",
+ desc="Ignore information from the minc acquisition variable.",
argstr="-ignore_acquisition_variable",
)
ignore_ecat_acquisition_variable = traits.Bool(
- desc="Ignore informations from the minc ecat_acquisition variable.",
+ desc="Ignore information from the minc ecat_acquisition variable.",
argstr="-ignore_ecat_acquisition_variable",
)
ignore_ecat_main = traits.Bool(
- desc="Ignore informations from the minc ecat-main variable.",
+ desc="Ignore information from the minc ecat-main variable.",
argstr="-ignore_ecat_main",
)
ignore_ecat_subheader_variable = traits.Bool(
- desc="Ignore informations from the minc ecat-subhdr variable.",
+ desc="Ignore information from the minc ecat-subhdr variable.",
argstr="-ignore_ecat_subheader_variable",
)
@@ -1285,7 +1285,7 @@ class BeastInputSpec(CommandLineInputSpec):
-positive: Specify mask of positive segmentation (inside mask) instead of the default mask.
-output_selection: Specify file to output selected files.
-count: Specify file to output the patch count.
- -mask: Specify a segmentation mask instead of the the default mask.
+ -mask: Specify a segmentation mask instead of the default mask.
-no_mask: Do not apply a segmentation mask. Perform the segmentation over the entire image.
-no_positive: Do not apply a positive mask.
Generic options for all commands:
@@ -1553,7 +1553,7 @@ class PikInputSpec(CommandLineInputSpec):
)
start = traits.Int(
- desc="Slice number to get. (note this is in voxel co-ordinates).",
+ desc="Slice number to get. (note this is in voxel coordinates).",
argstr="--slice %s",
) # FIXME Int is correct?
@@ -1565,7 +1565,7 @@ class PikInputSpec(CommandLineInputSpec):
slice_y = traits.Bool(desc="Get a coronal (y) slice.", argstr="-y", xor=_xor_slice)
slice_x = traits.Bool(
desc="Get a sagittal (x) slice.", argstr="-x", xor=_xor_slice
- ) # FIXME typo in man page? sagital?
+ ) # FIXME typo in man page? sagittal?
triplanar = traits.Bool(
desc="Create a triplanar view of the input file.", argstr="--triplanar"
@@ -2759,7 +2759,7 @@ class NormInputSpec(CommandLineInputSpec):
exists=True,
)
clamp = traits.Bool(
- desc="Force the ouput range between limits [default].",
+ desc="Force the output range between limits [default].",
argstr="-clamp",
usedefault=True,
default_value=True,
@@ -3031,7 +3031,6 @@ class Volpad(CommandLine):
class VolisoInputSpec(CommandLineInputSpec):
-
input_file = File(
desc="input file to convert to isotropic sampling",
exists=True,
diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py
index af9bf5977b..03069fcf98 100644
--- a/nipype/interfaces/mipav/developer.py
+++ b/nipype/interfaces/mipav/developer.py
@@ -1253,7 +1253,7 @@ class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec):
traits.Bool,
File(),
hash_files=False,
- desc="Tissue classification of of the whole input volume.",
+ desc="Tissue classification of the whole input volume.",
argstr="--outFANTASM %s",
)
outd0 = traits.Either(
@@ -1310,7 +1310,7 @@ class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec):
)
outPrior = File(desc="Probability prior from the atlas registrations", exists=True)
outFANTASM = File(
- desc="Tissue classification of of the whole input volume.", exists=True
+ desc="Tissue classification of the whole input volume.", exists=True
)
outd0 = File(desc="Initial Brainmask", exists=True)
outMidsagittal = File(desc="Plane dividing the brain hemispheres", exists=True)
diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py
index be87930a38..a097295eea 100644
--- a/nipype/interfaces/mrtrix3/preprocess.py
+++ b/nipype/interfaces/mrtrix3/preprocess.py
@@ -257,7 +257,8 @@ def _format_arg(self, name, trait_spec, value):
def _list_outputs(self):
outputs = self.output_spec().get()
- outputs["out_file"] = op.abspath(self.inputs.out_file)
+ if self.inputs.out_file:
+ outputs["out_file"] = op.abspath(self.inputs.out_file)
if self.inputs.bias:
outputs["bias"] = op.abspath(self.inputs.bias)
return outputs
diff --git a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py
index 5615f3e61c..c90d93a6ba 100644
--- a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py
+++ b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py
@@ -11,7 +11,6 @@
@pytest.mark.skipif(no_nifty_tool(cmd="seg_EM"), reason="niftyseg is not installed")
def test_seg_em():
-
# Create a node object
seg_em = EM()
diff --git a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py
index 18156e37f1..53d5bd4170 100644
--- a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py
+++ b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py
@@ -13,7 +13,6 @@
no_nifty_tool(cmd="seg_PatchMatch"), reason="niftyseg is not installed"
)
def test_seg_patchmatch():
-
# Create a node object
seg_patchmatch = PatchMatch()
diff --git a/nipype/interfaces/niftyseg/tests/test_lesions.py b/nipype/interfaces/niftyseg/tests/test_lesions.py
index d46b380cc2..2daece08cb 100644
--- a/nipype/interfaces/niftyseg/tests/test_lesions.py
+++ b/nipype/interfaces/niftyseg/tests/test_lesions.py
@@ -13,7 +13,6 @@
no_nifty_tool(cmd="seg_FillLesions"), reason="niftyseg is not installed"
)
def test_seg_filllesions():
-
# Create a node object
seg_fill = FillLesions()
diff --git a/nipype/interfaces/niftyseg/tests/test_maths.py b/nipype/interfaces/niftyseg/tests/test_maths.py
index 84740b7447..6c0251d7f5 100644
--- a/nipype/interfaces/niftyseg/tests/test_maths.py
+++ b/nipype/interfaces/niftyseg/tests/test_maths.py
@@ -11,7 +11,6 @@
@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed")
def test_unary_maths():
-
# Create a node object
unarym = UnaryMaths()
@@ -38,7 +37,6 @@ def test_unary_maths():
@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed")
def test_binary_maths():
-
# Create a node object
binarym = BinaryMaths()
@@ -65,7 +63,6 @@ def test_binary_maths():
@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed")
def test_int_binary_maths():
-
# Create a node object
ibinarym = BinaryMathsInteger()
@@ -93,7 +90,6 @@ def test_int_binary_maths():
@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed")
def test_tuple_maths():
-
# Create a node object
tuplem = TupleMaths()
@@ -124,7 +120,6 @@ def test_tuple_maths():
@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed")
def test_merge():
-
# Create a node object
merge = Merge()
diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py
index 053902e2bd..95494e7f5f 100644
--- a/nipype/interfaces/nilearn.py
+++ b/nipype/interfaces/nilearn.py
@@ -155,7 +155,7 @@ def _process_inputs(self):
if self.inputs.include_global:
global_label_data = label_data.dataobj.sum(axis=3) # sum across all regions
global_label_data = (
- np.rint(global_label_data).astype(int).clip(0, 1)
+ np.rint(global_label_data).clip(0, 1).astype('u1')
) # binarize
global_label_data = self._4d(global_label_data, label_data.affine)
global_masker = nl.NiftiLabelsMasker(
diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py
index 0991730e81..2d742d3e90 100644
--- a/nipype/interfaces/nipy/base.py
+++ b/nipype/interfaces/nipy/base.py
@@ -12,7 +12,7 @@
have_nipy = True
try:
package_check("nipy")
-except ImportError:
+except:
have_nipy = False
diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py
index f6c7aa1f61..d5b6c3a3de 100644
--- a/nipype/interfaces/nitime/analysis.py
+++ b/nipype/interfaces/nitime/analysis.py
@@ -32,7 +32,6 @@
class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec):
-
# Input either csv file, or time-series object and use _xor_inputs to
# discriminate
_xor_inputs = ("in_file", "in_TS")
diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py
index f315e9fc7c..283677c59a 100644
--- a/nipype/interfaces/petpvc.py
+++ b/nipype/interfaces/petpvc.py
@@ -195,8 +195,8 @@ def _gen_fname(
"""Generate a filename based on the given parameters.
The filename will take the form: cwd/basename.
- If change_ext is True, it will use the extentions specified in
- intputs.output_type.
+ If change_ext is True, it will use the extensions specified in
+ inputs.output_type.
Parameters
----------
diff --git a/nipype/interfaces/semtools/brains/segmentation.py b/nipype/interfaces/semtools/brains/segmentation.py
index 2c97b86842..79e25c2bda 100644
--- a/nipype/interfaces/semtools/brains/segmentation.py
+++ b/nipype/interfaces/semtools/brains/segmentation.py
@@ -114,7 +114,7 @@ class BRAINSTalairach(SEMLikeCommandLine):
category: BRAINS.Segmentation
- description: This program creates a VTK structured grid defining the Talairach coordinate system based on four points: AC, PC, IRP, and SLA. The resulting structred grid can be written as either a classic VTK file or the new VTK XML file format. Two representations of the resulting grid can be written. The first is a bounding box representation that also contains the location of the AC and PC points. The second representation is the full Talairach grid representation that includes the additional rows of boxes added to the inferior allowing full coverage of the cerebellum.
+ description: This program creates a VTK structured grid defining the Talairach coordinate system based on four points: AC, PC, IRP, and SLA. The resulting structured grid can be written as either a classic VTK file or the new VTK XML file format. Two representations of the resulting grid can be written. The first is a bounding box representation that also contains the location of the AC and PC points. The second representation is the full Talairach grid representation that includes the additional rows of boxes added to the inferior allowing full coverage of the cerebellum.
version: 0.1
diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py
index bed7438271..78ee3c25c5 100644
--- a/nipype/interfaces/semtools/brains/utilities.py
+++ b/nipype/interfaces/semtools/brains/utilities.py
@@ -173,7 +173,7 @@ class GeneratePurePlugMaskInputSpec(CommandLineInputSpec):
)
numberOfSubSamples = InputMultiPath(
traits.Int,
- desc="Number of continous index samples taken at each direction of lattice space for each plug volume",
+ desc="Number of continuous index samples taken at each direction of lattice space for each plug volume",
sep=",",
argstr="--numberOfSubSamples %s",
)
diff --git a/nipype/interfaces/semtools/diffusion/diffusion.py b/nipype/interfaces/semtools/diffusion/diffusion.py
index 8cc5a320e6..d352adf276 100644
--- a/nipype/interfaces/semtools/diffusion/diffusion.py
+++ b/nipype/interfaces/semtools/diffusion/diffusion.py
@@ -49,7 +49,7 @@ class dtiaverage(SEMLikeCommandLine):
category: Diffusion.Diffusion Tensor Images.CommandLineOnly
description: dtiaverage is a program that allows to compute the average of an arbitrary number of tensor fields (listed after the --inputs option) This program is used in our pipeline as the last step of the atlas building processing. When all the tensor fields have been deformed in the same space, to create the average tensor field (--tensor_output) we use dtiaverage.
- Several average method can be used (specified by the --method option): euclidian, log-euclidian and pga. The default being euclidian.
+ Several average method can be used (specified by the --method option): euclidean, log-euclidean and pga. The default being euclidean.
version: 1.0.0
@@ -118,7 +118,7 @@ class dtiestimInputSpec(CommandLineInputSpec):
"wls",
"nls",
"ml",
- desc="Esitmation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)",
+ desc="Estimation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)",
argstr="--method %s",
)
correction = traits.Enum(
@@ -214,7 +214,7 @@ class dtiestim(SEMLikeCommandLine):
contributor: Casey Goodlett, Francois Budin
- acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler.
+ acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler.
"""
input_spec = dtiestimInputSpec
@@ -553,7 +553,7 @@ class DWIConvertInputSpec(CommandLineInputSpec):
argstr="--useIdentityMeaseurementFrame ",
)
useBMatrixGradientDirections = traits.Bool(
- desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can emperically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.",
+ desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can empirically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.",
argstr="--useBMatrixGradientDirections ",
)
outputDirectory = traits.Either(
diff --git a/nipype/interfaces/semtools/diffusion/gtract.py b/nipype/interfaces/semtools/diffusion/gtract.py
index eb8e05f4f5..2e5a5816c5 100644
--- a/nipype/interfaces/semtools/diffusion/gtract.py
+++ b/nipype/interfaces/semtools/diffusion/gtract.py
@@ -275,7 +275,7 @@ class gtractCoregBvaluesInputSpec(CommandLineInputSpec):
argstr="--outputTransform %s",
)
eddyCurrentCorrection = traits.Bool(
- desc="Flag to perform eddy current corection in addition to motion correction (recommended)",
+ desc="Flag to perform eddy current correction in addition to motion correction (recommended)",
argstr="--eddyCurrentCorrection ",
)
numberOfIterations = traits.Int(
@@ -501,7 +501,7 @@ class gtractCopyImageOrientationInputSpec(CommandLineInputSpec):
argstr="--inputVolume %s",
)
inputReferenceVolume = File(
- desc="Required: input file containing orietation that will be cloned.",
+ desc="Required: input file containing orientation that will be cloned.",
exists=True,
argstr="--inputReferenceVolume %s",
)
@@ -1119,7 +1119,7 @@ class gtractCoRegAnatomyInputSpec(CommandLineInputSpec):
argstr="--inputVolume %s",
)
inputAnatomicalVolume = File(
- desc="Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.",
+ desc="Required: input anatomical image file name. It is recommended that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.",
exists=True,
argstr="--inputAnatomicalVolume %s",
)
@@ -1224,7 +1224,7 @@ class gtractCoRegAnatomy(SEMLikeCommandLine):
category: Diffusion.GTRACT
- description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images shoud be used for image co-registration with the B-Spline transform.
+ description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images should be used for image co-registration with the B-Spline transform.
version: 4.0.0
@@ -1542,7 +1542,7 @@ class gtractFiberTracking(SEMLikeCommandLine):
category: Diffusion.GTRACT
- description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines ther Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define severeal scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambigous regions and utilizes branching and a graph search algorithm in ambigous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline alogrithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program.
+ description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines the Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define several scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambiguous regions and utilizes branching and a graph search algorithm in ambiguous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline algorithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program.
version: 4.0.0
@@ -1712,7 +1712,7 @@ class gtractTensorInputSpec(CommandLineInputSpec):
"NOMASK",
"ROIAUTO",
"ROI",
- desc="ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used",
+ desc="ROIAUTO: mask is implicitly defined using a otsu foreground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used",
argstr="--maskProcessingMode %s",
)
maskVolume = File(
diff --git a/nipype/interfaces/semtools/diffusion/maxcurvature.py b/nipype/interfaces/semtools/diffusion/maxcurvature.py
index c4f170e9cb..be6bfd10e6 100644
--- a/nipype/interfaces/semtools/diffusion/maxcurvature.py
+++ b/nipype/interfaces/semtools/diffusion/maxcurvature.py
@@ -51,7 +51,7 @@ class maxcurvature(SEMLikeCommandLine):
contributor: Casey Goodlett
- acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler.
+ acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler.
"""
input_spec = maxcurvatureInputSpec
diff --git a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py
index cbf58623dc..6544282a00 100644
--- a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py
+++ b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py
@@ -47,7 +47,7 @@ class fiberstats(SEMLikeCommandLine):
contributor: Casey Goodlett
- acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler.
+ acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler.
"""
input_spec = fiberstatsInputSpec
diff --git a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py
index caddd16e22..cd8f1a5cd3 100644
--- a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py
+++ b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py
@@ -93,7 +93,7 @@ class fibertrack(SEMLikeCommandLine):
contributor: Casey Goodlett
- acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler.
+ acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler.
"""
input_spec = fibertrackInputSpec
diff --git a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py
index 67026cb890..5cd092caa6 100644
--- a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py
+++ b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py
@@ -126,7 +126,7 @@ class UKFTractographyInputSpec(CommandLineInputSpec):
)
Rs = traits.Float(desc="Measurement noise", argstr="--Rs %f")
maxBranchingAngle = traits.Float(
- desc="Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is supressed when this maxBranchingAngle is set to 0.0",
+ desc="Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is suppressed when this maxBranchingAngle is set to 0.0",
argstr="--maxBranchingAngle %f",
)
minBranchingAngle = traits.Float(
@@ -159,7 +159,7 @@ class UKFTractography(SEMLikeCommandLine):
category: Diffusion.Tractography
- description: This module traces fibers in a DWI Volume using the multiple tensor unscented Kalman Filter methology. For more informations check the documentation.
+ description: This module traces fibers in a DWI Volume using the multiple tensor unscented Kalman Filter methology. For more information check the documentation.
version: 1.0
diff --git a/nipype/interfaces/semtools/filtering/featuredetection.py b/nipype/interfaces/semtools/filtering/featuredetection.py
index 37a44ae4d5..df22f88810 100644
--- a/nipype/interfaces/semtools/filtering/featuredetection.py
+++ b/nipype/interfaces/semtools/filtering/featuredetection.py
@@ -21,10 +21,10 @@
class GenerateSummedGradientImageInputSpec(CommandLineInputSpec):
inputVolume1 = File(
- desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume1 %s"
+ desc="input volume 1, usually t1 image", exists=True, argstr="--inputVolume1 %s"
)
inputVolume2 = File(
- desc="input volume 2, usally t2 image", exists=True, argstr="--inputVolume2 %s"
+ desc="input volume 2, usually t2 image", exists=True, argstr="--inputVolume2 %s"
)
outputFileName = traits.Either(
traits.Bool,
@@ -309,10 +309,10 @@ class ErodeImage(SEMLikeCommandLine):
class GenerateBrainClippedImageInputSpec(CommandLineInputSpec):
inputImg = File(
- desc="input volume 1, usally t1 image", exists=True, argstr="--inputImg %s"
+ desc="input volume 1, usually t1 image", exists=True, argstr="--inputImg %s"
)
inputMsk = File(
- desc="input volume 2, usally t2 image", exists=True, argstr="--inputMsk %s"
+ desc="input volume 2, usually t2 image", exists=True, argstr="--inputMsk %s"
)
outputFileName = traits.Either(
traits.Bool,
@@ -402,7 +402,7 @@ class NeighborhoodMedian(SEMLikeCommandLine):
class GenerateTestImageInputSpec(CommandLineInputSpec):
inputVolume = File(
- desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume %s"
+ desc="input volume 1, usually t1 image", exists=True, argstr="--inputVolume %s"
)
outputVolume = traits.Either(
traits.Bool,
diff --git a/nipype/interfaces/semtools/legacy/registration.py b/nipype/interfaces/semtools/legacy/registration.py
index cb65aa12f5..959a1b1dc0 100644
--- a/nipype/interfaces/semtools/legacy/registration.py
+++ b/nipype/interfaces/semtools/legacy/registration.py
@@ -20,7 +20,9 @@
class scalartransformInputSpec(CommandLineInputSpec):
- input_image = File(desc="Image to tranform", exists=True, argstr="--input_image %s")
+ input_image = File(
+ desc="Image to transform", exists=True, argstr="--input_image %s"
+ )
output_image = traits.Either(
traits.Bool,
File(),
@@ -35,7 +37,7 @@ class scalartransformInputSpec(CommandLineInputSpec):
desc="Output file for transformation parameters",
argstr="--transformation %s",
)
- invert = traits.Bool(desc="Invert tranform before applying.", argstr="--invert ")
+ invert = traits.Bool(desc="Invert transform before applying.", argstr="--invert ")
deformation = File(
desc="Deformation field.", exists=True, argstr="--deformation %s"
)
diff --git a/nipype/interfaces/semtools/registration/brainsfit.py b/nipype/interfaces/semtools/registration/brainsfit.py
index b319ce1c86..56c9da54f2 100644
--- a/nipype/interfaces/semtools/registration/brainsfit.py
+++ b/nipype/interfaces/semtools/registration/brainsfit.py
@@ -291,7 +291,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--outputTransform %s",
)
initializeRegistrationByCurrentGenericTransform = traits.Bool(
- desc="If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existant transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.",
+ desc="If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existent transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.",
argstr="--initializeRegistrationByCurrentGenericTransform ",
)
failureExitCode = traits.Int(
@@ -327,7 +327,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--maximumNumberOfCorrections %d",
)
gui = traits.Bool(
- desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.",
+ desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build system, and probably does nothing on your installation.",
argstr="--gui ",
)
promptUser = traits.Bool(
@@ -392,7 +392,7 @@ class BRAINSFit(SEMLikeCommandLine):
category: Registration
- description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Full documentation avalable here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit. Method described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291
+ description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Full documentation available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit. Method described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291
version: 3.0.0
diff --git a/nipype/interfaces/semtools/registration/brainsresample.py b/nipype/interfaces/semtools/registration/brainsresample.py
index e8ac045936..a77a52dca3 100644
--- a/nipype/interfaces/semtools/registration/brainsresample.py
+++ b/nipype/interfaces/semtools/registration/brainsresample.py
@@ -75,7 +75,7 @@ class BRAINSResampleInputSpec(CommandLineInputSpec):
defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f")
gridSpacing = InputMultiPath(
traits.Int,
- desc="Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space",
+ desc="Add warped grid to output image to help show the deformation that occurred with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for making a 2D image of grid lines from the 3D space",
sep=",",
argstr="--gridSpacing %s",
)
diff --git a/nipype/interfaces/semtools/registration/specialized.py b/nipype/interfaces/semtools/registration/specialized.py
index 0726ab807c..85f8509a5b 100644
--- a/nipype/interfaces/semtools/registration/specialized.py
+++ b/nipype/interfaces/semtools/registration/specialized.py
@@ -86,7 +86,7 @@ class VBRAINSDemonWarpInputSpec(CommandLineInputSpec):
argstr="--registrationFilterType %s",
)
smoothDisplacementFieldSigma = traits.Float(
- desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.",
+ desc="A gaussian smoothing value to be applied to the deformation field at each iteration.",
argstr="--smoothDisplacementFieldSigma %f",
)
numberOfPyramidLevels = traits.Int(
@@ -346,7 +346,7 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec):
argstr="--registrationFilterType %s",
)
smoothDisplacementFieldSigma = traits.Float(
- desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.",
+ desc="A gaussian smoothing value to be applied to the deformation field at each iteration.",
argstr="--smoothDisplacementFieldSigma %f",
)
numberOfPyramidLevels = traits.Int(
@@ -403,7 +403,7 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec):
"ROIAUTO",
"ROI",
"BOBF",
- desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.",
+ desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is chosen, then the mask is implicitly defined using a otsu foreground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.",
argstr="--maskProcessingMode %s",
)
fixedBinaryVolume = File(
diff --git a/nipype/interfaces/semtools/segmentation/specialized.py b/nipype/interfaces/semtools/segmentation/specialized.py
index 0b1f46f420..a7744775c4 100644
--- a/nipype/interfaces/semtools/segmentation/specialized.py
+++ b/nipype/interfaces/semtools/segmentation/specialized.py
@@ -37,11 +37,11 @@ class BRAINSCutInputSpec(CommandLineInputSpec):
desc="print out some debugging information", argstr="--verbose %d"
)
multiStructureThreshold = traits.Bool(
- desc="multiStructureThreshold module to deal with overlaping area",
+ desc="multiStructureThreshold module to deal with overlapping area",
argstr="--multiStructureThreshold ",
)
histogramEqualization = traits.Bool(
- desc="A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which genreate input vectors without Histogram Equalization. ",
+ desc="A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which generate input vectors without Histogram Equalization. ",
argstr="--histogramEqualization ",
)
computeSSEOn = traits.Bool(
@@ -144,7 +144,7 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec):
argstr="--closingSize %f",
)
ROIAutoDilateSize = traits.Float(
- desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
+ desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
argstr="--ROIAutoDilateSize %f",
)
outputVolumePixelType = traits.Enum(
@@ -178,7 +178,7 @@ class BRAINSROIAuto(SEMLikeCommandLine):
category: Segmentation.Specialized
- description: This program is used to create a mask over the most prominant forground region in an image. This is accomplished via a combination of otsu thresholding and a closing operation. More documentation is available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ForegroundMasking.
+ description: This program is used to create a mask over the most prominent foreground region in an image. This is accomplished via a combination of otsu thresholding and a closing operation. More documentation is available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ForegroundMasking.
version: 2.4.1
@@ -269,7 +269,7 @@ class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec):
argstr="--outputVerificationScript %s",
)
mspQualityLevel = traits.Int(
- desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ",
+ desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ",
argstr="--mspQualityLevel %d",
)
otsuPercentileThreshold = traits.Float(
@@ -391,7 +391,7 @@ class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec):
traits.Bool,
Directory(),
hash_files=False,
- desc=", The directory for the debuging images to be written., ",
+ desc=", The directory for the debugging images to be written., ",
argstr="--resultsDir %s",
)
writedebuggingImagesLevel = traits.Int(
@@ -457,7 +457,7 @@ class BRAINSConstellationDetectorOutputSpec(TraitedSpec):
exists=True,
)
resultsDir = Directory(
- desc=", The directory for the debuging images to be written., ",
+ desc=", The directory for the debugging images to be written., ",
exists=True,
)
@@ -467,7 +467,7 @@ class BRAINSConstellationDetector(SEMLikeCommandLine):
category: Segmentation.Specialized
- description: This program will find the mid-sagittal plane, a constellation of landmarks in a volume, and create an AC/PC aligned data set with the AC point at the center of the voxel lattice (labeled at the origin of the image physical space.) Part of this work is an extention of the algorithms originally described by Dr. Babak A. Ardekani, Alvin H. Bachman, Model-based automatic detection of the anterior and posterior commissures on MRI scans, NeuroImage, Volume 46, Issue 3, 1 July 2009, Pages 677-682, ISSN 1053-8119, DOI: 10.1016/j.neuroimage.2009.02.030. (http://www.sciencedirect.com/science/article/B6WNP-4VRP25C-4/2/8207b962a38aa83c822c6379bc43fe4c)
+ description: This program will find the mid-sagittal plane, a constellation of landmarks in a volume, and create an AC/PC aligned data set with the AC point at the center of the voxel lattice (labeled at the origin of the image physical space.) Part of this work is an extension of the algorithms originally described by Dr. Babak A. Ardekani, Alvin H. Bachman, Model-based automatic detection of the anterior and posterior commissures on MRI scans, NeuroImage, Volume 46, Issue 3, 1 July 2009, Pages 677-682, ISSN 1053-8119, DOI: 10.1016/j.neuroimage.2009.02.030. (http://www.sciencedirect.com/science/article/B6WNP-4VRP25C-4/2/8207b962a38aa83c822c6379bc43fe4c)
version: 1.0
@@ -626,7 +626,7 @@ class BinaryMaskEditorBasedOnLandmarks(SEMLikeCommandLine):
class BRAINSMultiSTAPLEInputSpec(CommandLineInputSpec):
inputCompositeT1Volume = File(
- desc="Composite T1, all label maps transofrmed into the space for this image.",
+ desc="Composite T1, all label maps transformed into the space for this image.",
exists=True,
argstr="--inputCompositeT1Volume %s",
)
@@ -724,7 +724,7 @@ class BRAINSABCInputSpec(CommandLineInputSpec):
traits.Bool,
Directory(),
hash_files=False,
- desc="Ouput directory",
+ desc="Output directory",
argstr="--outputDir %s",
)
atlasToSubjectTransformType = traits.Enum(
@@ -832,7 +832,7 @@ class BRAINSABCInputSpec(CommandLineInputSpec):
)
numberOfSubSamplesInEachPlugArea = InputMultiPath(
traits.Int,
- desc="Number of continous index samples taken at each direction of lattice space for each plug volume.",
+ desc="Number of continuous index samples taken at each direction of lattice space for each plug volume.",
sep=",",
argstr="--numberOfSubSamplesInEachPlugArea %s",
)
@@ -872,7 +872,7 @@ class BRAINSABCOutputSpec(TraitedSpec):
desc="(optional) Filename to which save the final state of the registration",
exists=True,
)
- outputDir = Directory(desc="Ouput directory", exists=True)
+ outputDir = Directory(desc="Output directory", exists=True)
atlasToSubjectTransform = File(
desc="The transform from atlas to the subject", exists=True
)
diff --git a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py
index 7138dc37d3..fe3aa71521 100644
--- a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py
+++ b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py
@@ -29,14 +29,14 @@ class GenerateAverageLmkFileInputSpec(CommandLineInputSpec):
traits.Bool,
File(),
hash_files=False,
- desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)",
+ desc="Output landmark file name that includes average values for landmarks (.fcsv or .wts)",
argstr="--outputLandmarkFile %s",
)
class GenerateAverageLmkFileOutputSpec(TraitedSpec):
outputLandmarkFile = File(
- desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)",
+ desc="Output landmark file name that includes average values for landmarks (.fcsv or .wts)",
exists=True,
)
diff --git a/nipype/interfaces/semtools/utilities/brains.py b/nipype/interfaces/semtools/utilities/brains.py
index 5ff0f9aa35..b5964e3555 100644
--- a/nipype/interfaces/semtools/utilities/brains.py
+++ b/nipype/interfaces/semtools/utilities/brains.py
@@ -52,7 +52,7 @@ class BRAINSConstellationModelerInputSpec(CommandLineInputSpec):
argstr="--resultsDir %s",
)
mspQualityLevel = traits.Int(
- desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ",
+ desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ",
argstr="--mspQualityLevel %d",
)
rescaleIntensities = traits.Bool(
@@ -736,7 +736,7 @@ class BRAINSClipInferior(SEMLikeCommandLine):
class GenerateLabelMapFromProbabilityMapInputSpec(CommandLineInputSpec):
inputVolumes = InputMultiPath(
File(exists=True),
- desc="The Input probaiblity images to be computed for lable maps",
+ desc="The Input probaiblity images to be computed for label maps",
argstr="--inputVolumes %s...",
)
outputLabelVolume = traits.Either(
@@ -805,7 +805,7 @@ class BRAINSAlignMSPInputSpec(CommandLineInputSpec):
argstr="--writedebuggingImagesLevel %d",
)
mspQualityLevel = traits.Int(
- desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ",
+ desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ",
argstr="--mspQualityLevel %d",
)
rescaleIntensities = traits.Bool(
@@ -857,11 +857,11 @@ class BRAINSAlignMSPOutputSpec(TraitedSpec):
class BRAINSAlignMSP(SEMLikeCommandLine):
- """title: Align Mid Saggital Brain (BRAINS)
+ """title: Align Mid Sagittal Brain (BRAINS)
category: Utilities.BRAINS
- description: Resample an image into ACPC alignement ACPCDetect
+ description: Resample an image into ACPC alignment ACPCDetect
"""
input_spec = BRAINSAlignMSPInputSpec
@@ -886,7 +886,7 @@ class BRAINSLandmarkInitializerInputSpec(CommandLineInputSpec):
argstr="--inputMovingLandmarkFilename %s",
)
inputWeightFilename = File(
- desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ",
+ desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are proportional, that is the magnitude of weights will be normalized by its minimum and maximum value. ",
exists=True,
argstr="--inputWeightFilename %s",
)
@@ -991,7 +991,7 @@ class BRAINSSnapShotWriterInputSpec(CommandLineInputSpec):
)
inputPlaneDirection = InputMultiPath(
traits.Int,
- desc="Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.",
+ desc="Plane to display. In general, 0=sagittal, 1=coronal, and 2=axial plane.",
sep=",",
argstr="--inputPlaneDirection %s",
)
diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py
index f71d963142..89125a963f 100644
--- a/nipype/interfaces/slicer/generate_classes.py
+++ b/nipype/interfaces/slicer/generate_classes.py
@@ -2,7 +2,7 @@
"""This script generates Slicer Interfaces based on the CLI modules XML. CLI
modules are selected from the hardcoded list below and generated code is placed
in the cli_modules.py file (and imported in __init__.py). For this to work
-correctly you must have your CLI executabes in $PATH"""
+correctly you must have your CLI executables in $PATH"""
import xml.dom.minidom
import subprocess
import os
@@ -121,7 +121,7 @@ def generate_all_classes(
modules_list=[], launcher=[], redirect_x=False, mipav_hacks=False
):
"""modules_list contains all the SEM compliant tools that should have wrappers created for them.
- launcher containtains the command line prefix wrapper arugments needed to prepare
+ launcher containtains the command line prefix wrapper arguments needed to prepare
a proper environment for each of the modules.
"""
all_code = {}
@@ -196,7 +196,7 @@ def generate_class(
if longFlagNode:
# Prefer to use longFlag as name if it is given, rather than the parameter name
longFlagName = longFlagNode[0].firstChild.nodeValue
- # SEM automatically strips prefixed "--" or "-" from from xml before processing
+ # SEM automatically strips prefixed "--" or "-" from xml before processing
# we need to replicate that behavior here The following
# two nodes in xml have the same behavior in the program
# --test
diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py
index e26c7036a2..5780d29096 100644
--- a/nipype/interfaces/slicer/registration/brainsfit.py
+++ b/nipype/interfaces/slicer/registration/brainsfit.py
@@ -104,7 +104,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
"NOMASK",
"ROIAUTO",
"ROI",
- desc="What mode to use for using the masks. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.",
+ desc="What mode to use for using the masks. If ROIAUTO is chosen, then the mask is implicitly defined using a otsu foreground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.",
argstr="--maskProcessingMode %s",
)
fixedBinaryVolume = File(
@@ -208,7 +208,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
)
transformType = InputMultiPath(
traits.Str,
- desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifiying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.",
+ desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.",
sep=",",
argstr="--transformType %s",
)
@@ -234,7 +234,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--medianFilterSize %s",
)
removeIntensityOutliers = traits.Float(
- desc="The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the moduel will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ",
+ desc="The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the module will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ",
argstr="--removeIntensityOutliers %f",
)
useCachingOfBSplineWeightsMode = traits.Enum(
@@ -251,11 +251,11 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--useExplicitPDFDerivativesMode %s",
)
ROIAutoDilateSize = traits.Float(
- desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
+ desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
argstr="--ROIAutoDilateSize %f",
)
ROIAutoClosingSize = traits.Float(
- desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.",
+ desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.",
argstr="--ROIAutoClosingSize %f",
)
relaxationFactor = traits.Float(
@@ -279,7 +279,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--numberOfThreads %d",
)
forceMINumberOfThreads = traits.Int(
- desc="Force the the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!",
+ desc="Force the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!",
argstr="--forceMINumberOfThreads %d",
)
debugLevel = traits.Int(
@@ -295,7 +295,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
argstr="--projectedGradientTolerance %f",
)
gui = traits.Bool(
- desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.",
+ desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build system, and probably does nothing on your installation.",
argstr="--gui ",
)
promptUser = traits.Bool(
diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py
index 653be8e916..9512a05195 100644
--- a/nipype/interfaces/slicer/registration/brainsresample.py
+++ b/nipype/interfaces/slicer/registration/brainsresample.py
@@ -94,7 +94,7 @@ class BRAINSResample(SEMLikeCommandLine):
category: Registration
description:
- This program resamples an image image using a deformation field or a transform (BSpline, Affine, Rigid, etc.).
+ This program resamples an image using a deformation field or a transform (BSpline, Affine, Rigid, etc.).
version: 3.0.0
diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py
index 3abab602dc..da0bff4dd1 100644
--- a/nipype/interfaces/slicer/segmentation/specialized.py
+++ b/nipype/interfaces/slicer/segmentation/specialized.py
@@ -28,7 +28,7 @@ class RobustStatisticsSegmenterInputSpec(CommandLineInputSpec):
argstr="--intensityHomogeneity %f",
)
curvatureWeight = traits.Float(
- desc="Given sphere 1.0 score and extreme rough bounday/surface 0 score, what is the expected smoothness of the object?",
+ desc="Given sphere 1.0 score and extreme rough boundary/surface 0 score, what is the expected smoothness of the object?",
argstr="--curvatureWeight %f",
)
labelValue = traits.Int(
@@ -255,7 +255,7 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec):
argstr="--closingSize %f",
)
ROIAutoDilateSize = traits.Float(
- desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
+ desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.",
argstr="--ROIAutoDilateSize %f",
)
outputVolumePixelType = traits.Enum(
@@ -288,7 +288,7 @@ class BRAINSROIAuto(SEMLikeCommandLine):
category: Segmentation.Specialized
- description: This tool uses a combination of otsu thresholding and a closing operations to identify the most prominant foreground region in an image.
+ description: This tool uses a combination of otsu thresholding and a closing operations to identify the most prominent foreground region in an image.
version: 2.4.1
diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py
index d2ebe4d15f..6d7a7b2382 100644
--- a/nipype/interfaces/slicer/surface.py
+++ b/nipype/interfaces/slicer/surface.py
@@ -351,7 +351,7 @@ class ModelMaker(SEMLikeCommandLine):
category: Surface Models
- description: Create 3D surface models from segmented data.Models are imported into Slicer under a model hierarchy node in a MRML scene. The model colors are set by the color table associated with the input volume (these colours will only be visible if you load the model scene file).
Create Multiple:
If you specify a list of Labels, it will over ride any start/end label settings.
If you clickGenerate Allit will over ride the list of lables and any start/end label settings.
Model Maker Settings:
You can set the number of smoothing iterations, target reduction in number of polygons (decimal percentage). Use 0 and 1 if you wish no smoothing nor decimation.
You can set the flags to split normals or generate point normals in this pane as well.
You can save a copy of the models after intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation); these models are not saved in the mrml file, turn off deleting temporary files first in the python window:
slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff()
+ description: Create 3D surface models from segmented data.Models are imported into Slicer under a model hierarchy node in a MRML scene. The model colors are set by the color table associated with the input volume (these colours will only be visible if you load the model scene file).
Create Multiple:
If you specify a list of Labels, it will over ride any start/end label settings.
If you clickGenerate Allit will over ride the list of labels and any start/end label settings.
Model Maker Settings:
You can set the number of smoothing iterations, target reduction in number of polygons (decimal percentage). Use 0 and 1 if you wish no smoothing nor decimation.
You can set the flags to split normals or generate point normals in this pane as well.
You can save a copy of the models after intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation); these models are not saved in the mrml file, turn off deleting temporary files first in the python window:
slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff()
version: 4.1
diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py
index 2347d718ae..4e9b78e1f1 100644
--- a/nipype/interfaces/spm/base.py
+++ b/nipype/interfaces/spm/base.py
@@ -201,7 +201,6 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None):
and klass._command == matlab_cmd
and klass._paths == paths
):
-
return {"name": klass._name, "path": klass._path, "release": klass._version}
logger.debug("matlab command or path has changed. recomputing version.")
mlab = MatlabCommand(matlab_cmd=matlab_cmd, resource_monitor=False)
diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py
index 260742f5b0..0c2eece0d5 100644
--- a/nipype/interfaces/spm/model.py
+++ b/nipype/interfaces/spm/model.py
@@ -616,7 +616,7 @@ class ThresholdInputSpec(SPMCommandInputSpec):
desc=(
"In case no clusters survive the "
"topological inference step this "
- "will pick a culster with the highes "
+ "will pick a culster with the highest "
"sum of t-values. Use with care."
),
)
diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py
index dbafdcd18d..a612865039 100644
--- a/nipype/interfaces/spm/preprocess.py
+++ b/nipype/interfaces/spm/preprocess.py
@@ -39,7 +39,6 @@
class FieldMapInputSpec(SPMCommandInputSpec):
-
jobtype = traits.Enum(
"calculatevdm",
usedefault=True,
@@ -235,7 +234,6 @@ def _format_arg(self, opt, spec, val):
"""Convert input to appropriate format for spm"""
if opt in ["phase_file", "magnitude_file", "anat_file", "epi_file"]:
-
return scans_for_fname(ensure_list(val))
return super(FieldMap, self)._format_arg(opt, spec, val)
@@ -256,7 +254,6 @@ def _list_outputs(self):
class ApplyVDMInputSpec(SPMCommandInputSpec):
-
in_files = InputMultiObject(
ImageFileSPM(exists=True),
field="data.scans",
@@ -674,7 +671,6 @@ def _list_outputs(self):
class RealignUnwarpInputSpec(SPMCommandInputSpec):
-
in_files = InputMultiObject(
traits.Either(
ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True))
@@ -879,7 +875,6 @@ def _format_arg(self, opt, spec, val):
return super(RealignUnwarp, self)._format_arg(opt, spec, val)
def _parse_inputs(self, skip=()):
-
spmdict = super(RealignUnwarp, self)._parse_inputs(skip=())[0]
if isdefined(self.inputs.phase_map):
@@ -2681,7 +2676,6 @@ def _list_outputs(self):
class VBMSegmentInputSpec(SPMCommandInputSpec):
-
in_files = InputMultiPath(
ImageFileSPM(exists=True),
desc="A list of files to be segmented",
@@ -2851,7 +2845,6 @@ class VBMSegmentInputSpec(SPMCommandInputSpec):
class VBMSegmentOuputSpec(TraitedSpec):
-
native_class_images = traits.List(
traits.List(File(exists=True)), desc="native space probability maps"
)
diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py
index 543a0d3024..1c9e506d57 100644
--- a/nipype/interfaces/spm/utils.py
+++ b/nipype/interfaces/spm/utils.py
@@ -24,7 +24,6 @@ class Analyze2niiOutputSpec(SPMCommandInputSpec):
class Analyze2nii(SPMCommand):
-
input_spec = Analyze2niiInputSpec
output_spec = Analyze2niiOutputSpec
diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py
index 45bd53e32c..2df5c9dad8 100644
--- a/nipype/interfaces/tests/test_io.py
+++ b/nipype/interfaces/tests/test_io.py
@@ -571,7 +571,6 @@ def test_freesurfersource_incorrectdir():
def test_jsonsink_input():
-
ds = nio.JSONFileSink()
assert ds.inputs._outputs == {}
diff --git a/nipype/interfaces/tests/test_nilearn.py b/nipype/interfaces/tests/test_nilearn.py
index 2066c00768..29a068552c 100644
--- a/nipype/interfaces/tests/test_nilearn.py
+++ b/nipype/interfaces/tests/test_nilearn.py
@@ -22,7 +22,6 @@
@pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available")
class TestSignalExtraction:
-
filenames = {
"in_file": "fmri.nii",
"label_files": "labels.nii",
@@ -184,10 +183,11 @@ def assert_expected_output(self, labels, wanted):
[[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]],
[[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]],
],
- ]
+ ],
+ np.int16,
)
- fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]])
+ fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]], np.uint8)
fake_equiv_4d_label_data = np.array(
[
diff --git a/nipype/interfaces/utility/tests/test_wrappers.py b/nipype/interfaces/utility/tests/test_wrappers.py
index fda81b2f5b..1e2ce8a953 100644
--- a/nipype/interfaces/utility/tests/test_wrappers.py
+++ b/nipype/interfaces/utility/tests/test_wrappers.py
@@ -95,7 +95,7 @@ def test_function_with_imports(tmpdir):
def test_aux_connect_function(tmpdir):
- """This tests excution nodes with multiple inputs and auxiliary
+ """This tests execution nodes with multiple inputs and auxiliary
function inside the Workflow connect function.
"""
tmpdir.chdir()
diff --git a/nipype/pipeline/engine/report_template.html b/nipype/pipeline/engine/report_template.html
index 3fb66b4a02..86b2745122 100644
--- a/nipype/pipeline/engine/report_template.html
+++ b/nipype/pipeline/engine/report_template.html
@@ -261,4 +261,3 @@