diff --git a/.circle/tests.sh b/.circle/tests.sh
index 6f1fd5459c..e47dabebfa 100644
--- a/.circle/tests.sh
+++ b/.circle/tests.sh
@@ -17,24 +17,24 @@ fi
# They may need to be rebalanced in the future.
case ${CIRCLE_NODE_INDEX} in
0)
- docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_pytests.sh && \
+ docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_pytests.sh && \
docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \
- docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc nipype/nipype:py35 /usr/bin/run_builddocs.sh && \
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d
+ docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc nipype/nipype:py36 /usr/bin/run_builddocs.sh && \
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d
;;
1)
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline
;;
2)
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
- docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
+ docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
;;
3)
- docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \
- docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow
+ docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \
+ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow
;;
esac
diff --git a/.travis.yml b/.travis.yml
index bef97b9b59..f97f48dddb 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,6 +6,7 @@ python:
- 2.7
- 3.4
- 3.5
+- 3.6
env:
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler"
- INSTALL_DEB_DEPENDECIES=false NIPYPE_EXTRAS="doc,tests,fmri,profiler"
diff --git a/Dockerfile b/Dockerfile
index 963171b4b0..502216cf8d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -50,7 +50,7 @@ ENV PATH=/usr/local/miniconda/bin:$PATH \
# only use one thread - nipype will handle parallelization
# Installing precomputed python packages
-ARG PYTHON_VERSION_MINOR=5
+ARG PYTHON_VERSION_MINOR=6
RUN conda config --add channels conda-forge; sync && \
conda config --set always_yes yes --set changeps1 no; sync && \
conda install -y python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} \
diff --git a/circle.yml b/circle.yml
index 6486e8248a..32d3ed16f6 100644
--- a/circle.yml
+++ b/circle.yml
@@ -38,11 +38,11 @@ dependencies:
- if [ "$CIRCLE_TAG" != "" ]; then sed -i -E "s/(__version__ = )'[A-Za-z0-9.-]+'/\1'$CIRCLE_TAG'/" nipype/info.py; fi
# - e=1 && for i in {1..5}; do docker build -f docker/base.Dockerfile --rm=false -t nipype/base:latest . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] :
# timeout: 21600
- - e=1 && for i in {1..5}; do docker build --rm=false -t nipype/nipype:latest -t nipype/nipype:py35 --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] :
+ - e=1 && for i in {1..5}; do docker build --rm=false -t nipype/nipype:latest -t nipype/nipype:py36 --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] :
timeout: 6000
- e=1 && for i in {1..5}; do docker build --rm=false -t nipype/nipype:py27 --build-arg PYTHON_VERSION_MAJOR=2 --build-arg PYTHON_VERSION_MINOR=7 --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG-py27 . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] :
timeout: 6000
- - docker save -o $HOME/docker/cache.tar nipype/base:latest nipype/nipype:py35 nipype/nipype:py27 :
+ - docker save -o $HOME/docker/cache.tar nipype/base:latest nipype/nipype:py36 nipype/nipype:py27 :
timeout: 6000
test:
diff --git a/doc/devel/testing_nipype.rst b/doc/devel/testing_nipype.rst
index 2a081dbeda..cc2debe53c 100644
--- a/doc/devel/testing_nipype.rst
+++ b/doc/devel/testing_nipype.rst
@@ -109,16 +109,17 @@ Testing Nipype using Docker
As of :code:`nipype-0.13`, Nipype is tested inside Docker containers. First, install the
`Docker Engine `_.
-Nipype has one base docker image called nipype/nipype, and several additional test images
+Nipype has one base docker image called nipype/base:latest, and several additional test images
for various Python versions.
-The base nipype/nipype image is built as follows::
+The base nipype image is built as follows::
cd path/to/nipype/
- docker build -t nipype/nipype .
+ docker build -t nipype/base:latest -f docker/base.Dockerfile .
+
+This base image contains several useful tools (FreeSurfer, AFNI, FSL, ANTs, etc.),
+but not nipype.
-This base image contains several useful tools (FreeSurfer, AFNI, FSL, ANTs, etc.) and
-a nipype installation, all in Python 3.5.
It is possible to fetch a built image from the latest master branch of nipype
using::
@@ -128,26 +129,15 @@ using::
The docker run command will then open the container and offer a bash shell for the
developer.
-The additional test images have several test scripts installed. For instance,
-to build and run all tests on Python 2.7::
-
- cd path/to/nipype/
- docker build -f docker/Dockerfile_py27 -t nipype/nipype_test:py27 .
- docker run -it --rm -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" \
- -v ~/examples:/root/examples:ro \
- -v ~/scratch:/scratch \
- -w /root/src/nipype \
- nipype/nipype_test:py27 /usr/bin/run_pytests.sh
-
-For running nipype in Python 3.5::
+For building a continer for running nipype in Python 3.6::
cd path/to/nipype/
- docker build -f docker/Dockerfile_py35 -t nipype/nipype_test:py35 .
+ docker build -f Dockerfile -t nipype/nipype_test:py36 .
docker run -it --rm -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" \
-v ~/examples:/root/examples:ro \
-v ~/scratch:/scratch \
-w /root/src/nipype \
- nipype/nipype_test:py35 /usr/bin/run_pytests.sh
+ nipype/nipype_test:py36 /usr/bin/run_pytests.sh
-The last two examples assume that the example data is downladed into ~/examples and
+The last examples assume that the example data is downladed into ~/examples and
the ~/scratch folder will be created if it does not exist previously.
diff --git a/doc/documentation.rst b/doc/documentation.rst
index 3468525492..39e3cadb08 100644
--- a/doc/documentation.rst
+++ b/doc/documentation.rst
@@ -11,8 +11,13 @@ Documentation
Previous versions: `0.12.0 `_ `0.11.0 `_
+
.. container:: doc2
+ .. admonition:: Michael Notter's Nipype guide
+
+ Be sure to read `Michael's excellent tutorials `_.
+
.. admonition:: Guides
.. hlist::
diff --git a/doc/links_names.txt b/doc/links_names.txt
index 0021edb4d5..608ddd37ed 100644
--- a/doc/links_names.txt
+++ b/doc/links_names.txt
@@ -74,8 +74,7 @@
.. _pythonxy: https://python-xy.github.io/
.. _EPD: http://www.enthought.com/products/epd.php
.. _Traits: http://code.enthought.com/projects/traits/
-.. _Anaconda: https://www.continuum.io/downloads
-.. _Canopy: https://www.enthought.com/products/canopy/
+.. _Miniconda: https://conda.io/miniconda.html
.. Python imaging projects
.. _PyMVPA: http://www.pymvpa.org
diff --git a/doc/quickstart.rst b/doc/quickstart.rst
index 2a68e78143..8952e18872 100644
--- a/doc/quickstart.rst
+++ b/doc/quickstart.rst
@@ -11,34 +11,30 @@ Downloading and installing
:maxdepth: 1
users/install
- users/vagrant
Beginner's guide
================
-Beginner's tutorials (IPython Notebooks). `Available here`__
+Michael Notter's Nipype tutorial. `Available here`__
-Michael Notter's Nipype guide. `Available here`__
+__ https://miykael.github.io/nipype_tutorial/
-Dissecting Nipype Workflows. `Available here`__
+Nipype workshop materials
+=========================
-Introductory slides [older]. `Available here`__
+Self-assessment questionnaire with links to learning about each piece. `Available here`__
-__ https://github.com/mwaskom/nipype_concepts
-__ http://miykael.github.com/nipype-beginner-s-guide/index.html
-__ http://slideviewer.herokuapp.com/url/raw.github.com/nipy/nipype/master/examples/nipype_tutorial.ipynb?theme=sky
-__ http://satra.github.com/intro2nipype
+Lecture slides `Available here`__
-User guides
-===========
+__ http://nipy.org/workshops/2017-03-boston/review.html
+__ http://nipy.org/workshops/2017-03-boston/index.html
-.. toctree::
- :maxdepth: 1
+`Docker containers `_
+
+`Github project for lectures `_
+`Github project for dockerfiles + notebooks `_
- users/interface_tutorial
- users/pipeline_tutorial
- users/plugins
- users/debug
+`All notebooks visualized `_
Developer guides
================
@@ -47,16 +43,6 @@ Developer guides
:maxdepth: 1
devel/writing_custom_interfaces
- devel/gitwash/index
.. include:: links_names.txt
-Useful links for beginners
-===========================
-
-Getting started with Python - Tutorials. `Available here`__
-
-Python for Beginners `Available here`__
-
- __ http://www.codecademy.com/en/tracks/python
- __ https://www.python.org/about/gettingstarted/
diff --git a/doc/users/index.rst b/doc/users/index.rst
index 3c39ce08b2..5cdcba4282 100644
--- a/doc/users/index.rst
+++ b/doc/users/index.rst
@@ -11,15 +11,12 @@
:maxdepth: 2
install
- vagrant
- interface_tutorial
caching_tutorial
.. toctree::
:maxdepth: 1
- pipeline_tutorial
plugins
config_file
debug
diff --git a/doc/users/install.rst b/doc/users/install.rst
index 5caaf56160..c065e4c96c 100644
--- a/doc/users/install.rst
+++ b/doc/users/install.rst
@@ -9,18 +9,17 @@ This page covers the necessary steps to install Nipype.
Nipype for users
----------------
-Using conda
-~~~~~~~~~~~
-
-Installing nipype from the conda-forge channel can be achieved by adding conda-forge to your channels with::
-
- conda config --add channels conda-forge
+Using docker
+~~~~~~~~~~~~
+You can follow the `Nipype tutorial `_
-Once the conda-forge channel has been enabled, nipype can be installed with::
+Using conda
+~~~~~~~~~~~
- conda install nipype
+Installing nipype from the conda-forge channel can be achieved by::
+ conda install --channel conda-forge nipype
It is possible to list all of the versions of nipype available on your platform with::
@@ -36,18 +35,23 @@ The installation process is similar to other Python packages.
If you already have a Python environment set up, you can do::
- easy_install nipype
-
-or::
-
pip install nipype
-
If you want to install all the optional features of ``nipype``,
-use the following command (only for ``nipype>=0.13``)::
+use the following command::
pip install nipype[all]
+While `all` installs everything, one can also install select components as
+listed below::
+
+ 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
+ 'tests': ['pytest-cov', 'codecov'],
+ 'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
+ 'profiler': ['psutil'],
+ 'duecredit': ['duecredit'],
+ 'xvfbwrapper': ['xvfbwrapper'],
+
Debian and Ubuntu
~~~~~~~~~~~~~~~~~
@@ -59,11 +63,14 @@ manager.
Mac OS X
~~~~~~~~
-The easiest way to get nipype running on Mac OS X is to install Anaconda_ or
-Canopy_ and then add nipype by executing::
+The easiest way to get nipype running on Mac OS X is to install Miniconda_ and
+follow the instructions above. If you have a non-conda environment you can
+install nipype by typing::
- easy_install nipype
+ pip install nipype
+Note that the above procedure may require availability of gcc on your system
+path to compile the traits package.
From source
~~~~~~~~~~~
@@ -79,7 +86,7 @@ If you downloaded the source distribution named something
like ``nipype-x.y.tar.gz``, then unpack the tarball, change into the
``nipype-x.y`` directory and install nipype using::
- python setup.py install
+ pip install .
**Note:** Depending on permissions you may need to use ``sudo``.
@@ -87,39 +94,34 @@ like ``nipype-x.y.tar.gz``, then unpack the tarball, change into the
Testing the install
-------------------
-The best way to test the install is checking nipype's version ::
+The best way to test the install is checking nipype's version and then running
+the tests::
python -c "import nipype; print(nipype.__version__)"
-
+ python -c "import nipype; nipype.test()"
Installation for developers
---------------------------
Developers should start `here <../devel/testing_nipype.html>`_.
-
Recommended Software
-------------
+--------------------
Strong Recommendations
~~~~~~~~~~~~~~~~~~~~~~
-IPython_ 0.10.2 - 1.0.0
- Interactive python environment. This is necessary for some parallel
- components of the pipeline engine.
+IPython_
+ Interactive python environment.
-Matplotlib_ 1.0 - 1.2
+Matplotlib_
Plotting library
-`RDFLib `_ 4.1
- RDFLibrary required for provenance export as RDF
-
Sphinx_ 1.1
Required for building the documentation
`Graphviz `_
- Required for building the documentation. The python wrapper package (``graphviz``)
- and the program itself both need to be installed.
+ Required for building the documentation.
Interface Dependencies
~~~~~~~~~~~~~~~~~~~~~~
@@ -148,7 +150,7 @@ Slicer_
3.6 or later
Nipy_
- 0.1.2+20110404 or later
+ 0.4 or later
Nitime_
(optional)
diff --git a/doc/users/resource_sched_profiler.rst b/doc/users/resource_sched_profiler.rst
index f911429a69..37404b27da 100644
--- a/doc/users/resource_sched_profiler.rst
+++ b/doc/users/resource_sched_profiler.rst
@@ -1,8 +1,8 @@
.. _resource_sched_profiler:
-============================================
+=============================================
Resource Scheduling and Profiling with Nipype
-============================================
+=============================================
The latest version of Nipype supports system resource scheduling and profiling.
These features allows users to ensure high throughput of their data processing
while also controlling the amount of computing resources a given workflow will
diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile
index e21385e44c..224028fe3b 100644
--- a/docker/base.Dockerfile
+++ b/docker/base.Dockerfile
@@ -100,6 +100,8 @@ RUN apt-get update && \
apt-utils \
fusefat \
make \
+ # Added g++ to compile dipy in py3.6
+ g++=4:5.3.1-1ubuntu1 \
ruby=1:2.3.0+1 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
diff --git a/nipype/external/due.py b/nipype/external/due.py
index b053765183..658ffa0aba 100644
--- a/nipype/external/due.py
+++ b/nipype/external/due.py
@@ -54,11 +54,7 @@ def _donothing_func(*args, **kwargs):
if 'due' in locals() and not hasattr(due, 'cite'):
raise RuntimeError(
"Imported due lacks .cite. DueCredit is now disabled")
-except Exception as e:
- if type(e).__name__ != 'ImportError':
- import logging
- logging.getLogger("duecredit").error(
- "Failed to import duecredit due to %s" % str(e))
+except ImportError:
# Initiate due stub
due = InactiveDueCreditCollector()
BibTeX = Doi = Url = _donothing_func
diff --git a/nipype/info.py b/nipype/info.py
index c70b555ee3..e437d02149 100644
--- a/nipype/info.py
+++ b/nipype/info.py
@@ -55,6 +55,7 @@ def get_nipype_gitversion():
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering']
description = 'Neuroimaging in Python: Pipelines and Interfaces'
@@ -94,16 +95,16 @@ def get_nipype_gitversion():
"""
# versions
-NIBABEL_MIN_VERSION = '2.0.1'
+NIBABEL_MIN_VERSION = '2.1.0'
NETWORKX_MIN_VERSION = '1.7'
NUMPY_MIN_VERSION = '1.6.2'
SCIPY_MIN_VERSION = '0.11'
TRAITS_MIN_VERSION = '4.6'
DATEUTIL_MIN_VERSION = '1.5'
PYTEST_MIN_VERSION = '3.0'
-FUTURE_MIN_VERSION = '0.15.2'
+FUTURE_MIN_VERSION = '0.16.0'
SIMPLEJSON_MIN_VERSION = '3.8.0'
-PROV_MIN_VERSION = '1.4.0'
+PROV_MIN_VERSION = '1.5.0'
CLICK_MIN_VERSION = '6.6.0'
NAME = 'nipype'
@@ -138,23 +139,20 @@ def get_nipype_gitversion():
'click>=%s' % CLICK_MIN_VERSION,
'funcsigs',
'configparser',
- 'pytest>=%s' % PYTEST_MIN_VERSION
+ 'pytest>=%s' % PYTEST_MIN_VERSION,
+ 'mock',
+ 'pydotplus'
]
TESTS_REQUIRES = [
- 'pytest>=%s' % PYTEST_MIN_VERSION,
'pytest-cov',
- 'mock',
- 'codecov',
- 'dipy',
- 'nipy',
- 'matplotlib'
+ 'codecov'
]
EXTRA_REQUIRES = {
'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus'],
'tests': TESTS_REQUIRES,
- 'fmri': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
+ 'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'],
'profiler': ['psutil'],
'duecredit': ['duecredit'],
'xvfbwrapper': ['xvfbwrapper'],
diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py
index 43345bb8c6..5cd107bc69 100644
--- a/nipype/pipeline/engine/tests/test_engine.py
+++ b/nipype/pipeline/engine/tests/test_engine.py
@@ -677,7 +677,8 @@ def test_write_graph_runs(tmpdir):
mod2 = pe.Node(interface=EngineTestInterface(), name='mod2')
pipe.connect([(mod1, mod2, [('output1', 'input1')])])
try:
- pipe.write_graph(graph2use=graph, simple_form=simple)
+ pipe.write_graph(graph2use=graph, simple_form=simple,
+ format='dot')
except Exception:
assert False, \
'Failed to plot {} {} graph'.format(
@@ -708,7 +709,8 @@ def test_deep_nested_write_graph_runs(tmpdir):
mod1 = pe.Node(interface=EngineTestInterface(), name='mod1')
parent.add_nodes([mod1])
try:
- pipe.write_graph(graph2use=graph, simple_form=simple)
+ pipe.write_graph(graph2use=graph, simple_form=simple,
+ format='dot')
except Exception as e:
assert False, \
'Failed to plot {} {} deep graph: {!s}'.format(
diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py
index b290b01f0d..0efd35534f 100644
--- a/nipype/pipeline/engine/utils.py
+++ b/nipype/pipeline/engine/utils.py
@@ -1019,20 +1019,22 @@ def export_graph(graph_in, base_dir=None, show=False, use_execgraph=False,
use_ext=False,
newpath=base_dir)
_write_detailed_dot(graph, outfname)
- cmd = 'dot -T%s -O %s' % (format, outfname)
- res = CommandLine(cmd, terminal_output='allatonce').run()
- if res.runtime.returncode:
- logger.warn('dot2png: %s', res.runtime.stderr)
+ if format != 'dot':
+ cmd = 'dot -T%s -O %s' % (format, outfname)
+ res = CommandLine(cmd, terminal_output='allatonce').run()
+ if res.runtime.returncode:
+ logger.warn('dot2png: %s', res.runtime.stderr)
pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form)
simplefname = fname_presuffix(dotfilename,
suffix='.dot',
use_ext=False,
newpath=base_dir)
nx.drawing.nx_pydot.write_dot(pklgraph, simplefname)
- cmd = 'dot -T%s -O %s' % (format, simplefname)
- res = CommandLine(cmd, terminal_output='allatonce').run()
- if res.runtime.returncode:
- logger.warn('dot2png: %s', res.runtime.stderr)
+ if format != 'dot':
+ cmd = 'dot -T%s -O %s' % (format, simplefname)
+ res = CommandLine(cmd, terminal_output='allatonce').run()
+ if res.runtime.returncode:
+ logger.warn('dot2png: %s', res.runtime.stderr)
if show:
pos = nx.graphviz_layout(pklgraph, prog='dot')
nx.draw(pklgraph, pos)
@@ -1045,18 +1047,17 @@ def export_graph(graph_in, base_dir=None, show=False, use_execgraph=False,
return simplefname if simple_form else outfname
-def format_dot(dotfilename, format=None):
+def format_dot(dotfilename, format='png'):
"""Dump a directed graph (Linux only; install via `brew` on OSX)"""
- cmd = 'dot -T%s -O \'%s\'' % (format, dotfilename)
- try:
- CommandLine(cmd).run()
- except IOError as ioe:
- if "could not be found" in str(ioe):
- raise IOError("Cannot draw directed graph; executable 'dot' is unavailable")
- else:
- raise ioe
-
if format != 'dot':
+ cmd = 'dot -T%s -O \'%s\'' % (format, dotfilename)
+ try:
+ CommandLine(cmd).run()
+ except IOError as ioe:
+ if "could not be found" in str(ioe):
+ raise IOError("Cannot draw directed graph; executable 'dot' is unavailable")
+ else:
+ raise ioe
dotfilename += '.%s' % format
return dotfilename
diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py
index 1ed10aef42..8876a6878d 100644
--- a/nipype/sphinxext/plot_workflow.py
+++ b/nipype/sphinxext/plot_workflow.py
@@ -113,22 +113,30 @@
from errno import EEXIST
import traceback
-from docutils.parsers.rst import directives
-from docutils.parsers.rst.directives.images import Image
-
+missing_imports = []
+try:
+ from docutils.parsers.rst import directives
+ from docutils.parsers.rst.directives.images import Image
+ align = Image.align
+except ImportError as e:
+ missing_imports = [str(e)]
try:
# Sphinx depends on either Jinja or Jinja2
import jinja2
def format_template(template, **kw):
return jinja2.Template(template).render(**kw)
-except ImportError:
- import jinja
- def format_template(template, **kw):
- return jinja.from_string(template, **kw)
+except ImportError as e:
+ missing_imports.append(str(e))
+ try:
+ import jinja
+ def format_template(template, **kw):
+ return jinja.from_string(template, **kw)
+ missing_imports.pop()
+ except ImportError as e:
+ missing_imports.append(str(e))
from builtins import str, bytes
-align = Image.align
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
@@ -152,7 +160,10 @@ def _mkdirp(folder):
def wf_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
- return run(arguments, content, options, state_machine, state, lineno)
+ if len(missing_imports) == 0:
+ return run(arguments, content, options, state_machine, state, lineno)
+ else:
+ raise ImportError('\n'.join(missing_imports))
wf_directive.__doc__ = __doc__
def _option_boolean(arg):
diff --git a/requirements.txt b/requirements.txt
index 6953af5507..65b9beb7ab 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,13 +3,13 @@ scipy>=0.11
networkx>=1.7
traits>=4.6
python-dateutil>=1.5
-nibabel>=2.0.1
-future>=0.15.2
+nibabel>=2.1.0
+future>=0.16.0
simplejson>=3.8.0
-prov>=1.4.0
+prov>=1.5.0
click>=6.6.0
-psutil
funcsigs
configparser
pytest>=3.0
-pytest-cov
+mock
+pydotplus
diff --git a/rtd_requirements.txt b/rtd_requirements.txt
index 1f60cd351f..71e2e3d4bc 100644
--- a/rtd_requirements.txt
+++ b/rtd_requirements.txt
@@ -3,13 +3,14 @@ scipy>=0.11
networkx>=1.7
traits>=4.6
python-dateutil>=1.5
-nibabel>=2.0.1
-pytest>=3.0
-pytest-cov
-future>=0.15.2
+nibabel>=2.1.0
+future>=0.16.0
simplejson>=3.8.0
-prov>=1.4.0
-psutil
+prov>=1.5.0
funcsigs
configparser
+pytest>=3.0
+mock
+pydotplus
+psutil
matplotlib
diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py
index 03ef31eaf8..3eb7467c4b 100644
--- a/tools/interfacedocgen.py
+++ b/tools/interfacedocgen.py
@@ -440,6 +440,30 @@ def write_modules_api(self, modules, outdir):
if not api_str:
continue
# write out to file
+ mvalues = m.split('.')
+ if len(mvalues) > 3:
+ index_prefix = '.'.join(mvalues[1:3])
+ index_dir = os.path.join(outdir,
+ index_prefix)
+ index_file = index_dir + self.rst_extension
+ if not os.path.exists(index_dir):
+ os.makedirs(index_dir)
+ header = """.. AUTO-GENERATED FILE -- DO NOT EDIT!
+
+{name}
+{underline}
+
+.. toctree::
+ :maxdepth: 1
+ :glob:
+
+ {name}/*
+ """.format(name=index_prefix,
+ underline='='*len(index_prefix))
+ with open(index_file, 'wt') as fp:
+ fp.write(header)
+ m = os.path.join(index_prefix,
+ '.'.join(mvalues[3:]))
outfile = os.path.join(outdir,
m + self.rst_extension)
fileobj = open(outfile, 'wt')