diff --git a/.circleci/config.yml b/.circleci/config.yml index c2356cebdf..7e826eb547 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,5 @@ _machine_kwds: &machine_kwds - image: circleci/classic:201808-01 + image: ubuntu-2004:202107-02 _store_artifacts_kwds: &store_artifacts_kwds path: /home/circleci/work/tests @@ -43,33 +43,20 @@ _get_base_image: &get_base_image exit 1 fi -_build_main_image_py36: &build_main_image_py36 - name: Build main image (py36) +_build_main_image_py38: &build_main_image_py38 + name: Build main image (py38) no_output_timeout: 60m command: | tools/retry_cmd.sh -n 5 -s 15 \ docker build \ --rm=false \ --tag nipype/nipype:latest \ - --tag nipype/nipype:py36 \ + --tag nipype/nipype:py38 \ + $(test -z "${CIRCLE_TAG}" || echo --tag nipype/nipype:"${CIRCLE_TAG}") \ --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ --build-arg VCS_REF="$(git rev-parse --short HEAD)" \ --build-arg VERSION="${CIRCLE_TAG}" /home/circleci/nipype -_build_main_image_py27: &build_main_image_py27 - name: Build main image (py27) - no_output_timeout: 60m - command: | - tools/retry_cmd.sh -n 5 -s 15 \ - docker build \ - --rm=false \ - --tag nipype/nipype:py27 \ - --build-arg PYTHON_VERSION_MAJOR=2 \ - --build-arg PYTHON_VERSION_MINOR=7 \ - --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ - --build-arg VCS_REF="$(git rev-parse --short HEAD)" \ - --build-arg VERSION="${CIRCLE_TAG}-py27" /home/circleci/nipype - _download_test_data: &_download_test_data name: Download test data no_output_timeout: 20m @@ -77,14 +64,21 @@ _download_test_data: &_download_test_data environment: OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage" command: | + set -x export DATA_NIPYPE_TUTORIAL_URL="${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xj + if [[ ! -d nipype-tutorial ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xjv + fi export DATA_NIPYPE_FSL_COURSE="${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xz + if [[ ! -d nipype-fsl_course_data ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xzv + fi export DATA_NIPYPE_FSL_FEEDS="${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" - curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xz + if [[ ! -d feeds ]]; then + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xzv + fi _prepare_working_directory: &prepare_working_directory name: Prepare working directory @@ -113,10 +107,9 @@ _run_codecov_smoke: &_run_codecov_smoke version: 2 jobs: - compare_base_dockerfiles: docker: - - image: docker:17.10.0-ce-git + - image: cimg/base:2022.04 steps: - checkout: path: /home/circleci/nipype @@ -126,10 +119,10 @@ jobs: working_directory: /home/circleci/nipype/docker command: | mkdir -p /tmp/docker - ash ./generate_dockerfiles.sh -b + bash ./generate_dockerfiles.sh -b # Use the sha256 sum of the pruned Dockerfile as the cache key. - ash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned + bash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned - restore_cache: key: dockerfile-cache-v1-master-{{ checksum "/tmp/docker/Dockerfile.base-pruned" }} - run: @@ -148,6 +141,23 @@ jobs: - docker/Dockerfile.base-pruned - docker/get_base_image.sh + get_test_data: + machine: *machine_kwds + working_directory: /home/circleci/nipype + steps: + - restore_cache: + keys: + - data-v0-{{ .Branch }}-{{ .Revision }} + - data-v0--{{ .Revision }} + - data-v0-{{ .Branch }}- + - data-v0-master- + - data-v0- + - run: *_download_test_data + - save_cache: + key: data-v0-{{ .Branch }}-{{ .Revision }} + paths: + - /home/circleci/examples + test_pytest: machine: *machine_kwds working_directory: /home/circleci/nipype @@ -156,35 +166,24 @@ jobs: path: /home/circleci/nipype - attach_workspace: at: /tmp + - restore_cache: + keys: + - data-v0-{{ .Branch }}-{{ .Revision }} - run: *set_pr_number - run: *generate_dockerfiles - run: *modify_nipype_version - run: *get_base_image - - run: *build_main_image_py36 - - run: *build_main_image_py27 + - run: *build_main_image_py38 - run: *_get_codecov - - run: *_download_test_data - run: *prepare_working_directory - run: - name: Run pytests (py36) + name: Run pytests no_output_timeout: 30m environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_pytest.sh - - run: - name: Run pytests (py27) - no_output_timeout: 30m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py2_pytest.sh + command: bash -ux /home/circleci/nipype/.circleci/test_pytest.sh - run: *_run_codecov_coverage - store_artifacts: *store_artifacts_kwds - store_test_results: *store_artifacts_kwds - - run: - name: Build docs (py36) - no_output_timeout: 30m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_docs.sh - - store_artifacts: - path: /home/circleci/work/docs - run: name: Save Docker images to workspace if on master no_output_timeout: 60m @@ -192,113 +191,14 @@ jobs: if [ "$CIRCLE_BRANCH" = "master" -a -z "$CIRCLE_PULL_REQUEST" ]; then docker save nipype/nipype:base \ nipype/nipype:latest \ - nipype/nipype:py27 \ - nipype/nipype:py36 | gzip -1 > /tmp/docker/nipype-base-latest-py36-py27.tar.gz \ - && du -h /tmp/docker/nipype-base-latest-py36-py27.tar.gz + nipype/nipype:py38 | gzip -1 > /tmp/docker/nipype-base-latest-py38.tar.gz \ + && du -h /tmp/docker/nipype-base-latest-py38.tar.gz fi - persist_to_workspace: root: /tmp paths: - docker - test_py3_fmri_fsl_spm: - machine: *machine_kwds - working_directory: /home/circleci/nipype - steps: - - checkout: - path: /home/circleci/nipype - - attach_workspace: - at: /tmp - - run: *set_pr_number - - run: *generate_dockerfiles - - run: *modify_nipype_version - - run: *get_base_image - - run: *build_main_image_py36 - - run: *_get_codecov - - run: *_download_test_data - - run: *prepare_working_directory - - run: - name: Run FSL reuse pipeline (py36) - no_output_timeout: 40m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_fsl_reuse_linear_l1.sh - - run: - name: Run SPM test workflow - 3D inputs (py36) - no_output_timeout: 40m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_spm_linear_3d.sh - - run: - name: Run SPM test workflow - 4D inputs (py36) - no_output_timeout: 40m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_spm_linear_4d.sh - - run: *_run_codecov_smoke - - store_artifacts: *store_artifacts_kwds - - test_py3_fmri_spm_dartel_multiproc: - machine: *machine_kwds - working_directory: /home/circleci/nipype - steps: - - checkout: - path: /home/circleci/nipype - - attach_workspace: - at: /tmp - - run: *set_pr_number - - run: *generate_dockerfiles - - run: *modify_nipype_version - - run: *get_base_image - - run: *build_main_image_py36 - - run: *_get_codecov - - run: *_download_test_data - - run: *prepare_working_directory - - run: - name: Run SPM DARTEL Level 1 pipeline (py36) - no_output_timeout: 1h - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_spm_dartel_multiproc_l1.sh - - run: - name: Run SPM DARTEL Level 2 pipeline (py36) - no_output_timeout: 30m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_spm_dartel_multiproc_l2.sh - - run: *_run_codecov_smoke - - store_artifacts: *store_artifacts_kwds - - test_fmri_spm_nested_fsl_feeds: - machine: *machine_kwds - working_directory: /home/circleci/nipype - steps: - - checkout: - path: /home/circleci/nipype - - attach_workspace: - at: /tmp - - run: *set_pr_number - - run: *generate_dockerfiles - - run: *modify_nipype_version - - run: *get_base_image - - run: *build_main_image_py36 - - run: *build_main_image_py27 - - run: *_get_codecov - - run: *_download_test_data - - run: *prepare_working_directory - - run: - name: Run SPM Nested Level 1 pipeline (py36) - no_output_timeout: 1h - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_spm_nested_multiproc_l1.sh - - run: - name: Run SPM Nested Level 2 pipeline (py27) - no_output_timeout: 30m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py2_fmri_spm_nested_multiproc_l2.sh - - run: - name: Run FSL FEEDS pipeline (py36) - no_output_timeout: 40m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/test_py3_fmri_fsl_feeds_linear_l1.sh - - run: *_run_codecov_smoke - - store_artifacts: *store_artifacts_kwds - deploy_dockerhub: docker: - image: docker:17.10.0-ce-git @@ -310,7 +210,7 @@ jobs: name: Load saved Docker images. no_output_timeout: 60m command: | - docker load < /tmp/docker/nipype-base-latest-py36-py27.tar.gz + docker load < /tmp/docker/nipype-base-latest-py38.tar.gz - run: name: Push to DockerHub no_output_timeout: 120m @@ -318,8 +218,8 @@ jobs: echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin docker push nipype/nipype:base docker push nipype/nipype:latest - docker push nipype/nipype:py36 - docker push nipype/nipype:py27 + docker push nipype/nipype:py38 + test -z "${CIRCLE_TAG}" || docker push nipype/nipype:"${CIRCLE_TAG}" - run: name: Move pruned Dockerfile to /tmp/docker/cache directory command: | @@ -330,59 +230,6 @@ jobs: - /tmp/docker/cache/Dockerfile.base-pruned key: dockerfile-cache-v1-{{ .Branch }}-{{ checksum "/tmp/docker/cache/Dockerfile.base-pruned" }} - pypi_precheck: - machine: *machine_kwds - working_directory: /home/circleci/nipype - steps: - - checkout: - path: /home/circleci/nipype - - run: - name: Check pypi preconditions - command: | - pip install --upgrade twine future wheel readme_renderer setuptools - python setup.py check -r -s - python setup.py sdist bdist_wheel - - run: - name: Validate Python 2 installation - command: | - pyenv local 2.7.12 - pip install dist/nipype-*-py2.py3-none-any.whl - # Futures should install in Python 2 - pip show futures 2>/dev/null | grep "Name: futures" - - run: - name: Validate Python 3 installation - command: | - pyenv local 3.5.2 - pip install dist/nipype-*-py2.py3-none-any.whl - # Futures should not install in Python 3 - test $(pip show futures 2>/dev/null | wc -l) = "0" - - run: - name: Validate Python 3.7 installation - command: | - pyenv local 3.7.0 - pip install --upgrade pip - # Pre-install a version of numpy that will not pass - pip install numpy==1.15.0 - pip install dist/nipype-*-py2.py3-none-any.whl - # Numpy should be upgraded to >= 1.15.3 - test "$(pip show numpy | grep Version)" \> "Version: 1.15.2" - - store_artifacts: - path: /home/circleci/nipype/dist - - deploy_pypi: - machine: *machine_kwds - working_directory: /home/circleci/nipype - steps: - - checkout: - path: /home/circleci/nipype - - run: - name: Deploy to PyPI - command: | - pip install --upgrade twine future wheel readme_renderer setuptools - python setup.py check -r -s - python setup.py sdist bdist_wheel - twine upload dist/* - update_feedstock: machine: *machine_kwds working_directory: /home/circleci/nipype @@ -392,9 +239,9 @@ jobs: - run: name: Install hub command: | - curl -sSL https://github.com/github/hub/releases/download/v2.2.9/hub-linux-amd64-2.2.9.tgz | \ + curl -sSL https://github.com/github/hub/releases/download/v2.14.2/hub-linux-amd64-2.14.2.tgz | \ tar zxv -C /tmp - sudo /tmp/hub-linux-amd64-2.2.9/install + sudo /tmp/hub-linux-amd64-2.14.2/install - run: name: Expand SSH Key command: | @@ -404,10 +251,11 @@ jobs: chmod go-rwx ~/.ssh/id_ed25519 ssh-keygen -y -f ~/.ssh/id_ed25519 > ~/.ssh/id_ed25519.pub - run: - name: Set git identity + name: Set git/hub configuration command: | git config --global user.name "nipybot" git config --global user.email "nipybot@gmail.com" + git config --global hub.protocol ssh - run: name: Update feedstock command: | @@ -418,54 +266,4 @@ jobs: workflows: version: 2 build_test_deploy: - jobs: - - pypi_precheck: - filters: - branches: - only: /rel\/.*/ - tags: - only: /.*/ - - compare_base_dockerfiles: - filters: - tags: - only: /.*/ - - test_pytest: - filters: - tags: - only: /.*/ - requires: - - compare_base_dockerfiles - - test_py3_fmri_fsl_spm: - requires: - - compare_base_dockerfiles - - test_py3_fmri_spm_dartel_multiproc: - requires: - - compare_base_dockerfiles - - test_fmri_spm_nested_fsl_feeds: - requires: - - compare_base_dockerfiles - - deploy_dockerhub: - filters: - branches: - only: master - requires: - - test_pytest - - test_fmri_spm_nested_fsl_feeds - - test_py3_fmri_fsl_spm - - test_py3_fmri_spm_dartel_multiproc - - deploy_pypi: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - requires: - - pypi_precheck - - test_pytest - - update_feedstock: - context: nipybot - filters: - branches: - only: /rel\/.*/ - tags: - only: /.*/ + jobs: [] diff --git a/.circleci/test_py2_fmri_spm_nested_multiproc_l2.sh b/.circleci/test_py2_fmri_spm_nested_multiproc_l2.sh deleted file mode 100644 index 1aa2a4f8c5..0000000000 --- a/.circleci/test_py2_fmri_spm_nested_multiproc_l2.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e NIPYPE_NUMBER_OF_CPUS=4 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py27" /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline diff --git a/.circleci/test_py2_pytest.sh b/.circleci/test_py2_pytest.sh deleted file mode 100644 index df1489d240..0000000000 --- a/.circleci/test_py2_pytest.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py27" /usr/bin/run_pytests.sh diff --git a/.circleci/test_py3_docs.sh b/.circleci/test_py3_docs.sh deleted file mode 100644 index a050caf66c..0000000000 --- a/.circleci/test_py3_docs.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /src/nipype/doc "${DOCKER_IMAGE}:py36" /usr/bin/run_builddocs.sh diff --git a/.circleci/test_py3_fmri_fsl_feeds_linear_l1.sh b/.circleci/test_py3_fmri_fsl_feeds_linear_l1.sh deleted file mode 100644 index 9666829b74..0000000000 --- a/.circleci/test_py3_fmri_fsl_feeds_linear_l1.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline diff --git a/.circleci/test_py3_fmri_fsl_reuse_linear_l1.sh b/.circleci/test_py3_fmri_fsl_reuse_linear_l1.sh deleted file mode 100644 index 48be49d80d..0000000000 --- a/.circleci/test_py3_fmri_fsl_reuse_linear_l1.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow diff --git a/.circleci/test_py3_fmri_spm_dartel_multiproc_l1.sh b/.circleci/test_py3_fmri_spm_dartel_multiproc_l1.sh deleted file mode 100644 index 4208eed506..0000000000 --- a/.circleci/test_py3_fmri_spm_dartel_multiproc_l1.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_dartel MultiProc /data/examples/ level1 diff --git a/.circleci/test_py3_fmri_spm_dartel_multiproc_l2.sh b/.circleci/test_py3_fmri_spm_dartel_multiproc_l2.sh deleted file mode 100644 index 86119e7654..0000000000 --- a/.circleci/test_py3_fmri_spm_dartel_multiproc_l2.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_dartel MultiProc /data/examples/ l2pipeline diff --git a/.circleci/test_py3_fmri_spm_linear_3d.sh b/.circleci/test_py3_fmri_spm_linear_3d.sh deleted file mode 100644 index 27c2c92a1a..0000000000 --- a/.circleci/test_py3_fmri_spm_linear_3d.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d diff --git a/.circleci/test_py3_fmri_spm_linear_4d.sh b/.circleci/test_py3_fmri_spm_linear_4d.sh deleted file mode 100644 index cd255d60ca..0000000000 --- a/.circleci/test_py3_fmri_spm_linear_4d.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d diff --git a/.circleci/test_py3_fmri_spm_nested_multiproc_l1.sh b/.circleci/test_py3_fmri_spm_nested_multiproc_l1.sh deleted file mode 100644 index a6d2133a42..0000000000 --- a/.circleci/test_py3_fmri_spm_nested_multiproc_l1.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e NIPYPE_NUMBER_OF_CPUS=4 "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 diff --git a/.circleci/test_py3_pytest.sh b/.circleci/test_pytest.sh similarity index 72% rename from .circleci/test_py3_pytest.sh rename to .circleci/test_pytest.sh index b93208f395..832a52a02b 100644 --- a/.circleci/test_py3_pytest.sh +++ b/.circleci/test_pytest.sh @@ -1,3 +1,3 @@ #!/bin/bash -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py36" /usr/bin/run_pytests.sh +docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py38" /usr/bin/run_pytests.sh diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 0000000000..6b83f1227e --- /dev/null +++ b/.codespellrc @@ -0,0 +1,13 @@ +[codespell] +skip = .git,*.pdf,*.svg,external +# nd,nam - import module short +# fith - oddness coming from AFNI +# whos - smth used in matlab things +# SMAL - Stanford CNI MRS Library +# Suh - name +# noo,crasher,afile - field/var name used +# Reson - short journal name +# ALS, FWE - neuroimaging specific abbrevs +# Comision - foreign word used +# expad - AFNI flag +ignore-words-list = te,inport,objekt,jist,nd,hel,inout,fith,whos,fot,ue,shs,smal,nam,filetest,suh,noo,reson,als,fwe,crasher,comision,afile,expad,burnin diff --git a/.dockerignore b/.dockerignore index fb4be03ec9..d6aeda95ff 100644 --- a/.dockerignore +++ b/.dockerignore @@ -22,13 +22,12 @@ src/ .git # other -docs/**/* -docs/ +doc/**/* +doc/ .cache/ .circle/**/* .circle/ circle.yml -rtd_requirements.txt Vagrantfile .travis.yml .mailmap diff --git a/.et b/.et new file mode 100644 index 0000000000..d271a1d060 --- /dev/null +++ b/.et @@ -0,0 +1,4 @@ +{ "bad_versions" : [ "1.2.1", + "1.2.3", + "1.3.0"] +} diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..253c1c6919 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,90 @@ +# Tue Feb 27 14:28:18 2024 -0500 - effigies@gmail.com - STY: black 2024.2 [ignore-rev] +f1cbc1c941f8d02659f8ef245aac0862efb80ccf +# Mon Sep 11 13:36:40 2023 +0200 - 37933899+servoz@users.noreply.github.com - run black for nipype/interfaces/spm/preprocess.py +b9cac5e993143febb01ade42e56b41009427a4b6 +# Wed Jul 5 16:31:45 2023 -0400 - effigies@gmail.com - STY: Run black and fix typo +34a4ac6eeff8d4924b40875c45df5d84a97da90b +# Wed Jul 5 11:30:42 2023 -0400 - effigies@gmail.com - STY: black +3b89ca85faf2428ecf7844de9c0db4aa7c329c93 +# Wed Jul 5 09:49:31 2023 -0400 - effigies@gmail.com - STY: black +4a6a7d9d25d5d1e1f0eb55828dede58f8b9c9f80 +# Wed Apr 5 14:01:05 2023 -0400 - effigies@gmail.com - STY: black [ignore-rev] +a9ce9b78a402ebacf7726ad6454bb75b1447f52f +# Wed Sep 14 14:12:07 2022 -0400 - mathiasg@stanford.edu - STY: Black +f4a779223c6b0dffa47138d24ec9ef378c7164a9 +# Tue Apr 19 14:09:31 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev] +b9716ebd46541f7fb1b96a94cc35b5e2ea6c3bba +# Fri Apr 15 06:59:48 2022 -0700 - markiewicz@stanford.edu - STY: black [ignore-rev] +d223fbccda6dee0ef39e00084296a3292f2ccf87 +# Fri Apr 8 21:34:43 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev] +e3f56da124fd58041018c2e70d16a130ef189a66 +# Sun Apr 3 10:27:07 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev] +988c382ebfc7df964874b6287b9d9e27e274a4a4 +# Sat Apr 2 21:32:56 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev] +230fac6149d4dfbd5da669a983332a1fe318ef57 +# Sat Apr 2 12:49:15 2022 -0400 - markiewicz@stanford.edu - STY/TEST: Make specs and run black [ignore-rev] +2ba8dacb8cc1f6f9c5b15b1cfb7b0395d45dcfb3 +# Sun Mar 20 21:19:39 2022 -0400 - markiewicz@stanford.edu - STY: black [ignore-rev] +87cc1b54981113024ec3fd594881f72cf67513fb +# Wed Dec 8 17:02:09 2021 -0500 - markiewicz@stanford.edu - STY: black [ignore-rev] +5ac2f18ac116832d81a13f0c83e0a6aeda1457cf +# Thu Dec 2 09:30:42 2021 -0500 - markiewicz@stanford.edu - STY: black +ee50279ebf061a70ff502e7d2e51385b285cfda4 +# Tue Oct 26 11:18:02 2021 +0200 - code@oscaresteban.es - sty: run black +5d9adbbb77b7047b9b47cd2fa079dee0094cfc91 +# Wed Oct 20 12:07:15 2021 +0200 - fabio.bernardoni@uniklinikum-dresden.de - run black on the preprocess.py file to improve formatting +674e9b0eeca082efb5322b61fea57ee89a3e4a24 +# Wed Oct 13 16:08:23 2021 -0400 - markiewicz@stanford.edu - ENH: Add expected steps for FreeSurfer 7 recon-all (#3389) +8f7c0bf2ec9c819844a2736a9ae2f6eef19a8e7f +# Wed Oct 13 14:26:48 2021 -0400 - markiewicz@stanford.edu - STY: black +d8dbc6f7b6a5385535e2fa53b7c6af7aa1370f46 +# Wed Sep 29 16:53:54 2021 +0200 - code@oscaresteban.es - sty: run black on affected files +5f280da629bb7b5dce908633d2deea85b55dd67b +# Thu Jun 24 17:43:22 2021 +0200 - code@oscaresteban.es - sty: run black +135ce497a18adbe0811441c2b720910ec549aa6f +# Thu Sep 23 08:56:28 2021 -0400 - markiewicz@stanford.edu - STY: black +f69b3fb09560616822737764bb07272cd587e4a0 +# Fri Apr 30 17:19:55 2021 -0400 - markiewicz@stanford.edu - STY: black +04718ac71436b6f283af7575dda0f6998b64f893 +# Fri Apr 30 16:50:00 2021 -0400 - markiewicz@stanford.edu - STY: black +12deb959cccc431fb8222cc5854f1c92a0080021 +# Thu Apr 1 12:26:08 2021 -0400 - markiewicz@stanford.edu - STY: black +f64bf338f630a9ee5cbe7a3ec98c68292897e720 +# Thu Dec 3 09:24:05 2020 +1100 - tom.g.close@gmail.com - run black over touched files +c81259bc3b28baa1f18f95f6b056c228c6bfd115 +# Fri Aug 14 17:15:15 2020 -0400 - markiewicz@stanford.edu - STY: Black +83358d7f17aac07cb90d0330f11ea2322e2974d8 +# Sat Mar 14 12:44:20 2020 -0400 - markiewicz@stanford.edu - STY: black +faef7d0f93013a700c882f709e98fb3cd36ebb03 +# Sun Mar 8 15:05:28 2020 +0100 - 3453485+daniel-ge@users.noreply.github.com - FIX: get length of generator + STY: Black +02991da67458b879d7c6360aa6457eb3c1bd5a07 +# Wed Mar 4 16:30:39 2020 -0500 - markiewicz@stanford.edu - STY: black +d50c1858564c0b3073fb23c54886a0454cb66afa +# Thu Feb 27 15:08:42 2020 -0800 - code@oscaresteban.es - sty: black +417b8897a116fcded5000e21e2b6ccbe29452a52 +# Thu Jan 2 11:29:05 2020 -0800 - code@oscaresteban.es - sty: black +aaf677a87f64c485f3e305799e4a5dc73b69e5fb +# Sun Dec 29 17:47:51 2019 -0800 - code@oscaresteban.es - sty: black +f763008442d88d8ce00ec266698268389415f8d6 +# Thu Jan 2 11:29:05 2020 -0800 - code@oscaresteban.es - sty: black +b1eccafd4edc8503b02d715f5b5f6f783520fdf9 +# Sun Dec 29 17:47:51 2019 -0800 - code@oscaresteban.es - sty: black +70db90349598cc7f26a4a513779529fba7d0a797 +# Thu Dec 19 09:22:22 2019 -0500 - markiewicz@stanford.edu - STY: Black +6c1d91d71f6f0db0e985bd2adc34206442b0653d +# Thu Dec 19 15:51:11 2019 -0500 - markiewicz@stanford.edu - STY: Black +97bdbd5f48ab242de5288ba4715192a27619a803 +# Fri Nov 15 14:38:10 2019 -0500 - steve@steventilley.com - run black +78fa360f5b785224349b8b85b07e510d2233bb63 +# Fri Nov 15 14:34:03 2019 -0500 - steve@steventilley.com - run black +7f85f43a34de8bff8e634232c939b17cee8e8fc5 +# Thu Nov 14 11:14:51 2019 -0500 - markiewicz@stanford.edu - Merge pull request #3096 from effigies/sty/black +1a869991adc024577536689d557fc748c764f15d +# Thu Nov 14 09:15:20 2019 -0500 - markiewicz@stanford.edu - STY: Black setup.py +9c50b5daa797def5672dd057155b0e2c658853e2 +# Thu Nov 14 09:14:38 2019 -0500 - markiewicz@stanford.edu - STY: Black for tools/ +47194993ae14aceeec436cfb3769def667196668 +# Wed Nov 13 23:41:15 2019 -0500 - markiewicz@stanford.edu - STY: Black +75653feadc6667d5313d83e9c62a5d5819771a9c +# Tue Nov 12 09:43:34 2019 -0500 - markiewicz@stanford.edu - STY: Black files pre-merge +497b44d680eee0892fa59c6aaaae22a17d70a536 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index cfc5007b49..a765cabb95 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -17,7 +17,3 @@ Fixes # . ## List of changes proposed in this PR (pull-request) - -## Acknowledgment - -- [ ] \(Mandatory\) I acknowledge that this contribution will be available under the Apache 2 license. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..61d6e0c09e --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# Dependabot configuration file +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + groups: + actions-infrastructure: + patterns: + - "actions/*" diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000000..6f32efeaf1 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,19 @@ +--- +name: Codespell + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml new file mode 100644 index 0000000000..6b3ef96f0c --- /dev/null +++ b/.github/workflows/contrib.yml @@ -0,0 +1,84 @@ +name: Contribution checks + +# This checks validate contributions meet baseline checks +# +# * specs - Ensure make + +on: + push: + branches: + - master + - maint/* + pull_request: + branches: + - master + - maint/* + +defaults: + run: + shell: bash + +concurrency: + group: contrib-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + stable: + # Check each OS, all supported Python, minimum versions and latest releases + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: ["ubuntu-latest"] + python-version: ["3.12"] + nipype-extras: ["dev"] + check: ["specs", "style"] + env: + DEPENDS: "" + CHECK_TYPE: ${{ matrix.check }} + NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} + EXTRA_PIP_FLAGS: "" + INSTALL_DEB_DEPENDENCIES: false + INSTALL_TYPE: pip + CI_SKIP_TEST: 1 + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Create virtual environment + run: tools/ci/create_venv.sh + - name: Build archive + run: | + source tools/ci/build_archive.sh + echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV + - name: Install Debian dependencies + run: tools/ci/install_deb_dependencies.sh + if: ${{ matrix.os == 'ubuntu-18.04' }} + - name: Install dependencies + run: tools/ci/install_dependencies.sh + - name: Install Nipype + run: tools/ci/install.sh + - name: Run tests + run: tools/ci/check.sh + if: ${{ matrix.check != 'skiptests' }} + - uses: codecov/codecov-action@v4 + with: + file: coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} + path: test-results.xml + if: ${{ always() && matrix.check == 'test' }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000000..41776bc188 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,173 @@ +name: Stable tests + +# This file tests the claimed support range of nipype including +# +# * Operating systems: Linux, OSX +# * Dependencies: minimum requirements, optional requirements +# * Installation methods: setup.py, sdist, wheel, archive + +on: + push: + branches: + - master + - maint/* + tags: + - "*" + pull_request: + branches: + - master + - maint/* + schedule: + # 8am EST / 9am EDT Mondays + - cron: "0 13 * * 1" + +defaults: + run: + shell: bash + +concurrency: + group: tests-${{ github.ref }} + cancel-in-progress: true + +permissions: {} +jobs: + build: + permissions: + contents: read # to fetch code (actions/checkout) + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-python@v5 + with: + python-version: 3 + - run: pip install --upgrade build twine + - name: Build sdist and wheel + run: python -m build + - run: twine check dist/* + - uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/ + + test-package: + runs-on: ubuntu-latest + needs: [build] + strategy: + matrix: + package: ["wheel", "sdist"] + steps: + - uses: actions/download-artifact@v4 + with: + name: dist + path: dist/ + - uses: actions/setup-python@v5 + with: + python-version: 3 + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Update pip + run: pip install --upgrade pip + - name: Install wheel + run: pip install dist/nipype-*.whl + if: matrix.package == 'wheel' + - name: Install sdist + run: pip install dist/nipype-*.tar.gz + if: matrix.package == 'sdist' + - run: python -c 'import nipype; print(nipype.__version__)' + - name: Install test extras + run: pip install nipype[tests] + - name: Run tests + run: pytest --doctest-modules -v --pyargs nipype + + stable: + # Check each OS, all supported Python, minimum versions and latest releases + permissions: + contents: read # to fetch code (actions/checkout) + + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: ["ubuntu-22.04"] + python-version: ["3.9", "3.10", "3.11", "3.12"] + check: ["test"] + pip-flags: [""] + depends: ["REQUIREMENTS"] + deb-depends: [false] + nipype-extras: ["doc,tests,profiler"] + include: + - os: ubuntu-22.04 + python-version: "3.9" + check: test + pip-flags: "" + depends: REQUIREMENTS + deb-depends: true + nipype-extras: doc,tests,profiler,duecredit,ssh + - os: ubuntu-20.04 + python-version: "3.9" + check: test + pip-flags: "" + depends: REQUIREMENTS + deb-depends: true + nipype-extras: doc,tests,nipy,profiler,duecredit,ssh + env: + DEPENDS: ${{ matrix.depends }} + CHECK_TYPE: ${{ matrix.check }} + EXTRA_PIP_FLAGS: ${{ matrix.pip-flags }} + INSTALL_DEB_DEPENDENCIES: ${{ matrix.deb-depends }} + NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} + INSTALL_TYPE: pip + CI_SKIP_TEST: 1 + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Create virtual environment + run: tools/ci/create_venv.sh + - name: Build archive + run: | + source tools/ci/build_archive.sh + echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV + - name: Install Debian dependencies + run: tools/ci/install_deb_dependencies.sh + if: ${{ matrix.os == 'ubuntu-latest' }} + - name: Install dependencies + run: tools/ci/install_dependencies.sh + - name: Install Nipype + run: tools/ci/install.sh + - name: Run tests + run: tools/ci/check.sh + if: ${{ matrix.check != 'skiptests' }} + - uses: codecov/codecov-action@v4 + with: + file: coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.deb-depends }} + path: test-results.xml + if: ${{ always() && matrix.check == 'test' }} + + publish: + runs-on: ubuntu-latest + environment: "Package deployment" + needs: [stable, test-package] + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + steps: + - uses: actions/download-artifact@v4 + with: + name: dist + path: dist/ + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/tutorials.yml b/.github/workflows/tutorials.yml new file mode 100644 index 0000000000..46aa42d25c --- /dev/null +++ b/.github/workflows/tutorials.yml @@ -0,0 +1,76 @@ +name: Test tutorials + +on: + push: + branches: + - "rel/*" + +concurrency: + group: tutorials-${{ github.ref }} + cancel-in-progress: true + +permissions: {} +jobs: + tutorial: + runs-on: ubuntu-latest + env: + BRANCH_NAME: ${{ github.ref_name }} + steps: + - name: Start time + id: start + run: echo "::set-output name=start_time::$(date +'%Y-%m-%dT%H:%M:%S%z')" + - name: Trigger Nipype tutorial Github Action + run: | + set -x + curl -X POST \ + -H "Authorization: Bearer ${{ secrets.TUTORIAL_ACCESS_TOKEN }}" \ + -H "Accept: application/vnd.github+json" \ + https://api.github.com/repos/miykael/nipype_tutorial/actions/workflows/testing.yml/dispatches \ + -d '{"ref": "master", "inputs": {"nipype_branch": "'${BRANCH_NAME}'"}}' + sleep 10 + - name: Check Action was successfully dispatched + id: dispatched + run: | + START=${{ steps.start.outputs.start_time }} + RUN_ID=$(curl -s -H "Accept: application/vnd.github+json" \ + 'https://api.github.com/repos/miykael/nipype_tutorial/actions/runs?created=>'${START}'&per_page=1' \ + | jq -r '.workflow_runs[0].id') + + # fail if not extracted + [[ -n $RUN_ID ]] || exit 1 + echo "::set-output name=run_id::$RUN_ID" + - name: Check if action completed + timeout-minutes: 120 + run: | + RUN_ID=${{ steps.dispatched.outputs.run_id }} + while : + do + TIMESTAMP=$(date +'%Y-%m-%dT%H:%M:%S%z') + # check status every 5 minutes + STATUS=$(curl -s -H "Accept: application/vnd.github+json" \ + https://api.github.com/repos/miykael/nipype_tutorial/actions/runs/${RUN_ID} \ + | jq -r '.conclusion') + case $STATUS in + success) + echo "[$TIMESTAMP] Tutorial run $RUN_ID completed successfully." + exit 0 + ;; + failure) + echo "[$TIMESTAMP] Tutorial run $RUN_ID failed." + exit 1 + ;; + *) + echo "[$TIMESTAMP] Conclusion ($STATUS) is not yet complete" + sleep 300 + esac + done + - name: Cancel ongoing run if cancelled or failed + if: ${{ failure() || cancelled() }} + run: | + set -x + RUN_ID=${{ steps.dispatched.outputs.run_id }} + echo "Something went wrong, cancelling dispatched run" + curl -s -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.TUTORIAL_ACCESS_TOKEN }}" \ + https://api.github.com/repos/miykael/nipype_tutorial/actions/runs/${RUN_ID}/cancel diff --git a/.gitignore b/.gitignore index 4213d07a68..b2556cb084 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,5 @@ __pycache__/ .ipynb_checkpoints/ .ruby-version .pytest_cache +.vscode/ +venv/ diff --git a/.mailmap b/.mailmap index 6a13f46ee1..1f6bdee6fc 100644 --- a/.mailmap +++ b/.mailmap @@ -1,156 +1,214 @@ +Abel A. González Orozco +Aimi Watanabe Aimi Watanabe stymy -Aimi Watanabe stymy -Alejandro Tabas qTabs -Alejandro Tabas qtabs -Alejandro de la Vega adelavega -Alexander Schaefer Alexander Schaefer -Alexander Schaefer alexschaefer83 -Alexander Schaefer aschaefer -Alexandre M. Savio Alexandre M. S -Alexandre M. Savio Alexandre M. S -Alexandre M. Savio Alexandre Manhaes Savio -Andrey Chetverikov Andrey Chetverikov -Andrey Chetverikov achetverikov -Anisha Keshavan Anisha Keshavan -Anisha Keshavan akeshavan -Ariel Rokem arokem -Ariel Rokem arokem -Arman Eshaghi armaneshaghi -Ashely Gillman Ashley Gillman -Ashely Gillman ashgillman -Basille Pinsard bpinsard -Basille Pinsard bpinsard -Ben Cipollini Ben Cipollini -Benjamin Yvernault Benjamin Yvernault -Benjamin Yvernault byvernault -Blake Dewey Blake Dewey -Blake Dewey blakedewey -Blake Dewey blakedewey -Brendan Moloney moloney -Caroline Froehlich carolFrohlich -Chris Filo Gorgolewski Chris Filo Gorgolewski -Chris Filo Gorgolewski Chris Filo Gorgolewski -Chris Filo Gorgolewski Chris Filo Gorgolewski -Chris Filo Gorgolewski Chris Gorgolewski -Chris Filo Gorgolewski Krzysztof Gorgolewski -Chris Filo Gorgolewski filo -Chris Filo Gorgolewski filo -Chris Filo Gorgolewski filo -Christopher J. Markiewicz Chris Markiewicz -Christopher J. Markiewicz Christopher J. Johnson -Christopher J. Markiewicz Christopher J. Markiewicz -Christopher J. Markiewicz Christopher J. Markiewicz -Cindee Madison cindeem -Cindee Madison cindeem <> -Colin Buchanan Colin Buchanan -Colin Buchanan colinbuchanan -Daniel Brenner brennerd -Daniel Brenner brennerd11 -Daniel Clark dclark87 -Daniel Geisler daniel-ge -Daniel Ginsburg danginsburg -Daniel McNamee danmc -David Ellis David Ellis -David Ellis David Ellis -David Welch David Welch -Dimitri Papadopoulos Orfanos Dimitri Papadopoulos -Dmytro belevtsoff -Dylan Nielson Dylan -Elizabeth DuPre emdupre -Erik Ziegler Erik -Erik Ziegler Erik Ziegler -Erik Ziegler erik -Erik Ziegler erikz -Erik Ziegler swederik -Fernando Pérez-García Fernando -Franz Liem fliem -Franz Liem fliem -Fred Loney FredLoney -Gael Varoquaux GaelVaroquaux -Gael Varoquaux GaelVaroquaux -Gavin Cooper gjcooper -Gilles de Hollander Gilles86 -Hans Johnson Hans Johnson -Hans Johnson hjmjohnson -Horea Christian Horea Christian -Isaac Schwabacher ischwabacher -Jakub Kaczmarzyk jakubk -Jakub Kaczmarzyk kaczmarj -James Kent jdkent -Janosch Linkersdörfer Januzz -Jason Wong Jason -Jason Wong Jason W -Jason Wong Jason W -Jason Wong jason -Jason Wong jason-wg -Jens Kleesiek JensNRAD -Jessica Forbes jessicaforbes -Jérémy Guillon GUILLON Jeremy -Joerg Stadler Joerg Stadler -Joerg Stadler Jörg Stadler -Joke Durnez jokedurnez -Josh Warner JDWarner -Josh Warner Josh Warner (Mac) -Kai Schlamp medihack Jessica Forbes jessicaforbes -Katie Bottenhorn 62442katieb -Kesshi Jordan Kesshi Jordan -Kesshi Jordan Kesshi Jordan -Kesshi Jordan Kesshi Jordan -Kesshi Jordan Kesshi Jordan -Kesshi Jordan Kesshi Jordan -Kesshi Jordan Kesshi jordan -Kesshi Jordan kesshijordan -Kevin Sitek sitek -Kevin Sitek sitek -Leonie Lampe Leonie Lmape -Lukas Snoek Lukas Snoek -Marcel Falkiewicz Marcel Falkiewicz -Mathias Goncalves Mathias Goncalves -Mathias Goncalves mathiasg -Mathieu Dubois Mathieu Dubois -Mathieu Dubois duboism -Matteo Mancini matteomancini -Matteo Visconti dOC Matteo Visconti dOC -Matteo Visconti dOC mvdoc -Michael Clark Clark -Michael Dayan Michael -Michael Dayan Michael +Alejandro Tabas +Alejandro Tabas +Alejandro de la Vega +Alejandro de la Vega +Alexander Schaefer +Alexander Schaefer +Alexander Schaefer +Alexandre M. Savio +Alexandre M. Savio +Andrea Dell'Orco +Andrea Dell'Orco +Andrew Floren +Andrey Chetverikov +Andrey Chetverikov +Anibal Sólon Heinsfeld +Anisha Keshavan +Anisha Keshavan +Anna Doll <45283972+AnnaD15@users.noreply.github.com> +Ariel Rokem +Ariel Rokem +Arman Eshaghi +Ashely Gillman +Avneet Kaur +Avneet Kaur +Basille Pinsard +Basille Pinsard +Ben Cipollini +Benjamin Acland +Benjamin Meyers <34044274+BenjaminMey@users.noreply.github.com> +Benjamin Meyers <34044274+BenjaminMey@users.noreply.github.com> BenjaminMey +Benjamin Yvernault +Benjamin Yvernault +Blake Dewey +Blake Dewey +Blake Dewey +Brendan Moloney +Caroline Froehlich +Christopher J. Markiewicz +Christopher J. Markiewicz +Christopher J. Markiewicz +Christopher John Steele +Cindee Madison +Colin Buchanan +Colin Buchanan +Colin Buchanan +Daniel Brenner +Daniel Clark +Daniel Geisler +Daniel Geisler <3453485+daniel-ge@users.noreply.github.com> +Daniel Geisler +Daniel Ginsburg +Daniel McNamee +David Ellis +David Ellis +David Mordom +David Welch +Dimitri Papadopoulos Orfanos +Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> +Dmytro Belevtsoff +Dorian Vogel +Dylan M. Nielson +Dylan M. Nielson +Eduard Ort +Elizabeth DuPre +Erik Ziegler +Erik Ziegler +Erik Ziegler +Fabio Bernardoni +Fabio Bernardoni +Feilong Ma +Fernando Pérez-García +Franz Liem +Franz Liem +Fred Loney +Gael Varoquaux +Gal Ben-Zvi +Gavin Cooper +Ghislain Vaillant +Ghislain Vaillant +Gilles de Hollander +Gio Piantoni +Guillaume Flandin +Hans Johnson +Henry Jones +Horea Christian +Hrvoje Stojic +Isaac Schwabacher +Jakub Kaczmarzyk +James Kent +James Kent +James Kent +Janosch Linkersdörfer +Jason Wong +Jason Wong +Jens Kleesiek +Jessica Forbes +Jérémy Guillon +Joerg Stadler +Joerg Stadler +Joerg Stadler +John A. Lee +John A. Lee +Joke Durnez +Jordi Huguet +Josh Warner +Junhao WEN +Kai Schlamp +Katherine Bottenhorn +Kesshi Jordan +Kesshi Jordan +Kesshi Jordan +Kesshi Jordan +Kesshi Jordan +Kesshi Jordan +Kevin Sitek +Kevin Sitek +Koen Helwegen +Kornelius Podranski +Kristofer Montazeri +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski +Kshitij Chawla +Leonie Lampe +Lukas Snoek +Marcel Falkiewicz +Maria de Fatima Dias +Maria de Fatima Dias +Martin Norgaard +Martin Perez-Guevara +Mathias Goncalves +Mathias Goncalves +Mathieu Dubois +Mathieu Dubois +Matteo Mancini +Matteo Visconti di Oleggio Castello +Matteo Visconti di Oleggio Castello +Matthew Cieslak +Michael Clark +Michael Dayan +Michael Dayan +Michael Dayan Michael Dayan mick-d -Michael Notter miykael -Michael Waskom Michael Waskom -Michael Waskom Michael Waskom -Michael Waskom Michael Waskom -Michael Waskom mwaskom -Michael Waskom mwaskom -Michael Waskom mwaskom -Oliver Contier oliver-contier -Oscar Esteban Oscar Esteban -Oscar Esteban oesteban -Pablo Polosecki pipolose -Pablo Polosecki pipolose -Ranjit Khanuja RanjitK -Ross Markello Ross Markello -Russell Poldrack Russ Poldrack -Russell Poldrack poldrack -Salma Bougacha Salma BOUGACHA -Salma Bougacha salma -Salma Bougacha salma1601 -Satrajit Ghosh Satrajit Ghosh -Sebastian Urchs sebastian -Sharad Sikka ssikka -Shariq Iqbal shariqiqbal2810 -Shariq Iqbal shariqiqbal2810 +Michael Joseph +Michael Joseph +Michael Philipp Notter +Michael Philipp Notter +Michael Waskom +Michael Waskom +Michael Waskom +Miguel Molina-Romero +Murat Bilgel +Nat Lee +Ole Numssen +Oliver Contier +Olivia Stanley +Oscar Esteban +Oscar Esteban +Pablo Polosecki +Pablo Polosecki +Paul Kuntke +Paul Kuntke +Paul Sharp +Ranjit Khanuja +Rastko Ćirić +Rastko Ćirić +Rastko Ćirić +Raunak Jalan +Raunak Jalan <41023976+RaunakJalan@users.noreply.github.com> +Ross Markello +Russell Poldrack +Russell Poldrack +Salma Bougacha +Sami Kristian Andberg +Satrajit Ghosh +Sebastian Urchs +Serge Koudoro +Sharad Sikka +Shariq Iqbal +Shariq Iqbal +Shoshana Berleant Shoshana Berleant Shoshana Berleant -Shoshana Berleant Shoshana Berleant -Simon R Simon Rothmeier -Siqi Liu siqi liu -Siqi Liu sql -Steven Giavasis Steven Giavasis -Steven Giavasis sgiavasis -Steven Giavasis sgiavasis -Tristan Glatard Tristan Glatard -Victor Saase vsaase -William Triplett William Triplett -Wolfgang Pauli Wolfgang Pauli -Xiangzhen Kong bnucon -Yaroslav Halchenko Yaroslav Halchenko +Shoshana Berleant Ubuntu +Simon Rothmei +Simon Rothmei +Sin Kim +Sin Kim +Siqi Liu +Steven Giavasis +Steven Giavasis +Steven Giavasis +Steven Tilley +Sulantha Mathotaarachchi +Sunjae Shim <85246533+sjshim@users.noreply.github.com> +Tim Robert-Fitzgerald +Tom Close +Tom Close +Tristan Glatard +Victor Férat +Victor Férat +Victor Férat +Victor Saase +Weijie Huang +William Triplett +Wolfgang Pauli +Xiangzhen Kong +Yaroslav Halchenko diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..c1bda308da --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - repo: https://github.com/psf/black + rev: 24.2.0 + hooks: + - id: black + - repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000000..33b5e91a58 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,30 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: doc/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +formats: + - htmlzip + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - requirements: doc/requirements.txt + - method: pip + path: . + extra_requirements: + - doc diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 871e176ff0..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,102 +0,0 @@ -dist: xenial -sudo: true - -language: python -# our build matrix -python: -- 2.7 -- 3.5 -- 3.6 -- 3.7 - -# NOTE: Any changes to the matrix section should be duplicated below for -# Python 3.4 -env: - global: - - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - matrix: - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler" - CI_SKIP_TEST=1 - - INSTALL_DEB_DEPENDECIES=false - NIPYPE_EXTRAS="doc,tests,profiler" - CI_SKIP_TEST=1 - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler,duecredit,ssh" - CI_SKIP_TEST=1 - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler" - EXTRA_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" - CI_SKIP_TEST=1 - -# Python 3.4 is only available on Trusty, so we need to duplicate the -# env matrix specifically for it. -matrix: - include: - - python: 3.4 - dist: trusty - env: - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler" - CI_SKIP_TEST=1 - - python: 3.4 - dist: trusty - env: - - INSTALL_DEB_DEPENDECIES=false - NIPYPE_EXTRAS="doc,tests,profiler" - CI_SKIP_TEST=1 - - python: 3.4 - dist: trusty - env: - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler,duecredit,ssh" - CI_SKIP_TEST=1 - - python: 3.4 - dist: trusty - env: - - INSTALL_DEB_DEPENDECIES=true - NIPYPE_EXTRAS="doc,tests,nipy,profiler" - EXTRA_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" - CI_SKIP_TEST=1 - allow_failures: - - python: 2.7 - env: INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,nipy,profiler" EXTRA_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" CI_SKIP_TEST=1 - -addons: - apt: - packages: - - xvfb - - fusefat - - graphviz - -cache: - directories: - - ${HOME}/.cache - -before_install: -- if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; sudo ln -s /run/shm /dev/shm; fi -- travis_retry bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh); -- if $INSTALL_DEB_DEPENDECIES; then - travis_retry sudo apt-get -y update && - travis_retry sudo apt-get install -y -qq fsl afni elastix fsl-atlases; - fi; -- if $INSTALL_DEB_DEPENDECIES; then - source /etc/fsl/fsl.sh; - source /etc/afni/afni.sh; - export FSLOUTPUTTYPE=NIFTI_GZ; - fi; - -- travis_retry pip install -r requirements.txt -- travis_retry pip install grabbit==0.1.2 -- travis_retry git clone -b 0.6.5 https://github.com/INCF/pybids.git ${HOME}/pybids && pip install -e ${HOME}/pybids - -install: -- travis_retry pip install $EXTRA_PIP_FLAGS -e .[$NIPYPE_EXTRAS] - -script: -- py.test -v --cov nipype --cov-config .coveragerc --cov-report xml:cov.xml -c nipype/pytest.ini --doctest-modules nipype - -after_script: -- codecov --file cov.xml --flags unittests -e TRAVIS_JOB_NUMBER diff --git a/.zenodo.json b/.zenodo.json index 6b0a6843fa..25b5aaa911 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,10 +1,5 @@ { "creators": [ - { - "affiliation": "Department of Psychology, Stanford University", - "name": "Gorgolewski, Krzysztof J.", - "orcid": "0000-0003-3321-7583" - }, { "affiliation": "Department of Psychology, Stanford University", "name": "Esteban, Oscar", @@ -16,54 +11,60 @@ "orcid": "0000-0002-6533-164X" }, { - "affiliation": "Independent", - "name": "Ziegler, Erik", - "orcid": "0000-0003-1857-8129" + "name": "Burns, Christopher" }, { - "affiliation": "The University of Iowa", - "name": "Ellis, David Gage", - "orcid": "0000-0002-3718-6836" + "affiliation": "MIT", + "name": "Goncalves, Mathias", + "orcid": "0000-0002-7252-7771" }, { "affiliation": "MIT", "name": "Jarecka, Dorota", + "orcid": "0000-0001-8282-2988" + }, + { + "affiliation": "Independent", + "name": "Ziegler, Erik", "orcid": "0000-0003-1857-8129" }, { - "affiliation": "The Laboratory for Investigative Neurophysiology (The LINE), Department of Radiology and Department of Clinical Neurosciences, Lausanne, Switzerland; Center for Biomedical Imaging (CIBM), Lausanne, Switzerland", - "name": "Notter, Michael Philipp", - "orcid": "0000-0002-5866-047X" + "name": "Berleant, Shoshana" }, { - "affiliation": "University of Iowa", - "name": "Johnson, Hans", - "orcid": "0000-0001-9513-2660" + "affiliation": "The University of Iowa", + "name": "Ellis, David Gage", + "orcid": "0000-0002-3718-6836" }, { - "name": "Burns, Christopher" + "name": "Pinsard, Basile" }, { - "affiliation": "Klinikum rechts der Isar, TUM. ACPySS", - "name": "Manh\u00e3es-Savio, Alexandre", - "orcid": "0000-0002-6608-6885" + "name": "Madison, Cindee" }, { - "name": "Hamalainen, Carlo", - "orcid": "0000-0001-7655-3830" + "affiliation": "Department of Psychology, Stanford University", + "name": "Waskom, Michael" }, { - "name": "Yvernault, Benjamin" + "affiliation": "The Laboratory for Investigative Neurophysiology (The LINE), Department of Radiology and Department of Clinical Neurosciences, Lausanne, Switzerland; Center for Biomedical Imaging (CIBM), Lausanne, Switzerland", + "name": "Notter, Michael Philipp", + "orcid": "0000-0002-5866-047X" }, { - "affiliation": "Florida International University", - "name": "Salo, Taylor", - "orcid": "0000-0001-9813-3167" + "affiliation": "Developer", + "name": "Clark, Daniel", + "orcid": "0000-0002-8121-8954" }, { - "affiliation": "MIT", - "name": "Goncalves, Mathias", - "orcid": "0000-0002-7252-7771" + "affiliation": "Klinikum rechts der Isar, TUM. ACPySS", + "name": "Manhães-Savio, Alexandre", + "orcid": "0000-0002-6608-6885" + }, + { + "affiliation": "UC Berkeley", + "name": "Clark, Dav", + "orcid": "0000-0002-3982-4416" }, { "affiliation": "University of California, San Francisco", @@ -71,75 +72,71 @@ "orcid": "0000-0001-6313-0580" }, { - "affiliation": "Department of Psychology, Stanford University", - "name": "Waskom, Michael" + "affiliation": "Mayo Clinic, Neurology, Rochester, MN, USA", + "name": "Dayan, Michael", + "orcid": "0000-0002-2666-0969" }, { - "affiliation": "Shattuck Lab, UCLA Brain Mapping Center", - "name": "Wong, Jason" + "affiliation": "Dartmouth College: Hanover, NH, United States", + "name": "Halchenko, Yaroslav O.", + "orcid": "0000-0003-3456-2493" }, { - "name": "Modat, Marc" + "name": "Loney, Fred" }, { - "name": "Loney, Fred" + "affiliation": "Department of Psychology, Stanford University", + "name": "Norgaard, Martin", + "orcid": "0000-0003-2131-5688" }, { - "affiliation": "Developer", - "name": "Clark, Daniel", - "orcid": "0000-0002-8121-8954" + "affiliation": "Florida International University", + "name": "Salo, Taylor", + "orcid": "0000-0001-9813-3167" }, { - "affiliation": "National Institute of Mental Health", - "name": "Nielson, Dylan M.", - "orcid": "0000-0003-4613-6643" + "affiliation": "University of Iowa", + "name": "Johnson, Hans", + "orcid": "0000-0001-9513-2660" }, { "affiliation": "Department of Electrical and Computer Engineering, Johns Hopkins University", "name": "Dewey, Blake E", "orcid": "0000-0003-4554-5058" }, - { - "name": "Madison, Cindee" - }, { "affiliation": "Molecular Imaging Research Center, CEA, France", "name": "Bougacha, Salma" }, { - "affiliation": "National Institutes of Health", - "name": "Clark, Michael G. " - }, - { - "affiliation": "Dartmouth College", - "name": "Visconti di Oleggio Castello, Matteo", - "orcid": "0000-0001-7931-5272" + "affiliation": "UC Berkeley - UCSF Graduate Program in Bioengineering", + "name": "Keshavan, Anisha", + "orcid": "0000-0003-3554-043X" }, { - "affiliation": "Mayo Clinic, Neurology, Rochester, MN, USA", - "name": "Dayan, Michael", - "orcid": "0000-0002-2666-0969" + "name": "Yvernault, Benjamin" }, { - "affiliation": "UC Berkeley", - "name": "Clark, Dav", - "orcid": "0000-0002-3982-4416" + "affiliation": "Institute for Biomedical Engineering, ETH and University of Zurich", + "name": "Christian, Horea", + "orcid": "0000-0001-7037-2449" }, { - "affiliation": "UC Berkeley - UCSF Graduate Program in Bioengineering", - "name": "Keshavan, Anisha", - "orcid": "0000-0003-3554-043X" + "name": "Hamalainen, Carlo", + "orcid": "0000-0001-7655-3830" }, { - "name": "Pinsard, Basile" + "affiliation": "Stanford University", + "name": "Ćirić , Rastko", + "orcid": "0000-0001-6347-7939" }, { - "affiliation": "CNRS LTCI, Telecom ParisTech, Universit\u00e9 Paris-Saclay", - "name": "Gramfort, Alexandre", - "orcid": "0000-0001-9791-4404" + "name": "Dubois, Mathieu" }, { - "name": "Berleant, Shoshana" + "affiliation": "The Centre for Addiction and Mental Health", + "name": "Joseph, Michael", + "orcid": "0000-0002-0068-230X" }, { "affiliation": "UC San Diego", @@ -147,24 +144,23 @@ "orcid": "0000-0002-7782-0790" }, { - "affiliation": "Dartmouth College: Hanover, NH, United States", - "name": "Halchenko, Yaroslav O.", - "orcid": "0000-0003-3456-2493" + "affiliation": "Holland Bloorview Kids Rehabilitation Hospital", + "name": "Tilley II, Steven", + "orcid": "0000-0003-4853-5082" }, { - "affiliation": "Montreal Neurological Institute and Hospital", - "name": "Markello, Ross", - "orcid": "0000-0003-1057-1336" + "affiliation": "Dartmouth College", + "name": "Visconti di Oleggio Castello, Matteo", + "orcid": "0000-0001-7931-5272" }, { - "affiliation": "The University of Washington eScience Institute", - "name": "Rokem, Ariel", - "orcid": "0000-0003-0679-1985" + "affiliation": "University of Texas at Austin", + "name": "De La Vega, Alejandro", + "orcid": "0000-0001-9062-3778" }, { - "affiliation": "Montreal Neurological Institute and Hospital", - "name": "DuPre, Elizabeth", - "orcid": "0000-0003-1358-196X" + "affiliation": "Shattuck Lab, UCLA Brain Mapping Center", + "name": "Wong, Jason" }, { "affiliation": "MIT", @@ -172,418 +168,757 @@ "orcid": "0000-0002-5544-7577" }, { - "name": "Moloney, Brendan" + "affiliation": "Research Group Neuroanatomy and Connectivity, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany", + "name": "Huntenburg, Julia M.", + "orcid": "0000-0003-0579-9811" }, { - "affiliation": "INRIA", - "name": "Varoquaux, Gael", - "orcid": "0000-0003-1076-5122" + "affiliation": "National Institutes of Health", + "name": "Clark, Michael G. " }, { - "affiliation": "Institute for Biomedical Engineering, ETH and University of Zurich", - "name": "Horea, Christian", - "orcid": "0000-0001-7037-2449" + "affiliation": "Neuroscience Program, University of Iowa", + "name": "Kent, James D.", + "orcid": "0000-0002-4892-2659" }, { - "affiliation": "Athena EPI, Inria Sophia-Antipolis", - "name": "Wassermann , Demian", - "orcid": "0000-0001-5194-6056" + "affiliation": "Concordia University", + "name": "Benderoff, Erin" + }, + { + "name": "Erickson, Drew" + }, + { + "affiliation": "CIBIT, UC", + "name": "Dias, Maria de Fatima", + "orcid": "0000-0001-8878-1750" + }, + { + "name": "Moloney, Brendan" }, { "affiliation": "Otto-von-Guericke-University Magdeburg, Germany", "name": "Hanke, Michael", "orcid": "0000-0001-6398-6370" }, + { + "affiliation": "Child Mind Institute", + "name": "Giavasis, Steven" + }, + { + "affiliation": "SRI International", + "name": "Nichols, B. Nolan", + "orcid": "0000-0003-1099-3328" + }, + { + "name": "Tungaraza, Rosalia" + }, + { + "affiliation": "Charitè Universitätsmedizin Berlin, Germany", + "name": "Dell'Orco, Andrea", + "orcid": "0000-0002-3964-8360" + }, + { + "affiliation": "Child Mind Institute", + "name": "Frohlich, Caroline" + }, + { + "affiliation": "Athena EPI, Inria Sophia-Antipolis", + "name": "Wassermann, Demian", + "orcid": "0000-0001-5194-6056" + }, { "affiliation": "Vrije Universiteit, Amsterdam", - "name": "Gilles de Hollander", + "name": "de Hollander, Gilles", "orcid": "0000-0003-1988-5091" }, + { + "affiliation": "Indiana University, IN, USA", + "name": "Koudoro, Serge" + }, + { + "affiliation": "University College London", + "name": "Eshaghi, Arman", + "orcid": "0000-0002-6652-3512" + }, + { + "name": "Millman, Jarrod" + }, + { + "affiliation": "University College London", + "name": "Mancini, Matteo", + "orcid": "0000-0001-7194-4568" + }, + { + "affiliation": "University of Sydney", + "name": "Close, Thomas", + "orcid": "0000-0002-4160-2134" + }, + { + "affiliation": "National Institute of Mental Health", + "name": "Nielson, Dylan M.", + "orcid": "0000-0003-4613-6643" + }, + { + "affiliation": "INRIA", + "name": "Varoquaux, Gael", + "orcid": "0000-0003-1076-5122" + }, + { + "affiliation": "Charite Universitatsmedizin Berlin, Germany", + "name": "Waller, Lea", + "orcid": "0000-0002-3239-6957" + }, + { + "name": "Watanabe, Aimi" + }, { "name": "Mordom, David" }, + { + "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", + "name": "Guillon, Jérémy", + "orcid": "0000-0002-2672-7510" + }, + { + "affiliation": "Penn Statistics in Imaging and Visualization Endeavor, University of Pennsylvania", + "name": "Robert-Fitzgerald, Timothy", + "orcid": "0000-0001-8303-8001" + }, + { + "affiliation": "Donders Institute for Brain, Cognition and Behavior, Center for Cognitive Neuroimaging", + "name": "Chetverikov, Andrey", + "orcid": "0000-0003-2767-6310" + }, + { + "affiliation": "The University of Washington eScience Institute", + "name": "Rokem, Ariel", + "orcid": "0000-0003-0679-1985" + }, + { + "affiliation": "Washington University in St Louis", + "name": "Acland, Benjamin", + "orcid": "0000-0001-6392-6634" + }, + { + "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universität Dresden, Dresden, Germany", + "name": "Bernardoni, Fabio", + "orcid": "0000-0002-5112-405X" + }, + { + "name": "Forbes, Jessica" + }, + { + "affiliation": "Montreal Neurological Institute and Hospital", + "name": "Markello, Ross", + "orcid": "0000-0003-1057-1336" + }, { "affiliation": "Australian eHealth Research Centre, Commonwealth Scientific and Industrial Research Organisation; University of Queensland", "name": "Gillman, Ashley", "orcid": "0000-0001-9130-1092" }, + { + "affiliation": "Sagol School of Neuroscience, Tel Aviv University", + "name": "Kepler, Gal", + "orcid": "0000-0002-5655-9423" + }, + { + "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China; Max Planck Institute for Psycholinguistics, Nijmegen, the Netherlands", + "name": "Kong, Xiang-Zhen", + "orcid": "0000-0002-0805-1350" + }, + { + "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universität Dresden, Dresden, Germany", + "name": "Geisler, Daniel", + "orcid": "0000-0003-2076-5329" + }, + { + "name": "Salvatore, John" + }, + { + "affiliation": "CNRS LTCI, Telecom ParisTech, Université Paris-Saclay", + "name": "Gramfort, Alexandre", + "orcid": "0000-0001-9791-4404" + }, + { + "affiliation": "Department of Psychology, University of Bielefeld, Bielefeld, Germany.", + "name": "Doll, Anna", + "orcid": "0000-0002-0799-0831" + }, { "name": "Buchanan, Colin" }, { - "name": "Tungaraza, Rosalia" + "affiliation": "Montreal Neurological Institute and Hospital", + "name": "DuPre, Elizabeth", + "orcid": "0000-0003-1358-196X" }, { - "affiliation": "California Institute of Technology", - "name": "Pauli, Wolfgang M.", - "orcid": "0000-0002-0966-0254" + "affiliation": "The University of Sydney", + "name": "Liu, Siqi" + }, + { + "affiliation": "National University Singapore", + "name": "Schaefer, Alexander", + "orcid": "0000-0001-6488-4739" + }, + { + "affiliation": "CEA", + "name": "Papadopoulos Orfanos, Dimitri", + "orcid": "0000-0002-1242-8990" + }, + { + "affiliation": "UniversityHospital Heidelberg, Germany", + "name": "Kleesiek, Jens" }, { "affiliation": "Nathan s Kline institute for psychiatric research", "name": "Sikka, Sharad" }, + { + "name": "Schwartz, Yannick" + }, + { + "affiliation": "Medical College of Wisconsin", + "name": "Espana, Lezlie", + "orcid": "0000-0002-6466-4653" + }, + { + "affiliation": "The University of Iowa", + "name": "Ghayoor, Ali", + "orcid": "0000-0002-8858-1254" + }, + { + "affiliation": "NIMH IRP", + "name": "Lee, John A.", + "orcid": "0000-0001-5884-4247" + }, + { + "name": "Mattfeld, Aaron" + }, + { + "affiliation": "University of Washington", + "name": "Richie-Halford, Adam", + "orcid": "0000-0001-9276-9084" + }, + { + "affiliation": "University of Zurich", + "name": "Liem, Franz", + "orcid": "0000-0003-0646-4810" + }, + { + "affiliation": "ARAMIS Lab, Paris Brain Institute", + "name": "Vaillant, Ghislain", + "orcid": "0000-0003-0267-3033" + }, + { + "affiliation": "Neurospin/Unicog/Inserm/CEA", + "name": "Perez-Guevara, Martin Felipe", + "orcid": "0000-0003-4497-861X" + }, + { + "name": "Heinsfeld, Anibal Sólon", + "orcid": "0000-0002-2050-0614" + }, + { + "name": "Haselgrove, Christian" + }, + { + "affiliation": "Department of Psychology, Stanford University; Parietal, INRIA", + "name": "Durnez, Joke", + "orcid": "0000-0001-9030-2202" + }, + { + "affiliation": "MPI CBS Leipzig, Germany", + "name": "Lampe, Leonie" + }, + { + "name": "Poldrack, Russell" + }, + { + "affiliation": "1 McGill Centre for Integrative Neuroscience (MCIN), Ludmer Centre for Neuroinformatics and Mental Health, Montreal Neurological Institute (MNI), McGill University, Montréal, 3801 University Street, WB-208, H3A 2B4, Québec, Canada. 2 University of Lyon, CNRS, INSERM, CREATIS., Villeurbanne, 7, avenue Jean Capelle, 69621, France.", + "name": "Glatard, Tristan", + "orcid": "0000-0003-2620-5883" + }, + { + "affiliation": "Sagol School of Neuroscience, Tel Aviv University", + "name": "Baratz, Zvi", + "orcid": "0000-0001-7159-1387" + }, + { + "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany.", + "name": "Tabas, Alejandro", + "orcid": "0000-0002-8643-1543" + }, + { + "name": "Cumba, Chad" + }, { "affiliation": "University College London", - "name": "Mancini, Matteo", - "orcid": "0000-0001-7194-4568" + "name": "Pérez-García, Fernando", + "orcid": "0000-0001-9090-3024" + }, + { + "name": "Blair, Ross" + }, + { + "affiliation": "Duke University", + "name": "Iqbal, Shariq", + "orcid": "0000-0003-2766-8425" + }, + { + "affiliation": "University of Iowa", + "name": "Welch, David" + }, + { + "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences", + "name": "Contier, Oliver", + "orcid": "0000-0002-2983-4709" + }, + { + "affiliation": "Department of Psychology, Stanford University", + "name": "Triplett, William", + "orcid": "0000-0002-9546-1306" + }, + { + "affiliation": "Child Mind Institute", + "name": "Craddock, R. Cameron", + "orcid": "0000-0002-4950-1303" + }, + { + "name": "Correa, Carlos" + }, + { + "affiliation": "Leibniz Institute for Neurobiology", + "name": "Stadler, Jörg", + "orcid": "0000-0003-4313-129X" + }, + { + "affiliation": "Mayo Clinic", + "name": "Warner, Joshua", + "orcid": "0000-0003-3579-4835" + }, + { + "affiliation": "Yale University; New Haven, CT, United States", + "name": "Sisk, Lucinda M.", + "orcid": "0000-0003-4900-9770" + }, + { + "name": "Falkiewicz, Marcel" + }, + { + "affiliation": "University of Illinois Urbana Champaign", + "name": "Sharp, Paul" + }, + { + "name": "Rothmei, Simon" + }, + { + "affiliation": "Korea Advanced Institute of Science and Technology", + "name": "Kim, Sin", + "orcid": "0000-0003-4652-3758" + }, + { + "name": "Weinstein, Alejandro" }, { - "name": "Forbes, Jessica" + "affiliation": "University of Pennsylvania", + "name": "Kahn, Ari E.", + "orcid": "0000-0002-2127-0507" }, { - "affiliation": "Duke University", - "name": "Iqbal, Shariq", - "orcid": "0000-0003-2766-8425" + "affiliation": "Harvard University - Psychology", + "name": "Kastman, Erik", + "orcid": "0000-0001-7221-9042" }, { - "name": "Schwartz, Yannick" + "affiliation": "Florida International University", + "name": "Bottenhorn, Katherine", + "orcid": "0000-0002-7796-8795" }, { - "affiliation": "University College London", - "name": "Malone, Ian B.", - "orcid": "0000-0001-7512-7856" + "affiliation": "GIGA Institute", + "name": "Grignard, Martin", + "orcid": "0000-0001-5549-1861" }, { - "name": "Dubois, Mathieu" + "affiliation": "Boston University", + "name": "Perkins, L. Nathan" }, { - "affiliation": "Child Mind Institute", - "name": "Frohlich, Caroline" + "name": "Zhou, Dale" }, { - "affiliation": "University of Iowa", - "name": "Welch, David" + "name": "Bielievtsov, Dmytro", + "orcid": "0000-0003-3846-7696" }, { - "name": "Kent, James" + "affiliation": "University of Newcastle, Australia", + "name": "Cooper, Gavin", + "orcid": "0000-0002-7186-5293" }, { - "name": "Watanabe, Aimi" + "affiliation": "Max Planck UCL Centre for Computational Psychiatry and Ageing Research, University College London", + "name": "Stojic, Hrvoje", + "orcid": "0000-0002-9699-9052" }, { - "affiliation": "Research Group Neuroanatomy and Connectivity, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany", - "name": "Huntenburg, Julia M.", - "orcid": "0000-0003-0579-9811" + "name": "Hui Qian, Tan" }, { - "name": "Cumba, Chad" + "affiliation": "German Institute for International Educational Research", + "name": "Linkersdörfer, Janosch", + "orcid": "0000-0002-1577-1233" }, { - "affiliation": "SRI International", - "name": "Nichols, B. Nolan", - "orcid": "0000-0003-1099-3328" + "name": "Renfro, Mandy" }, { - "affiliation": "University College London", - "name": "Eshaghi, Arman", - "orcid": "0000-0002-6652-3512" + "name": "Hinds, Oliver" }, { - "name": "Ginsburg, Daniel" + "affiliation": "Dept of Medical Biophysics, Univeristy of Western Ontario", + "name": "Stanley, Olivia" }, { - "affiliation": "National University Singapore", - "name": "Schaefer, Alexander", - "orcid": "0000-0001-6488-4739" + "name": "Küttner, René" }, { - "affiliation": "Harvard University - Psychology", - "name": "Kastman, Erik", - "orcid": "0000-0001-7221-9042" + "affiliation": "California Institute of Technology", + "name": "Pauli, Wolfgang M.", + "orcid": "0000-0002-0966-0254" }, { - "affiliation": "Washington University in St Louis", - "name": "Acland, Benjamin", - "orcid": "0000-0001-6392-6634" + "affiliation": "Weill Cornell Medicine", + "name": "Xie, Xihe", + "orcid": "0000-0001-6595-2473" }, { - "affiliation": "University of Zurich", - "name": "Liem, Franz", - "orcid": "0000-0003-0646-4810" + "affiliation": "NIMH, Scientific and Statistical Computing Core", + "name": "Glen, Daniel", + "orcid": "0000-0001-8456-5647" }, { - "affiliation": "UniversityHospital Heidelberg, Germany", - "name": "Kleesiek, Jens" + "affiliation": "Florida International University", + "name": "Kimbler, Adam", + "orcid": "0000-0001-5885-9596" }, { - "name": "Erickson, Drew" + "affiliation": "University of Pittsburgh", + "name": "Meyers, Benjamin", + "orcid": "0000-0001-9137-4363" }, { - "affiliation": "Child Mind Institute", - "name": "Giavasis, Steven" + "name": "Tarbert, Claire" }, { - "name": "Correa, Carlos" + "name": "Ginsburg, Daniel" }, { - "name": "Ghayoor, Ali" + "name": "Haehn, Daniel" }, { - "name": "K\u00fcttner, Ren\u00e9" + "affiliation": "Max Planck Research Group for Neuroanatomy & Connectivity, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany", + "name": "Margulies, Daniel S.", + "orcid": "0000-0002-8880-9204" }, { - "name": "Millman, Jarrod" + "affiliation": "CNRS, UMS3552 IRMaGe", + "name": "Condamine, Eric", + "orcid": "0000-0002-9533-3769" }, { - "name": "Lai, Jeff" + "affiliation": "Dartmouth College", + "name": "Ma, Feilong", + "orcid": "0000-0002-6838-3971" }, { - "name": "Zhou, Dale" + "affiliation": "University College London", + "name": "Malone, Ian B.", + "orcid": "0000-0001-7512-7856" }, { - "name": "Blair, Ross" + "affiliation": "University of Amsterdam", + "name": "Snoek, Lukas", + "orcid": "0000-0001-8972-204X" }, { - "name": "Haselgrove, Christian" + "name": "Brett, Matthew" }, { - "affiliation": "1 McGill Centre for Integrative Neuroscience (MCIN), Ludmer Centre for Neuroinformatics and Mental Health, Montreal Neurological Institute (MNI), McGill University, Montr\u00e9al, 3801 University Street, WB-208, H3A 2B4, Qu\u00e9bec, Canada. 2 University of Lyon, CNRS, INSERM, CREATIS., Villeurbanne, 7, avenue Jean Capelle, 69621, France.", - "name": "Glatard, Tristan", - "orcid": "0000-0003-2620-5883" + "affiliation": "Department of Neuropsychiatry, University of Pennsylvania", + "name": "Cieslak, Matthew", + "orcid": "0000-0002-1931-4734" }, { - "name": "Renfro, Mandy" + "name": "Hallquist, Michael" }, { - "affiliation": "The University of Sydney", - "name": "Liu, Siqi" + "affiliation": "Technical University Munich", + "name": "Molina-Romero, Miguel", + "orcid": "0000-0001-8054-0426" }, { - "affiliation": "University of Pennsylvania", - "name": "Kahn, Ari E.", - "orcid": "0000-0002-2127-0507" + "affiliation": "National Institute on Aging, Baltimore, MD, USA", + "name": "Bilgel, Murat", + "orcid": "0000-0001-5042-7422" }, { - "affiliation": "University College London", - "name": "P\u00e9rez-Garc\u00eda, Fernando", - "orcid": "0000-0001-9090-3024" + "name": "Lee, Nat", + "orcid": "0000-0001-9308-9988" }, { - "affiliation": "Department of Psychology, Stanford University", - "name": "Triplett, William", - "orcid": "0000-0002-9546-1306" + "affiliation": "Insitiute and Polyclinc for Diagnostic and Interventional Neuroradiology, University Hospital Carl Gustav Carus, Dresden, Germany", + "name": "Kuntke, Paul", + "orcid": "0000-0003-1838-2230" }, { - "affiliation": "MPI CBS Leipzig, Germany", - "name": "Lampe, Leonie" + "name": "Jalan, Raunak" }, { - "affiliation": "Leibniz Institute for Neurobiology", - "name": "Stadler, J\u00f6rg", - "orcid": "0000-0003-4313-129X" + "name": "Inati, Souheil" }, { - "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China; Max Planck Institute for Psycholinguistics, Nijmegen, the Netherlands", - "name": "Kong, Xiang-Zhen", - "orcid": "0000-0002-0805-1350" + "affiliation": "Institute of Neuroinformatics, ETH/University of Zurich", + "name": "Gerhard, Stephan", + "orcid": "0000-0003-4454-6171" }, { - "name": "Hallquist, Michael" + "affiliation": "Enigma Biomedical Group", + "name": "Mathotaarachchi, Sulantha" }, { - "affiliation": "Donders Institute for Brain, Cognition and Behavior, Center for Cognitive Neuroimaging", - "name": "Chetverikov, Andrey", - "orcid": "0000-0003-2767-6310" + "name": "Saase, Victor" }, { - "affiliation": "GIGA Institute", - "name": "Grignard, Martin", - "orcid": "0000-0001-5549-1861" + "affiliation": "Washington University in St Louis", + "name": "Van, Andrew", + "orcid": "0000-0002-8787-0943" }, { - "name": "Salvatore, John" + "affiliation": "MPI-CBS; McGill University", + "name": "Steele, Christopher John", + "orcid": "0000-0003-1656-7928" }, { - "name": "Park, Anne" + "affiliation": "Vrije Universiteit Amsterdam", + "name": "Ort, Eduard" }, { - "name": "Poldrack, Russell" + "affiliation": "Stanford University", + "name": "Lerma-Usabiaga, Garikoitz", + "orcid": "0000-0001-9800-4816" }, { - "affiliation": "Child Mind Institute", - "name": "Craddock, R. Cameron", - "orcid": "0000-0002-4950-1303" + "name": "Schwabacher, Isaac" }, { - "name": "Hinds, Oliver" + "name": "Arias, Jaime" }, { - "affiliation": "University of Newcastle, Australia", - "name": "Cooper, Gavin", - "orcid": "0000-0002-7186-5293" + "name": "Lai, Jeff" }, { - "affiliation": "Institute of Imaging & Computer Vision, RWTH Aachen University, Germany", - "name": "Weninger, Leon" + "affiliation": "Child Mind Institute / Nathan Kline Institute", + "name": "Pellman, John", + "orcid": "0000-0001-6810-4461" }, { - "name": "Inati, Souheil" + "affiliation": "BarcelonaBeta Brain Research Center", + "name": "Huguet, Jordi", + "orcid": "0000-0001-8420-4833" }, { - "affiliation": "Boston University", - "name": "Perkins, L. Nathan" + "affiliation": "University of Pennsylvania", + "name": "Junhao WEN", + "orcid": "0000-0003-2077-3070" }, { - "affiliation": "University of Amsterdam", - "name": "Lukas Snoek", - "orcid": "0000-0001-8972-204X" + "affiliation": "TIB – Leibniz Information Centre for Science and Technology and University Library, Hannover, Germany", + "name": "Leinweber, Katrin", + "orcid": "0000-0001-5135-5758" }, { - "name": "Marina, Ana" + "affiliation": "INRIA-Saclay, Team Parietal", + "name": "Chawla, Kshitij", + "orcid": "0000-0002-7517-6321" }, { - "name": "Mattfeld, Aaron" + "affiliation": "Institute of Imaging & Computer Vision, RWTH Aachen University, Germany", + "name": "Weninger, Leon" }, { - "name": "Matsubara, K" + "name": "Modat, Marc" }, { - "name": "Noel, Maxime" + "affiliation": "University of Waterloo", + "name": "Mukhometzianov, Rinat", + "orcid": "0000-0003-1274-4827" }, { - "name": "Cheung, Brian" + "name": "Harms, Robbert" }, { - "affiliation": "The University of Texas at Austin", - "name": "Floren, Andrew", - "orcid": "0000-0003-3618-2056" + "affiliation": "University of Helsinki", + "name": "Andberg, Sami Kristian", + "orcid": "0000-0002-5650-3964" }, { - "name": "Urchs, Sebastian" + "name": "Matsubara, K" }, { - "affiliation": "Department of Psychology, Stanford University; Parietal, INRIA", - "name": "Durnez, Joke", - "orcid": "0000-0001-9030-2202" + "affiliation": "Universidad de Guadalajara", + "name": "González Orozco, Abel A." }, { - "affiliation": "Technische Universit\u00e4t Dresden, Faculty of Medicine, Department of Child and Adolescent Psychiatry", - "name": "Geisler, Daniel", - "orcid": "0000-0003-2076-5329" + "affiliation": "ARAMIS Lab", + "name": "Routier, Alexandre", + "orcid": "0000-0003-1603-8049" }, { - "affiliation": "University of Texas at Austin", - "name": "De La Vega, Alejandro", - "orcid": "0000-0001-9062-3778" + "name": "Marina, Ana" }, { - "affiliation": "University of illinois urbana champaign", - "name": "Sharp, Paul" + "name": "Davison, Andrew" }, { - "affiliation": "Institute of Neuroinformatics, ETH/University of Zurich", - "name": "Gerhard, Stephan", - "orcid": "0000-0003-4454-6171" + "affiliation": "The University of Texas at Austin", + "name": "Floren, Andrew", + "orcid": "0000-0003-3618-2056" }, { - "affiliation": "Technical University Munich", - "name": "Molina-Romero, Miguel", - "orcid": "0000-0001-8054-0426" + "name": "Park, Anne" }, { - "name": "Haehn, Daniel" + "affiliation": "Consolidated Department of Psychiatry, Harvard Medical School", + "name": "Frederick, Blaise", + "orcid": "0000-0001-5832-5279" }, { - "name": "Weinstein, Alejandro" + "name": "Cheung, Brian" }, { - "name": "Tambini, Arielle" + "name": "McDermottroe, Conor" }, { - "affiliation": "Duke University", - "name": "Broderick, William", - "orcid": "0000-0002-8999-9003" + "affiliation": "University of Cambridge", + "name": "McNamee, Daniel", + "orcid": "0000-0001-9928-4960" }, { - "name": "Saase, Victor" + "name": "Shachnev, Dmitry" }, { - "name": "Rothmei, Simon" + "affiliation": "University of Applied Sciences and Arts Northwestern Switzerland", + "name": "Vogel, Dorian", + "orcid": "0000-0003-3445-576X" }, { - "affiliation": "University of Helsinki", - "name": "Andberg, Sami Kristian", - "orcid": "0000-0002-5650-3964" + "name": "Flandin, Guillaume" }, { - "name": "Harms, Robbert" + "affiliation": "Stanford University and the University of Chicago", + "name": "Jones, Henry", + "orcid": "0000-0001-7719-3646" }, { - "name": "Khanuja, Ranjeet" + "affiliation": "Athinoula A. Martinos Center for Biomedical Imaging, Department of Radiology, Massachusetts General Hospital, Charlestown, MA, USA", + "name": "Gonzalez, Ivan", + "orcid": "0000-0002-6451-6909" + }, + { + "name": "Varada, Jan" }, { "name": "Schlamp, Kai" }, { - "name": "Arias, Jaime" + "name": "Podranski, Kornelius" }, { - "affiliation": "CEA", - "name": "Papadopoulos Orfanos, Dimitri", - "orcid": "0000-0002-1242-8990" + "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China", + "name": "Huang, Lijie", + "orcid": "0000-0002-9910-5069" }, { - "name": "Tarbert, Claire" + "name": "Noel, Maxime" }, { - "affiliation": "Vrije Universiteit Amsterdam", - "name": "Ort, Eduard" + "affiliation": "Medical Imaging & Biomarkers, Bioclinica, Newark, CA, USA.", + "name": "Pannetier, Nicolas", + "orcid": "0000-0002-0744-5155" }, { - "name": "Nickson, Thomas" + "affiliation": "Flywheel.io, Minneapolis, MN, USA.", + "name": "Velasco, Pablo", + "orcid": "0000-0002-5749-6049" }, { - "name": "Brett, Matthew" + "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences", + "name": "Numssen, Ole", + "orcid": "0000-0001-7164-2682" }, { - "name": "Falkiewicz, Marcel" + "name": "Khanuja, Ranjeet" }, { - "name": "Podranski, Kornelius" + "name": "Urchs, Sebastian" }, { - "affiliation": "German Institute for International Educational Research", - "name": "Linkersd\u00f6rfer, Janosch", - "orcid": "0000-0002-1577-1233" + "affiliation": "Department of Psychology, Stanford University", + "name": "Shim, Sunjae", + "orcid": "0000-0003-2773-0807" }, { - "name": "Flandin, Guillaume" + "name": "Nickson, Thomas" }, { - "name": "Shachnev, Dmitry" + "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China", + "name": "Huang, Lijie", + "orcid": "0000-0002-9910-5069" }, { - "affiliation": "University of Cambridge", - "name": "McNamee, Daniel", - "orcid": "0000-0001-9928-4960" + "affiliation": "Duke University", + "name": "Broderick, William", + "orcid": "0000-0002-8999-9003" }, { - "name": "Davison, Andrew" + "name": "Tambini, Arielle" }, { - "name": "Varada, Jan" + "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany.", + "name": "Mihai, Paul Glad", + "orcid": "0000-0001-5715-6442" }, { - "name": "Schwabacher, Isaac" + "affiliation": "University of Tübingen and MPI for Biological Cybernertics", + "name": "Bannert, Michael M.", + "orcid": "0000-0003-1010-7517" }, { - "affiliation": "Child Mind Institute / Nathan Kline Institute", - "name": "Pellman, John", - "orcid": "0000-0001-6810-4461" + "affiliation": "Research Centre Juelich", + "name": "Wu, Jianxiao", + "orcid": "0000-0002-4866-272X" }, { - "name": "Perez-Guevara, Martin" + "affiliation": "Department of Neurology, BG-University Hospital Bergmannsheil Bochum, Germany", + "name": "Butry, Lionel" }, { - "name": "Khanuja, Ranjeet" + "affiliation": "Lund University", + "name": "Anijärv, Toomas Erik", + "orcid": "0000-0002-3650-4230" }, { - "affiliation": "Medical Imaging & Biomarkers, Bioclinica, Newark, CA, USA.", - "name": "Pannetier, Nicolas", - "orcid": "0000-0002-0744-5155" + "affiliation": "Azienda Ospedaliero-Universitaria di Modena", + "name": "Genovese, Maurilio", + "orcid": "0000-0002-8154-8224" }, { - "name": "McDermottroe, Conor" + "affiliation": "Department of Psychology, Stanford University", + "name": "Gorgolewski, Krzysztof J.", + "orcid": "0000-0003-3321-7583" }, { "affiliation": "MIT, HMS", @@ -594,6 +929,11 @@ "affiliation": "University College London", "name": "Stojic, Hrvoje", "orcid": "0000-0002-9699-9052" + }, + { + "affiliation": "Department of Psychological and Brain Sciences, Dartmouth College", + "name": "Petre, Bogdan", + "orcid": "0000-0002-8437-168X" } ], "keywords": [ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f55c09a41c..302a32d626 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,13 +1,13 @@ # Contributing to Nipype -Welcome to the Nipype repository! We're excited you're here and want to contribute. +Welcome to the Nipype repository! We're excited you're here and want to contribute. These guidelines are designed to make it as easy as possible to get involved. If you have any questions that aren't discussed below, please let us know by opening an [issue][link_issues]! Before you start you'll need to set up a free [GitHub][link_github] account and sign in. Here are some [instructions][link_signupinstructions]. If you are not familiar with version control systems such as git, - [introductions and tutorials](http://www.reproducibleimaging.org/module-reproducible-basics/02-vcs/) - may be found on [ReproducibleImaging.org](https://www.reproducibleimaging.org/). +we recommend the [VCS module](http://www.reproducibleimaging.org/module-reproducible-basics/02-vcs/) +available from [ReproNim](http://www.reproducibleimaging.org/). Already know what you're looking for in this guide? Jump to the following sections: * [Understanding issue labels](#issue-labels) @@ -22,7 +22,7 @@ The current list of issue labels are [here][link_labels] and include: * [![Bugs](https://img.shields.io/badge/-bugs-fc2929.svg)][link_bugs] *These issues point to problems in the project.* - If you find new a bug, please provide as much information as possible to recreate the error. + If you find a new bug, please provide as much information as possible to recreate the error. The [issue template][link_issue_template] will automatically populate any new issue you open, and contains information we've found to be helpful in addressing bug reports. Please fill it out to the best of your ability! @@ -41,7 +41,7 @@ The current list of issue labels are [here][link_labels] and include: * [![Orphaned](https://img.shields.io/badge/-orphaned-9baddd.svg)][link_orphaned] *These pull requests have been closed for inactivity.* Before proposing a new pull request, browse through the "orphaned" pull requests. - You may find that someone has already made significant progress toward your goal, and you can re-use their + You may find that someone has already made significant progress toward your goal, and you can reuse their unfinished work. An adopted PR should be updated to merge or rebase the current master, and a new PR should be created (see below) that references the original PR. @@ -72,7 +72,7 @@ One way to do this is to [configure a new remote named "upstream"](https://help. **3. Make the changes you've discussed.** -If you're adding a new tool from an existing neuroimaging toolkit (e.g., 3dDeconvolve from AFNI), +If you're adding a new tool from an existing neuroimaging toolkit (e.g., 3dDeconvolve from AFNI), check out the [guide for adding new interfaces to Nipype][link_new_interfaces]. When you are working on your changes, test frequently to ensure you are not breaking the existing code. @@ -82,27 +82,35 @@ Before pushing your changes to GitHub, run `make check-before-commit`. This will test the entire package, and build the documentation. If you get no errors, you're ready to submit your changes! -It's a good practice to create [a new branch](https://help.github.com/articles/about-branches/) +It's a good practice to create [a new branch](https://help.github.com/articles/about-branches/) of the repository for a new set of changes. +For Python 2.7-compatible fixes, the branch should start from the `maint/1.3.x` branch on the +upstream repository. **4. Submit a [pull request][link_pullrequest].** A new pull request for your changes should be created from your fork of the repository. -When opening a pull request, please use one of the following prefixes: +When opening a pull request, please use one of the following prefixes: -* **[ENH]** for enhancements -* **[FIX]** for bug fixes -* **[TST]** for new or updated tests -* **[DOC]** for new or updated documentation -* **[STY]** for stylistic changes -* **[REF]** for refactoring existing code +* **[ENH]** for enhancements +* **[FIX]** for bug fixes +* **[TST]** for new or updated tests +* **[DOC]** for new or updated documentation +* **[STY]** for stylistic changes +* **[REF]** for refactoring existing code + +**5. Install pre-commit.** + +[pre-commit](https://pre-commit.com/) is a git hook for running operations at commit time. To use it in +your environment, do `pip install pre-commit` following by `pre-commit install` +inside your source directory.
Pull requests should be submitted early and often (please don't mix too many unrelated changes within one PR)! -If your pull request is not yet ready to be merged, please also include the **[WIP]** prefix (you can remove it once your PR is ready to be merged). +If your pull request is not yet ready to be merged, please also include the **[WIP]** prefix (you can remove it once your PR is ready to be merged). This tells the development team that your pull request is a "work-in-progress", and that you plan to continue working on it. Review and discussion on new code can begin well before the work is complete, and the more discussion the better! @@ -116,7 +124,7 @@ One your PR is ready a member of the development team will review your changes t In general, do not catch exceptions without good reason. For non-fatal exceptions, log the exception as a warning and add more information about what may have caused the error. -If you do need to catch an exception, raise a new exception using ``raise_from(NewException("message"), oldException)`` from ``future``. +If you do need to catch an exception, raise a new exception using ``raise NewException("message") from oldException)``. Do not log this, as it creates redundant/confusing logs. #### Testing diff --git a/MANIFEST.in b/MANIFEST.in index 7986c197d1..f21701cba7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,6 @@ include MANIFEST.in include README include THANKS include Makefile -include build_docs.py include setup_egg.py include doc/documentation.zip include nipype/COMMIT_INFO.txt diff --git a/Makefile b/Makefile index 095452210a..568d53379c 100644 --- a/Makefile +++ b/Makefile @@ -9,20 +9,24 @@ PYTHON ?= python zipdoc: html zip documentation.zip doc/_build/html +.git-blame-ignore-revs: .git/HEAD + git log --grep "\[ignore-rev\]\|STY: black\|run black" -i --pretty=format:"# %ad - %ae - %s%n%H" > .git-blame-ignore-revs + echo >> .git-blame-ignore-revs + sdist: zipdoc @echo "Building source distribution..." - python setup.py sdist + $(PYTHON) setup.py sdist @echo "Done building source distribution." # XXX copy documentation.zip to dist directory. egg: zipdoc @echo "Building egg..." - python setup.py bdist_egg + $(PYTHON) setup.py bdist_egg @echo "Done building egg." upload_to_pypi: zipdoc @echo "Uploading to PyPi..." - python setup.py sdist --formats=zip,gztar upload + $(PYTHON) setup.py sdist --formats=zip,gztar upload trailing-spaces: find . -name "*[.py|.rst]" -type f | xargs perl -pi -e 's/[ \t]*$$//' @@ -56,10 +60,10 @@ inplace: $(PYTHON) setup.py build_ext -i test-code: in - py.test --doctest-module nipype + $(PYTHON) -m pytest --doctest-modules nipype test-coverage: clean-tests in - py.test --doctest-modules --cov-config .coveragerc --cov=nipype nipype + $(PYTHON) -m pytest --doctest-modules --cov-config .coveragerc --cov=nipype nipype test: tests # just another name tests: clean test-code @@ -70,7 +74,7 @@ html: specs: @echo "Checking specs and autogenerating spec tests" - env PYTHONPATH=".:$(PYTHONPATH)" python tools/checkspecs.py + env PYTHONPATH=".:$(PYTHONPATH)" $(PYTHON) tools/checkspecs.py check: check-before-commit # just a shortcut check-before-commit: specs trailing-spaces html test diff --git a/README.rst b/README.rst index b0d0e7a0be..b51c2bf401 100644 --- a/README.rst +++ b/README.rst @@ -77,6 +77,24 @@ Information specific to Nipype is located here:: http://nipy.org/nipype +Python 2 Statement +------------------ + +Python 2.7 reaches its end-of-life in January 2020, which means it will +*no longer be maintained* by Python developers. `Many projects +`__ are removing support in advance of this +deadline, which will make it increasingly untenable to try to support +Python 2, even if we wanted to. + +The final series with 2.7 support is 1.3.x. If you have a package using +Python 2 and are unable or unwilling to upgrade to Python 3, then you +should use the following `dependency +`__ for +Nipype:: + + nipype<1.4 + +Bug fixes will be accepted against the ``maint/1.3.x`` branch. Support and Communication ------------------------- @@ -91,7 +109,7 @@ To participate in the Nipype development related discussions please use the foll Please add *[nipype]* to the subject line when posting on the mailing list. -You can even hangout with the Nipype developers in their +You can even hangout with the Nipype developers in their `Gitter `_ channel or in the BrainHack `Slack `_ channel. (Click `here `_ to join the Slack workspace.) diff --git a/THANKS.rst b/THANKS.rst index 4d8cdd47e7..71c4d9eeac 100644 --- a/THANKS.rst +++ b/THANKS.rst @@ -19,4 +19,3 @@ and `UL1 TR000442 University of Iowa Clinical and Translational Science Program We would also like to thank `JetBrains `__ for providing `Pycharm `__ licenses. - diff --git a/build_docs.py b/build_docs.py deleted file mode 100644 index c39a8da473..0000000000 --- a/build_docs.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Code to build the documentation in the setup.py - -To use this code, run:: - - python setup.py build_sphinx -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open, str - -# Standard library imports -import sys -import os -from os.path import join as pjoin -import zipfile -import warnings -import shutil -from distutils.cmd import Command -from distutils.command.clean import clean - -_info_fname = pjoin(os.path.dirname(__file__), 'nipype', 'info.py') -INFO_VARS = {} -exec(str(open(_info_fname, 'rt').read()), {}, INFO_VARS) - -DOC_BUILD_DIR = os.path.join('doc', '_build', 'html') -DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees') - -############################################################################### -# Distutils Command class for installing nipype to a temporary location. - - -class TempInstall(Command): - temp_install_dir = os.path.join('build', 'install') - - def run(self): - """ build and install nipype in a temporary location. """ - install = self.distribution.get_command_obj('install') - install.install_scripts = self.temp_install_dir - install.install_base = self.temp_install_dir - install.install_platlib = self.temp_install_dir - install.install_purelib = self.temp_install_dir - install.install_data = self.temp_install_dir - install.install_lib = self.temp_install_dir - install.install_headers = self.temp_install_dir - install.run() - - # Horrible trick to reload nipype with our temporary instal - for key in list(sys.modules.keys()): - if key.startswith('nipype'): - sys.modules.pop(key, None) - sys.path.append(os.path.abspath(self.temp_install_dir)) - # Pop the cwd - sys.path.pop(0) - import nipype - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - -############################################################################### -# Distutils Command class for API generation -class APIDocs(TempInstall): - description = \ - """generate API docs """ - - user_options = [ - ('None', None, 'this command has no options'), - ] - - def run(self): - # First build the project and install it to a temporary location. - TempInstall.run(self) - os.chdir('doc') - try: - # We are running the API-building script via an - # system call, but overriding the import path. - toolsdir = os.path.abspath(pjoin('..', 'tools')) - for docbuilder in ['build_interface_docs.py']: - build_templates = pjoin(toolsdir, docbuilder) - cmd = """%s -c 'import sys; sys.path.append("%s"); sys.path.append("%s"); execfile("%s", dict(__name__="__main__"))'""" \ - % (sys.executable, - toolsdir, - self.temp_install_dir, - build_templates) - os.system(cmd) - finally: - os.chdir('..') - - -############################################################################### -# Code to copy the sphinx-generated html docs in the distribution. -def relative_path(filename): - """ Return the relative path to the file, assuming the file is - in the DOC_BUILD_DIR directory. - """ - length = len(os.path.abspath(DOC_BUILD_DIR)) + 1 - return os.path.abspath(filename)[length:] - - -############################################################################### -# Distutils Command class build the docs -# Sphinx import. -try: - from sphinx.setup_command import BuildDoc -except: - MyBuildDoc = None -else: - class MyBuildDoc(BuildDoc): - """ Sub-class the standard sphinx documentation building system, to - add logics for API generation and matplotlib's plot directive. - """ - - def run(self): - self.run_command('api_docs') - # We need to be in the doc directory for to plot_directive - # and API generation to work - """ - os.chdir('doc') - try: - BuildDoc.run(self) - finally: - os.chdir('..') - """ - # It put's the build in a doc/doc/_build directory with the - # above?!?! I'm leaving the code above here but commented out - # in case I'm missing something? - BuildDoc.run(self) - self.zip_docs() - - def zip_docs(self): - if not os.path.exists(DOC_BUILD_DIR): - raise OSError('Doc directory does not exist.') - target_file = os.path.join('doc', 'documentation.zip') - # ZIP_DEFLATED actually compresses the archive. However, there - # will be a RuntimeError if zlib is not installed, so we check - # for it. ZIP_STORED produces an uncompressed zip, but does not - # require zlib. - try: - zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_DEFLATED) - except RuntimeError: - warnings.warn('zlib not installed, storing the docs ' - 'without compression') - zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_STORED) - - for root, dirs, files in os.walk(DOC_BUILD_DIR): - relative = relative_path(root) - if not relative.startswith('.doctrees'): - for f in files: - zf.write(os.path.join(root, f), - os.path.join(relative, 'html_docs', f)) - zf.close() - - def finalize_options(self): - """ Override the default for the documentation build - directory. - """ - self.build_dir = os.path.join(*DOC_BUILD_DIR.split(os.sep)[:-1]) - BuildDoc.finalize_options(self) - -############################################################################### -# Distutils Command class to clean - - -class Clean(clean): - - def run(self): - clean.run(self) - api_path = os.path.join('doc', 'api', 'generated') - if os.path.exists(api_path): - print("Removing %s" % api_path) - shutil.rmtree(api_path) - interface_path = os.path.join('doc', 'interfaces', 'generated') - if os.path.exists(interface_path): - print("Removing %s" % interface_path) - shutil.rmtree(interface_path) - if os.path.exists(DOC_BUILD_DIR): - print("Removing %s" % DOC_BUILD_DIR) - shutil.rmtree(DOC_BUILD_DIR) - if os.path.exists(DOC_DOCTREES_DIR): - print("Removing %s" % DOC_DOCTREES_DIR) - shutil.rmtree(DOC_DOCTREES_DIR) - - -# The command classes for distutils, used by the setup.py -cmdclass = {'build_sphinx': MyBuildDoc, - 'api_docs': APIDocs, - 'clean': Clean, - } diff --git a/doc/.gitignore b/doc/.gitignore index 1f812bd420..d396f26e2d 100644 --- a/doc/.gitignore +++ b/doc/.gitignore @@ -1 +1,2 @@ /documentation.zip +_static/python diff --git a/doc/Makefile b/doc/Makefile index abe329a57a..9b3e3783ef 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -11,44 +11,30 @@ PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html api htmlonly latex changes linkcheck doctest +.PHONY: help clean html htmlonly latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html make the HTML documentation" - @echo " api make API documents only" @echo " latex make the LaTeX, you can set PAPER=a4 or PAPER=letter" @echo " pdf make and run the PDF generation" @echo " changes make an overview of all changed/added/deprecated" \ "items (ChangeLog)" @echo " linkcheck check all external links for integrity" @echo " doctest run all doctests embedded in the documentation" - @echo " sf_satra copy html files to sourceforge (satra only)" clean: - -rm -rf _build/* *~ api/generated interfaces/generated users/examples documentation.zip + -rm -rf _build/* *~ api/generated interfaces/generated users/examples documentation.zip _static/python htmlonly: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html @echo @echo "Build finished. The HTML pages are in _build/html." -api: - rm -rf api/generated - python -u ../tools/build_modref_templates.py - rm -rf interfaces/generated - python -u ../tools/build_interface_docs.py - @echo "Build API docs finished." - -html: clean examples2rst api htmlonly +html: clean htmlonly @echo "Build HTML and API finished." -examples2rst: - mkdir -p users/examples - ../tools/make_examples.py --no-exec - @echo "examples2rst finished." - -latex: api +latex: clean $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." @@ -82,18 +68,3 @@ gitwash-update: --project-url=http://nipy.org/nipype \ --project-ml-url=http://mail.scipy.org/mailman/listinfo/nipy-devel @echo "gitwash updated" - -# Sourceforge doesn't appear to have a way of copying the files -# without specifying a username. So we'll probably have one target -# for each project admin -sf_satra_nightly: html - @echo "Copying html files to sourceforge..." - scp -r _build/html/* satra,nipy@web.sourceforge.net:htdocs/nipype-nightly/ - -sf_satra: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. satra,nipy@web.sourceforge.net:htdocs/nipype/. - -sf_filo: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. gorgolewski,nipy@web.sourceforge.net:htdocs/nipype/. diff --git a/doc/README.txt b/doc/README.txt deleted file mode 100644 index e55d4936cf..0000000000 --- a/doc/README.txt +++ /dev/null @@ -1,28 +0,0 @@ -====================== - Nipype Documentation -====================== - -This directory contains the documentation for the Nipype_ project. -The documentation is written in reST_ (reStructuredText) and uses -Sphinx_ to render html documentation from the rst source files. - -A relatively recent version of Sphinx_ is required to build the -documentation, at least 0.6.x. - -Use the ``Makefile`` to build the documentation. Common commands: - -Discover available make targets:: - - make help - -Clean up previous build:: - - make clean - -Build html documentation:: - - make html - - - - diff --git a/doc/_static/nipype.css b/doc/_static/nipype.css index cec080b3d6..5aceb4c42a 100644 --- a/doc/_static/nipype.css +++ b/doc/_static/nipype.css @@ -5,6 +5,10 @@ body { min-width: 70em; } +div.sphinxsidebar { + position: relative; +} + div.sphinxsidebar h4, div.sphinxsidebar h3 { background-color: #2F83C8; } diff --git a/doc/_static/snakebrain-nipype.svg b/doc/_static/snakebrain-nipype.svg new file mode 100644 index 0000000000..04bfc05362 --- /dev/null +++ b/doc/_static/snakebrain-nipype.svg @@ -0,0 +1,108 @@ + + + + + + + + image/svg+xml + + + + + + Michael Hanke <michael.hanke@gmail.com> + + + Artwork by Arno Klein + The image has been converted from a pixel-based artwork. + + + Original artwork created by Arno Klein <arno@binarybottle.com>. +Converted to SVG by Michael Hanke <michael.hanke@gmail.com>. + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html index ce03fb63f9..f771858968 100644 --- a/doc/_templates/indexsidebar.html +++ b/doc/_templates/indexsidebar.html @@ -11,4 +11,4 @@

{{ _('Links') }}

Python Versions -{% endblock %} +{% endblock %} diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html index a8de8d176b..2228087cec 100644 --- a/doc/_templates/layout.html +++ b/doc/_templates/layout.html @@ -2,6 +2,9 @@ {% set title = 'Neuroimaging in Python - Pipelines and Interfaces' %} {% set short_title = 'Nipype' %} +{% block sidebar1 %}{{ sidebar() }}{% endblock %} +{% block sidebar2 %}{% endblock %} + {% block extrahead %} {{ super() }} @@ -42,8 +45,6 @@ {% block relbar1 %}{% endblock %} {% block relbar2 %}{% endblock %} -{% block sidebar1 %}{{ sidebar() }}{% endblock %} -{% block sidebar2 %}{% endblock %} {% block footer %} {{ super() }} diff --git a/doc/_templates/navbar.html b/doc/_templates/navbar.html index 883bedc56b..6b2d457a1f 100644 --- a/doc/_templates/navbar.html +++ b/doc/_templates/navbar.html @@ -1,7 +1,6 @@ \n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "Most used/contributed policy!\n", - "\n", - "Not all components of these packages are available." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "skip" - } - }, - "source": [ - "# Workflows\n", - "\n", - "- Properties:\n", - "\n", - " - processing pipeline is a directed acyclic graph (DAG)\n", - " - nodes are processes\n", - " - edges represent data flow\n", - " - compact represenation for any process\n", - " - code and data separation" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#Execution Plugins\n", - "\n", - "Allows seamless execution across many architectures\n", - "\n", - " - Local\n", - "\n", - " - Serial\n", - " - Multicore\n", - "\n", - " - Clusters\n", - "\n", - " - HTCondor\n", - " - PBS/Torque/SGE/LSF (native and via IPython)\n", - " - SSH (via IPython)\n", - " - Soma Workflow" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Learn Nipype concepts in 10 easy steps\n", - "\n", - "\n", - "1. Installing and testing the installation \n", - "2. Working with interfaces\n", - "3. Using Nipype caching\n", - "4. Creating Nodes, MapNodes and Workflows\n", - "5. Getting and saving data\n", - "6. Using Iterables\n", - "7. Function nodes\n", - "8. Distributed computation\n", - "9. Connecting to databases\n", - "10. Execution configuration options" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 1. Installing Nipype\n", - "\n", - "## Scientific Python:\n", - "\n", - "* Debian/Ubuntu/Scientific Fedora\n", - "* [Canopy from Enthought](https://www.enthought.com/products/canopy/)\n", - "* [Anaconda from Contnuum Analytics](https://store.continuum.io/cshop/anaconda/)\n", - "\n", - "## Installing Nipype:\n", - "\n", - "* Available from [@NeuroDebian](http://neuro.debian.net/pkgs/python-nipype.html),\n", - " [@PyPI](http://pypi.python.org/pypi/nipype/), and\n", - " [@GitHub](http://github.com/nipy/nipype)\n", - " \n", - " - pip install nipype\n", - " - easy_install nipype\n", - " - sudo apt-get install python-nipype\n", - "\n", - "* Dependencies: networkx, nibabel, numpy, scipy, traits\n", - "\n", - "## Running Nipype ([Quickstart](http://nipy.org/nipype/quickstart.html)):\n", - "\n", - "* Ensure underlying tools are installed and accessible\n", - "* Nipype **is a wrapper, not a substitute** for AFNI, ANTS, FreeSurfer, FSL, SPM,\n", - " NiPy, etc.,." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "# Step 1. Testing nipype" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```\n", - "$ ipython notebook\n", - "```" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import nipype\n", - "\n", - "# Comment the following section to increase verbosity of output\n", - "nipype.config.set('logging', 'workflow_level', 'CRITICAL')\n", - "nipype.config.set('logging', 'interface_level', 'CRITICAL')\n", - "nipype.logging.update_logging(nipype.config)\n", - "\n", - "nipype.test(verbose=0) # Increase verbosity parameter for more info" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "If all goes well you will see an OK:\n", - "\n", - " ----------------------------------------------------------------------\n", - " Ran 2497 tests in 68.486s\n", - "\n", - " OK (SKIP=13)\n", - "\n", - "The number of tests and time will vary depending on which interfaces you have installed on your system." - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "nipype.get_info()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "# Environment and data setup\n", - "\n", - "Setting up your Ipython notebook environment and download some data to play with" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "%pylab inline" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "# Some preliminaries\n", - "import os\n", - "cwd = os.getcwd()\n", - "tutorial_dir = '/software/temp/nipype-tutorial/ohbm/'\n", - "if not os.path.exists(tutorial_dir):\n", - " os.mkdir(tutorial_dir)\n", - "os.chdir(tutorial_dir)" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "-" - } - }, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import urllib\n", - "required_files = ['ds107/sub001/BOLD/task001_run001/bold.nii.gz',\n", - " 'ds107/sub001/BOLD/task001_run002/bold.nii.gz',\n", - " 'ds107/sub001/anatomy/highres001.nii.gz',\n", - " 'ds107/sub044/BOLD/task001_run001/bold.nii.gz',\n", - " 'ds107/sub044/BOLD/task001_run002/bold.nii.gz',\n", - " 'ds107/sub044/anatomy/highres001.nii.gz'\n", - " ]\n", - "base_url = 'http://openfmri.aws.amazon.com.s3.amazonaws.com/'\n", - "for filepath in required_files:\n", - " file_location = os.path.join(tutorial_dir, filepath)\n", - " if not os.path.exists(file_location):\n", - " print('Retrieving: ' + file_location)\n", - " os.makedirs(os.path.dirname(file_location))\n", - " urllib.urlretrieve(base_url + filepath, file_location)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 2. Working with interfaces" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import nipype.algorithms" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.fsl import DTIFit\n", - "from nipype.interfaces.spm import Realign" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Finding interface inputs and outputs and examples" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "DTIFit.help()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "Realign.help()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Creating a directory for running interfaces" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import os\n", - "from shutil import copyfile\n", - "library_dir = os.path.join(tutorial_dir, 'as_a_library')\n", - "if not os.path.exists(library_dir):\n", - " os.mkdir(library_dir)\n", - "os.chdir(library_dir)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "## Executing interfaces" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.freesurfer import MRIConvert\n", - "convert = MRIConvert(in_file='../ds107/sub001/BOLD/task001_run001/bold.nii.gz',\n", - " out_file='ds107.nii')\n", - "print(convert.cmdline)\n", - "results = convert.run(terminal_output='none') # allatonce, stream (default), file" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "results.outputs" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "## Other ways" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "convert = MRIConvert()\n", - "convert.inputs.in_file='../ds107/sub001/BOLD/task001_run001/bold.nii.gz'\n", - "convert.inputs.out_file='ds107.nii'\n", - "convert.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "convert = MRIConvert()\n", - "convert.run(in_file='../ds107/sub001/BOLD/task001_run001/bold.nii.gz',\n", - " out_file='ds107.nii')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": true, - "input": [ - "convert.inputs" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#### Look at only the defined inputs" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "results.inputs" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "-" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Experiment with other interfaces\n", - "\n", - "For example, run realignment with SPM" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.spm import Realign\n", - "results1 = Realign(in_files='ds107.nii',\n", - " register_to_mean=False).run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "And now use FSL" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.fsl import MCFLIRT\n", - "results2 = MCFLIRT(in_file='ds107.nii', ref_vol=0,\n", - " save_plots=True).run()" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "-" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Now we can look at some results" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "print results1.runtime.duration, results2.runtime.duration\n", - "subplot(211);plot(genfromtxt('ds107_mcf.nii.gz.par')[:, 3:]);title('FSL')\n", - "subplot(212);plot(genfromtxt('rp_ds107.txt')[:,:3]);title('SPM')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### if i execute the MCFLIRT line again, well, it runs again!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 3. Nipype caching" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.caching import Memory\n", - "mem = Memory('.')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create `cacheable` objects" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "spm_realign = mem.cache(Realign)\n", - "fsl_realign = mem.cache(MCFLIRT)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Execute interfaces" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "spm_results = spm_realign(in_files='ds107.nii', register_to_mean=False)\n", - "fsl_results = fsl_realign(in_file='ds107.nii', ref_vol=0, save_plots=True)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "subplot(211);plot(genfromtxt(fsl_results.outputs.par_file)[:, 3:])\n", - "subplot(212);plot(genfromtxt(spm_results.outputs.realignment_parameters)[:,:3])" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "spm_results = spm_realign(in_files='ds107.nii', register_to_mean=False)\n", - "fsl_results = fsl_realign(in_file='ds107.nii', ref_vol=0, save_plots=True)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "# More caching" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from os.path import abspath as opap\n", - "files = [opap('../ds107/sub001/BOLD/task001_run001/bold.nii.gz'),\n", - " opap('../ds107/sub001/BOLD/task001_run002/bold.nii.gz')]\n", - "converter = mem.cache(MRIConvert)\n", - "newfiles = []\n", - "for idx, fname in enumerate(files):\n", - " newfiles.append(converter(in_file=fname,\n", - " out_type='nii').outputs.out_file)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "os.chdir(tutorial_dir)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 4: Nodes, Mapnodes and workflows\n", - "\n", - "**Where:**" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.pipeline.engine import Node, MapNode, Workflow" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**Node**:" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "realign_spm = Node(Realign(), name='motion_correct')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**Mapnode**:\n", - "\n", - "" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "convert2nii = MapNode(MRIConvert(out_type='nii'),\n", - " iterfield=['in_file'],\n", - " name='convert2nii')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "# \"Hello World\" of Nipype workflows" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Connect them up:" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "realignflow = Workflow(name='realign_with_spm')\n", - "realignflow.connect(convert2nii, 'out_file',\n", - " realign_spm, 'in_files')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "convert2nii.inputs.in_file = files\n", - "realign_spm.inputs.register_to_mean = False\n", - "\n", - "realignflow.base_dir = opap('.')\n", - "realignflow.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#Visualize the workflow" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "realignflow.write_graph()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from IPython.core.display import Image\n", - "Image('realign_with_spm/graph.png')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "realignflow.write_graph(graph2use='orig')\n", - "Image('realign_with_spm/graph_detailed.png')" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 5. Getting and saving data\n", - "\n", - "### Instead of assigning data ourselves, let's *glob* it" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "os.chdir(tutorial_dir)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.io import DataGrabber, DataFinder\n", - "ds = Node(DataGrabber(infields=['subject_id'], outfields=['func']),\n", - " name='datasource')\n", - "ds.inputs.base_directory = opap('ds107')\n", - "ds.inputs.template = '%s/BOLD/task001*/bold.nii.gz'\n", - "ds.inputs.sort_filelist = True\n", - "\n", - "ds.inputs.subject_id = 'sub001'\n", - "print ds.run().outputs" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "ds.inputs.subject_id = 'sub044'\n", - "print ds.run().outputs" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#Multiple files\n", - "\n", - "### A little more practical usage" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "ds = Node(DataGrabber(infields=['subject_id', 'task_id'],\n", - " outfields=['func', 'anat']),\n", - " name='datasource')\n", - "ds.inputs.base_directory = opap('ds107')\n", - "ds.inputs.template = '*'\n", - "ds.inputs.template_args = {'func': [['subject_id', 'task_id']],\n", - " 'anat': [['subject_id']]}\n", - "ds.inputs.field_template = {'func': '%s/BOLD/task%03d*/bold.nii.gz',\n", - " 'anat': '%s/anatomy/highres001.nii.gz'}\n", - "ds.inputs.sort_filelist = True\n", - "ds.inputs.subject_id = 'sub001'\n", - "ds.inputs.task_id = 1\n", - "print ds.run().outputs" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "# Connecting to computation" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "convert2nii = MapNode(MRIConvert(out_type='nii'),\n", - " iterfield=['in_file'],\n", - " name='convert2nii')\n", - "\n", - "realign_spm = Node(Realign(), name='motion_correct')\n", - "realign_spm.inputs.register_to_mean = False\n", - "\n", - "connectedworkflow = Workflow(name='connectedtogether')\n", - "connectedworkflow.base_dir = opap('working_dir')\n", - "connectedworkflow.connect(ds, 'func', convert2nii, 'in_file')\n", - "connectedworkflow.connect(convert2nii, 'out_file', realign_spm, 'in_files')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#Data sinking\n", - "\n", - "###Take output computed in a workflow out of it." - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.io import DataSink\n", - "sinker = Node(DataSink(), name='sinker')\n", - "sinker.inputs.base_directory = opap('output')\n", - "connectedworkflow.connect(realign_spm, 'realigned_files',\n", - " sinker, 'realigned')\n", - "connectedworkflow.connect(realign_spm, 'realignment_parameters',\n", - " sinker, 'realigned.@parameters')\n", - "connectedworkflow.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### How to determine output location\n", - "\n", - " 'base_directory/container/parameterization/destloc/filename'\n", - " \n", - " destloc = [@]string[[.[@]]string[[.[@]]string]...] and\n", - " destloc = realigned.@parameters --> 'realigned'\n", - " destloc = realigned.parameters.@1 --> 'realigned/parameters'\n", - " destloc = realigned.parameters.@2 --> 'realigned/parameters'\n", - " filename comes from the input to the connect statement." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "#Step 6: *iterables* - parametric execution\n", - "\n", - "**Workflow + iterables**: runs subgraph several times, attribute not input" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "ds.iterables = ('subject_id', ['sub001', 'sub044'])\n", - "connectedworkflow.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "#Putting it all together\n", - "\n", - "### iterables + MapNode + Node + Workflow + DataGrabber + DataSink" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "connectedworkflow.write_graph()\n", - "Image('working_dir/connectedtogether/graph.png')" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "-" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Step 7: The Function interface\n", - "\n", - "### The do anything you want card" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.utility import Function\n", - "\n", - "def myfunc(input1, input2):\n", - " \"\"\"Add and subtract two inputs\n", - " \"\"\"\n", - " return input1 + input2, input1 - input2\n", - "\n", - "calcfunc = Node(Function(input_names=['input1', 'input2'],\n", - " output_names = ['sum', 'difference'],\n", - " function=myfunc),\n", - " name='mycalc')\n", - "calcfunc.inputs.input1 = 1\n", - "calcfunc.inputs.input2 = 2\n", - "res = calcfunc.run()\n", - "print res.outputs" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "#Step 8: Distributed computing\n", - "\n", - "### Normally calling run executes the workflow in series" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "connectedworkflow.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### but you can scale very easily\n", - "\n", - "For example, to use multiple cores on your local machine" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "connectedworkflow.run('MultiProc', plugin_args={'n_procs': 4})" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### Or to other job managers\n", - "\n", - "- connectedworkflow.run('PBS', plugin_args={'qsub_args': '-q many'})\n", - "- connectedworkflow.run('SGE', plugin_args={'qsub_args': '-q many'})\n", - "- connectedworkflow.run('LSF', plugin_args={'qsub_args': '-q many'})\n", - "- connectedworkflow.run('Condor')\n", - "- connectedworkflow.run('IPython')\n", - "\n", - "### or submit graphs as a whole\n", - "\n", - "\n", - "- connectedworkflow.run('PBSGraph', plugin_args={'qsub_args': '-q many'})\n", - "- connectedworkflow.run('SGEGraph', plugin_args={'qsub_args': '-q many'})\n", - "- connectedworkflow.run('CondorDAGMan')\n", - "\n", - "### Current Requirement: **SHARED FILESYSTEM**" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "### You can also set node specific plugin arguments" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "- node.plugin_args = {'qsub_args': '-l nodes=1:ppn=3', 'overwrite': True}\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "#Step 9: Connecting to Databases" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from os.path import abspath as opap\n", - "\n", - "from nipype.interfaces.io import XNATSource\n", - "from nipype.pipeline.engine import Node, Workflow\n", - "from nipype.interfaces.fsl import BET\n", - "\n", - "subject_id = 'xnat_S00001'\n", - "\n", - "dg = Node(XNATSource(infields=['subject_id'],\n", - " outfields=['struct'],\n", - " config='/Users/satra/xnat_configs/nitrc_ir_config'),\n", - " name='xnatsource')\n", - "dg.inputs.query_template = ('/projects/fcon_1000/subjects/%s/experiments/xnat_E00001'\n", - " '/scans/%s/resources/NIfTI/files')\n", - "dg.inputs.query_template_args['struct'] = [['subject_id', 'anat_mprage_anonymized']]\n", - "dg.inputs.subject_id = subject_id\n", - "\n", - "bet = Node(BET(), name='skull_stripper')\n", - "\n", - "wf = Workflow(name='testxnat')\n", - "wf.base_dir = opap('xnattest')\n", - "wf.connect(dg, 'struct', bet, 'in_file')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.io import XNATSink\n", - "\n", - "ds = Node(XNATSink(config='/Users/satra/xnat_configs/central_config'),\n", - " name='xnatsink')\n", - "ds.inputs.project_id = 'NPTEST'\n", - "ds.inputs.subject_id = 'NPTEST_xnat_S00001'\n", - "ds.inputs.experiment_id = 'test_xnat'\n", - "ds.inputs.reconstruction_id = 'bet'\n", - "ds.inputs.share = True\n", - "wf.connect(bet, 'out_file', ds, 'brain')" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "skip" - } - }, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "wf.run()" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "#Step 10: Configuration options\n", - "\n", - "[Configurable options](http://nipy.org/nipype/users/config_file.html) control workflow and node execution options\n", - "\n", - "At the global level:" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype import config, logging\n", - "\n", - "config.enable_debug_mode()\n", - "logging.update_logging(config)\n", - "\n", - "config.set('execution', 'stop_on_first_crash', 'true')" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "At the workflow level:" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "wf.config['execution']['hash_method'] = 'content'" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "Configurations can also be set at the node level." - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "bet.config = {'execution': {'keep_unnecessary_outputs': 'true'}}" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "wf.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Reusable workflows" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "config.set_default_config()\n", - "logging.update_logging(config)" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth\n", - "\n", - "smooth = create_susan_smooth()\n", - "smooth.inputs.inputnode.in_files = opap('output/realigned/_subject_id_sub044/rbold_out.nii')\n", - "smooth.inputs.inputnode.fwhm = 5\n", - "smooth.inputs.inputnode.mask_file = 'mask.nii'\n", - "\n", - "smooth.run() # Will error because mask.nii does not exist" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from nipype.interfaces.fsl import BET, MeanImage, ImageMaths\n", - "from nipype.pipeline.engine import Node\n", - "\n", - "\n", - "remove_nan = Node(ImageMaths(op_string= '-nan'), name='nanremove')\n", - "remove_nan.inputs.in_file = opap('output/realigned/_subject_id_sub044/rbold_out.nii')\n", - "\n", - "mi = Node(MeanImage(), name='mean')\n", - "\n", - "mask = Node(BET(mask=True), name='mask')\n", - "\n", - "wf = Workflow('reuse')\n", - "wf.base_dir = opap('.')\n", - "wf.connect(remove_nan, 'out_file', mi, 'in_file')\n", - "wf.connect(mi, 'out_file', mask, 'in_file')\n", - "wf.connect(mask, 'out_file', smooth, 'inputnode.mask_file')\n", - "wf.connect(remove_nan, 'out_file', smooth, 'inputnode.in_files')\n", - "\n", - "wf.run()" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "subslide" - } - }, - "source": [ - "## Setting internal parameters of workflows" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "print(smooth.list_node_names())\n", - "\n", - "median = smooth.get_node('median')\n", - "median.inputs.op_string = '-k %s -p 60'" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "-" - } - }, - "outputs": [] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "wf.run()" - ], - "language": "python", - "metadata": {}, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "slideshow": { - "slide_type": "slide" - } - }, - "source": [ - "# Summary\n", - "\n", - "\n", - "- This tutorial covers the concepts of Nipype\n", - "\n", - " 1. Installing and testing the installation \n", - " 2. Working with interfaces\n", - " 3. Using Nipype caching\n", - " 4. Creating Nodes, MapNodes and Workflows\n", - " 5. Getting and saving data\n", - " 6. Using Iterables\n", - " 7. Function nodes\n", - " 8. Distributed computation\n", - " 9. Connecting to databases\n", - " 10. Execution configuration options\n", - "\n", - "- It will allow you to reuse and debug the various workflows available in Nipype, BIPS and CPAC\n", - "- Please contribute new interfaces and workflows!" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import os\n", - "basedir = '/Users/satra/Dropbox/WORK/notebooks/'\n", - "if os.path.exists(basedir):\n", - " os.chdir(basedir)" - ], - "language": "python", - "metadata": { - "slideshow": { - "slide_type": "skip" - } - }, - "outputs": [] - } - ], - "metadata": {} - } - ] -} diff --git a/examples/rsfmri_vol_surface_preprocessing.py b/examples/rsfmri_vol_surface_preprocessing.py deleted file mode 100644 index 20b150b149..0000000000 --- a/examples/rsfmri_vol_surface_preprocessing.py +++ /dev/null @@ -1,1087 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -==================================== -rsfMRI: ANTS, FS, FSL, SPM, aCompCor -==================================== - - -A preprocessing workflow for Siemens resting state data. - -This workflow makes use of: - -- ANTS -- FreeSurfer -- FSL -- SPM -- CompCor - -For example:: - - python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii - -s subj001 -o output -p PBS --plugin_args "dict(qsub_args='-q many')" - - or - - python rsfmri_vol_surface_preprocessing.py -f SUB_1024011/E?/func/rest.nii - -t OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz --TR 2 -s SUB_1024011 - --subjects_dir fsdata --slice_times 0 17 1 18 2 19 3 20 4 21 5 22 6 23 - 7 24 8 25 9 26 10 27 11 28 12 29 13 30 14 31 15 32 16 -o . - -This workflow takes resting timeseries and a Siemens dicom file corresponding -to it and preprocesses it to produce timeseries coordinates or grayordinates. - -This workflow also requires 2mm subcortical atlas and templates that are -available from: - -http://mindboggle.info/data.html - -specifically the 2mm versions of: - -- `Joint Fusion Atlas `_ -- `MNI template `_ -""" - -from __future__ import division, unicode_literals -from builtins import open, range, str - -import os - -from nipype.interfaces.base import CommandLine -CommandLine.set_default_terminal_output('allatonce') - -from dicom import read_file - -from nipype.interfaces import (spm, fsl, Function, ants, freesurfer) -from nipype.interfaces.c3 import C3dAffineTool - -fsl.FSLCommand.set_default_output_type('NIFTI') - -from nipype import Workflow, Node, MapNode -from nipype.interfaces import matlab as mlab - -mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodisplay") -# If SPM is not in your MATLAB path you should add it here -# mlab.MatlabCommand.set_default_paths('/software/matlab/spm12') - -from nipype.algorithms.rapidart import ArtifactDetect -from nipype.algorithms.misc import TSNR, CalculateMedian -from nipype.interfaces.utility import Rename, Merge, IdentityInterface -from nipype.utils.filemanip import filename_to_list -from nipype.interfaces.io import DataSink, FreeSurferSource - -import numpy as np -import scipy as sp -import nibabel as nb - -imports = [ - 'import os', 'import nibabel as nb', 'import numpy as np', - 'import scipy as sp', - 'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename', - 'from scipy.special import legendre' -] - - -def get_info(dicom_files): - from dcmstack.extract import default_extractor - """Given a Siemens dicom file return metadata - - Returns - ------- - RepetitionTime - Slice Acquisition Times - Spacing between slices - """ - meta = default_extractor( - read_file( - filename_to_list(dicom_files)[0], - stop_before_pixels=True, - force=True)) - return (meta['RepetitionTime'] / 1000., meta['CsaImage.MosaicRefAcqTimes'], - meta['SpacingBetweenSlices']) - - -def median(in_files): - """Computes an average of the median of each realigned timeseries - - Parameters - ---------- - - in_files: one or more realigned Nifti 4D time series - - Returns - ------- - - out_file: a 3D Nifti file - """ - import numpy as np - import nibabel as nb - from nipype.utils import NUMPY_MMAP - average = None - for idx, filename in enumerate(filename_to_list(in_files)): - img = nb.load(filename, mmap=NUMPY_MMAP) - data = np.median(img.get_data(), axis=3) - if average is None: - average = data - else: - average = average + data - median_img = nb.Nifti1Image(average / float(idx + 1), img.affine, - img.header) - filename = os.path.join(os.getcwd(), 'median.nii.gz') - median_img.to_filename(filename) - return filename - - -def bandpass_filter(files, lowpass_freq, highpass_freq, fs): - """Bandpass filter the input files - - Parameters - ---------- - files: list of 4d nifti files - lowpass_freq: cutoff frequency for the low pass filter (in Hz) - highpass_freq: cutoff frequency for the high pass filter (in Hz) - fs: sampling rate (in Hz) - """ - from nipype.utils.filemanip import split_filename, list_to_filename - import numpy as np - import nibabel as nb - from nipype.utils import NUMPY_MMAP - out_files = [] - for filename in filename_to_list(files): - path, name, ext = split_filename(filename) - out_file = os.path.join(os.getcwd(), name + '_bp' + ext) - img = nb.load(filename, mmap=NUMPY_MMAP) - timepoints = img.shape[-1] - F = np.zeros((timepoints)) - lowidx = int(timepoints / 2) + 1 - if lowpass_freq > 0: - lowidx = np.round(lowpass_freq / fs * timepoints) - highidx = 0 - if highpass_freq > 0: - highidx = np.round(highpass_freq / fs * timepoints) - F[highidx:lowidx] = 1 - F = ((F + F[::-1]) > 0).astype(int) - data = img.get_data() - if np.all(F == 1): - filtered_data = data - else: - filtered_data = np.real(np.fft.ifftn(np.fft.fftn(data) * F)) - img_out = nb.Nifti1Image(filtered_data, img.affine, img.header) - img_out.to_filename(out_file) - out_files.append(out_file) - return list_to_filename(out_files) - - -def motion_regressors(motion_params, order=0, derivatives=1): - """Compute motion regressors upto given order and derivative - - motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) - """ - import numpy as np - out_files = [] - for idx, filename in enumerate(filename_to_list(motion_params)): - params = np.genfromtxt(filename) - out_params = params - for d in range(1, derivatives + 1): - cparams = np.vstack((np.repeat(params[0, :][None, :], d, axis=0), - params)) - out_params = np.hstack((out_params, np.diff(cparams, d, axis=0))) - out_params2 = out_params - for i in range(2, order + 1): - out_params2 = np.hstack((out_params2, np.power(out_params, i))) - filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx) - np.savetxt(filename, out_params2, fmt=b"%.10f") - out_files.append(filename) - return out_files - - -def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): - """Builds a regressor set comprisong motion parameters, composite norm and - outliers - - The outliers are added as a single time point column for each outlier - - - Parameters - ---------- - - motion_params: a text file containing motion parameters and its derivatives - comp_norm: a text file containing the composite norm - outliers: a text file containing 0-based outlier indices - detrend_poly: number of polynomials to add to detrend - - Returns - ------- - components_file: a text file containing all the regressors - """ - import numpy as np - import nibabel as nb - from scipy.special import legendre - out_files = [] - for idx, filename in enumerate(filename_to_list(motion_params)): - params = np.genfromtxt(filename) - norm_val = np.genfromtxt(filename_to_list(comp_norm)[idx]) - out_params = np.hstack((params, norm_val[:, None])) - try: - outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) - except IOError: - outlier_val = np.empty((0)) - for index in np.atleast_1d(outlier_val): - outlier_vector = np.zeros((out_params.shape[0], 1)) - outlier_vector[index] = 1 - out_params = np.hstack((out_params, outlier_vector)) - if detrend_poly: - timepoints = out_params.shape[0] - X = np.empty((timepoints, 0)) - for i in range(detrend_poly): - X = np.hstack((X, legendre(i + 1)(np.linspace( - -1, 1, timepoints))[:, None])) - out_params = np.hstack((out_params, X)) - filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) - np.savetxt(filename, out_params, fmt=b"%.10f") - out_files.append(filename) - return out_files - - -def extract_noise_components(realigned_file, - mask_file, - num_components=5, - extra_regressors=None): - """Derive components most reflective of physiological noise - - Parameters - ---------- - realigned_file: a 4D Nifti file containing realigned volumes - mask_file: a 3D Nifti file containing white matter + ventricular masks - num_components: number of components to use for noise decomposition - extra_regressors: additional regressors to add - - Returns - ------- - components_file: a text file containing the noise components - """ - from scipy.linalg.decomp_svd import svd - import numpy as np - import nibabel as nb - from nipype.utils import NUMPY_MMAP - import os - imgseries = nb.load(realigned_file, mmap=NUMPY_MMAP) - components = None - for filename in filename_to_list(mask_file): - mask = nb.load(filename, mmap=NUMPY_MMAP).get_data() - if len(np.nonzero(mask > 0)[0]) == 0: - continue - voxel_timecourses = imgseries.get_data()[mask > 0] - voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 - # remove mean and normalize by variance - # voxel_timecourses.shape == [nvoxels, time] - X = voxel_timecourses.T - stdX = np.std(X, axis=0) - stdX[stdX == 0] = 1. - stdX[np.isnan(stdX)] = 1. - stdX[np.isinf(stdX)] = 1. - X = (X - np.mean(X, axis=0)) / stdX - u, _, _ = svd(X, full_matrices=False) - if components is None: - components = u[:, :num_components] - else: - components = np.hstack((components, u[:, :num_components])) - if extra_regressors: - regressors = np.genfromtxt(extra_regressors) - components = np.hstack((components, regressors)) - components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt=b"%.10f") - return components_file - - -def rename(in_files, suffix=None): - from nipype.utils.filemanip import (filename_to_list, split_filename, - list_to_filename) - out_files = [] - for idx, filename in enumerate(filename_to_list(in_files)): - _, name, ext = split_filename(filename) - if suffix is None: - out_files.append(name + ('_%03d' % idx) + ext) - else: - out_files.append(name + suffix + ext) - return list_to_filename(out_files) - - -def get_aparc_aseg(files): - """Return the aparc+aseg.mgz file""" - for name in files: - if 'aparc+aseg.mgz' in name: - return name - raise ValueError('aparc+aseg.mgz not found') - - -def extract_subrois(timeseries_file, label_file, indices): - """Extract voxel time courses for each subcortical roi index - - Parameters - ---------- - - timeseries_file: a 4D Nifti file - label_file: a 3D file containing rois in the same space/size of the 4D file - indices: a list of indices for ROIs to extract. - - Returns - ------- - out_file: a text file containing time courses for each voxel of each roi - The first four columns are: freesurfer index, i, j, k positions in the - label file - """ - from nipype.utils.filemanip import split_filename - import nibabel as nb - from nipype.utils import NUMPY_MMAP - import os - img = nb.load(timeseries_file, mmap=NUMPY_MMAP) - data = img.get_data() - roiimg = nb.load(label_file, mmap=NUMPY_MMAP) - rois = roiimg.get_data() - prefix = split_filename(timeseries_file)[1] - out_ts_file = os.path.join(os.getcwd(), '%s_subcortical_ts.txt' % prefix) - with open(out_ts_file, 'wt') as fp: - for fsindex in indices: - ijk = np.nonzero(rois == fsindex) - ts = data[ijk] - for i0, row in enumerate(ts): - fp.write('%d,%d,%d,%d,' % ( - fsindex, ijk[0][i0], ijk[1][i0], - ijk[2][i0]) + ','.join(['%.10f' % val - for val in row]) + '\n') - return out_ts_file - - -def combine_hemi(left, right): - """Combine left and right hemisphere time series into a single text file - """ - import os - import numpy as np - from nipype.utils import NUMPY_MMAP - lh_data = nb.load(left, mmap=NUMPY_MMAP).get_data() - rh_data = nb.load(right, mmap=NUMPY_MMAP).get_data() - - indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], - 2000000 + np.arange(0, rh_data.shape[0])[:, None])) - all_data = np.hstack((indices, - np.vstack((lh_data.squeeze(), rh_data.squeeze())))) - filename = left.split('.')[1] + '_combined.txt' - np.savetxt( - filename, - all_data, - fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] - 1))) - return os.path.abspath(filename) - - -def create_reg_workflow(name='registration'): - """Create a FEAT preprocessing workflow together with freesurfer - - Parameters - ---------- - - name : name of workflow (default: 'registration') - - Inputs:: - - inputspec.source_files : files (filename or list of filenames to register) - inputspec.mean_image : reference image to use - inputspec.anatomical_image : anatomical image to coregister to - inputspec.target_image : registration target - - Outputs:: - - outputspec.func2anat_transform : FLIRT transform - outputspec.anat2target_transform : FLIRT+FNIRT transform - outputspec.transformed_files : transformed files in target space - outputspec.transformed_mean : mean image in target space - """ - - register = Workflow(name=name) - - inputnode = Node( - interface=IdentityInterface(fields=[ - 'source_files', 'mean_image', 'subject_id', 'subjects_dir', - 'target_image' - ]), - name='inputspec') - - outputnode = Node( - interface=IdentityInterface(fields=[ - 'func2anat_transform', 'out_reg_file', 'anat2target_transform', - 'transforms', 'transformed_mean', 'segmentation_files', - 'anat2target', 'aparc' - ]), - name='outputspec') - - # Get the subject's freesurfer source directory - fssource = Node(FreeSurferSource(), name='fssource') - fssource.run_without_submitting = True - register.connect(inputnode, 'subject_id', fssource, 'subject_id') - register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir') - - convert = Node(freesurfer.MRIConvert(out_type='nii'), name="convert") - register.connect(fssource, 'T1', convert, 'in_file') - - # Coregister the median to the surface - bbregister = Node(freesurfer.BBRegister(), name='bbregister') - bbregister.inputs.init = 'fsl' - bbregister.inputs.contrast_type = 't2' - bbregister.inputs.out_fsl_file = True - bbregister.inputs.epi_mask = True - register.connect(inputnode, 'subject_id', bbregister, 'subject_id') - register.connect(inputnode, 'mean_image', bbregister, 'source_file') - register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir') - """ - Estimate the tissue classes from the anatomical image. But use spm's segment - as FSL appears to be breaking. - """ - - stripper = Node(fsl.BET(), name='stripper') - register.connect(convert, 'out_file', stripper, 'in_file') - fast = Node(fsl.FAST(), name='fast') - register.connect(stripper, 'out_file', fast, 'in_files') - """ - Binarize the segmentation - """ - - binarize = MapNode( - fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'), - iterfield=['in_file'], - name='binarize') - register.connect(fast, 'partial_volume_files', binarize, 'in_file') - """ - Apply inverse transform to take segmentations to functional space - """ - - applyxfm = MapNode( - freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), - iterfield=['target_file'], - name='inverse_transform') - register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir') - register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file') - register.connect(binarize, 'out_file', applyxfm, 'target_file') - register.connect(inputnode, 'mean_image', applyxfm, 'source_file') - """ - Apply inverse transform to aparc file - """ - - aparcxfm = Node( - freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), - name='aparc_inverse_transform') - register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir') - register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file') - register.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparcxfm, - 'target_file') - register.connect(inputnode, 'mean_image', aparcxfm, 'source_file') - """ - Convert the BBRegister transformation to ANTS ITK format - """ - - convert2itk = Node(C3dAffineTool(), name='convert2itk') - convert2itk.inputs.fsl2ras = True - convert2itk.inputs.itk_transform = True - register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image', convert2itk, 'source_file') - register.connect(stripper, 'out_file', convert2itk, 'reference_file') - """ - Compute registration between the subject's structural and MNI template - This is currently set to perform a very quick registration. However, the - registration can be made significantly more accurate for cortical - structures by increasing the number of iterations - All parameters are set using the example from: - #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh - """ - - reg = Node(ants.Registration(), name='antsRegister') - reg.inputs.output_transform_prefix = "output_" - reg.inputs.transforms = ['Rigid', 'Affine', 'SyN'] - reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)] - reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[ - 100, 30, 20 - ]] - reg.inputs.dimension = 3 - reg.inputs.write_composite_transform = True - reg.inputs.collapse_output_transforms = True - reg.inputs.initial_moving_transform_com = True - reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']] - reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]] - reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]] - reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]] - reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]] - reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01] - reg.inputs.convergence_window_size = [20] * 2 + [5] - reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] - reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] - reg.inputs.use_estimate_learning_rate_once = [True] * 3 - reg.inputs.use_histogram_matching = [False] * 2 + [True] - reg.inputs.winsorize_lower_quantile = 0.005 - reg.inputs.winsorize_upper_quantile = 0.995 - reg.inputs.float = True - reg.inputs.output_warped_image = 'output_warped_image.nii.gz' - reg.inputs.num_threads = 4 - reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} - register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode, 'target_image', reg, 'fixed_image') - """ - Concatenate the affine and ants transforms into a list - """ - - merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') - register.connect(convert2itk, 'itk_transform', merge, 'in2') - register.connect(reg, 'composite_transform', merge, 'in1') - """ - Transform the mean image. First to anatomical and then to target - """ - - warpmean = Node(ants.ApplyTransforms(), name='warpmean') - warpmean.inputs.input_image_type = 3 - warpmean.inputs.interpolation = 'Linear' - warpmean.inputs.invert_transform_flags = [False, False] - warpmean.terminal_output = 'file' - warpmean.inputs.args = '--float' - warpmean.inputs.num_threads = 4 - - register.connect(inputnode, 'target_image', warpmean, 'reference_image') - register.connect(inputnode, 'mean_image', warpmean, 'input_image') - register.connect(merge, 'out', warpmean, 'transforms') - """ - Assign all the output files - """ - - register.connect(reg, 'warped_image', outputnode, 'anat2target') - register.connect(warpmean, 'output_image', outputnode, 'transformed_mean') - register.connect(applyxfm, 'transformed_file', outputnode, - 'segmentation_files') - register.connect(aparcxfm, 'transformed_file', outputnode, 'aparc') - register.connect(bbregister, 'out_fsl_file', outputnode, - 'func2anat_transform') - register.connect(bbregister, 'out_reg_file', outputnode, 'out_reg_file') - register.connect(reg, 'composite_transform', outputnode, - 'anat2target_transform') - register.connect(merge, 'out', outputnode, 'transforms') - - return register - - -""" -Creates the main preprocessing workflow -""" - - -def create_workflow(files, - target_file, - subject_id, - TR, - slice_times, - norm_threshold=1, - num_components=5, - vol_fwhm=None, - surf_fwhm=None, - lowpass_freq=-1, - highpass_freq=-1, - subjects_dir=None, - sink_directory=os.getcwd(), - target_subject=['fsaverage3', 'fsaverage4'], - name='resting'): - - wf = Workflow(name=name) - - # Rename files in case they are named identically - name_unique = MapNode( - Rename(format_string='rest_%(run)02d'), - iterfield=['in_file', 'run'], - name='rename') - name_unique.inputs.keep_ext = True - name_unique.inputs.run = list(range(1, len(files) + 1)) - name_unique.inputs.in_file = files - - realign = Node(interface=spm.Realign(), name="realign") - realign.inputs.jobtype = 'estwrite' - - num_slices = len(slice_times) - slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing") - slice_timing.inputs.num_slices = num_slices - slice_timing.inputs.time_repetition = TR - slice_timing.inputs.time_acquisition = TR - TR / float(num_slices) - slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist() - slice_timing.inputs.ref_slice = int(num_slices / 2) - - # Comute TSNR on realigned data regressing polynomials upto order 2 - tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') - wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file') - - # Compute the median image across runs - calc_median = Node(CalculateMedian(), name='median') - wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') - """Segment and Register - """ - - registration = create_reg_workflow(name='registration') - wf.connect(calc_median, 'median_file', registration, - 'inputspec.mean_image') - registration.inputs.inputspec.subject_id = subject_id - registration.inputs.inputspec.subjects_dir = subjects_dir - registration.inputs.inputspec.target_image = target_file - """Use :class:`nipype.algorithms.rapidart` to determine which of the - images in the functional series are outliers based on deviations in - intensity or movement. - """ - - art = Node(interface=ArtifactDetect(), name="art") - art.inputs.use_differences = [True, True] - art.inputs.use_norm = True - art.inputs.norm_threshold = norm_threshold - art.inputs.zintensity_threshold = 9 - art.inputs.mask_type = 'spm_global' - art.inputs.parameter_source = 'SPM' - """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose - to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal - voxel sizes. - """ - - wf.connect([ - (name_unique, realign, [('out_file', 'in_files')]), - (realign, slice_timing, [('realigned_files', 'in_files')]), - (slice_timing, art, [('timecorrected_files', 'realigned_files')]), - (realign, art, [('realignment_parameters', 'realignment_parameters')]), - ]) - - def selectindex(files, idx): - import numpy as np - from nipype.utils.filemanip import filename_to_list, list_to_filename - return list_to_filename( - np.array(filename_to_list(files))[idx].tolist()) - - mask = Node(fsl.BET(), name='getmask') - mask.inputs.mask = True - wf.connect(calc_median, 'median_file', mask, 'in_file') - - # get segmentation in normalized functional space - - def merge_files(in1, in2): - out_files = filename_to_list(in1) - out_files.extend(filename_to_list(in2)) - return out_files - - # filter some noise - - # Compute motion regressors - motreg = Node( - Function( - input_names=['motion_params', 'order', 'derivatives'], - output_names=['out_files'], - function=motion_regressors, - imports=imports), - name='getmotionregress') - wf.connect(realign, 'realignment_parameters', motreg, 'motion_params') - - # Create a filter to remove motion and art confounds - createfilter1 = Node( - Function( - input_names=[ - 'motion_params', 'comp_norm', 'outliers', 'detrend_poly' - ], - output_names=['out_files'], - function=build_filter1, - imports=imports), - name='makemotionbasedfilter') - createfilter1.inputs.detrend_poly = 2 - wf.connect(motreg, 'out_files', createfilter1, 'motion_params') - wf.connect(art, 'norm_files', createfilter1, 'comp_norm') - wf.connect(art, 'outlier_files', createfilter1, 'outliers') - - filter1 = MapNode( - fsl.GLM( - out_f_name='F_mcart.nii', out_pf_name='pF_mcart.nii', demean=True), - iterfield=['in_file', 'design', 'out_res_name'], - name='filtermotion') - - wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file') - wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'), - filter1, 'out_res_name') - wf.connect(createfilter1, 'out_files', filter1, 'design') - - createfilter2 = MapNode( - Function( - input_names=[ - 'realigned_file', 'mask_file', 'num_components', - 'extra_regressors' - ], - output_names=['out_files'], - function=extract_noise_components, - imports=imports), - iterfield=['realigned_file', 'extra_regressors'], - name='makecompcorrfilter') - createfilter2.inputs.num_components = num_components - - wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') - wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') - wf.connect(registration, - ('outputspec.segmentation_files', selectindex, [0, 2]), - createfilter2, 'mask_file') - - filter2 = MapNode( - fsl.GLM(out_f_name='F.nii', out_pf_name='pF.nii', demean=True), - iterfield=['in_file', 'design', 'out_res_name'], - name='filter_noise_nosmooth') - wf.connect(filter1, 'out_res', filter2, 'in_file') - wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2, - 'out_res_name') - wf.connect(createfilter2, 'out_files', filter2, 'design') - wf.connect(mask, 'mask_file', filter2, 'mask') - - bandpass = Node( - Function( - input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'], - output_names=['out_files'], - function=bandpass_filter, - imports=imports), - name='bandpass_unsmooth') - bandpass.inputs.fs = 1. / TR - bandpass.inputs.highpass_freq = highpass_freq - bandpass.inputs.lowpass_freq = lowpass_freq - wf.connect(filter2, 'out_res', bandpass, 'files') - """Smooth the functional data using - :class:`nipype.interfaces.spm.Smooth`. - """ - - smooth = Node(interface=spm.Smooth(), name="smooth") - smooth.inputs.fwhm = vol_fwhm - - wf.connect(bandpass, 'out_files', smooth, 'in_files') - - collector = Node(Merge(2), name='collect_streams') - wf.connect(smooth, 'smoothed_files', collector, 'in1') - wf.connect(bandpass, 'out_files', collector, 'in2') - """ - Transform the remaining images. First to anatomical and then to target - """ - - warpall = MapNode( - ants.ApplyTransforms(), iterfield=['input_image'], name='warpall') - warpall.inputs.input_image_type = 3 - warpall.inputs.interpolation = 'Linear' - warpall.inputs.invert_transform_flags = [False, False] - warpall.terminal_output = 'file' - warpall.inputs.reference_image = target_file - warpall.inputs.args = '--float' - warpall.inputs.num_threads = 1 - - # transform to target - wf.connect(collector, 'out', warpall, 'input_image') - wf.connect(registration, 'outputspec.transforms', warpall, 'transforms') - - mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask') - - wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file') - - maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker') - wf.connect(warpall, 'output_image', maskts, 'in_file') - wf.connect(mask_target, 'out_file', maskts, 'mask_file') - - # map to surface - # extract aparc+aseg ROIs - # extract subcortical ROIs - # extract target space ROIs - # combine subcortical and cortical rois into a single cifti file - - ####### - # Convert aparc to subject functional space - - # Sample the average time series in aparc ROIs - sampleaparc = MapNode( - freesurfer.SegStats(default_color_table=True), - iterfield=['in_file', 'summary_file', 'avgwf_txt_file'], - name='aparc_ts') - sampleaparc.inputs.segment_id = ( - [8] + list(range(10, 14)) + [17, 18, 26, 47] + list(range(49, 55)) + - [58] + list(range(1001, 1036)) + list(range(2001, 2036))) - - wf.connect(registration, 'outputspec.aparc', sampleaparc, - 'segmentation_file') - wf.connect(collector, 'out', sampleaparc, 'in_file') - - def get_names(files, suffix): - """Generate appropriate names for output files - """ - from nipype.utils.filemanip import (split_filename, filename_to_list, - list_to_filename) - out_names = [] - for filename in files: - _, name, _ = split_filename(filename) - out_names.append(name + suffix) - return list_to_filename(out_names) - - wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc, - 'avgwf_txt_file') - wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc, - 'summary_file') - - # Sample the time series onto the surface of the target surface. Performs - # sampling into left and right hemisphere - target = Node(IdentityInterface(fields=['target_subject']), name='target') - target.iterables = ('target_subject', filename_to_list(target_subject)) - - samplerlh = MapNode( - freesurfer.SampleToSurface(), - iterfield=['source_file'], - name='sampler_lh') - samplerlh.inputs.sampling_method = "average" - samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) - samplerlh.inputs.sampling_units = "frac" - samplerlh.inputs.interp_method = "trilinear" - samplerlh.inputs.smooth_surf = surf_fwhm - # samplerlh.inputs.cortex_mask = True - samplerlh.inputs.out_type = 'niigz' - samplerlh.inputs.subjects_dir = subjects_dir - - samplerrh = samplerlh.clone('sampler_rh') - - samplerlh.inputs.hemi = 'lh' - wf.connect(collector, 'out', samplerlh, 'source_file') - wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file') - wf.connect(target, 'target_subject', samplerlh, 'target_subject') - - samplerrh.set_input('hemi', 'rh') - wf.connect(collector, 'out', samplerrh, 'source_file') - wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file') - wf.connect(target, 'target_subject', samplerrh, 'target_subject') - - # Combine left and right hemisphere to text file - combiner = MapNode( - Function( - input_names=['left', 'right'], - output_names=['out_file'], - function=combine_hemi, - imports=imports), - iterfield=['left', 'right'], - name="combiner") - wf.connect(samplerlh, 'out_file', combiner, 'left') - wf.connect(samplerrh, 'out_file', combiner, 'right') - - # Sample the time series file for each subcortical roi - ts2txt = MapNode( - Function( - input_names=['timeseries_file', 'label_file', 'indices'], - output_names=['out_file'], - function=extract_subrois, - imports=imports), - iterfield=['timeseries_file'], - name='getsubcortts') - ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\ - list(range(49, 55)) + [58] - ts2txt.inputs.label_file = \ - os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' - '2mm_v2.nii.gz')) - wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file') - - ###### - - substitutions = [('_target_subject_', - ''), ('_filtermotart_cleaned_bp_trans_masked', ''), - ('_filtermotart_cleaned_bp', '')] - regex_subs = [ - ('_ts_masker.*/sar', '/smooth/'), - ('_ts_masker.*/ar', '/unsmooth/'), - ('_combiner.*/sar', '/smooth/'), - ('_combiner.*/ar', '/unsmooth/'), - ('_aparc_ts.*/sar', '/smooth/'), - ('_aparc_ts.*/ar', '/unsmooth/'), - ('_getsubcortts.*/sar', '/smooth/'), - ('_getsubcortts.*/ar', '/unsmooth/'), - ('series/sar', 'series/smooth/'), - ('series/ar', 'series/unsmooth/'), - ('_inverse_transform./', ''), - ] - # Save the relevant data into an output directory - datasink = Node(interface=DataSink(), name="datasink") - datasink.inputs.base_directory = sink_directory - datasink.inputs.container = subject_id - datasink.inputs.substitutions = substitutions - datasink.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') - wf.connect(realign, 'realignment_parameters', datasink, - 'resting.qa.motion') - wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') - wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') - wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') - wf.connect(registration, 'outputspec.segmentation_files', datasink, - 'resting.mask_files') - wf.connect(registration, 'outputspec.anat2target', datasink, - 'resting.qa.ants') - wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask') - wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target') - wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F') - wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF') - wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps') - wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p') - wf.connect(bandpass, 'out_files', datasink, - 'resting.timeseries.@bandpassed') - wf.connect(smooth, 'smoothed_files', datasink, - 'resting.timeseries.@smoothed') - wf.connect(createfilter1, 'out_files', datasink, - 'resting.regress.@regressors') - wf.connect(createfilter2, 'out_files', datasink, - 'resting.regress.@compcorr') - wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target') - wf.connect(sampleaparc, 'summary_file', datasink, - 'resting.parcellations.aparc') - wf.connect(sampleaparc, 'avgwf_txt_file', datasink, - 'resting.parcellations.aparc.@avgwf') - wf.connect(ts2txt, 'out_file', datasink, - 'resting.parcellations.grayo.@subcortical') - - datasink2 = Node(interface=DataSink(), name="datasink2") - datasink2.inputs.base_directory = sink_directory - datasink2.inputs.container = subject_id - datasink2.inputs.substitutions = substitutions - datasink2.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') - wf.connect(combiner, 'out_file', datasink2, - 'resting.parcellations.grayo.@surface') - return wf - - -""" -Creates the full workflow including getting information from dicom files -""" - - -def create_resting_workflow(args, name=None): - TR = args.TR - slice_times = args.slice_times - if args.dicom_file: - TR, slice_times, slice_thickness = get_info(args.dicom_file) - slice_times = (np.array(slice_times) / 1000.).tolist() - if name is None: - name = 'resting_' + args.subject_id - kwargs = dict( - files=[os.path.abspath(filename) for filename in args.files], - target_file=os.path.abspath(args.target_file), - subject_id=args.subject_id, - TR=TR, - slice_times=slice_times, - vol_fwhm=args.vol_fwhm, - surf_fwhm=args.surf_fwhm, - norm_threshold=2., - subjects_dir=os.path.abspath(args.fsdir), - target_subject=args.target_surfs, - lowpass_freq=args.lowpass_freq, - highpass_freq=args.highpass_freq, - sink_directory=os.path.abspath(args.sink), - name=name) - wf = create_workflow(**kwargs) - return wf - - -if __name__ == "__main__": - from argparse import ArgumentParser, RawTextHelpFormatter - defstr = ' (default %(default)s)' - parser = ArgumentParser( - description=__doc__, formatter_class=RawTextHelpFormatter) - parser.add_argument( - "-d", - "--dicom_file", - dest="dicom_file", - help="an example dicom file from the resting series") - parser.add_argument( - "-f", - "--files", - dest="files", - nargs="+", - help="4d nifti files for resting state", - required=True) - parser.add_argument( - "-t", - "--target", - dest="target_file", - help=("Target in MNI space. Best to use the MindBoggle " - "template - " - "OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz"), - required=True) - parser.add_argument( - "-s", - "--subject_id", - dest="subject_id", - help="FreeSurfer subject id", - required=True) - parser.add_argument( - "--subjects_dir", - dest="fsdir", - help="FreeSurfer subject directory", - required=True) - parser.add_argument( - "--target_surfaces", - dest="target_surfs", - nargs="+", - default=['fsaverage5'], - help="FreeSurfer target surfaces" + defstr) - parser.add_argument( - "--TR", - dest="TR", - default=None, - type=float, - help="TR if dicom not provided in seconds") - parser.add_argument( - "--slice_times", - dest="slice_times", - nargs="+", - type=float, - help="Slice onset times in seconds") - parser.add_argument( - '--vol_fwhm', - default=6., - dest='vol_fwhm', - type=float, - help="Spatial FWHM" + defstr) - parser.add_argument( - '--surf_fwhm', - default=15., - dest='surf_fwhm', - type=float, - help="Spatial FWHM" + defstr) - parser.add_argument( - "-l", - "--lowpass_freq", - dest="lowpass_freq", - default=0.1, - type=float, - help="Low pass frequency (Hz)" + defstr) - parser.add_argument( - "-u", - "--highpass_freq", - dest="highpass_freq", - default=0.01, - type=float, - help="High pass frequency (Hz)" + defstr) - parser.add_argument( - "-o", - "--output_dir", - dest="sink", - help="Output directory base", - required=True) - parser.add_argument( - "-w", "--work_dir", dest="work_dir", help="Output directory base") - parser.add_argument( - "-p", - "--plugin", - dest="plugin", - default='Linear', - help="Plugin to use") - parser.add_argument( - "--plugin_args", dest="plugin_args", help="Plugin arguments") - args = parser.parse_args() - - wf = create_resting_workflow(args) - - if args.work_dir: - work_dir = os.path.abspath(args.work_dir) - else: - work_dir = os.getcwd() - - wf.base_dir = work_dir - if args.plugin_args: - wf.run(args.plugin, plugin_args=eval(args.plugin_args)) - else: - wf.run(args.plugin) diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py deleted file mode 100644 index d3d9887cc6..0000000000 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ /dev/null @@ -1,1084 +0,0 @@ -#!/usr/bin/env python -""" -===================================== -rsfMRI: ANTS, FS, FSL, NiPy, aCompCor -===================================== - - -A preprocessing workflow for Siemens resting state data. - -This workflow makes use of: - -- ANTS -- FreeSurfer -- FSL -- NiPy -- CompCor - -For example:: - - python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii - -s subj001 -o output -p PBS --plugin_args "dict(qsub_args='-q many')" - -or:: - - python rsfmri_vol_surface_preprocessing.py -f SUB_1024011/E?/func/rest.nii - -t OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz --TR 2 -s SUB_1024011 - --subjects_dir fsdata --slice_times 0 17 1 18 2 19 3 20 4 21 5 22 6 23 - 7 24 8 25 9 26 10 27 11 28 12 29 13 30 14 31 15 32 16 -o . - -This workflow takes resting timeseries and a Siemens dicom file corresponding -to it and preprocesses it to produce timeseries coordinates or grayordinates. - -For non-Siemens dicoms, provide slice times instead, since the dicom extractor is not guaranteed to work. - -This workflow also requires 2mm subcortical atlas and templates that are -available from: - -http://mindboggle.info/data.html - -specifically the 2mm versions of: - - * `Joint Fusion Atlas `_ - * `MNI template `_ - -Import necessary modules from nipype. -""" - -from __future__ import division, unicode_literals -from builtins import open, range, str - -import os - -from nipype.interfaces.base import CommandLine -CommandLine.set_default_terminal_output('allatonce') - -# https://github.com/moloney/dcmstack -from dcmstack.extract import default_extractor -# pip install pydicom -from dicom import read_file - -from nipype.interfaces import (fsl, Function, ants, freesurfer, nipy) -from nipype.interfaces.c3 import C3dAffineTool - -fsl.FSLCommand.set_default_output_type('NIFTI_GZ') - -from nipype import Workflow, Node, MapNode - -from nipype.algorithms.rapidart import ArtifactDetect -from nipype.algorithms.misc import TSNR, CalculateMedian -from nipype.algorithms.confounds import ACompCor -from nipype.interfaces.utility import Rename, Merge, IdentityInterface -from nipype.utils.filemanip import filename_to_list -from nipype.interfaces.io import DataSink, FreeSurferSource -import nipype.interfaces.freesurfer as fs - -import numpy as np -import scipy as sp -import nibabel as nb -from nipype.utils.config import NUMPY_MMAP - -""" -A list of modules and functions to import inside of nodes -""" - -imports = [ - 'import os', - 'import nibabel as nb', - 'import numpy as np', - 'import scipy as sp', - 'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename', - 'from scipy.special import legendre' - ] - -""" -Define utility functions for use in workflow nodes -""" - - -def get_info(dicom_files): - """Given a Siemens dicom file return metadata - - Returns - ------- - RepetitionTime - Slice Acquisition Times - Spacing between slices - """ - meta = default_extractor( - read_file( - filename_to_list(dicom_files)[0], - stop_before_pixels=True, - force=True)) - return (meta['RepetitionTime'] / 1000., meta['CsaImage.MosaicRefAcqTimes'], - meta['SpacingBetweenSlices']) - - -def median(in_files): - """Computes an average of the median of each realigned timeseries - - Parameters - ---------- - - in_files: one or more realigned Nifti 4D time series - - Returns - ------- - - out_file: a 3D Nifti file - """ - average = None - for idx, filename in enumerate(filename_to_list(in_files)): - img = nb.load(filename, mmap=NUMPY_MMAP) - data = np.median(img.get_data(), axis=3) - if average is None: - average = data - else: - average = average + data - median_img = nb.Nifti1Image(average / float(idx + 1), img.affine, - img.header) - filename = os.path.join(os.getcwd(), 'median.nii.gz') - median_img.to_filename(filename) - return filename - - -def bandpass_filter(files, lowpass_freq, highpass_freq, fs): - """Bandpass filter the input files - - Parameters - ---------- - files: list of 4d nifti files - lowpass_freq: cutoff frequency for the low pass filter (in Hz) - highpass_freq: cutoff frequency for the high pass filter (in Hz) - fs: sampling rate (in Hz) - """ - out_files = [] - for filename in filename_to_list(files): - path, name, ext = split_filename(filename) - out_file = os.path.join(os.getcwd(), name + '_bp' + ext) - img = nb.load(filename, mmap=NUMPY_MMAP) - timepoints = img.shape[-1] - F = np.zeros((timepoints)) - lowidx = int(timepoints / 2) + 1 - if lowpass_freq > 0: - lowidx = np.round(float(lowpass_freq) / fs * timepoints) - highidx = 0 - if highpass_freq > 0: - highidx = np.round(float(highpass_freq) / fs * timepoints) - F[highidx:lowidx] = 1 - F = ((F + F[::-1]) > 0).astype(int) - data = img.get_data() - if np.all(F == 1): - filtered_data = data - else: - filtered_data = np.real(np.fft.ifftn(np.fft.fftn(data) * F)) - img_out = nb.Nifti1Image(filtered_data, img.affine, img.header) - img_out.to_filename(out_file) - out_files.append(out_file) - return list_to_filename(out_files) - - -def motion_regressors(motion_params, order=0, derivatives=1): - """Compute motion regressors upto given order and derivative - - motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) - """ - out_files = [] - for idx, filename in enumerate(filename_to_list(motion_params)): - params = np.genfromtxt(filename) - out_params = params - for d in range(1, derivatives + 1): - cparams = np.vstack((np.repeat(params[0, :][None, :], d, axis=0), - params)) - out_params = np.hstack((out_params, np.diff(cparams, d, axis=0))) - out_params2 = out_params - for i in range(2, order + 1): - out_params2 = np.hstack((out_params2, np.power(out_params, i))) - filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx) - np.savetxt(filename, out_params2, fmt=b"%.10f") - out_files.append(filename) - return out_files - - -def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): - """Builds a regressor set comprisong motion parameters, composite norm and - outliers - - The outliers are added as a single time point column for each outlier - - - Parameters - ---------- - - motion_params: a text file containing motion parameters and its derivatives - comp_norm: a text file containing the composite norm - outliers: a text file containing 0-based outlier indices - detrend_poly: number of polynomials to add to detrend - - Returns - ------- - components_file: a text file containing all the regressors - """ - out_files = [] - for idx, filename in enumerate(filename_to_list(motion_params)): - params = np.genfromtxt(filename) - norm_val = np.genfromtxt(filename_to_list(comp_norm)[idx]) - out_params = np.hstack((params, norm_val[:, None])) - try: - outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) - except IOError: - outlier_val = np.empty((0)) - for index in np.atleast_1d(outlier_val): - outlier_vector = np.zeros((out_params.shape[0], 1)) - outlier_vector[index] = 1 - out_params = np.hstack((out_params, outlier_vector)) - if detrend_poly: - timepoints = out_params.shape[0] - X = np.empty((timepoints, 0)) - for i in range(detrend_poly): - X = np.hstack((X, legendre(i + 1)(np.linspace( - -1, 1, timepoints))[:, None])) - out_params = np.hstack((out_params, X)) - filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) - np.savetxt(filename, out_params, fmt=b"%.10f") - out_files.append(filename) - return out_files - - -def rename(in_files, suffix=None): - from nipype.utils.filemanip import (filename_to_list, split_filename, - list_to_filename) - out_files = [] - for idx, filename in enumerate(filename_to_list(in_files)): - _, name, ext = split_filename(filename) - if suffix is None: - out_files.append(name + ('_%03d' % idx) + ext) - else: - out_files.append(name + suffix + ext) - return list_to_filename(out_files) - - -def get_aparc_aseg(files): - """Return the aparc+aseg.mgz file""" - for name in files: - if 'aparc+aseg.mgz' in name: - return name - raise ValueError('aparc+aseg.mgz not found') - - -def extract_subrois(timeseries_file, label_file, indices): - """Extract voxel time courses for each subcortical roi index - - Parameters - ---------- - - timeseries_file: a 4D Nifti file - label_file: a 3D file containing rois in the same space/size of the 4D file - indices: a list of indices for ROIs to extract. - - Returns - ------- - out_file: a text file containing time courses for each voxel of each roi - The first four columns are: freesurfer index, i, j, k positions in the - label file - """ - img = nb.load(timeseries_file, mmap=NUMPY_MMAP) - data = img.get_data() - roiimg = nb.load(label_file, mmap=NUMPY_MMAP) - rois = roiimg.get_data() - prefix = split_filename(timeseries_file)[1] - out_ts_file = os.path.join(os.getcwd(), '%s_subcortical_ts.txt' % prefix) - with open(out_ts_file, 'wt') as fp: - for fsindex in indices: - ijk = np.nonzero(rois == fsindex) - ts = data[ijk] - for i0, row in enumerate(ts): - fp.write('%d,%d,%d,%d,' % ( - fsindex, ijk[0][i0], ijk[1][i0], - ijk[2][i0]) + ','.join(['%.10f' % val - for val in row]) + '\n') - return out_ts_file - - -def combine_hemi(left, right): - """Combine left and right hemisphere time series into a single text file - """ - lh_data = nb.load(left, mmap=NUMPY_MMAP).get_data() - rh_data = nb.load(right, mmap=NUMPY_MMAP).get_data() - - indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], - 2000000 + np.arange(0, rh_data.shape[0])[:, None])) - all_data = np.hstack((indices, - np.vstack((lh_data.squeeze(), rh_data.squeeze())))) - filename = left.split('.')[1] + '_combined.txt' - np.savetxt( - filename, - all_data, - fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] - 1))) - return os.path.abspath(filename) - -""" -Create a Registration Workflow -""" - - -def create_reg_workflow(name='registration'): - """Create a FEAT preprocessing workflow together with freesurfer - - Parameters - ---------- - name : name of workflow (default: 'registration') - - Inputs: - - inputspec.source_files : files (filename or list of filenames to register) - inputspec.mean_image : reference image to use - inputspec.anatomical_image : anatomical image to coregister to - inputspec.target_image : registration target - - Outputs: - - outputspec.func2anat_transform : FLIRT transform - outputspec.anat2target_transform : FLIRT+FNIRT transform - outputspec.transformed_files : transformed files in target space - outputspec.transformed_mean : mean image in target space - - Example - ------- - See code below - """ - - register = Workflow(name=name) - - inputnode = Node( - interface=IdentityInterface(fields=[ - 'source_files', 'mean_image', 'subject_id', 'subjects_dir', - 'target_image' - ]), - name='inputspec') - - outputnode = Node( - interface=IdentityInterface(fields=[ - 'func2anat_transform', 'out_reg_file', 'anat2target_transform', - 'transforms', 'transformed_mean', 'segmentation_files', - 'anat2target', 'aparc', 'min_cost_file' - ]), - name='outputspec') - - # Get the subject's freesurfer source directory - fssource = Node(FreeSurferSource(), name='fssource') - fssource.run_without_submitting = True - register.connect(inputnode, 'subject_id', fssource, 'subject_id') - register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir') - - convert = Node(freesurfer.MRIConvert(out_type='nii'), name="convert") - register.connect(fssource, 'T1', convert, 'in_file') - - # Coregister the median to the surface - bbregister = Node(freesurfer.BBRegister(), name='bbregister') - bbregister.inputs.init = 'fsl' - bbregister.inputs.contrast_type = 't2' - bbregister.inputs.out_fsl_file = True - bbregister.inputs.epi_mask = True - register.connect(inputnode, 'subject_id', bbregister, 'subject_id') - register.connect(inputnode, 'mean_image', bbregister, 'source_file') - register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir') - """ - Estimate the tissue classes from the anatomical image. But use aparc+aseg's brain mask - """ - - binarize = Node( - fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), - name="binarize_aparc") - register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, - "in_file") - stripper = Node(fsl.ApplyMask(), name='stripper') - register.connect(binarize, "binary_file", stripper, "mask_file") - register.connect(convert, 'out_file', stripper, 'in_file') - - fast = Node(fsl.FAST(), name='fast') - register.connect(stripper, 'out_file', fast, 'in_files') - """ - Binarize the segmentation - """ - - binarize = MapNode( - fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'), - iterfield=['in_file'], - name='binarize') - register.connect(fast, 'partial_volume_files', binarize, 'in_file') - """ - Apply inverse transform to take segmentations to functional space - """ - - applyxfm = MapNode( - freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), - iterfield=['target_file'], - name='inverse_transform') - register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir') - register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file') - register.connect(binarize, 'out_file', applyxfm, 'target_file') - register.connect(inputnode, 'mean_image', applyxfm, 'source_file') - """ - Apply inverse transform to aparc file - """ - - aparcxfm = Node( - freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), - name='aparc_inverse_transform') - register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir') - register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file') - register.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparcxfm, - 'target_file') - register.connect(inputnode, 'mean_image', aparcxfm, 'source_file') - """ - Convert the BBRegister transformation to ANTS ITK format - """ - - convert2itk = Node(C3dAffineTool(), name='convert2itk') - convert2itk.inputs.fsl2ras = True - convert2itk.inputs.itk_transform = True - register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file') - register.connect(inputnode, 'mean_image', convert2itk, 'source_file') - register.connect(stripper, 'out_file', convert2itk, 'reference_file') - """ - Compute registration between the subject's structural and MNI template - - * All parameters are set using the example from: - #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh - * This is currently set to perform a very quick registration. However, - the registration can be made significantly more accurate for cortical - structures by increasing the number of iterations. - """ - - reg = Node(ants.Registration(), name='antsRegister') - reg.inputs.output_transform_prefix = "output_" - reg.inputs.transforms = ['Rigid', 'Affine', 'SyN'] - reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)] - reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[ - 100, 30, 20 - ]] - reg.inputs.dimension = 3 - reg.inputs.write_composite_transform = True - reg.inputs.collapse_output_transforms = True - reg.inputs.initial_moving_transform_com = True - reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']] - reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]] - reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]] - reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]] - reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]] - reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01] - reg.inputs.convergence_window_size = [20] * 2 + [5] - reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] - reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] - reg.inputs.use_estimate_learning_rate_once = [True] * 3 - reg.inputs.use_histogram_matching = [False] * 2 + [True] - reg.inputs.winsorize_lower_quantile = 0.005 - reg.inputs.winsorize_upper_quantile = 0.995 - reg.inputs.float = True - reg.inputs.output_warped_image = 'output_warped_image.nii.gz' - reg.inputs.num_threads = 4 - reg.plugin_args = {'sbatch_args': '-c%d' % 4} - register.connect(stripper, 'out_file', reg, 'moving_image') - register.connect(inputnode, 'target_image', reg, 'fixed_image') - - """ - Concatenate the affine and ants transforms into a list - """ - - merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') - register.connect(convert2itk, 'itk_transform', merge, 'in2') - register.connect(reg, ('composite_transform', pickfirst), merge, 'in1') - - """ - Transform the mean image. First to anatomical and then to target - """ - - warpmean = Node(ants.ApplyTransforms(), name='warpmean') - warpmean.inputs.input_image_type = 3 - warpmean.inputs.interpolation = 'Linear' - warpmean.inputs.invert_transform_flags = [False, False] - warpmean.terminal_output = 'file' - warpmean.inputs.args = '--float' - warpmean.inputs.num_threads = 4 - warpmean.plugin_args = {'sbatch_args': '-c%d' % 4} - - register.connect(inputnode, 'target_image', warpmean, 'reference_image') - register.connect(inputnode, 'mean_image', warpmean, 'input_image') - register.connect(merge, 'out', warpmean, 'transforms') - - """ - Assign all the output files - """ - - register.connect(reg, 'warped_image', outputnode, 'anat2target') - register.connect(warpmean, 'output_image', outputnode, 'transformed_mean') - register.connect(applyxfm, 'transformed_file', outputnode, - 'segmentation_files') - register.connect(aparcxfm, 'transformed_file', outputnode, 'aparc') - register.connect(bbregister, 'out_fsl_file', outputnode, - 'func2anat_transform') - register.connect(bbregister, 'out_reg_file', outputnode, 'out_reg_file') - register.connect(reg, 'composite_transform', outputnode, - 'anat2target_transform') - register.connect(merge, 'out', outputnode, 'transforms') - register.connect(bbregister, 'min_cost_file', outputnode, 'min_cost_file') - - return register - -""" -Creates the main preprocessing workflow -""" - - -def create_workflow(files, - target_file, - subject_id, - TR, - slice_times, - norm_threshold=1, - num_components=5, - vol_fwhm=None, - surf_fwhm=None, - lowpass_freq=-1, - highpass_freq=-1, - subjects_dir=None, - sink_directory=os.getcwd(), - target_subject=['fsaverage3', 'fsaverage4'], - name='resting'): - - wf = Workflow(name=name) - - # Rename files in case they are named identically - name_unique = MapNode( - Rename(format_string='rest_%(run)02d'), - iterfield=['in_file', 'run'], - name='rename') - name_unique.inputs.keep_ext = True - name_unique.inputs.run = list(range(1, len(files) + 1)) - name_unique.inputs.in_file = files - - realign = Node(nipy.SpaceTimeRealigner(), name="spacetime_realign") - realign.inputs.slice_times = slice_times - realign.inputs.tr = TR - realign.inputs.slice_info = 2 - realign.plugin_args = {'sbatch_args': '-c%d' % 4} - - # Compute TSNR on realigned data regressing polynomials up to order 2 - tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') - wf.connect(realign, "out_file", tsnr, "in_file") - - # Compute the median image across runs - calc_median = Node(CalculateMedian(), name='median') - wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') - - """ - Segment and Register - """ - - registration = create_reg_workflow(name='registration') - wf.connect(calc_median, 'median_file', registration, - 'inputspec.mean_image') - registration.inputs.inputspec.subject_id = subject_id - registration.inputs.inputspec.subjects_dir = subjects_dir - registration.inputs.inputspec.target_image = target_file - - """Quantify TSNR in each freesurfer ROI - """ - - get_roi_tsnr = MapNode( - fs.SegStats(default_color_table=True), - iterfield=['in_file'], - name='get_aparc_tsnr') - get_roi_tsnr.inputs.avgwf_txt_file = True - wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file') - wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, - 'segmentation_file') - - """Use :class:`nipype.algorithms.rapidart` to determine which of the - images in the functional series are outliers based on deviations in - intensity or movement. - """ - - art = Node(interface=ArtifactDetect(), name="art") - art.inputs.use_differences = [True, True] - art.inputs.use_norm = True - art.inputs.norm_threshold = norm_threshold - art.inputs.zintensity_threshold = 9 - art.inputs.mask_type = 'spm_global' - art.inputs.parameter_source = 'NiPy' - - """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose - to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal - voxel sizes. - """ - - wf.connect([ - (name_unique, realign, [('out_file', 'in_file')]), - (realign, art, [('out_file', 'realigned_files')]), - (realign, art, [('par_file', 'realignment_parameters')]), - ]) - - def selectindex(files, idx): - import numpy as np - from nipype.utils.filemanip import filename_to_list, list_to_filename - return list_to_filename( - np.array(filename_to_list(files))[idx].tolist()) - - mask = Node(fsl.BET(), name='getmask') - mask.inputs.mask = True - wf.connect(calc_median, 'median_file', mask, 'in_file') - - # get segmentation in normalized functional space - - def merge_files(in1, in2): - out_files = filename_to_list(in1) - out_files.extend(filename_to_list(in2)) - return out_files - - # filter some noise - - # Compute motion regressors - motreg = Node( - Function( - input_names=['motion_params', 'order', 'derivatives'], - output_names=['out_files'], - function=motion_regressors, - imports=imports), - name='getmotionregress') - wf.connect(realign, 'par_file', motreg, 'motion_params') - - # Create a filter to remove motion and art confounds - createfilter1 = Node( - Function( - input_names=[ - 'motion_params', 'comp_norm', 'outliers', 'detrend_poly' - ], - output_names=['out_files'], - function=build_filter1, - imports=imports), - name='makemotionbasedfilter') - createfilter1.inputs.detrend_poly = 2 - wf.connect(motreg, 'out_files', createfilter1, 'motion_params') - wf.connect(art, 'norm_files', createfilter1, 'comp_norm') - wf.connect(art, 'outlier_files', createfilter1, 'outliers') - - filter1 = MapNode( - fsl.GLM( - out_f_name='F_mcart.nii.gz', - out_pf_name='pF_mcart.nii.gz', - demean=True), - iterfield=['in_file', 'design', 'out_res_name'], - name='filtermotion') - - wf.connect(realign, 'out_file', filter1, 'in_file') - wf.connect(realign, ('out_file', rename, '_filtermotart'), filter1, - 'out_res_name') - wf.connect(createfilter1, 'out_files', filter1, 'design') - - createfilter2 = MapNode( - ACompCor(), - iterfield=['realigned_file', 'extra_regressors'], - name='makecompcorrfilter') - createfilter2.inputs.components_file = 'noise_components.txt' - createfilter2.inputs.num_components = num_components - - wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') - wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') - wf.connect(registration, - ('outputspec.segmentation_files', selectindex, [0, 2]), - createfilter2, 'mask_file') - - filter2 = MapNode( - fsl.GLM(out_f_name='F.nii.gz', out_pf_name='pF.nii.gz', demean=True), - iterfield=['in_file', 'design', 'out_res_name'], - name='filter_noise_nosmooth') - wf.connect(filter1, 'out_res', filter2, 'in_file') - wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2, - 'out_res_name') - wf.connect(createfilter2, 'components_file', filter2, 'design') - wf.connect(mask, 'mask_file', filter2, 'mask') - - bandpass = Node( - Function( - input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'], - output_names=['out_files'], - function=bandpass_filter, - imports=imports), - name='bandpass_unsmooth') - bandpass.inputs.fs = 1. / TR - bandpass.inputs.highpass_freq = highpass_freq - bandpass.inputs.lowpass_freq = lowpass_freq - wf.connect(filter2, 'out_res', bandpass, 'files') - """Smooth the functional data using - :class:`nipype.interfaces.fsl.IsotropicSmooth`. - """ - - smooth = MapNode( - interface=fsl.IsotropicSmooth(), name="smooth", iterfield=["in_file"]) - smooth.inputs.fwhm = vol_fwhm - - wf.connect(bandpass, 'out_files', smooth, 'in_file') - - collector = Node(Merge(2), name='collect_streams') - wf.connect(smooth, 'out_file', collector, 'in1') - wf.connect(bandpass, 'out_files', collector, 'in2') - """ - Transform the remaining images. First to anatomical and then to target - """ - - warpall = MapNode( - ants.ApplyTransforms(), iterfield=['input_image'], name='warpall') - warpall.inputs.input_image_type = 3 - warpall.inputs.interpolation = 'Linear' - warpall.inputs.invert_transform_flags = [False, False] - warpall.terminal_output = 'file' - warpall.inputs.reference_image = target_file - warpall.inputs.args = '--float' - warpall.inputs.num_threads = 2 - warpall.plugin_args = {'sbatch_args': '-c%d' % 2} - - # transform to target - wf.connect(collector, 'out', warpall, 'input_image') - wf.connect(registration, 'outputspec.transforms', warpall, 'transforms') - - mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask') - - wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file') - - maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker') - wf.connect(warpall, 'output_image', maskts, 'in_file') - wf.connect(mask_target, 'out_file', maskts, 'mask_file') - - # map to surface - # extract aparc+aseg ROIs - # extract subcortical ROIs - # extract target space ROIs - # combine subcortical and cortical rois into a single cifti file - - ####### - # Convert aparc to subject functional space - - # Sample the average time series in aparc ROIs - sampleaparc = MapNode( - freesurfer.SegStats(default_color_table=True), - iterfield=['in_file', 'summary_file', 'avgwf_txt_file'], - name='aparc_ts') - sampleaparc.inputs.segment_id = ( - [8] + list(range(10, 14)) + [17, 18, 26, 47] + list(range(49, 55)) + - [58] + list(range(1001, 1036)) + list(range(2001, 2036))) - - wf.connect(registration, 'outputspec.aparc', sampleaparc, - 'segmentation_file') - wf.connect(collector, 'out', sampleaparc, 'in_file') - - def get_names(files, suffix): - """Generate appropriate names for output files - """ - from nipype.utils.filemanip import (split_filename, filename_to_list, - list_to_filename) - import os - out_names = [] - for filename in files: - path, name, _ = split_filename(filename) - out_names.append(os.path.join(path, name + suffix)) - return list_to_filename(out_names) - - wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc, - 'avgwf_txt_file') - wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc, - 'summary_file') - - # Sample the time series onto the surface of the target surface. Performs - # sampling into left and right hemisphere - target = Node(IdentityInterface(fields=['target_subject']), name='target') - target.iterables = ('target_subject', filename_to_list(target_subject)) - - samplerlh = MapNode( - freesurfer.SampleToSurface(), - iterfield=['source_file'], - name='sampler_lh') - samplerlh.inputs.sampling_method = "average" - samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) - samplerlh.inputs.sampling_units = "frac" - samplerlh.inputs.interp_method = "trilinear" - samplerlh.inputs.smooth_surf = surf_fwhm - # samplerlh.inputs.cortex_mask = True - samplerlh.inputs.out_type = 'niigz' - samplerlh.inputs.subjects_dir = subjects_dir - - samplerrh = samplerlh.clone('sampler_rh') - - samplerlh.inputs.hemi = 'lh' - wf.connect(collector, 'out', samplerlh, 'source_file') - wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file') - wf.connect(target, 'target_subject', samplerlh, 'target_subject') - - samplerrh.set_input('hemi', 'rh') - wf.connect(collector, 'out', samplerrh, 'source_file') - wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file') - wf.connect(target, 'target_subject', samplerrh, 'target_subject') - - # Combine left and right hemisphere to text file - combiner = MapNode( - Function( - input_names=['left', 'right'], - output_names=['out_file'], - function=combine_hemi, - imports=imports), - iterfield=['left', 'right'], - name="combiner") - wf.connect(samplerlh, 'out_file', combiner, 'left') - wf.connect(samplerrh, 'out_file', combiner, 'right') - - # Sample the time series file for each subcortical roi - ts2txt = MapNode( - Function( - input_names=['timeseries_file', 'label_file', 'indices'], - output_names=['out_file'], - function=extract_subrois, - imports=imports), - iterfield=['timeseries_file'], - name='getsubcortts') - ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\ - list(range(49, 55)) + [58] - ts2txt.inputs.label_file = \ - os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' - '2mm_v2.nii.gz')) - wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file') - - ###### - - substitutions = [ - ('_target_subject_', ''), - ('_filtermotart_cleaned_bp_trans_masked', ''), - ('_filtermotart_cleaned_bp', ''), - ] - substitutions += [("_smooth%d" % i, "") for i in range(11)[::-1]] - substitutions += [("_ts_masker%d" % i, "") for i in range(11)[::-1]] - substitutions += [("_getsubcortts%d" % i, "") for i in range(11)[::-1]] - substitutions += [("_combiner%d" % i, "") for i in range(11)[::-1]] - substitutions += [("_filtermotion%d" % i, "") for i in range(11)[::-1]] - substitutions += [("_filter_noise_nosmooth%d" % i, "") - for i in range(11)[::-1]] - substitutions += [("_makecompcorfilter%d" % i, "") - for i in range(11)[::-1]] - substitutions += [("_get_aparc_tsnr%d/" % i, "run%d_" % (i + 1)) - for i in range(11)[::-1]] - - substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"), - ("T1_out_brain_pve_1_maths_warped", - "compcor_gm"), ("T1_out_brain_pve_2_maths_warped", - "compcor_wm"), - ("output_warped_image_maths", - "target_brain_mask"), ("median_brain_mask", - "native_brain_mask"), ("corr_", - "")] - - regex_subs = [ - ('_combiner.*/sar', '/smooth/'), - ('_combiner.*/ar', '/unsmooth/'), - ('_aparc_ts.*/sar', '/smooth/'), - ('_aparc_ts.*/ar', '/unsmooth/'), - ('_getsubcortts.*/sar', '/smooth/'), - ('_getsubcortts.*/ar', '/unsmooth/'), - ('series/sar', 'series/smooth/'), - ('series/ar', 'series/unsmooth/'), - ('_inverse_transform./', ''), - ] - # Save the relevant data into an output directory - datasink = Node(interface=DataSink(), name="datasink") - datasink.inputs.base_directory = sink_directory - datasink.inputs.container = subject_id - datasink.inputs.substitutions = substitutions - datasink.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') - wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') - wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') - wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') - wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') - wf.connect(registration, 'outputspec.segmentation_files', datasink, - 'resting.mask_files') - wf.connect(registration, 'outputspec.anat2target', datasink, - 'resting.qa.ants') - wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask') - wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target') - wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F') - wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF') - wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps') - wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p') - wf.connect(registration, 'outputspec.min_cost_file', datasink, - 'resting.qa.mincost') - wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr.@map') - wf.connect([(get_roi_tsnr, datasink, - [('avgwf_txt_file', 'resting.qa.tsnr'), - ('summary_file', 'resting.qa.tsnr.@summary')])]) - - wf.connect(bandpass, 'out_files', datasink, - 'resting.timeseries.@bandpassed') - wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed') - wf.connect(createfilter1, 'out_files', datasink, - 'resting.regress.@regressors') - wf.connect(createfilter2, 'components_file', datasink, - 'resting.regress.@compcorr') - wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target') - wf.connect(sampleaparc, 'summary_file', datasink, - 'resting.parcellations.aparc') - wf.connect(sampleaparc, 'avgwf_txt_file', datasink, - 'resting.parcellations.aparc.@avgwf') - wf.connect(ts2txt, 'out_file', datasink, - 'resting.parcellations.grayo.@subcortical') - - datasink2 = Node(interface=DataSink(), name="datasink2") - datasink2.inputs.base_directory = sink_directory - datasink2.inputs.container = subject_id - datasink2.inputs.substitutions = substitutions - datasink2.inputs.regexp_substitutions = regex_subs # (r'(/_.*(\d+/))', r'/run\2') - wf.connect(combiner, 'out_file', datasink2, - 'resting.parcellations.grayo.@surface') - return wf - - -""" -Creates the full workflow including getting information from dicom files -""" - - -def create_resting_workflow(args, name=None): - TR = args.TR - slice_times = args.slice_times - if args.dicom_file: - TR, slice_times, slice_thickness = get_info(args.dicom_file) - slice_times = (np.array(slice_times) / 1000.).tolist() - - if name is None: - name = 'resting_' + args.subject_id - kwargs = dict( - files=[os.path.abspath(filename) for filename in args.files], - target_file=os.path.abspath(args.target_file), - subject_id=args.subject_id, - TR=TR, - slice_times=slice_times, - vol_fwhm=args.vol_fwhm, - surf_fwhm=args.surf_fwhm, - norm_threshold=2., - subjects_dir=os.path.abspath(args.fsdir), - target_subject=args.target_surfs, - lowpass_freq=args.lowpass_freq, - highpass_freq=args.highpass_freq, - sink_directory=os.path.abspath(args.sink), - name=name) - wf = create_workflow(**kwargs) - return wf - - -if __name__ == "__main__": - from argparse import ArgumentParser, RawTextHelpFormatter - defstr = ' (default %(default)s)' - parser = ArgumentParser( - description=__doc__, formatter_class=RawTextHelpFormatter) - parser.add_argument( - "-d", - "--dicom_file", - dest="dicom_file", - help="a SIEMENS example dicom file from the resting series") - parser.add_argument( - "-f", - "--files", - dest="files", - nargs="+", - help="4d nifti files for resting state", - required=True) - parser.add_argument( - "-t", - "--target", - dest="target_file", - help=("Target in MNI space. Best to use the MindBoggle " - "template - " - "OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz"), - required=True) - parser.add_argument( - "-s", - "--subject_id", - dest="subject_id", - help="FreeSurfer subject id", - required=True) - parser.add_argument( - "--subjects_dir", - dest="fsdir", - help="FreeSurfer subject directory", - required=True) - parser.add_argument( - "--target_surfaces", - dest="target_surfs", - nargs="+", - default=['fsaverage5'], - help="FreeSurfer target surfaces" + defstr) - parser.add_argument( - "--TR", - dest="TR", - default=None, - type=float, - help="TR if dicom not provided in seconds") - parser.add_argument( - "--slice_times", - dest="slice_times", - nargs="+", - type=float, - help="Slice onset times in seconds") - parser.add_argument( - '--vol_fwhm', - default=6., - dest='vol_fwhm', - type=float, - help="Spatial FWHM" + defstr) - parser.add_argument( - '--surf_fwhm', - default=15., - dest='surf_fwhm', - type=float, - help="Spatial FWHM" + defstr) - parser.add_argument( - "-l", - "--lowpass_freq", - dest="lowpass_freq", - default=0.1, - type=float, - help="Low pass frequency (Hz)" + defstr) - parser.add_argument( - "-u", - "--highpass_freq", - dest="highpass_freq", - default=0.01, - type=float, - help="High pass frequency (Hz)" + defstr) - parser.add_argument( - "-o", - "--output_dir", - dest="sink", - help="Output directory base", - required=True) - parser.add_argument( - "-w", "--work_dir", dest="work_dir", help="Output directory base") - parser.add_argument( - "-p", - "--plugin", - dest="plugin", - default='Linear', - help="Plugin to use") - parser.add_argument( - "--plugin_args", dest="plugin_args", help="Plugin arguments") - args = parser.parse_args() - - wf = create_resting_workflow(args) - - if args.work_dir: - work_dir = os.path.abspath(args.work_dir) - else: - work_dir = os.getcwd() - - wf.base_dir = work_dir - if args.plugin_args: - wf.run(args.plugin, plugin_args=eval(args.plugin_args)) - else: - wf.run(args.plugin) diff --git a/examples/smri_ants_build_template.py b/examples/smri_ants_build_template.py deleted file mode 100644 index a75c0f6783..0000000000 --- a/examples/smri_ants_build_template.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -=============================================== -sMRI: Using new ANTS for creating a T1 template -=============================================== - -In this tutorial we will use ANTS (old version aka "ANTS") based workflow to -create a template out of multiple T1 volumes. - -1. Tell python where to find the appropriate functions. -""" - -from __future__ import print_function, unicode_literals -from builtins import open -from future import standard_library -standard_library.install_aliases() - -import os -import nipype.interfaces.utility as util -import nipype.interfaces.ants as ants -import nipype.interfaces.io as io -import nipype.pipeline.engine as pe # pypeline engine - -from nipype.workflows.smri.ants import ANTSTemplateBuildSingleIterationWF -""" -2. Download T1 volumes into home directory -""" - -import urllib.request -import urllib.error -import urllib.parse -homeDir = os.getenv("HOME") -requestedPath = os.path.join(homeDir, 'nipypeTestPath') -mydatadir = os.path.realpath(requestedPath) -if not os.path.exists(mydatadir): - os.makedirs(mydatadir) -print(mydatadir) - -MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', - '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', - '02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124', - '03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128', - '01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123', - '02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125', - '03_T1_inv_half.nii.gz'), -] -for tt in MyFileURLs: - myURL = tt[0] - localFilename = os.path.join(mydatadir, tt[1]) - if not os.path.exists(localFilename): - remotefile = urllib.request.urlopen(myURL) - - localFile = open(localFilename, 'wb') - localFile.write(remotefile.read()) - localFile.close() - print("Downloaded file: {0}".format(localFilename)) - else: - print("File previously downloaded {0}".format(localFilename)) - -input_images = [ - os.path.join(mydatadir, '01_T1_half.nii.gz'), - os.path.join(mydatadir, '02_T1_half.nii.gz'), - os.path.join(mydatadir, '03_T1_half.nii.gz') -] -input_passive_images = [{ - 'INV_T1': - os.path.join(mydatadir, '01_T1_inv_half.nii.gz') -}, { - 'INV_T1': - os.path.join(mydatadir, '02_T1_inv_half.nii.gz') -}, { - 'INV_T1': - os.path.join(mydatadir, '03_T1_inv_half.nii.gz') -}] -""" -3. Define the workflow and its working directory -""" - -tbuilder = pe.Workflow(name="ANTSTemplateBuilder") -tbuilder.base_dir = requestedPath -""" -4. Define data sources. In real life these would be replace by DataGrabbers -""" - -datasource = pe.Node( - interface=util.IdentityInterface( - fields=['imageList', 'passiveImagesDictionariesList']), - run_without_submitting=True, - name='InputImages') -datasource.inputs.imageList = input_images -datasource.inputs.passiveImagesDictionariesList = input_passive_images -datasource.inputs.sort_filelist = True -""" -5. Template is initialized by a simple average -""" - -initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg') -initAvg.inputs.dimension = 3 -initAvg.inputs.normalize = True - -tbuilder.connect(datasource, "imageList", initAvg, "images") -""" -6. Define the first iteration of template building -""" - -buildTemplateIteration1 = ANTSTemplateBuildSingleIterationWF('iteration01') -tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1, - 'inputspec.fixed_image') -tbuilder.connect(datasource, 'imageList', buildTemplateIteration1, - 'inputspec.images') -tbuilder.connect(datasource, 'passiveImagesDictionariesList', - buildTemplateIteration1, - 'inputspec.ListOfPassiveImagesDictionaries') -""" -7. Define the second iteration of template building -""" - -buildTemplateIteration2 = ANTSTemplateBuildSingleIterationWF('iteration02') -tbuilder.connect(buildTemplateIteration1, 'outputspec.template', - buildTemplateIteration2, 'inputspec.fixed_image') -tbuilder.connect(datasource, 'imageList', buildTemplateIteration2, - 'inputspec.images') -tbuilder.connect(datasource, 'passiveImagesDictionariesList', - buildTemplateIteration2, - 'inputspec.ListOfPassiveImagesDictionaries') -""" -8. Move selected files to a designated results folder -""" - -datasink = pe.Node(io.DataSink(), name="datasink") -datasink.inputs.base_directory = os.path.join(requestedPath, "results") - -tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink, - 'PrimaryTemplate') -tbuilder.connect(buildTemplateIteration2, - 'outputspec.passive_deformed_templates', datasink, - 'PassiveTemplate') -tbuilder.connect(initAvg, 'output_average_image', datasink, - 'PreRegisterAverage') -""" -8. Run the workflow -""" - -tbuilder.run() diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py deleted file mode 100644 index e7050b05b7..0000000000 --- a/examples/smri_ants_registration.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -================================== -sMRI: Using ANTS for registration -================================== - -In this simple tutorial we will use the Registration interface from ANTS to -coregister two T1 volumes. - -1. Tell python where to find the appropriate functions. - -""" - -from __future__ import print_function, unicode_literals -from builtins import open - -from future import standard_library -standard_library.install_aliases() - -import os -import urllib.request -import urllib.error -import urllib.parse -from nipype.interfaces.ants import Registration -from nipype.testing import example_data -""" -2. Download T1 volumes into home directory - -""" - -homeDir = os.getenv("HOME") -requestedPath = os.path.join(homeDir, 'nipypeTestPath') -mydatadir = os.path.realpath(requestedPath) -if not os.path.exists(mydatadir): - os.makedirs(mydatadir) -print(mydatadir) - -MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', - '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', - '02_T1_half.nii.gz'), -] -for tt in MyFileURLs: - myURL = tt[0] - localFilename = os.path.join(mydatadir, tt[1]) - if not os.path.exists(localFilename): - remotefile = urllib.request.urlopen(myURL) - - localFile = open(localFilename, 'wb') - localFile.write(remotefile.read()) - localFile.close() - print("Downloaded file: {0}".format(localFilename)) - else: - print("File previously downloaded {0}".format(localFilename)) - -input_images = [ - os.path.join(mydatadir, '01_T1_half.nii.gz'), - os.path.join(mydatadir, '02_T1_half.nii.gz'), -] -""" -3. Define the parameters of the registration. Settings are -found in the file ``smri_ants_registration_settings.json`` -distributed with the ``example_data`` of `nipype`. - -""" - -reg = Registration( - from_file=example_data('smri_ants_registration_settings.json')) -reg.inputs.fixed_image = input_images[0] -reg.inputs.moving_image = input_images[1] -""" -Alternatively to the use of the ``from_file`` feature to load ANTs settings, -the user can manually set all those inputs instead:: - - reg.inputs.output_transform_prefix = 'thisTransform' - reg.inputs.output_warped_image = 'INTERNAL_WARPED.nii.gz' - reg.inputs.output_transform_prefix = "output_" - reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] - reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] - reg.inputs.number_of_iterations = ([[10000, 111110, 11110]] * 3 + - [[100, 50, 30]]) - reg.inputs.dimension = 3 - reg.inputs.write_composite_transform = True - reg.inputs.collapse_output_transforms = False - reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']] - reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]] - reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]] - reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]] - reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]] - reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01] - reg.inputs.convergence_window_size = [20] * 3 + [5] - reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] - reg.inputs.sigma_units = ['vox'] * 4 - reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]] * 2 + [[4, 2, 1]] - reg.inputs.use_estimate_learning_rate_once = [True] * 4 - reg.inputs.use_histogram_matching = [False] * 3 + [True] - reg.inputs.initial_moving_transform_com = True - -""" - -print(reg.cmdline) -""" -3. Run the registration -""" - -reg.run() diff --git a/examples/smri_antsregistration_build_template.py b/examples/smri_antsregistration_build_template.py deleted file mode 100644 index ecc214265c..0000000000 --- a/examples/smri_antsregistration_build_template.py +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -====================================================== -sMRI: Using new ANTS for creating a T1 template (ITK4) -====================================================== - -In this tutorial we will use ANTS (new ITK4 version aka "antsRegistration") based workflow to -create a template out of multiple T1 volumes. We will also showcase how to fine tune SGE jobs requirements. - -1. Tell python where to find the appropriate functions. -""" - -from __future__ import print_function -from future import standard_library -standard_library.install_aliases() - -import os -import nipype.interfaces.utility as util -import nipype.interfaces.ants as ants -import nipype.interfaces.io as io -import nipype.pipeline.engine as pe # pypeline engine - -from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF -""" -2. Download T1 volumes into home directory -""" - -import urllib.request -import urllib.error -import urllib.parse -homeDir = os.getenv("HOME") -requestedPath = os.path.join(homeDir, 'nipypeTestPath') -mydatadir = os.path.realpath(requestedPath) -if not os.path.exists(mydatadir): - os.makedirs(mydatadir) -print(mydatadir) - -MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', - '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', - '02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124', - '03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128', - '01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123', - '02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125', - '03_T1_inv_half.nii.gz'), -] -for tt in MyFileURLs: - myURL = tt[0] - localFilename = os.path.join(mydatadir, tt[1]) - if not os.path.exists(localFilename): - remotefile = urllib.request.urlopen(myURL) - - localFile = open(localFilename, 'wb') - localFile.write(remotefile.read()) - localFile.close() - print("Downloaded file: {0}".format(localFilename)) - else: - print("File previously downloaded {0}".format(localFilename)) -""" -ListOfImagesDictionaries - a list of dictionaries where each dictionary is -for one scan session, and the mappings in the dictionary are for all the -co-aligned images for that one scan session -""" - -ListOfImagesDictionaries = [{ - 'T1': - os.path.join(mydatadir, '01_T1_half.nii.gz'), - 'INV_T1': - os.path.join(mydatadir, '01_T1_inv_half.nii.gz'), - 'LABEL_MAP': - os.path.join(mydatadir, '01_T1_inv_half.nii.gz') -}, { - 'T1': - os.path.join(mydatadir, '02_T1_half.nii.gz'), - 'INV_T1': - os.path.join(mydatadir, '02_T1_inv_half.nii.gz'), - 'LABEL_MAP': - os.path.join(mydatadir, '02_T1_inv_half.nii.gz') -}, { - 'T1': - os.path.join(mydatadir, '03_T1_half.nii.gz'), - 'INV_T1': - os.path.join(mydatadir, '03_T1_inv_half.nii.gz'), - 'LABEL_MAP': - os.path.join(mydatadir, '03_T1_inv_half.nii.gz') -}] -input_passive_images = [{ - 'INV_T1': - os.path.join(mydatadir, '01_T1_inv_half.nii.gz') -}, { - 'INV_T1': - os.path.join(mydatadir, '02_T1_inv_half.nii.gz') -}, { - 'INV_T1': - os.path.join(mydatadir, '03_T1_inv_half.nii.gz') -}] -""" -registrationImageTypes - A list of the image types to be used actively during -the estimation process of registration, any image type not in this list -will be passively resampled with the estimated transforms. -['T1','T2'] -""" - -registrationImageTypes = ['T1'] -""" -interpolationMap - A map of image types to interpolation modes. If an -image type is not listed, it will be linearly interpolated. -{ 'labelmap':'NearestNeighbor', 'FLAIR':'WindowedSinc' } -""" - -interpolationMapping = { - 'INV_T1': 'LanczosWindowedSinc', - 'LABEL_MAP': 'NearestNeighbor', - 'T1': 'Linear' -} -""" -3. Define the workflow and its working directory -""" - -tbuilder = pe.Workflow(name="antsRegistrationTemplateBuilder") -tbuilder.base_dir = requestedPath -""" -4. Define data sources. In real life these would be replace by DataGrabbers -""" - -InitialTemplateInputs = [mdict['T1'] for mdict in ListOfImagesDictionaries] - -datasource = pe.Node( - interface=util.IdentityInterface(fields=[ - 'InitialTemplateInputs', 'ListOfImagesDictionaries', - 'registrationImageTypes', 'interpolationMapping' - ]), - run_without_submitting=True, - name='InputImages') -datasource.inputs.InitialTemplateInputs = InitialTemplateInputs -datasource.inputs.ListOfImagesDictionaries = ListOfImagesDictionaries -datasource.inputs.registrationImageTypes = registrationImageTypes -datasource.inputs.interpolationMapping = interpolationMapping -datasource.inputs.sort_filelist = True -""" -5. Template is initialized by a simple average in this simple example, - any reference image could be used (i.e. a previously created template) -""" - -initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg') -initAvg.inputs.dimension = 3 -initAvg.inputs.normalize = True - -tbuilder.connect(datasource, "InitialTemplateInputs", initAvg, "images") -""" -6. Define the first iteration of template building -""" - -buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF( - 'iteration01') -""" -Here we are fine tuning parameters of the SGE job (memory limit, numebr of cores etc.) -""" - -BeginANTS = buildTemplateIteration1.get_node("BeginANTS") -BeginANTS.plugin_args = { - 'qsub_args': - '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', - 'overwrite': - True -} - -tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1, - 'inputspec.fixed_image') -tbuilder.connect(datasource, 'ListOfImagesDictionaries', - buildTemplateIteration1, 'inputspec.ListOfImagesDictionaries') -tbuilder.connect(datasource, 'registrationImageTypes', buildTemplateIteration1, - 'inputspec.registrationImageTypes') -tbuilder.connect(datasource, 'interpolationMapping', buildTemplateIteration1, - 'inputspec.interpolationMapping') -""" -7. Define the second iteration of template building -""" - -buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF( - 'iteration02') -BeginANTS = buildTemplateIteration2.get_node("BeginANTS") -BeginANTS.plugin_args = { - 'qsub_args': - '-S /bin/bash -pe smp1 8-12 -l mem_free=6000M -o /dev/null -e /dev/null queue_name', - 'overwrite': - True -} -tbuilder.connect(buildTemplateIteration1, 'outputspec.template', - buildTemplateIteration2, 'inputspec.fixed_image') -tbuilder.connect(datasource, 'ListOfImagesDictionaries', - buildTemplateIteration2, 'inputspec.ListOfImagesDictionaries') -tbuilder.connect(datasource, 'registrationImageTypes', buildTemplateIteration2, - 'inputspec.registrationImageTypes') -tbuilder.connect(datasource, 'interpolationMapping', buildTemplateIteration2, - 'inputspec.interpolationMapping') -""" -8. Move selected files to a designated results folder -""" - -datasink = pe.Node(io.DataSink(), name="datasink") -datasink.inputs.base_directory = os.path.join(requestedPath, "results") - -tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink, - 'PrimaryTemplate') -tbuilder.connect(buildTemplateIteration2, - 'outputspec.passive_deformed_templates', datasink, - 'PassiveTemplate') -tbuilder.connect(initAvg, 'output_average_image', datasink, - 'PreRegisterAverage') -""" -9. Run the workflow -""" - -tbuilder.run(plugin="SGE") diff --git a/examples/smri_cbs_skullstripping.py b/examples/smri_cbs_skullstripping.py deleted file mode 100644 index 1471496576..0000000000 --- a/examples/smri_cbs_skullstripping.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -======================================== -sMRI: USing CBS Tools for skullstripping -======================================== - -This simple workflow uses SPECTRE2010 algorithm to skullstrip an MP2RAGE -anatomical scan. -""" - -import nipype.pipeline.engine as pe -from nipype.interfaces.mipav.developer import (JistIntensityMp2rageMasking, - MedicAlgorithmSPECTRE2010) - -wf = pe.Workflow("skullstripping") - -mask = pe.Node(JistIntensityMp2rageMasking(), name="masking") -folder_path = '/Users/filo/7t_trt/niftis/sub001/session_1/' -mask.inputs.inSecond = folder_path + "MP2RAGE_INV2.nii.gz" -mask.inputs.inQuantitative = folder_path + "MP2RAGE_UNI.nii.gz" -mask.inputs.inT1weighted = folder_path + "MP2RAGE_T1.nii.gz" -mask.inputs.outMasked = True -mask.inputs.outMasked2 = True -mask.inputs.outSignal = True -mask.inputs.outSignal2 = True - -skullstrip = pe.Node(MedicAlgorithmSPECTRE2010(), name="skullstrip") -skullstrip.inputs.outStripped = True -skullstrip.inputs.xDefaultMem = 6000 - -wf.connect(mask, 'outMasked', skullstrip, 'inInput') -wf.run() diff --git a/examples/smri_freesurfer.py b/examples/smri_freesurfer.py deleted file mode 100644 index d365b44dd5..0000000000 --- a/examples/smri_freesurfer.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python -""" -================ -sMRI: FreeSurfer -================ - -This script, smri_freesurfer.py, demonstrates the ability to call reconall on -a set of subjects and then make an average subject:: - - python smri_freesurfer.py - -Import necessary modules from nipype. -""" - -import os - -import nipype.pipeline.engine as pe -import nipype.interfaces.io as nio -from nipype.interfaces.freesurfer.preprocess import ReconAll -from nipype.interfaces.freesurfer.utils import MakeAverageSubject - -subject_list = ['s1', 's3'] -data_dir = os.path.abspath('data') -subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir') - -wf = pe.Workflow(name="l1workflow") -wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir') -""" -Grab data -""" - -datasource = pe.MapNode( - interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), - name='datasource', - iterfield=['subject_id']) -datasource.inputs.base_directory = data_dir -datasource.inputs.template = '%s/%s.nii' -datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) -datasource.inputs.subject_id = subject_list -datasource.inputs.sort_filelist = True -""" -Run recon-all -""" - -recon_all = pe.MapNode( - interface=ReconAll(), - name='recon_all', - iterfield=['subject_id', 'T1_files']) -recon_all.inputs.subject_id = subject_list -if not os.path.exists(subjects_dir): - os.mkdir(subjects_dir) -recon_all.inputs.subjects_dir = subjects_dir - -wf.connect(datasource, 'struct', recon_all, 'T1_files') -""" -Make average subject -""" - -average = pe.Node(interface=MakeAverageSubject(), name="average") -average.inputs.subjects_dir = subjects_dir - -wf.connect(recon_all, 'subject_id', average, 'subjects_ids') - -wf.run("MultiProc", plugin_args={'n_procs': 4}) diff --git a/examples/smri_fsreconall.py b/examples/smri_fsreconall.py deleted file mode 100644 index 6a9fc5446b..0000000000 --- a/examples/smri_fsreconall.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -""" -================ -sMRI: FSReconAll -================ - -This script, smri_fsreconall.py, demonstrates the ability to use the -create_reconall_workflow function to create a workflow and then run it on a -set of subjects and then make an average subject:: - - python smri_fsreconall.py - -For an example on how to call FreeSurfer's reconall script in Nipype -see smri_freesurfer.py. - -Import necessary modules from nipype. -""" - -import os - -import nipype.pipeline.engine as pe -import nipype.interfaces.io as nio -from nipype.workflows.smri.freesurfer import create_reconall_workflow -from nipype.interfaces.freesurfer.utils import MakeAverageSubject -from nipype.interfaces.utility import IdentityInterface -""" -Assign the tutorial directory -""" - -tutorial_dir = os.path.abspath('smri_fsreconall_tutorial') -if not os.path.isdir(tutorial_dir): - os.mkdir(tutorial_dir) -""" -Define the workflow directories -""" - -subject_list = ['s1', 's3'] -data_dir = os.path.abspath('data') -subjects_dir = os.path.join(tutorial_dir, 'subjects_dir') -if not os.path.exists(subjects_dir): - os.mkdir(subjects_dir) - -wf = pe.Workflow(name="l1workflow") -wf.base_dir = os.path.join(tutorial_dir, 'workdir') -""" -Create inputspec -""" - -inputspec = pe.Node( - interface=IdentityInterface(['subject_id']), name="inputspec") -inputspec.iterables = ("subject_id", subject_list) -""" -Grab data -""" - -datasource = pe.Node( - interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), - name='datasource') -datasource.inputs.base_directory = data_dir -datasource.inputs.template = '%s/%s.nii' -datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) -datasource.inputs.subject_id = subject_list -datasource.inputs.sort_filelist = True - -wf.connect(inputspec, 'subject_id', datasource, 'subject_id') -""" -Run recon-all -""" - -recon_all = create_reconall_workflow() -recon_all.inputs.inputspec.subjects_dir = subjects_dir - -wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files') -wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id') -""" -Make average subject -""" - -average = pe.JoinNode( - interface=MakeAverageSubject(), - joinsource="inputspec", - joinfield="subjects_ids", - name="average") -average.inputs.subjects_dir = subjects_dir - -wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, - 'subjects_ids') - -wf.run("MultiProc", plugin_args={'n_procs': 4}) diff --git a/examples/tessellation_tutorial.py b/examples/tessellation_tutorial.py deleted file mode 100644 index 832ad9cad2..0000000000 --- a/examples/tessellation_tutorial.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -""" -================================================= -sMRI: Regional Tessellation and Surface Smoothing -================================================= - -Introduction -============ - -This script, tessellation_tutorial.py, demonstrates the use of create_tessellation_flow from nipype.workflows.smri.freesurfer, and it can be run with:: - - python tessellation_tutorial.py - -This example requires that the user has Freesurfer installed, and that the Freesurfer directory for 'fsaverage' is present. - -.. seealso:: - - ConnectomeViewer - The Connectome Viewer connects Multi-Modal Multi-Scale Neuroimaging and Network Datasets For Analysis and Visualization in Python. - - http://www.geuz.org/gmsh/ - Gmsh: a three-dimensional finite element mesh generator with built-in pre- and post-processing facilities - - http://www.blender.org/ - Blender is the free open source 3D content creation suite, available for all major operating systems under the GNU General Public License. - -.. warning:: - - This workflow will take several hours to finish entirely, since smoothing the larger cortical surfaces is very time consuming. - -Packages and Data Setup -======================= - -Import the necessary modules and workflow from nipype. -""" - -import nipype.pipeline.engine as pe # pypeline engine -import nipype.interfaces.cmtk as cmtk -import nipype.interfaces.io as nio # Data i/o -import os -import os.path as op -from nipype.workflows.smri.freesurfer import create_tessellation_flow -""" -Directories -=========== - -Set the default directory and lookup table (LUT) paths -""" - -fs_dir = os.environ['FREESURFER_HOME'] -lookup_file = op.join(fs_dir, 'FreeSurferColorLUT.txt') -subjects_dir = op.join(fs_dir, 'subjects/') -output_dir = './tessellate_tutorial' -""" -Inputs -====== - -Create the tessellation workflow and set inputs -Here we will choose Gifti (gii) as the output format, because -we want to able to view the surface in ConnectomeViewer. - -In you intend to view the meshes in gmsh or Blender, you should change -the workflow creation to use stereolithographic (stl) format. -""" - -tessflow = create_tessellation_flow(name='tessflow', out_format='gii') -tessflow.inputs.inputspec.subject_id = 'fsaverage' -tessflow.inputs.inputspec.subjects_dir = subjects_dir -tessflow.inputs.inputspec.lookup_file = lookup_file -""" -We also create a conditional node to package the surfaces for ConnectomeViewer. -Simply set cff to "False" to ignore this step. -""" - -cff = True -if cff: - cff = pe.Node(interface=cmtk.CFFConverter(), name='cff') - cff.inputs.out_file = 'Meshes.cff' -""" -Outputs -======= - -Create a datasink to organize the smoothed meshes -Using regular-expression substitutions we can remove the extraneous folders generated by the mapnode. -""" - -datasink = pe.Node(interface=nio.DataSink(), name="datasink") -datasink.inputs.base_directory = 'meshes' -datasink.inputs.regexp_substitutions = [('_smoother[\d]*/', '')] -""" -Execution -========= - -Finally, create and run another pipeline that connects the workflow and datasink -""" - -tesspipe = pe.Workflow(name='tessellate_tutorial') -tesspipe.base_dir = output_dir -tesspipe.connect([(tessflow, datasink, [('outputspec.meshes', - '@meshes.all')])]) -""" -If the surfaces are to be packaged, this will connect the CFFConverter -node to the tessellation and smoothing workflow, as well as to the datasink. -""" - -if cff: - tesspipe.connect([(tessflow, cff, [('outputspec.meshes', - 'gifti_surfaces')])]) - tesspipe.connect([(cff, datasink, [('connectome_file', '@cff')])]) - -tesspipe.run() diff --git a/examples/test_spm.py b/examples/test_spm.py deleted file mode 100644 index 4c31f144ed..0000000000 --- a/examples/test_spm.py +++ /dev/null @@ -1,77 +0,0 @@ -from __future__ import division -from builtins import range -import nipype.pipeline.engine as pe -from nipype.interfaces import spm -from nipype.interfaces import fsl -from nipype.interfaces import utility as niu -from nipype.interfaces import io as nio -from nipype.algorithms.misc import Gunzip - - -def _get_first(inlist): - if isinstance(inlist, (list, tuple)): - return inlist[0] - return inlist - - -def test_spm(name='test_spm_3d'): - """ - A simple workflow to test SPM's installation. By default will split the 4D volume in - time-steps. - """ - workflow = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_data']), name='inputnode') - dgr = pe.Node( - nio.DataGrabber( - template="feeds/data/fmri.nii.gz", - outfields=['out_file'], - sort_filelist=False), - name='datasource') - - stc = pe.Node( - spm.SliceTiming( - num_slices=21, - time_repetition=1.0, - time_acquisition=2. - 2. / 32, - slice_order=list(range(21, 0, -1)), - ref_slice=10), - name='stc') - realign_estimate = pe.Node( - spm.Realign(jobtype='estimate'), name='realign_estimate') - realign_write = pe.Node(spm.Realign(jobtype='write'), name='realign_write') - realign_estwrite = pe.Node( - spm.Realign(jobtype='estwrite'), name='realign_estwrite') - smooth = pe.Node(spm.Smooth(fwhm=[6, 6, 6]), name='smooth') - - if name == 'test_spm_3d': - split = pe.Node( - fsl.Split(dimension="t", output_type="NIFTI"), name="split") - workflow.connect([(dgr, split, [(('out_file', _get_first), - 'in_file')]), - (split, stc, [("out_files", "in_files")])]) - elif name == 'test_spm_4d': - gunzip = pe.Node(Gunzip(), name="gunzip") - workflow.connect([(dgr, gunzip, [(('out_file', _get_first), - 'in_file')]), - (gunzip, stc, [("out_file", "in_files")])]) - else: - raise NotImplementedError( - 'No implementation of the test workflow \'{}\' was found'.format( - name)) - - workflow.connect([(inputnode, dgr, [('in_data', 'base_directory')]), - (stc, realign_estimate, - [('timecorrected_files', - 'in_files')]), (realign_estimate, realign_write, - [('modified_in_files', 'in_files')]), - (stc, realign_estwrite, - [('timecorrected_files', - 'in_files')]), (realign_write, smooth, - [('realigned_files', 'in_files')])]) - return workflow - - -workflow3d = test_spm() -workflow4d = test_spm(name='test_spm_4d') diff --git a/examples/workshop_dartmouth_2010.py b/examples/workshop_dartmouth_2010.py deleted file mode 100644 index 931a633c52..0000000000 --- a/examples/workshop_dartmouth_2010.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -================================ -Workshop: Dartmouth College 2010 -================================ - -First lets go to the directory with the data we'll be working on and start the interactive python interpreter -(with some nipype specific configuration). Note that nipype does not need to be run through ipython - it is -just much nicer to do interactive work in it. - -.. sourcecode:: bash - - cd $TDPATH - ipython -p nipype - -For every neuroimaging procedure supported by nipype there exists a wrapper - a small piece of code managing -the underlying software (FSL, SPM, AFNI etc.). We call those interfaces. They are standarised so we can hook them up -together. Lets have a look at some of them. - -.. sourcecode:: ipython - - In [1]: import nipype.interfaces.fsl as fsl - - In [2]: fsl.BET.help() - Inputs - ------ - - Mandatory: - in_file: input file to skull strip - - Optional: - args: Additional parameters to the command - center: center of gravity in voxels - environ: Environment variables (default={}) - frac: fractional intensity threshold - functional: apply to 4D fMRI data - mutually exclusive: functional, reduce_bias - mask: create binary mask image - mesh: generate a vtk mesh brain surface - no_output: Don't generate segmented output - out_file: name of output skull stripped image - outline: create surface outline image - output_type: FSL output type - radius: head radius - reduce_bias: bias field and neck cleanup - mutually exclusive: functional, reduce_bias - skull: create skull image - threshold: apply thresholding to segmented brain image and mask - vertical_gradient: vertical gradient in fractional intensity threshold (-1, 1) - - Outputs - ------- - mask_file: path/name of binary brain mask (if generated) - meshfile: path/name of vtk mesh file (if generated) - out_file: path/name of skullstripped file - outline_file: path/name of outline file (if generated) - - In [3]: import nipype.interfaces.freesurfer as fs - - In [4]: fs.Smooth.help() - Inputs - ------ - - Mandatory: - in_file: source volume - num_iters: number of iterations instead of fwhm - mutually exclusive: surface_fwhm - reg_file: registers volume to surface anatomical - surface_fwhm: surface FWHM in mm - mutually exclusive: num_iters - requires: reg_file - - Optional: - args: Additional parameters to the command - environ: Environment variables (default={}) - proj_frac: project frac of thickness a long surface normal - mutually exclusive: proj_frac_avg - proj_frac_avg: average a long normal min max delta - mutually exclusive: proj_frac - smoothed_file: output volume - subjects_dir: subjects directory - vol_fwhm: volumesmoothing outside of surface - - Outputs - ------- - args: Additional parameters to the command - environ: Environment variables - smoothed_file: smoothed input volume - subjects_dir: subjects directory - -You can read about all of the interfaces implemented in nipype at our online documentation at http://nipy.sourceforge.net/nipype/documentation.html#documentation . -Check it out now. - -Using interfaces ----------------- - -Having interfaces allows us to use third party software (like FSL BET) as function. Look how simple it is. -""" - -from __future__ import print_function -from builtins import str - -import nipype.interfaces.fsl as fsl -result = fsl.BET(in_file='data/s1/struct.nii').run() -print(result) -""" -Running a single program is not much of a breakthrough. Lets run motion correction followed by smoothing -(isotropic - in other words not using SUSAN). Notice that in the first line we are setting the output data type -for all FSL interfaces. -""" - -fsl.FSLCommand.set_default_output_type('NIFTI_GZ') -result1 = fsl.MCFLIRT(in_file='data/s1/f3.nii').run() -result2 = fsl.Smooth(in_file='f3_mcf.nii.gz', fwhm=6).run() -""" -Simple workflow ---------------- - -In the previous example we knew that fsl.MCFLIRT will produce a file called f3_mcf.nii.gz and we have hard coded -this as an input to fsl.Smooth. This is quite limited, but luckily nipype supports joining interfaces in pipelines. -This way output of one interface will be used as an input of another without having to hard code anything. Before -connecting Interfaces we need to put them into (separate) Nodes and give them unique names. This way every interface will -process data in a separate folder. -""" - -import nipype.pipeline.engine as pe -import os - -motion_correct = pe.Node( - interface=fsl.MCFLIRT(in_file=os.path.abspath('data/s1/f3.nii')), - name="motion_correct") -smooth = pe.Node(interface=fsl.Smooth(fwhm=6), name="smooth") - -motion_correct_and_smooth = pe.Workflow(name="motion_correct_and_smooth") -motion_correct_and_smooth.base_dir = os.path.abspath( - '.') # define where will be the root folder for the workflow -motion_correct_and_smooth.connect([(motion_correct, smooth, [('out_file', - 'in_file')])]) -# we are connecting 'out_file' output of motion_correct to 'in_file' input of smooth -motion_correct_and_smooth.run() -""" -Another workflow ----------------- - -Another example of a simple workflow (calculate the mean of fMRI signal and subtract it). -This time we'll be assigning inputs after defining the workflow. -""" - -calc_mean = pe.Node(interface=fsl.ImageMaths(), name="calc_mean") -calc_mean.inputs.op_string = "-Tmean" -subtract = pe.Node(interface=fsl.ImageMaths(), name="subtract") -subtract.inputs.op_string = "-sub" - -demean = pe.Workflow(name="demean") -demean.base_dir = os.path.abspath('.') -demean.connect([(calc_mean, subtract, [('out_file', 'in_file2')])]) - -demean.inputs.calc_mean.in_file = os.path.abspath('data/s1/f3.nii') -demean.inputs.subtract.in_file = os.path.abspath('data/s1/f3.nii') -demean.run() -""" -Reusing workflows ------------------ - -The beauty of the workflows is that they are reusable. We can just import a workflow made by someone -else and feed it with our data. -""" - -from fmri_fsl import preproc -preproc.base_dir = os.path.abspath('.') -preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii') -preproc.inputs.inputspec.struct = os.path.abspath('data/s1/struct.nii') -preproc.run() -""" -... and we can run it again and it won't actually rerun anything because none of -the parameters have changed. -""" - -preproc.run() -""" -... and we can change a parameter and run it again. Only the dependent nodes -are rerun and that too only if the input state has changed. -""" - -preproc.inputs.meanfuncmask.frac = 0.5 -preproc.run() -""" -Visualizing workflows 1 ------------------------ - -So what did we run in this precanned workflow -""" - -preproc.write_graph() -""" -Datasink --------- - -Datasink is a special interface for copying and arranging results. -""" - -import nipype.interfaces.io as nio - -preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii') -preproc.inputs.inputspec.struct = os.path.abspath('data/s1/struct.nii') -datasink = pe.Node(interface=nio.DataSink(), name='sinker') -preprocess = pe.Workflow(name='preprocout') -preprocess.base_dir = os.path.abspath('.') -preprocess.connect([(preproc, datasink, [('meanfunc2.out_file', 'meanfunc'), - ('maskfunc3.out_file', 'funcruns')])]) -preprocess.run() -""" -Datagrabber ------------ - -Datagrabber is (surprise, surprise) an interface for collecting files from hard drive. It is very flexible and -supports almost any file organisation of your data you can imagine. -""" - -datasource1 = nio.DataGrabber() -datasource1.inputs.template = 'data/s1/f3.nii' -datasource1.inputs.sort_filelist = True -results = datasource1.run() -print(results.outputs) - -datasource2 = nio.DataGrabber() -datasource2.inputs.template = 'data/s*/f*.nii' -datasource2.inputs.sort_filelist = True -results = datasource2.run() -print(results.outputs) - -datasource3 = nio.DataGrabber(infields=['run']) -datasource3.inputs.template = 'data/s1/f%d.nii' -datasource3.inputs.sort_filelist = True -datasource3.inputs.run = [3, 7] -results = datasource3.run() -print(results.outputs) - -datasource4 = nio.DataGrabber(infields=['subject_id', 'run']) -datasource4.inputs.template = 'data/%s/f%d.nii' -datasource4.inputs.sort_filelist = True -datasource4.inputs.run = [3, 7] -datasource4.inputs.subject_id = ['s1', 's3'] -results = datasource4.run() -print(results.outputs) -""" -Iterables ---------- - -Iterables is a special field of the Node class that enables to iterate all workfloes/nodes connected to it over -some parameters. Here we'll use it to iterate over two subjects. -""" - -import nipype.interfaces.utility as util -infosource = pe.Node( - interface=util.IdentityInterface(fields=['subject_id']), name="infosource") -infosource.iterables = ('subject_id', ['s1', 's3']) - -datasource = pe.Node( - nio.DataGrabber(infields=['subject_id'], outfields=['func', 'struct']), - name="datasource") -datasource.inputs.template = '%s/%s.nii' -datasource.inputs.base_directory = os.path.abspath('data') -datasource.inputs.template_args = dict( - func=[['subject_id', 'f3']], struct=[['subject_id', 'struct']]) -datasource.inputs.sort_filelist = True - -my_workflow = pe.Workflow(name="my_workflow") -my_workflow.base_dir = os.path.abspath('.') - -my_workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, preproc, [('func', 'inputspec.func'), - ('struct', 'inputspec.struct')])]) -my_workflow.run() -""" -and we can change a node attribute and run it again - -""" - -smoothnode = my_workflow.get_node('preproc.smooth') -assert (str(smoothnode) == 'preproc.smooth') -smoothnode.iterables = ('fwhm', [5., 10.]) -my_workflow.run() -""" -Visualizing workflows 2 ------------------------ - -In the case of nested workflows, we might want to look at expanded forms of the workflow. -""" diff --git a/nipype/__init__.py b/nipype/__init__.py index ad961e7df4..54872f193e 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,14 +1,21 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) +""" +Information on specific functions, classes, and methods. +:Release: |version| +:Date: |today| + +Top-level module API +-------------------- + +""" import os -from distutils.version import LooseVersion -from .info import (LONG_DESCRIPTION as __doc__, URL as __url__, STATUS as - __status__, __version__) +# No longer used internally but could be used externally. +from looseversion import LooseVersion + +from .info import URL as __url__, STATUS as __status__, __version__ from .utils.config import NipypeConfig from .utils.logger import Logging from .refs import due @@ -16,26 +23,30 @@ try: import faulthandler + faulthandler.enable() -except (ImportError, IOError) as e: +except (ImportError, OSError): pass config = NipypeConfig() logging = Logging(config) -class NipypeTester(object): - def __call__(self, doctests=True, parallel=True): +class NipypeTester: + def __call__(self, doctests=True, parallel=False): try: import pytest - except: - raise RuntimeError( - 'py.test not installed, run: pip install pytest') + except ImportError: + raise RuntimeError("py.test not installed, run: pip install pytest") args = [] if not doctests: - args.extend(['-p', 'no:doctest']) - if not parallel: - args.append('-n0') + args.extend(["-p", "no:doctest"]) + if parallel: + try: + import xdist + except ImportError: + raise RuntimeError("pytest-xdist required for parallel run") + args.append("-n auto") args.append(os.path.dirname(__file__)) pytest.main(args=args) @@ -49,5 +60,41 @@ def get_info(): from .pipeline import Node, MapNode, JoinNode, Workflow -from .interfaces import (DataGrabber, DataSink, SelectFiles, IdentityInterface, - Rename, Function, Select, Merge) +from .interfaces import ( + DataGrabber, + DataSink, + SelectFiles, + IdentityInterface, + Rename, + Function, + Select, + Merge, +) + + +def check_latest_version(raise_exception=False): + """ + Check for the latest version of the library. + + Parameters + ---------- + raise_exception: bool + Raise a RuntimeError if a bad version is being used + """ + import etelemetry + + logger = logging.getLogger("nipype.utils") + return etelemetry.check_available_version( + "nipy/nipype", __version__, logger, raise_exception + ) + + +# Run telemetry on import for interactive sessions, such as IPython, Jupyter notebooks, Python REPL +if config.getboolean("execution", "check_version"): + import __main__ + + if not hasattr(__main__, "__file__") and "NIPYPE_NO_ET" not in os.environ: + from .interfaces.base import BaseInterface + + if BaseInterface._etelemetry_version_data is None: + BaseInterface._etelemetry_version_data = check_latest_version() or "n/a" diff --git a/nipype/algorithms/__init__.py b/nipype/algorithms/__init__.py index a2909a3501..a701f6fe59 100644 --- a/nipype/algorithms/__init__.py +++ b/nipype/algorithms/__init__.py @@ -1,10 +1,9 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains pure python neuroimaging algorithms -Exaples: artifactdetect +Examples: artifactdetect """ -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 79c0b96f4e..157d1e48d7 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1,15 +1,12 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Algorithms to compute confounds in :abbr:`fMRI (functional MRI)` -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - +""" import os import os.path as op +from collections import OrderedDict +from itertools import chain import nibabel as nb import numpy as np @@ -17,13 +14,21 @@ from .. import config, logging from ..external.due import BibTeX -from ..interfaces.base import (traits, TraitedSpec, BaseInterface, - BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath) -from ..utils import NUMPY_MMAP +from ..interfaces.base import ( + traits, + TraitedSpec, + BaseInterface, + BaseInterfaceInputSpec, + File, + isdefined, + InputMultiPath, + OutputMultiPath, + SimpleInterface, + Tuple, +) from ..utils.misc import normalize_mc_params -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") def fallback_svd(a, full_matrices=True, compute_uv=True): @@ -33,69 +38,81 @@ def fallback_svd(a, full_matrices=True, compute_uv=True): pass from scipy.linalg import svd - return svd(a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver='gesvd') + + return svd( + a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver="gesvd" + ) class ComputeDVARSInputSpec(BaseInterfaceInputSpec): - in_file = File( - exists=True, mandatory=True, desc='functional data, after HMC') - in_mask = File(exists=True, mandatory=True, desc='a brain mask') + in_file = File(exists=True, mandatory=True, desc="functional data, after HMC") + in_mask = File(exists=True, mandatory=True, desc="a brain mask") remove_zerovariance = traits.Bool( - True, usedefault=True, desc='remove voxels with zero variance') - save_std = traits.Bool( - True, usedefault=True, desc='save standardized DVARS') - save_nstd = traits.Bool( - False, usedefault=True, desc='save non-standardized DVARS') + True, usedefault=True, desc="remove voxels with zero variance" + ) + variance_tol = traits.Float( + 1e-7, + usedefault=True, + desc="maximum variance to consider \"close to\" zero for the purposes of removal", + ) + save_std = traits.Bool(True, usedefault=True, desc="save standardized DVARS") + save_nstd = traits.Bool(False, usedefault=True, desc="save non-standardized DVARS") save_vxstd = traits.Bool( - False, usedefault=True, desc='save voxel-wise standardized DVARS') - save_all = traits.Bool(False, usedefault=True, desc='output all DVARS') - - series_tr = traits.Float(desc='repetition time in sec.') - save_plot = traits.Bool(False, usedefault=True, desc='write DVARS plot') - figdpi = traits.Int(100, usedefault=True, desc='output dpi for the plot') - figsize = traits.Tuple( + False, usedefault=True, desc="save voxel-wise standardized DVARS" + ) + save_all = traits.Bool(False, usedefault=True, desc="output all DVARS") + + series_tr = traits.Float(desc="repetition time in sec.") + save_plot = traits.Bool(False, usedefault=True, desc="write DVARS plot") + figdpi = traits.Int(100, usedefault=True, desc="output dpi for the plot") + figsize = Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, - desc='output figure size') + desc="output figure size", + ) figformat = traits.Enum( - 'png', 'pdf', 'svg', usedefault=True, desc='output format for figures') + "png", "pdf", "svg", usedefault=True, desc="output format for figures" + ) intensity_normalization = traits.Float( 1000.0, usedefault=True, - desc='Divide value in each voxel at each timepoint ' - 'by the median calculated across all voxels' - 'and timepoints within the mask (if specified)' - 'and then multiply by the value specified by' - 'this parameter. By using the default (1000)' - 'output DVARS will be expressed in ' - 'x10 % BOLD units compatible with Power et al.' - '2012. Set this to 0 to disable intensity' - 'normalization altogether.') + desc="Divide value in each voxel at each timepoint " + "by the median calculated across all voxels" + "and timepoints within the mask (if specified)" + "and then multiply by the value specified by" + "this parameter. By using the default (1000)" + "output DVARS will be expressed in " + "x10 % BOLD units compatible with Power et al." + "2012. Set this to 0 to disable intensity" + "normalization altogether.", + ) class ComputeDVARSOutputSpec(TraitedSpec): - out_std = File(exists=True, desc='output text file') - out_nstd = File(exists=True, desc='output text file') - out_vxstd = File(exists=True, desc='output text file') - out_all = File(exists=True, desc='output text file') + out_std = File(exists=True, desc="output text file") + out_nstd = File(exists=True, desc="output text file") + out_vxstd = File(exists=True, desc="output text file") + out_all = File(exists=True, desc="output text file") avg_std = traits.Float() avg_nstd = traits.Float() avg_vxstd = traits.Float() - fig_std = File(exists=True, desc='output DVARS plot') - fig_nstd = File(exists=True, desc='output DVARS plot') - fig_vxstd = File(exists=True, desc='output DVARS plot') + fig_std = File(exists=True, desc="output DVARS plot") + fig_nstd = File(exists=True, desc="output DVARS plot") + fig_vxstd = File(exists=True, desc="output DVARS plot") class ComputeDVARS(BaseInterface): """ Computes the DVARS. """ + input_spec = ComputeDVARSInputSpec output_spec = ComputeDVARSOutputSpec - references_ = [{ - 'entry': - BibTeX("""\ + _references = [ + { + "entry": BibTeX( + """\ @techreport{nichols_notes_2013, address = {Coventry, UK}, title = {Notes on {Creating} a {Standardized} {Version} of {DVARS}}, @@ -105,11 +122,13 @@ class ComputeDVARS(BaseInterface): institution = {University of Warwick}, author = {Nichols, Thomas}, year = {2013} -}"""), - 'tags': ['method'] - }, { - 'entry': - BibTeX("""\ +}""" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, @@ -123,111 +142,121 @@ class ComputeDVARS(BaseInterface): year = {2012}, pages = {2142--2154}, } -"""), - 'tags': ['method'] - }] +""" + ), + "tags": ["method"], + }, + ] def __init__(self, **inputs): self._results = {} - super(ComputeDVARS, self).__init__(**inputs) + super().__init__(**inputs) def _gen_fname(self, suffix, ext=None): fname, in_ext = op.splitext(op.basename(self.inputs.in_file)) - if in_ext == '.gz': + if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext - if ext.startswith('.'): + if ext.startswith("."): ext = ext[1:] - return op.abspath('{}_{}.{}'.format(fname, suffix, ext)) + return op.abspath(f"{fname}_{suffix}.{ext}") def _run_interface(self, runtime): dvars = compute_dvars( self.inputs.in_file, self.inputs.in_mask, remove_zerovariance=self.inputs.remove_zerovariance, - intensity_normalization=self.inputs.intensity_normalization) + variance_tol=self.inputs.variance_tol, + intensity_normalization=self.inputs.intensity_normalization, + ) - (self._results['avg_std'], self._results['avg_nstd'], - self._results['avg_vxstd']) = np.mean( - dvars, axis=1).astype(float) + ( + self._results["avg_std"], + self._results["avg_nstd"], + self._results["avg_vxstd"], + ) = np.mean(dvars, axis=1).astype(float) tr = None if isdefined(self.inputs.series_tr): tr = self.inputs.series_tr if self.inputs.save_std: - out_file = self._gen_fname('dvars_std', ext='tsv') - np.savetxt(out_file, dvars[0], fmt=b'%0.6f') - self._results['out_std'] = out_file + out_file = self._gen_fname("dvars_std", ext="tsv") + np.savetxt(out_file, dvars[0], fmt=b"%0.6f") + self._results["out_std"] = out_file if self.inputs.save_plot: - self._results['fig_std'] = self._gen_fname( - 'dvars_std', ext=self.inputs.figformat) + self._results["fig_std"] = self._gen_fname( + "dvars_std", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[0], - self.inputs.figsize, - 'Standardized DVARS', - series_tr=tr) + dvars[0], self.inputs.figsize, "Standardized DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_std'], + self._results["fig_std"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_nstd: - out_file = self._gen_fname('dvars_nstd', ext='tsv') - np.savetxt(out_file, dvars[1], fmt=b'%0.6f') - self._results['out_nstd'] = out_file + out_file = self._gen_fname("dvars_nstd", ext="tsv") + np.savetxt(out_file, dvars[1], fmt=b"%0.6f") + self._results["out_nstd"] = out_file if self.inputs.save_plot: - self._results['fig_nstd'] = self._gen_fname( - 'dvars_nstd', ext=self.inputs.figformat) + self._results["fig_nstd"] = self._gen_fname( + "dvars_nstd", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[1], self.inputs.figsize, 'DVARS', series_tr=tr) + dvars[1], self.inputs.figsize, "DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_nstd'], + self._results["fig_nstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_vxstd: - out_file = self._gen_fname('dvars_vxstd', ext='tsv') - np.savetxt(out_file, dvars[2], fmt=b'%0.6f') - self._results['out_vxstd'] = out_file + out_file = self._gen_fname("dvars_vxstd", ext="tsv") + np.savetxt(out_file, dvars[2], fmt=b"%0.6f") + self._results["out_vxstd"] = out_file if self.inputs.save_plot: - self._results['fig_vxstd'] = self._gen_fname( - 'dvars_vxstd', ext=self.inputs.figformat) + self._results["fig_vxstd"] = self._gen_fname( + "dvars_vxstd", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[2], - self.inputs.figsize, - 'Voxelwise std DVARS', - series_tr=tr) + dvars[2], self.inputs.figsize, "Voxelwise std DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_vxstd'], + self._results["fig_vxstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_all: - out_file = self._gen_fname('dvars', ext='tsv') + out_file = self._gen_fname("dvars", ext="tsv") np.savetxt( out_file, np.vstack(dvars).T, - fmt=b'%0.8f', - delimiter=b'\t', - header='std DVARS\tnon-std DVARS\tvx-wise std DVARS', - comments='') - self._results['out_all'] = out_file + fmt=b"%0.8f", + delimiter=b"\t", + header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", + comments="", + ) + self._results["out_all"] = out_file return runtime @@ -236,7 +265,7 @@ def _list_outputs(self): class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='motion parameters') + in_file = File(exists=True, mandatory=True, desc="motion parameters") parameter_source = traits.Enum( "FSL", "AFNI", @@ -244,33 +273,32 @@ class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): "FSFAST", "NIPY", desc="Source of movement parameters", - mandatory=True) + mandatory=True, + ) radius = traits.Float( 50, usedefault=True, - desc='radius in mm to calculate angular FDs, 50mm is the ' - 'default since it is used in Power et al. 2012') - out_file = File( - 'fd_power_2012.txt', usedefault=True, desc='output file name') - out_figure = File( - 'fd_power_2012.pdf', usedefault=True, desc='output figure name') - series_tr = traits.Float(desc='repetition time in sec.') - save_plot = traits.Bool(False, usedefault=True, desc='write FD plot') - normalize = traits.Bool( - False, usedefault=True, desc='calculate FD in mm/s') - figdpi = traits.Int( - 100, usedefault=True, desc='output dpi for the FD plot') - figsize = traits.Tuple( + desc="radius in mm to calculate angular FDs, 50mm is the " + "default since it is used in Power et al. 2012", + ) + out_file = File("fd_power_2012.txt", usedefault=True, desc="output file name") + out_figure = File("fd_power_2012.pdf", usedefault=True, desc="output figure name") + series_tr = traits.Float(desc="repetition time in sec.") + save_plot = traits.Bool(False, usedefault=True, desc="write FD plot") + normalize = traits.Bool(False, usedefault=True, desc="calculate FD in mm/s") + figdpi = traits.Int(100, usedefault=True, desc="output dpi for the FD plot") + figsize = Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, - desc='output figure size') + desc="output figure size", + ) class FramewiseDisplacementOutputSpec(TraitedSpec): - out_file = File(desc='calculated FD per timestep') - out_figure = File(desc='output image file') - fd_average = traits.Float(desc='average FD') + out_file = File(desc="calculated FD per timestep") + out_figure = File(desc="output image file") + fd_average = traits.Float(desc="average FD") class FramewiseDisplacement(BaseInterface): @@ -289,9 +317,10 @@ class FramewiseDisplacement(BaseInterface): input_spec = FramewiseDisplacementInputSpec output_spec = FramewiseDisplacementOutputSpec - references_ = [{ - 'entry': - BibTeX("""\ + _references = [ + { + "entry": BibTeX( + """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, @@ -305,9 +334,11 @@ class FramewiseDisplacement(BaseInterface): year = {2012}, pages = {2142--2154}, } -"""), - 'tags': ['method'] - }] +""" + ), + "tags": ["method"], + } + ] def _run_interface(self, runtime): mpars = np.loadtxt(self.inputs.in_file) # mpars is N_t x 6 @@ -315,20 +346,19 @@ def _run_interface(self, runtime): func1d=normalize_mc_params, axis=1, arr=mpars, - source=self.inputs.parameter_source) + source=self.inputs.parameter_source, + ) diff = mpars[:-1, :6] - mpars[1:, :6] diff[:, 3:6] *= self.inputs.radius fd_res = np.abs(diff).sum(axis=1) self._results = { - 'out_file': op.abspath(self.inputs.out_file), - 'fd_average': float(fd_res.mean()) + "out_file": op.abspath(self.inputs.out_file), + "fd_average": float(fd_res.mean()), } np.savetxt( - self.inputs.out_file, - fd_res, - header='FramewiseDisplacement', - comments='') + self.inputs.out_file, fd_res, header="FramewiseDisplacement", comments="" + ) if self.inputs.save_plot: tr = None @@ -336,21 +366,23 @@ def _run_interface(self, runtime): tr = self.inputs.series_tr if self.inputs.normalize and tr is None: - IFLOGGER.warning('FD plot cannot be normalized if TR is not set') + IFLOGGER.warning("FD plot cannot be normalized if TR is not set") - self._results['out_figure'] = op.abspath(self.inputs.out_figure) + self._results["out_figure"] = op.abspath(self.inputs.out_figure) fig = plot_confound( fd_res, self.inputs.figsize, - 'FD', - units='mm', + "FD", + units="mm", series_tr=tr, - normalize=self.inputs.normalize) + normalize=self.inputs.normalize, + ) fig.savefig( - self._results['out_figure'], + self._results["out_figure"], dpi=float(self.inputs.figdpi), format=self.inputs.out_figure[-3:], - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() return runtime @@ -361,89 +393,147 @@ def _list_outputs(self): class CompCorInputSpec(BaseInterfaceInputSpec): realigned_file = File( - exists=True, mandatory=True, desc='already realigned brain image (4D)') + exists=True, mandatory=True, desc="already realigned brain image (4D)" + ) mask_files = InputMultiPath( File(exists=True), - desc=('One or more mask files that determines ' - 'ROI (3D). When more that one file is ' - 'provided `merge_method` or ' - '`merge_index` must be provided')) + desc=( + "One or more mask files that determines " + "ROI (3D). When more that one file is " + "provided ``merge_method`` or " + "``merge_index`` must be provided" + ), + ) merge_method = traits.Enum( - 'union', - 'intersect', - 'none', - xor=['mask_index'], - requires=['mask_files'], - desc=('Merge method if multiple masks are ' - 'present - `union` uses voxels included in' - ' at least one input mask, `intersect` ' - 'uses only voxels present in all input ' - 'masks, `none` performs CompCor on ' - 'each mask individually')) + "union", + "intersect", + "none", + xor=["mask_index"], + requires=["mask_files"], + desc=( + "Merge method if multiple masks are " + "present - ``union`` uses voxels included in" + " at least one input mask, ``intersect`` " + "uses only voxels present in all input " + "masks, ``none`` performs CompCor on " + "each mask individually" + ), + ) mask_index = traits.Range( low=0, - xor=['merge_method'], - requires=['mask_files'], - desc=('Position of mask in `mask_files` to use - ' - 'first is the default.')) + xor=["merge_method"], + requires=["mask_files"], + desc="Position of mask in ``mask_files`` to use - first is the default.", + ) + mask_names = traits.List( + traits.Str, + desc="Names for provided masks (for printing into metadata). " + "If provided, it must be as long as the final mask list " + "(after any merge and indexing operations).", + ) components_file = traits.Str( - 'components_file.txt', + "components_file.txt", usedefault=True, - desc='Filename to store physiological components') - num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL + desc="Filename to store physiological components", + ) + num_components = traits.Either( + "all", + traits.Range(low=1), + xor=["variance_threshold"], + desc="Number of components to return from the decomposition. If " + "``num_components`` is ``all``, then all components will be " + "retained.", + ) + # 6 for BOLD, 4 for ASL + # automatically instantiated to 6 in CompCor below if neither + # ``num_components`` nor ``variance_threshold`` is defined (for + # backward compatibility) + variance_threshold = traits.Range( + low=0.0, + high=1.0, + exclude_low=True, + exclude_high=True, + xor=["num_components"], + desc="Select the number of components to be returned automatically " + "based on their ability to explain variance in the dataset. " + "``variance_threshold`` is a fractional value between 0 and 1; " + "the number of components retained will be equal to the minimum " + "number of components necessary to explain the provided " + "fraction of variance in the masked time series.", + ) pre_filter = traits.Enum( - 'polynomial', - 'cosine', + "polynomial", + "cosine", False, usedefault=True, - desc='Detrend time series prior to component ' - 'extraction') + desc="Detrend time series prior to component extraction", + ) use_regress_poly = traits.Bool( - deprecated='0.15.0', - new_name='pre_filter', - desc=('use polynomial regression ' - 'pre-component extraction')) + deprecated="0.15.0", + new_name="pre_filter", + desc=("use polynomial regression pre-component extraction"), + ) regress_poly_degree = traits.Range( - low=1, value=1, usedefault=True, desc='the degree polynomial to use') + low=1, value=1, usedefault=True, desc="the degree polynomial to use" + ) header_prefix = traits.Str( - desc=('the desired header for the output tsv ' - 'file (one column). If undefined, will ' - 'default to "CompCor"')) + desc=( + "the desired header for the output tsv " + "file (one column). If undefined, will " + 'default to "CompCor"' + ) + ) high_pass_cutoff = traits.Float( - 128, - usedefault=True, - desc='Cutoff (in seconds) for "cosine" pre-filter') + 128, usedefault=True, desc='Cutoff (in seconds) for "cosine" pre-filter' + ) repetition_time = traits.Float( - desc='Repetition time (TR) of series - derived from image header if ' - 'unspecified') + desc="Repetition time (TR) of series - derived from image header if " + "unspecified" + ) save_pre_filter = traits.Either( - traits.Bool, File, desc='Save pre-filter basis as text file') - ignore_initial_volumes = traits.Range( - low=0, + traits.Bool, + File, + default=False, + usedefault=True, + desc="Save pre-filter basis as text file", + ) + save_metadata = traits.Either( + traits.Bool, + File, + default=False, usedefault=True, - desc='Number of volumes at start of series to ignore') + desc="Save component metadata as text file", + ) + ignore_initial_volumes = traits.Range( + low=0, usedefault=True, desc="Number of volumes at start of series to ignore" + ) failure_mode = traits.Enum( - 'error', 'NaN', + "error", + "NaN", usedefault=True, - desc='When no components are found or convergence fails, raise an error ' - 'or silently return columns of NaNs.') + desc="When no components are found or convergence fails, raise an error " + "or silently return columns of NaNs.", + ) class CompCorOutputSpec(TraitedSpec): components_file = File( - exists=True, desc='text file containing the noise components') - pre_filter_file = File(desc='text file containing high-pass filter basis') + exists=True, desc="text file containing the noise components" + ) + pre_filter_file = File(desc="text file containing high-pass filter basis") + metadata_file = File(desc="text file containing component metadata") -class CompCor(BaseInterface): +class CompCor(SimpleInterface): """ - Interface with core CompCor computation, used in aCompCor and tCompCor + Interface with core CompCor computation, used in aCompCor and tCompCor. CompCor provides three pre-filter options, all of which include per-voxel mean removal: - - polynomial: Legendre polynomial basis - - cosine: Discrete cosine basis - - False: mean-removal only + + - ``'polynomial'``: Legendre polynomial basis + - ``'cosine'``: Discrete cosine basis + - ``False``: mean-removal only In the case of ``polynomial`` and ``cosine`` filters, a pre-filter file may be saved with a row for each volume/timepoint, and a column for each @@ -461,7 +551,6 @@ class CompCor(BaseInterface): Example ------- - >>> ccinterface = CompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -470,156 +559,217 @@ class CompCor(BaseInterface): >>> ccinterface.inputs.regress_poly_degree = 2 """ + input_spec = CompCorInputSpec output_spec = CompCorOutputSpec - references_ = [{ - 'entry': - BibTeX( - "@article{compcor_2007," - "title = {A component based noise correction method (CompCor) for BOLD and perfusion based}," - "volume = {37}," - "number = {1}," - "doi = {10.1016/j.neuroimage.2007.04.042}," - "urldate = {2016-08-13}," - "journal = {NeuroImage}," - "author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}," - "year = {2007}," - "pages = {90-101},}"), - 'tags': ['method', 'implementation'] - }] + _references = [ + { + "tags": ["method", "implementation"], + "entry": BibTeX( + """\ +@article{compcor_2007, + title = {A component based noise correction method (CompCor) for BOLD and perfusion based}, + volume = {37}, + number = {1}, + doi = {10.1016/j.neuroimage.2007.04.042}, + urldate = {2016-08-13}, + journal = {NeuroImage}, + author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}, + year = {2007}, + pages = {90-101} +}""" + ), + } + ] def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' - super(CompCor, self).__init__(*args, **kwargs) - self._header = 'CompCor' + """exactly the same as compcor except the header""" + super().__init__(*args, **kwargs) + self._header = "CompCor" def _run_interface(self, runtime): mask_images = [] if isdefined(self.inputs.mask_files): - mask_images = combine_mask_files(self.inputs.mask_files, - self.inputs.merge_method, - self.inputs.mask_index) + mask_images = combine_mask_files( + self.inputs.mask_files, self.inputs.merge_method, self.inputs.mask_index + ) if self.inputs.use_regress_poly: - self.inputs.pre_filter = 'polynomial' + self.inputs.pre_filter = "polynomial" # Degree 0 == remove mean; see compute_noise_components - degree = (self.inputs.regress_poly_degree - if self.inputs.pre_filter == 'polynomial' else 0) + degree = ( + self.inputs.regress_poly_degree + if self.inputs.pre_filter == "polynomial" + else 0 + ) - imgseries = nb.load(self.inputs.realigned_file, mmap=NUMPY_MMAP) + imgseries = nb.load(self.inputs.realigned_file) if len(imgseries.shape) != 4: - raise ValueError('{} expected a 4-D nifti file. Input {} has ' - '{} dimensions (shape {})'.format( - self._header, self.inputs.realigned_file, - len(imgseries.shape), imgseries.shape)) + raise ValueError( + "{} expected a 4-D nifti file. Input {} has " + "{} dimensions (shape {})".format( + self._header, + self.inputs.realigned_file, + len(imgseries.shape), + imgseries.shape, + ) + ) if len(mask_images) == 0: img = nb.Nifti1Image( - np.ones(imgseries.shape[:3], dtype=np.bool), + np.ones(imgseries.shape[:3], dtype=bool), affine=imgseries.affine, - header=imgseries.header) + header=imgseries.header, + ) mask_images = [img] skip_vols = self.inputs.ignore_initial_volumes if skip_vols: imgseries = imgseries.__class__( - imgseries.get_data()[..., skip_vols:], imgseries.affine, - imgseries.header) + imgseries.dataobj[..., skip_vols:], imgseries.affine, imgseries.header + ) - mask_images = self._process_masks(mask_images, imgseries.get_data()) + mask_images = self._process_masks(mask_images, imgseries.dataobj) TR = 0 - if self.inputs.pre_filter == 'cosine': + if self.inputs.pre_filter == "cosine": if isdefined(self.inputs.repetition_time): TR = self.inputs.repetition_time else: # Derive TR from NIfTI header, if possible try: TR = imgseries.header.get_zooms()[3] - if imgseries.header.get_xyzt_units()[1] == 'msec': + if imgseries.header.get_xyzt_units()[1] == "msec": TR /= 1000 except (AttributeError, IndexError): TR = 0 if TR == 0: raise ValueError( - '{} cannot detect repetition time from image - ' - 'Set the repetition_time input'.format(self._header)) - - components, filter_basis = compute_noise_components( - imgseries.get_data(), mask_images, self.inputs.num_components, - self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR) + "{} cannot detect repetition time from image - " + "Set the repetition_time input".format(self._header) + ) + + if isdefined(self.inputs.variance_threshold): + components_criterion = self.inputs.variance_threshold + elif isdefined(self.inputs.num_components): + components_criterion = self.inputs.num_components + else: + components_criterion = 6 + IFLOGGER.warning( + "`num_components` and `variance_threshold` are " + "not defined. Setting number of components to 6 " + "for backward compatibility. Please set either " + "`num_components` or `variance_threshold`, as " + "this feature may be deprecated in the future." + ) + + components, filter_basis, metadata = compute_noise_components( + imgseries.get_fdata(dtype=np.float32), + mask_images, + components_criterion, + self.inputs.pre_filter, + degree, + self.inputs.high_pass_cutoff, + TR, + self.inputs.failure_mode, + self.inputs.mask_names, + ) if skip_vols: old_comp = components nrows = skip_vols + components.shape[0] - components = np.zeros( - (nrows, components.shape[1]), dtype=components.dtype) + components = np.zeros((nrows, components.shape[1]), dtype=components.dtype) components[skip_vols:] = old_comp - components_file = os.path.join(os.getcwd(), - self.inputs.components_file) + components_file = os.path.join(os.getcwd(), self.inputs.components_file) + components_header = self._make_headers(components.shape[1]) np.savetxt( components_file, components, fmt=b"%.10f", - delimiter='\t', - header=self._make_headers(components.shape[1]), - comments='') - - if self.inputs.pre_filter and self.inputs.save_pre_filter: - pre_filter_file = self._list_outputs()['pre_filter_file'] - ftype = { - 'polynomial': 'Legendre', - 'cosine': 'Cosine' - }[self.inputs.pre_filter] + delimiter="\t", + header="\t".join(components_header), + comments="", + ) + self._results["components_file"] = os.path.join( + runtime.cwd, self.inputs.components_file + ) + + save_pre_filter = False + if self.inputs.pre_filter in ["polynomial", "cosine"]: + save_pre_filter = self.inputs.save_pre_filter + + if save_pre_filter: + self._results["pre_filter_file"] = save_pre_filter + if save_pre_filter is True: + self._results["pre_filter_file"] = os.path.join( + runtime.cwd, "pre_filter.tsv" + ) + + ftype = {"polynomial": "Legendre", "cosine": "Cosine"}[ + self.inputs.pre_filter + ] ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 - header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] + header = [f"{ftype}{i:02d}" for i in range(ncols)] if skip_vols: old_basis = filter_basis # nrows defined above filter_basis = np.zeros( - (nrows, ncols + skip_vols), dtype=filter_basis.dtype) + (nrows, ncols + skip_vols), dtype=filter_basis.dtype + ) if old_basis.size > 0: filter_basis[skip_vols:, :ncols] = old_basis filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) - header.extend([ - 'NonSteadyStateOutlier{:02d}'.format(i) - for i in range(skip_vols) - ]) + header.extend( + [f"NonSteadyStateOutlier{i:02d}" for i in range(skip_vols)] + ) np.savetxt( - pre_filter_file, + self._results["pre_filter_file"], filter_basis, - fmt=b'%.10f', - delimiter='\t', - header='\t'.join(header), - comments='') + fmt=b"%.10f", + delimiter="\t", + header="\t".join(header), + comments="", + ) + + metadata_file = self.inputs.save_metadata + if metadata_file: + self._results["metadata_file"] = metadata_file + if metadata_file is True: + self._results["metadata_file"] = os.path.join( + runtime.cwd, "component_metadata.tsv" + ) + components_names = np.empty(len(metadata["mask"]), dtype="object_") + retained = np.where(metadata["retained"]) + not_retained = np.where(np.logical_not(metadata["retained"])) + components_names[retained] = components_header + components_names[not_retained] = [ + f"dropped{i}" for i in range(len(not_retained[0])) + ] + with open(self._results["metadata_file"], "w") as f: + f.write("\t".join(["component"] + list(metadata.keys())) + "\n") + for i in zip(components_names, *metadata.values()): + f.write( + "{0[0]}\t{0[1]}\t{0[2]:.10f}\t" + "{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n".format(i) + ) return runtime def _process_masks(self, mask_images, timeseries=None): return mask_images - def _list_outputs(self): - outputs = self._outputs().get() - outputs['components_file'] = os.path.abspath( - self.inputs.components_file) - - save_pre_filter = self.inputs.save_pre_filter - if save_pre_filter: - if isinstance(save_pre_filter, bool): - save_pre_filter = os.path.abspath('pre_filter.tsv') - outputs['pre_filter_file'] = save_pre_filter - - return outputs - def _make_headers(self, num_col): - header = self.inputs.header_prefix if \ - isdefined(self.inputs.header_prefix) else self._header - headers = ['{}{:02d}'.format(header, i) for i in range(num_col)] - return '\t'.join(headers) + header = ( + self.inputs.header_prefix + if isdefined(self.inputs.header_prefix) + else self._header + ) + headers = [f"{header}{i:02d}" for i in range(num_col)] + return headers class ACompCor(CompCor): @@ -630,35 +780,35 @@ class ACompCor(CompCor): """ def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' - super(ACompCor, self).__init__(*args, **kwargs) - self._header = 'aCompCor' + """exactly the same as compcor except the header""" + super().__init__(*args, **kwargs) + self._header = "aCompCor" class TCompCorInputSpec(CompCorInputSpec): # and all the fields in CompCorInputSpec percentile_threshold = traits.Range( - low=0., - high=1., - value=.02, + low=0.0, + high=1.0, + value=0.02, exclude_low=True, exclude_high=True, usedefault=True, - desc='the percentile ' - 'used to select highest-variance ' - 'voxels, represented by a number ' - 'between 0 and 1, exclusive. By ' - 'default, this value is set to .02. ' - 'That is, the 2% of voxels ' - 'with the highest variance are used.') + desc="the percentile " + "used to select highest-variance " + "voxels, represented by a number " + "between 0 and 1, exclusive. By " + "default, this value is set to .02. " + "That is, the 2% of voxels " + "with the highest variance are used.", + ) class TCompCorOutputSpec(CompCorOutputSpec): # and all the fields in CompCorOutputSpec high_variance_masks = OutputMultiPath( - File(exists=True), - desc=(("voxels exceeding the variance" - " threshold"))) + File(exists=True), desc=("voxels exceeding the variance threshold") + ) class TCompCor(CompCor): @@ -667,7 +817,6 @@ class TCompCor(CompCor): Example ------- - >>> ccinterface = TCompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -682,40 +831,43 @@ class TCompCor(CompCor): output_spec = TCompCorOutputSpec def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' - super(TCompCor, self).__init__(*args, **kwargs) - self._header = 'tCompCor' + """exactly the same as compcor except the header""" + super().__init__(*args, **kwargs) + self._header = "tCompCor" self._mask_files = [] def _process_masks(self, mask_images, timeseries=None): out_images = [] self._mask_files = [] + timeseries = np.asanyarray(timeseries) for i, img in enumerate(mask_images): - mask = img.get_data().astype(np.bool) + mask = np.asanyarray(img.dataobj).astype(bool) imgseries = timeseries[mask, :] imgseries = regress_poly(2, imgseries)[0] tSTD = _compute_tSTD(imgseries, 0, axis=-1) threshold_std = np.percentile( tSTD, - np.round(100. * - (1. - self.inputs.percentile_threshold)).astype(int)) + np.round(100.0 * (1.0 - self.inputs.percentile_threshold)).astype(int), + ) mask_data = np.zeros_like(mask) mask_data[mask != 0] = tSTD >= threshold_std - out_image = nb.Nifti1Image( - mask_data, affine=img.affine, header=img.header) + out_image = nb.Nifti1Image(mask_data, affine=img.affine, header=img.header) # save mask - mask_file = os.path.abspath('mask_{:03d}.nii.gz'.format(i)) + mask_file = os.path.abspath(f"mask_{i:03d}.nii.gz") out_image.to_filename(mask_file) - IFLOGGER.debug('tCompcor computed and saved mask of shape %s to ' - 'mask_file %s', str(mask.shape), mask_file) + IFLOGGER.debug( + "tCompcor computed and saved mask of shape %s to mask_file %s", + str(mask.shape), + mask_file, + ) self._mask_files.append(mask_file) out_images.append(out_image) return out_images def _list_outputs(self): - outputs = super(TCompCor, self)._list_outputs() - outputs['high_variance_masks'] = self._mask_files + outputs = super()._list_outputs() + outputs["high_variance_masks"] = self._mask_files return outputs @@ -723,35 +875,31 @@ class TSNRInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( File(exists=True), mandatory=True, - desc='realigned 4D file or a list of 3D files') - regress_poly = traits.Range(low=1, desc='Remove polynomials') + desc="realigned 4D file or a list of 3D files", + ) + regress_poly = traits.Range(low=1, desc="Remove polynomials") tsnr_file = File( - 'tsnr.nii.gz', - usedefault=True, - hash_files=False, - desc='output tSNR file') + "tsnr.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" + ) mean_file = File( - 'mean.nii.gz', - usedefault=True, - hash_files=False, - desc='output mean file') + "mean.nii.gz", usedefault=True, hash_files=False, desc="output mean file" + ) stddev_file = File( - 'stdev.nii.gz', - usedefault=True, - hash_files=False, - desc='output tSNR file') + "stdev.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" + ) detrended_file = File( - 'detrend.nii.gz', + "detrend.nii.gz", usedefault=True, hash_files=False, - desc='input file after detrending') + desc="input file after detrending", + ) class TSNROutputSpec(TraitedSpec): - tsnr_file = File(exists=True, desc='tsnr image file') - mean_file = File(exists=True, desc='mean image file') - stddev_file = File(exists=True, desc='std dev image file') - detrended_file = File(desc='detrended input file') + tsnr_file = File(exists=True, desc="tsnr image file") + mean_file = File(exists=True, desc="mean image file") + stddev_file = File(exists=True, desc="std dev image file") + detrended_file = File(desc="detrended input file") class TSNR(BaseInterface): @@ -762,45 +910,44 @@ class TSNR(BaseInterface): Example ------- - >>> tsnr = TSNR() >>> tsnr.inputs.in_file = 'functional.nii' >>> res = tsnr.run() # doctest: +SKIP """ + input_spec = TSNRInputSpec output_spec = TSNROutputSpec def _run_interface(self, runtime): - img = nb.load(self.inputs.in_file[0], mmap=NUMPY_MMAP) + img = nb.load(self.inputs.in_file[0]) header = img.header.copy() - vollist = [ - nb.load(filename, mmap=NUMPY_MMAP) - for filename in self.inputs.in_file - ] + vollist = [nb.load(filename) for filename in self.inputs.in_file] data = np.concatenate( [ - vol.get_data().reshape(vol.shape[:3] + (-1, )) + vol.get_fdata(dtype=np.float32).reshape(vol.shape[:3] + (-1,)) for vol in vollist ], - axis=3) + axis=3, + ) data = np.nan_to_num(data) - if data.dtype.kind == 'i': + if data.dtype.kind == "i": header.set_data_dtype(np.float32) data = data.astype(np.float32) if isdefined(self.inputs.regress_poly): - data = regress_poly( - self.inputs.regress_poly, data, remove_mean=False)[0] + data = regress_poly(self.inputs.regress_poly, data, remove_mean=False)[0] img = nb.Nifti1Image(data, img.affine, header) nb.save(img, op.abspath(self.inputs.detrended_file)) meanimg = np.mean(data, axis=3) stddevimg = np.std(data, axis=3) tsnr = np.zeros_like(meanimg) - tsnr[stddevimg > 1.e-3] = meanimg[stddevimg > 1.e-3] / stddevimg[ - stddevimg > 1.e-3] + stddevimg_nonzero = stddevimg > 1.0e-3 + tsnr[stddevimg_nonzero] = ( + meanimg[stddevimg_nonzero] / stddevimg[stddevimg_nonzero] + ) img = nb.Nifti1Image(tsnr, img.affine, header) nb.save(img, op.abspath(self.inputs.tsnr_file)) img = nb.Nifti1Image(meanimg, img.affine, header) @@ -811,21 +958,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - for k in ['tsnr_file', 'mean_file', 'stddev_file']: + for k in ["tsnr_file", "mean_file", "stddev_file"]: outputs[k] = op.abspath(getattr(self.inputs, k)) if isdefined(self.inputs.regress_poly): - outputs['detrended_file'] = op.abspath(self.inputs.detrended_file) + outputs["detrended_file"] = op.abspath(self.inputs.detrended_file) return outputs class NonSteadyStateDetectorInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='4D NIFTI EPI file') + in_file = File(exists=True, mandatory=True, desc="4D NIFTI EPI file") class NonSteadyStateDetectorOutputSpec(TraitedSpec): - n_volumes_to_discard = traits.Int(desc='Number of non-steady state volumes' - 'detected in the beginning of the scan.') + n_volumes_to_discard = traits.Int( + desc="Number of non-steady state volumes" + "detected in the beginning of the scan." + ) class NonSteadyStateDetector(BaseInterface): @@ -839,10 +988,11 @@ class NonSteadyStateDetector(BaseInterface): def _run_interface(self, runtime): in_nii = nb.load(self.inputs.in_file) - global_signal = in_nii.get_data()[:, :, :, :50].mean(axis=0).mean( - axis=0).mean(axis=0) + global_signal = ( + in_nii.dataobj[:, :, :, :50].mean(axis=0).mean(axis=0).mean(axis=0) + ) - self._results = {'n_volumes_to_discard': is_outlier(global_signal)} + self._results = {"n_volumes_to_discard": is_outlier(global_signal)} return runtime @@ -850,10 +1000,20 @@ def _list_outputs(self): return self._results -def compute_dvars(in_file, - in_mask, - remove_zerovariance=False, - intensity_normalization=1000): +def _AR_est_YW(x, order, rxx=None): + """Retrieve AR coefficients while dropping the sig_sq return value""" + from nitime.algorithms import AR_est_YW + + return AR_est_YW(x, order, rxx=rxx)[0] + + +def compute_dvars( + in_file, + in_mask, + remove_zerovariance=False, + intensity_normalization=1000, + variance_tol=0.0, +): """ Compute the :abbr:`DVARS (D referring to temporal derivative of timecourses, VARS referring to RMS variance over voxels)` @@ -884,35 +1044,41 @@ def compute_dvars(in_file, """ import numpy as np import nibabel as nb - from nitime.algorithms import AR_est_YW import warnings - func = nb.load(in_file, mmap=NUMPY_MMAP).get_data().astype(np.float32) - mask = nb.load(in_mask, mmap=NUMPY_MMAP).get_data().astype(np.uint8) + func = np.float32(nb.load(in_file).dataobj) + mask = np.bool_(nb.load(in_mask).dataobj) if len(func.shape) != 4: raise RuntimeError("Input fMRI dataset should be 4-dimensional") - idx = np.where(mask > 0) - mfunc = func[idx[0], idx[1], idx[2], :] + mfunc = func[mask] if intensity_normalization != 0: mfunc = (mfunc / np.median(mfunc)) * intensity_normalization # Robust standard deviation (we are using "lower" interpolation # because this is what FSL is doing - func_sd = (np.percentile(mfunc, 75, axis=1, interpolation="lower") - - np.percentile(mfunc, 25, axis=1, interpolation="lower")) / 1.349 + try: + func_sd = ( + np.percentile(mfunc, 75, axis=1, method="lower") + - np.percentile(mfunc, 25, axis=1, method="lower") + ) / 1.349 + except TypeError: # NP < 1.22 + func_sd = ( + np.percentile(mfunc, 75, axis=1, interpolation="lower") + - np.percentile(mfunc, 25, axis=1, interpolation="lower") + ) / 1.349 if remove_zerovariance: - mfunc = mfunc[func_sd != 0, :] - func_sd = func_sd[func_sd != 0] + zero_variance_voxels = func_sd > variance_tol + mfunc = mfunc[zero_variance_voxels, :] + func_sd = func_sd[zero_variance_voxels] # Compute (non-robust) estimate of lag-1 autocorrelation - ar1 = np.apply_along_axis(AR_est_YW, 1, - regress_poly(0, mfunc, - remove_mean=True)[0].astype( - np.float32), 1)[:, 0] + ar1 = np.apply_along_axis( + _AR_est_YW, 1, regress_poly(0, mfunc, remove_mean=True)[0].astype(np.float32), 1 + ) # Compute (predicted) standard deviation of temporal difference time series diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd @@ -928,28 +1094,25 @@ def compute_dvars(in_file, dvars_stdz = dvars_nstd / diff_sd_mean with warnings.catch_warnings(): # catch, e.g., divide by zero errors - warnings.filterwarnings('error') + warnings.filterwarnings("error") # voxelwise standardization diff_vx_stdz = np.square( - func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T) + func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T + ) dvars_vx_stdz = np.sqrt(diff_vx_stdz.mean(axis=0)) return (dvars_stdz, dvars_nstd, dvars_vx_stdz) -def plot_confound(tseries, - figsize, - name, - units=None, - series_tr=None, - normalize=False): +def plot_confound(tseries, figsize, name, units=None, series_tr=None, normalize=False): """ A helper function to plot :abbr:`fMRI (functional MRI)` confounds. """ import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas @@ -968,18 +1131,18 @@ def plot_confound(tseries, ax.set_xlim((0, len(tseries))) ylabel = name if units is not None: - ylabel += (' speed [{}/s]' if normalize else ' [{}]').format(units) + ylabel += (" speed [{}/s]" if normalize else " [{}]").format(units) ax.set_ylabel(ylabel) - xlabel = 'Frame #' + xlabel = "Frame #" if series_tr is not None: - xlabel = 'Frame # ({} sec TR)'.format(series_tr) + xlabel = f"Frame # ({series_tr} sec TR)" ax.set_xlabel(xlabel) ylim = ax.get_ylim() ax = fig.add_subplot(grid[0, -1]) sns.distplot(tseries, vertical=True, ax=ax) - ax.set_xlabel('Frames') + ax.set_xlabel("Frames") ax.set_ylim(ylim) ax.set_yticklabels([]) return fig @@ -995,7 +1158,7 @@ def is_outlier(points, thresh=3.5): a modified z-score (based on the median absolute deviation) greater than this value will be classified as outliers. - :return: A bolean mask, of size numobservations-length array. + :return: A boolean mask, of size numobservations-length array. .. note:: References @@ -1007,7 +1170,7 @@ def is_outlier(points, thresh=3.5): if len(points.shape) == 1: points = points[:, None] median = np.median(points, axis=0) - diff = np.sum((points - median)**2, axis=-1) + diff = np.sum((points - median) ** 2, axis=-1) diff = np.sqrt(diff) med_abs_deviation = np.median(diff) @@ -1023,9 +1186,13 @@ def is_outlier(points, thresh=3.5): return timepoints_to_discard -def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): +def cosine_filter( + data, timestep, period_cut, remove_mean=True, axis=-1, failure_mode="error" +): datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0 and failure_mode != "error": + return data, np.array([]) data = data.reshape((-1, timepoints)) @@ -1044,7 +1211,7 @@ def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): return residuals.reshape(datashape), non_constant_regressors -def regress_poly(degree, data, remove_mean=True, axis=-1): +def regress_poly(degree, data, remove_mean=True, axis=-1, failure_mode="error"): """ Returns data with degree polynomial regressed out. @@ -1052,11 +1219,14 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): :param int axis: numpy array axes along which regression is performed """ - IFLOGGER.debug('Performing polynomial regression on data of shape %s', - str(data.shape)) + IFLOGGER.debug( + "Performing polynomial regression on data of shape %s", str(data.shape) + ) datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0 and failure_mode != "error": + return data, np.array([]) # Rearrange all voxel-wise time-series in rows data = data.reshape((-1, timepoints)) @@ -1089,6 +1259,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): A helper function for CompCor + Parameters + ---------- mask_files: a list one or more binary mask files mask_method: enum ('union', 'intersect', 'none') @@ -1096,7 +1268,10 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): mask_index: an integer determines which file to return (mutually exclusive with mask_method) - returns: a list of nibabel images + Returns + ------- + masks: a list of nibabel images + """ if isdefined(mask_index) or not isdefined(mask_method): @@ -1104,75 +1279,129 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): if len(mask_files) == 1: mask_index = 0 else: - raise ValueError(('When more than one mask file is provided, ' - 'one of merge_method or mask_index must be ' - 'set')) + raise ValueError( + "When more than one mask file is provided, " + "one of merge_method or mask_index must be " + "set" + ) if mask_index < len(mask_files): - mask = nb.load(mask_files[mask_index], mmap=NUMPY_MMAP) + mask = nb.load(mask_files[mask_index]) return [mask] - raise ValueError(('mask_index {0} must be less than number of mask ' - 'files {1}').format(mask_index, len(mask_files))) - masks = [] - if mask_method == 'none': - for filename in mask_files: - masks.append(nb.load(filename, mmap=NUMPY_MMAP)) - return masks + raise ValueError( + f"mask_index {mask_index} must be less than number of mask files {len(mask_files)}" + ) + if mask_method == "none": + return [nb.load(filename) for filename in mask_files] - if mask_method == 'union': + if mask_method == "union": mask = None for filename in mask_files: - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) + img_as_mask = np.asanyarray(img.dataobj).astype("int32") > 0 if mask is None: - mask = img.get_data() > 0 - np.logical_or(mask, img.get_data() > 0, mask) + mask = img_as_mask + np.logical_or(mask, img_as_mask, mask) img = nb.Nifti1Image(mask, img.affine, header=img.header) return [img] - if mask_method == 'intersect': + if mask_method == "intersect": mask = None for filename in mask_files: - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) + img_as_mask = np.asanyarray(img.dataobj).astype("int32") > 0 if mask is None: - mask = img.get_data() > 0 - np.logical_and(mask, img.get_data() > 0, mask) + mask = img_as_mask + np.logical_and(mask, img_as_mask, mask) img = nb.Nifti1Image(mask, img.affine, header=img.header) return [img] -def compute_noise_components(imgseries, mask_images, num_components, - filter_type, degree, period_cut, repetition_time): - """Compute the noise components from the imgseries for each mask - - imgseries: a nibabel img - mask_images: a list of nibabel images - num_components: number of noise components to return - filter_type: type off filter to apply to time series before computing - noise components. - 'polynomial' - Legendre polynomial basis - 'cosine' - Discrete cosine (DCT) basis - False - None (mean-removal only) - - Filter options: - - degree: order of polynomial used to remove trends from the timeseries - period_cut: minimum period (in sec) for DCT high-pass filter - repetition_time: time (in sec) between volume acquisitions +def compute_noise_components( + imgseries, + mask_images, + components_criterion=0.5, + filter_type=False, + degree=0, + period_cut=128, + repetition_time=None, + failure_mode="error", + mask_names=None, +): + """ + Compute the noise components from the image series for each mask. - returns: + Parameters + ---------- + imgseries: nibabel image + Time series data to be decomposed. + mask_images: list + List of nibabel images. Time series data from ``img_series`` is subset + according to the spatial extent of each mask, and the subset data is + then decomposed using principal component analysis. Masks should be + coextensive with either anatomical or spatial noise ROIs. + components_criterion: float + Number of noise components to return. If this is a decimal value + between 0 and 1, then ``create_noise_components`` will instead return + the smallest number of components necessary to explain the indicated + fraction of variance. If ``components_criterion`` is ``all``, then all + components will be returned. + filter_type: str + Type of filter to apply to time series before computing noise components. + + - 'polynomial' - Legendre polynomial basis + - 'cosine' - Discrete cosine (DCT) basis + - False - None (mean-removal only) + + failure_mode: str + Action to be taken in the event that any decomposition fails to + identify any components. ``error`` indicates that the routine should + raise an exception and exit, while any other value indicates that the + routine should return a matrix of NaN values equal in size to the + requested decomposition matrix. + mask_names: list or None + List of names for each image in ``mask_images``. This should be equal in + length to ``mask_images``, with the ith element of ``mask_names`` naming + the ith element of ``mask_images``. + degree: int + Order of polynomial used to remove trends from the timeseries + period_cut: float + Minimum period (in sec) for DCT high-pass filter + repetition_time: float + Time (in sec) between volume acquisitions. This must be defined if + the ``filter_type`` is ``cosine``. - components: a numpy array - basis: a numpy array containing the (non-constant) filter regressors + Returns + ------- + components: numpy array + Numpy array containing the requested set of noise components + basis: numpy array + Numpy array containing the (non-constant) filter regressors + metadata: OrderedDict{str: numpy array} + Dictionary of eigenvalues, fractional explained variances, and + cumulative explained variances. """ - components = None basis = np.array([]) - for img in mask_images: - mask = img.get_data().astype(np.bool) + if components_criterion == "all": + components_criterion = -1 + mask_names = mask_names or range(len(mask_images)) + + components = [] + md_mask = [] + md_sv = [] + md_var = [] + md_cumvar = [] + md_retained = [] + + for name, img in zip(mask_names, mask_images): + mask = np.asanyarray(nb.squeeze_image(img).dataobj).astype(bool) if imgseries.shape[:3] != mask.shape: raise ValueError( - 'Inputs for CompCor, timeseries and mask, do not have ' - 'matching spatial dimensions ({} and {}, respectively)'.format( - imgseries.shape[:3], mask.shape)) + "Inputs for CompCor, timeseries and mask, do not have " + "matching spatial dimensions ({} and {}, respectively)".format( + imgseries.shape[:3], mask.shape + ) + ) voxel_timecourses = imgseries[mask, :] @@ -1181,14 +1410,22 @@ def compute_noise_components(imgseries, mask_images, num_components, # Currently support Legendre-polynomial or cosine or detrending # With no filter, the mean is nonetheless removed (poly w/ degree 0) - if filter_type == 'cosine': + if filter_type == "cosine": + if repetition_time is None: + raise ValueError("Repetition time must be provided for cosine filter") voxel_timecourses, basis = cosine_filter( - voxel_timecourses, repetition_time, period_cut) - elif filter_type in ('polynomial', False): + voxel_timecourses, + repetition_time, + period_cut, + failure_mode=failure_mode, + ) + elif filter_type in ("polynomial", False): # from paper: # "The constant and linear trends of the columns in the matrix M were # removed [prior to ...]" - voxel_timecourses, basis = regress_poly(degree, voxel_timecourses) + voxel_timecourses, basis = regress_poly( + degree, voxel_timecourses, failure_mode=failure_mode + ) # "Voxel time series from the noise ROI (either anatomical or tSTD) were # placed in a matrix M of size Nxm, with time along the row dimension @@ -1196,25 +1433,63 @@ def compute_noise_components(imgseries, mask_images, num_components, M = voxel_timecourses.T # "[... were removed] prior to column-wise variance normalization." - M = M / _compute_tSTD(M, 1.) + M = M / _compute_tSTD(M, 1.0) # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." try: - u, _, _ = fallback_svd(M, full_matrices=False) - except np.linalg.LinAlgError: - if self.inputs.failure_mode == 'error': + u, s, _ = fallback_svd(M, full_matrices=False) + except (np.linalg.LinAlgError, ValueError): + if failure_mode == "error": raise - u = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan - if components is None: - components = u[:, :num_components] - else: - components = np.hstack((components, u[:, :num_components])) - if components is None and num_components > 0: - if self.inputs.failure_mode == 'error': - raise ValueError('No components found') - components = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan - return components, basis + s = np.full(M.shape[0], np.nan, dtype=np.float32) + if components_criterion >= 1: + u = np.full( + (M.shape[0], components_criterion), np.nan, dtype=np.float32 + ) + else: + u = np.full((M.shape[0], 1), np.nan, dtype=np.float32) + + variance_explained = (s**2) / np.sum(s**2) + cumulative_variance_explained = np.cumsum(variance_explained) + + num_components = int(components_criterion) + if 0 < components_criterion < 1: + num_components = ( + np.searchsorted(cumulative_variance_explained, components_criterion) + 1 + ) + elif components_criterion == -1: + num_components = len(s) + + num_components = int(num_components) + if num_components == 0: + break + + components.append(u[:, :num_components]) + md_mask.append([name] * len(s)) + md_sv.append(s) + md_var.append(variance_explained) + md_cumvar.append(cumulative_variance_explained) + md_retained.append([i < num_components for i in range(len(s))]) + + if len(components) > 0: + components = np.hstack(components) + else: + if failure_mode == "error": + raise ValueError("No components found") + components = np.full((M.shape[0], num_components), np.nan, dtype=np.float32) + + metadata = OrderedDict( + [ + ("mask", list(chain(*md_mask))), + ("singular_value", np.hstack(md_sv)), + ("variance_explained", np.hstack(md_var)), + ("cumulative_variance_explained", np.hstack(md_cumvar)), + ("retained", list(chain(*md_retained))), + ] + ) + + return components, basis, metadata def _compute_tSTD(M, x, axis=0): @@ -1250,7 +1525,7 @@ def _cosine_drift(period_cut, frametimes): """ len_tim = len(frametimes) n_times = np.arange(len_tim) - hfcut = 1. / period_cut # input parameter is the period + hfcut = 1.0 / period_cut # input parameter is the period # frametimes.max() should be (len_tim-1)*dt dt = frametimes[1] - frametimes[0] @@ -1261,10 +1536,9 @@ def _cosine_drift(period_cut, frametimes): nfct = np.sqrt(2.0 / len_tim) for k in range(1, order): - cdrift[:, k - 1] = nfct * np.cos( - (np.pi / len_tim) * (n_times + .5) * k) + cdrift[:, k - 1] = nfct * np.cos((np.pi / len_tim) * (n_times + 0.5) * k) - cdrift[:, order - 1] = 1. # or 1./sqrt(len_tim) to normalize + cdrift[:, order - 1] = 1.0 # or 1./sqrt(len_tim) to normalize return cdrift @@ -1288,7 +1562,7 @@ def _full_rank(X, cmax=1e15): c = smax / smin if c < cmax: return X, c - IFLOGGER.warning('Matrix is singular at working precision, regularizing...') + IFLOGGER.warning("Matrix is singular at working precision, regularizing...") lda = (smax - cmax * smin) / (cmax - 1) s = s + lda X = np.dot(U, np.dot(np.diag(s), V)) diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index a508eb5037..2ea5f43d87 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -1,22 +1,24 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range import os +from functools import lru_cache import numpy as np -from numpy import ones, kron, mean, eye, hstack, dot, tile +from numpy import ones, kron, mean, eye, hstack, tile from numpy.linalg import pinv import nibabel as nb -from ..interfaces.base import BaseInterfaceInputSpec, TraitedSpec, \ - BaseInterface, traits, File -from ..utils import NUMPY_MMAP +from ..interfaces.base import ( + BaseInterfaceInputSpec, + TraitedSpec, + BaseInterface, + traits, + File, +) class ICCInputSpec(BaseInterfaceInputSpec): subjects_sessions = traits.List( traits.List(File(exists=True)), desc="n subjects m sessions 3D stat files", - mandatory=True) + mandatory=True, + ) mask = File(exists=True, mandatory=True) @@ -27,27 +29,25 @@ class ICCOutputSpec(TraitedSpec): class ICC(BaseInterface): - ''' + """ Calculates Interclass Correlation Coefficient (3,1) as defined in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428. This particular implementation is aimed at relaibility (test-retest) studies. - ''' + """ + input_spec = ICCInputSpec output_spec = ICCOutputSpec def _run_interface(self, runtime): - maskdata = nb.load(self.inputs.mask).get_data() - maskdata = np.logical_not( - np.logical_or(maskdata == 0, np.isnan(maskdata))) - - session_datas = [[ - nb.load(fname, mmap=NUMPY_MMAP).get_data()[maskdata].reshape( - -1, 1) for fname in sessions - ] for sessions in self.inputs.subjects_sessions] - list_of_sessions = [ - np.dstack(session_data) for session_data in session_datas + maskdata = nb.load(self.inputs.mask).get_fdata() + maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata))) + + session_datas = [ + [nb.load(fname).get_fdata()[maskdata].reshape(-1, 1) for fname in sessions] + for sessions in self.inputs.subjects_sessions ] + list_of_sessions = [np.dstack(session_data) for session_data in session_datas] all_data = np.hstack(list_of_sessions) icc = np.zeros(session_datas[0][0].shape) session_F = np.zeros(session_datas[0][0].shape) @@ -56,73 +56,91 @@ def _run_interface(self, runtime): for x in range(icc.shape[0]): Y = all_data[x, :, :] - icc[x], subject_var[x], session_var[x], session_F[ - x], _, _ = ICC_rep_anova(Y) + icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova( + Y + ) nim = nb.load(self.inputs.subjects_sessions[0][0]) new_data = np.zeros(nim.shape) - new_data[maskdata] = icc.reshape(-1, ) + new_data[maskdata] = icc.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'icc_map.nii') + nb.save(new_img, "icc_map.nii") new_data = np.zeros(nim.shape) - new_data[maskdata] = session_var.reshape(-1, ) + new_data[maskdata] = session_var.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'session_var_map.nii') + nb.save(new_img, "session_var_map.nii") new_data = np.zeros(nim.shape) - new_data[maskdata] = subject_var.reshape(-1, ) + new_data[maskdata] = subject_var.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'subject_var_map.nii') + nb.save(new_img, "subject_var_map.nii") return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['icc_map'] = os.path.abspath('icc_map.nii') - outputs['session_var_map'] = os.path.abspath('session_var_map.nii') - outputs['subject_var_map'] = os.path.abspath('subject_var_map.nii') + outputs["icc_map"] = os.path.abspath("icc_map.nii") + outputs["session_var_map"] = os.path.abspath("session_var_map.nii") + outputs["subject_var_map"] = os.path.abspath("subject_var_map.nii") return outputs -def ICC_rep_anova(Y): - ''' +@lru_cache(maxsize=1) +def ICC_projection_matrix(shape): + nb_subjects, nb_conditions = shape + + x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions + x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects + X = hstack([x, x0]) + return X @ pinv(X.T @ X, hermitian=True) @ X.T + + +def ICC_rep_anova(Y, projection_matrix=None): + """ the data Y are entered as a 'table' ie subjects are in rows and repeated measures in columns One Sample Repeated measure ANOVA Y = XB + E with X = [FaTor / Subjects] - ''' + ``ICC_rep_anova`` involves an expensive operation to compute a projection + matrix, which depends only on the shape of ``Y``, which is computed by + calling ``ICC_projection_matrix(Y.shape)``. If arrays of multiple shapes are + expected, it may be worth pre-computing and passing directly as an + argument to ``ICC_rep_anova``. + + If only one ``Y.shape`` will occur, you do not need to explicitly handle + these, as the most recently calculated matrix is cached automatically. + For example, if you are running the same computation on every voxel of + an image, you will see significant speedups. + + If a ``Y`` is passed with a new shape, a new matrix will be calculated + automatically. + """ [nb_subjects, nb_conditions] = Y.shape dfc = nb_conditions - 1 - dfe = (nb_subjects - 1) * dfc dfr = nb_subjects - 1 + dfe = dfr * dfc # Compute the repeated measure effect # ------------------------------------ # Sum Square Total - mean_Y = mean(Y) - SST = ((Y - mean_Y)**2).sum() - - # create the design matrix for the different levels - x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions - x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects - X = hstack([x, x0]) + demeaned_Y = Y - mean(Y) + SST = np.sum(demeaned_Y**2) # Sum Square Error - predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten('F')) - residuals = Y.flatten('F') - predicted_Y - SSE = (residuals**2).sum() - - residuals.shape = Y.shape + if projection_matrix is None: + projection_matrix = ICC_projection_matrix(Y.shape) + residuals = Y.flatten("F") - (projection_matrix @ Y.flatten("F")) + SSE = np.sum(residuals**2) MSE = SSE / dfe - # Sum square session effect - between colums/sessions - SSC = ((mean(Y, 0) - mean_Y)**2).sum() * nb_subjects + # Sum square session effect - between columns/sessions + SSC = np.sum(mean(demeaned_Y, 0) ** 2) * nb_subjects MSC = SSC / dfc / nb_subjects session_effect_F = MSC / MSE @@ -131,8 +149,8 @@ def ICC_rep_anova(Y): SSR = SST - SSC - SSE MSR = SSR / dfr - # ICC(3,1) = (mean square subjeT - mean square error) / - # (mean square subjeT + (k-1)*-mean square error) + # ICC(3,1) = (mean square subject - mean square error) / + # (mean square subject + (k-1)*-mean square error) ICC = (MSR - MSE) / (MSR + dfc * MSE) e_var = MSE # variance of error diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 88ef8fc15f..5ba00d2675 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -1,60 +1,62 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Miscellaneous algorithms for 2D contours and 3D triangularized meshes handling """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import zip, str, bytes - import os.path as op import numpy as np from numpy import linalg as nla from .. import logging -from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, - BaseInterfaceInputSpec) +from ..interfaces.base import ( + BaseInterface, + traits, + TraitedSpec, + File, + BaseInterfaceInputSpec, + Tuple, +) from ..interfaces.vtkbase import tvtk from ..interfaces import vtkbase as VTKInfo -IFLOGGER = logging.getLogger('nipype.interface') + +IFLOGGER = logging.getLogger("nipype.interface") class TVTKBaseInterface(BaseInterface): - """ A base class for interfaces using VTK """ + """A base class for interfaces using VTK""" _redirect_x = True def __init__(self, **inputs): if VTKInfo.no_tvtk(): - raise ImportError('This interface requires tvtk to run.') - super(TVTKBaseInterface, self).__init__(**inputs) + raise ImportError("This interface requires tvtk to run.") + super().__init__(**inputs) class WarpPointsInputSpec(BaseInterfaceInputSpec): - points = File( - exists=True, mandatory=True, desc='file containing the point set') + points = File(exists=True, mandatory=True, desc="file containing the point set") warp = File( - exists=True, - mandatory=True, - desc='dense deformation field to be applied') + exists=True, mandatory=True, desc="dense deformation field to be applied" + ) interp = traits.Enum( - 'cubic', - 'nearest', - 'linear', + "cubic", + "nearest", + "linear", usedefault=True, mandatory=True, - desc='interpolation') + desc="interpolation", + ) out_points = File( - name_source='points', - name_template='%s_warped', - output_name='out_points', + name_source="points", + name_template="%s_warped", + output_name="out_points", keep_extension=True, - desc='the warped point set') + desc="the warped point set", + ) class WarpPointsOutputSpec(TraitedSpec): - out_points = File(desc='the warped point set') + out_points = File(desc="the warped point set") class WarpPoints(TVTKBaseInterface): @@ -74,22 +76,23 @@ class WarpPoints(TVTKBaseInterface): res = wp.run() """ + input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec - def _gen_fname(self, in_file, suffix='generated', ext=None): + def _gen_fname(self, in_file, suffix="generated", ext=None): fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext if ext is None: ext = fext - if ext[0] == '.': + if ext[0] == ".": ext = ext[1:] - return op.abspath('%s_%s.%s' % (fname, suffix, ext)) + return op.abspath(f"{fname}_{suffix}.{ext}") def _run_interface(self, runtime): import nibabel as nb @@ -110,12 +113,11 @@ def _run_interface(self, runtime): warps = [] for axis in warp_dims: - wdata = axis.get_data() + wdata = axis.dataobj # four_to_three ensures this is an array if np.any(wdata != 0): - warp = ndimage.map_coordinates(wdata, voxpoints.transpose()) else: - warp = np.zeros((points.shape[0], )) + warp = np.zeros((points.shape[0],)) warps.append(warp) @@ -124,15 +126,15 @@ def _run_interface(self, runtime): mesh.points = newpoints w = tvtk.PolyDataWriter() VTKInfo.configure_input_data(w, mesh) - w.file_name = self._gen_fname( - self.inputs.points, suffix='warped', ext='.vtk') + w.file_name = self._gen_fname(self.inputs.points, suffix="warped", ext=".vtk") w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_points'] = self._gen_fname( - self.inputs.points, suffix='warped', ext='.vtk') + outputs["out_points"] = self._gen_fname( + self.inputs.points, suffix="warped", ext=".vtk" + ) return outputs @@ -140,43 +142,46 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec): surface1 = File( exists=True, mandatory=True, - desc=('Reference surface (vtk format) to which compute ' - 'distance.')) + desc=("Reference surface (vtk format) to which compute distance."), + ) surface2 = File( exists=True, mandatory=True, - desc=('Test surface (vtk format) from which compute ' - 'distance.')) + desc=("Test surface (vtk format) from which compute distance."), + ) metric = traits.Enum( - 'euclidean', - 'sqeuclidean', - usedefault=True, - desc='norm used to report distance') + "euclidean", "sqeuclidean", usedefault=True, desc="norm used to report distance" + ) weighting = traits.Enum( - 'none', - 'area', + "none", + "area", usedefault=True, - desc=('"none": no weighting is performed, surface": edge distance is ' - 'weighted by the corresponding surface area')) + desc=( + '"none": no weighting is performed, surface": edge distance is ' + "weighted by the corresponding surface area" + ), + ) out_warp = File( - 'surfwarp.vtk', + "surfwarp.vtk", usedefault=True, - desc='vtk file based on surface1 and warpings mapping it ' - 'to surface2') + desc="vtk file based on surface1 and warpings mapping it to surface2", + ) out_file = File( - 'distance.npy', + "distance.npy", usedefault=True, - desc='numpy file keeping computed distances and weights') + desc="numpy file keeping computed distances and weights", + ) class ComputeMeshWarpOutputSpec(TraitedSpec): distance = traits.Float(desc="computed distance") out_warp = File( exists=True, - desc=('vtk file with the vertex-wise ' - 'mapping of surface1 to surface2')) + desc=("vtk file with the vertex-wise mapping of surface1 to surface2"), + ) out_file = File( - exists=True, desc='numpy file keeping computed distances and weights') + exists=True, desc="numpy file keeping computed distances and weights" + ) class ComputeMeshWarp(TVTKBaseInterface): @@ -220,7 +225,7 @@ def _run_interface(self, runtime): vtk2 = VTKInfo.vtk_output(r2) r1.update() r2.update() - assert (len(vtk1.points) == len(vtk2.points)) + assert len(vtk1.points) == len(vtk2.points) points1 = np.array(vtk1.points) points2 = np.array(vtk2.points) @@ -233,10 +238,10 @@ def _run_interface(self, runtime): except TypeError: # numpy < 1.9 errvector = np.apply_along_axis(nla.norm, 1, diff) - if self.inputs.metric == 'sqeuclidean': + if self.inputs.metric == "sqeuclidean": errvector **= 2 - if self.inputs.weighting == 'area': + if self.inputs.weighting == "area": faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:] for i, p1 in enumerate(points2): @@ -258,9 +263,8 @@ def _run_interface(self, runtime): out_mesh.points = vtk1.points out_mesh.polys = vtk1.polys out_mesh.point_data.vectors = diff - out_mesh.point_data.vectors.name = 'warpings' - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) + out_mesh.point_data.vectors.name = "warpings" + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, out_mesh) writer.write() @@ -269,9 +273,9 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_warp'] = op.abspath(self.inputs.out_warp) - outputs['distance'] = self._distance + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_warp"] = op.abspath(self.inputs.out_warp) + outputs["distance"] = self._distance return outputs @@ -279,11 +283,15 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): in_surf = File( exists=True, mandatory=True, - desc=('Input surface in vtk format, with associated warp ' - 'field as point data (ie. from ComputeMeshWarp')) + desc=( + "Input surface in vtk format, with associated warp " + "field as point data (ie. from ComputeMeshWarp" + ), + ) float_trait = traits.Either( traits.Float(1.0), - traits.Tuple(traits.Float(1.0), traits.Float(1.0), traits.Float(1.0))) + Tuple(traits.Float(1.0), traits.Float(1.0), traits.Float(1.0)), + ) operator = traits.Either( float_trait, @@ -291,31 +299,27 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): default=1.0, usedefault=True, mandatory=True, - desc='image, float or tuple of floats to act as operator') + desc="image, float or tuple of floats to act as operator", + ) operation = traits.Enum( - 'sum', - 'sub', - 'mul', - 'div', - usedefault=True, - desc='operation to be performed') + "sum", "sub", "mul", "div", usedefault=True, desc="operation to be performed" + ) out_warp = File( - 'warp_maths.vtk', + "warp_maths.vtk", usedefault=True, - desc='vtk file based on in_surf and warpings mapping it ' - 'to out_file') - out_file = File( - 'warped_surf.vtk', usedefault=True, desc='vtk with surface warped') + desc="vtk file based on in_surf and warpings mapping it to out_file", + ) + out_file = File("warped_surf.vtk", usedefault=True, desc="vtk with surface warped") class MeshWarpMathsOutputSpec(TraitedSpec): out_warp = File( exists=True, - desc=('vtk file with the vertex-wise ' - 'mapping of surface1 to surface2')) - out_file = File(exists=True, desc='vtk with surface warped') + desc=("vtk file with the vertex-wise mapping of surface1 to surface2"), + ) + out_file = File(exists=True, desc="vtk with surface warped") class MeshWarpMaths(TVTKBaseInterface): @@ -350,7 +354,7 @@ def _run_interface(self, runtime): points1 = np.array(vtk1.points) if vtk1.point_data.vectors is None: - raise RuntimeError('No warping field was found in in_surf') + raise RuntimeError("No warping field was found in in_surf") operator = self.inputs.operator opfield = np.ones_like(points1) @@ -359,7 +363,7 @@ def _run_interface(self, runtime): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk2 = VTKInfo.vtk_output(r2) r2.update() - assert (len(points1) == len(vtk2.points)) + assert len(points1) == len(vtk2.points) opfield = vtk2.point_data.vectors @@ -367,7 +371,7 @@ def _run_interface(self, runtime): opfield = vtk2.point_data.scalars if opfield is None: - raise RuntimeError('No operator values found in operator file') + raise RuntimeError("No operator values found in operator file") opfield = np.array(opfield) @@ -379,33 +383,31 @@ def _run_interface(self, runtime): warping = np.array(vtk1.point_data.vectors) - if self.inputs.operation == 'sum': + if self.inputs.operation == "sum": warping += opfield - elif self.inputs.operation == 'sub': + elif self.inputs.operation == "sub": warping -= opfield - elif self.inputs.operation == 'mul': + elif self.inputs.operation == "mul": warping *= opfield - elif self.inputs.operation == 'div': + elif self.inputs.operation == "div": warping /= opfield vtk1.point_data.vectors = warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_file)) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) VTKInfo.configure_input_data(writer, vtk1) writer.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_warp'] = op.abspath(self.inputs.out_warp) + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_warp"] = op.abspath(self.inputs.out_warp) return outputs @@ -421,6 +423,8 @@ class P2PDistance(ComputeMeshWarp): """ def __init__(self, **inputs): - super(P2PDistance, self).__init__(**inputs) - IFLOGGER.warning('This interface has been deprecated since 1.0, please ' - 'use ComputeMeshWarp') + super().__init__(**inputs) + IFLOGGER.warning( + "This interface has been deprecated since 1.0, please " + "use ComputeMeshWarp" + ) diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 0c35352f34..1f0ca3a9f2 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -1,14 +1,9 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Image assessment algorithms. Typical overlap and error computation measures to evaluate results from other processing units. -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import zip, range - +""" import os import os.path as op @@ -18,23 +13,27 @@ from .. import config, logging from ..interfaces.base import ( - SimpleInterface, BaseInterface, traits, TraitedSpec, File, - InputMultiPath, BaseInterfaceInputSpec, - isdefined) + SimpleInterface, + BaseInterface, + traits, + TraitedSpec, + File, + InputMultiPath, + BaseInterfaceInputSpec, + isdefined, +) from ..interfaces.nipy.base import NipyBaseInterface -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class DistanceInputSpec(BaseInterfaceInputSpec): volume1 = File( - exists=True, - mandatory=True, - desc="Has to have the same dimensions as volume2.") + exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." + ) volume2 = File( - exists=True, - mandatory=True, - desc="Has to have the same dimensions as volume1.") + exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." + ) method = traits.Enum( "eucl_min", "eucl_cog", @@ -42,29 +41,29 @@ class DistanceInputSpec(BaseInterfaceInputSpec): "eucl_wmean", "eucl_max", desc='""eucl_min": Euclidean distance between two closest points\ - "eucl_cog": mean Euclidian distance between the Center of Gravity\ + "eucl_cog": mean Euclidean distance between the Center of Gravity\ of volume1 and CoGs of volume2\ - "eucl_mean": mean Euclidian minimum distance of all volume2 voxels\ + "eucl_mean": mean Euclidean minimum distance of all volume2 voxels\ to volume1\ - "eucl_wmean": mean Euclidian minimum distance of all volume2 voxels\ + "eucl_wmean": mean Euclidean minimum distance of all volume2 voxels\ to volume1 weighted by their values\ - "eucl_max": maximum over minimum Euclidian distances of all volume2\ + "eucl_max": maximum over minimum Euclidean distances of all volume2\ voxels to volume1 (also known as the Hausdorff distance)', - usedefault=True) - mask_volume = File( - exists=True, desc="calculate overlap only within this mask.") + usedefault=True, + ) + mask_volume = File(exists=True, desc="calculate overlap only within this mask.") class DistanceOutputSpec(TraitedSpec): distance = traits.Float() - point1 = traits.Array(shape=(3, )) - point2 = traits.Array(shape=(3, )) + point1 = traits.Array(shape=(3,)) + point2 = traits.Array(shape=(3,)) histogram = File() class Distance(BaseInterface): - """Calculates distance between two volumes. - """ + """Calculates distance between two volumes.""" + input_spec = DistanceInputSpec output_spec = DistanceOutputSpec @@ -72,6 +71,7 @@ class Distance(BaseInterface): def _find_border(self, data): from scipy.ndimage.morphology import binary_erosion + eroded = binary_erosion(data) border = np.logical_and(data, np.logical_not(eroded)) return border @@ -86,10 +86,11 @@ def _get_coordinates(self, data, affine): def _eucl_min(self, nii1, nii2): from scipy.spatial.distance import cdist, euclidean - origdata1 = nii1.get_data().astype(np.bool) + + origdata1 = np.asanyarray(nii1.dataobj).astype(bool) border1 = self._find_border(origdata1) - origdata2 = nii2.get_data().astype(np.bool) + origdata2 = np.asanyarray(nii2.dataobj).astype(bool) border2 = self._find_border(origdata2) set1_coordinates = self._get_coordinates(border1, nii1.affine) @@ -97,30 +98,31 @@ def _eucl_min(self, nii1, nii2): set2_coordinates = self._get_coordinates(border2, nii2.affine) dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) - (point1, point2) = np.unravel_index( - np.argmin(dist_matrix), dist_matrix.shape) - return (euclidean(set1_coordinates.T[point1, :], - set2_coordinates.T[point2, :]), - set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]) + (point1, point2) = np.unravel_index(np.argmin(dist_matrix), dist_matrix.shape) + return ( + euclidean(set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]), + set1_coordinates.T[point1, :], + set2_coordinates.T[point2, :], + ) def _eucl_cog(self, nii1, nii2): from scipy.spatial.distance import cdist from scipy.ndimage.measurements import center_of_mass, label - origdata1 = np.logical_and(nii1.get_data() != 0, - np.logical_not(np.isnan(nii1.get_data()))) - cog_t = np.array(center_of_mass(origdata1.copy())).reshape(-1, 1) + + origdata1 = np.asanyarray(nii1.dataobj) + origdata1 = (np.rint(origdata1) != 0) & ~np.isnan(origdata1) + cog_t = np.array(center_of_mass(origdata1)).reshape(-1, 1) cog_t = np.vstack((cog_t, np.array([1]))) cog_t_coor = np.dot(nii1.affine, cog_t)[:3, :] - origdata2 = np.logical_and(nii2.get_data() != 0, - np.logical_not(np.isnan(nii2.get_data()))) + origdata2 = np.asanyarray(nii2.dataobj) + origdata2 = (np.rint(origdata2) != 0) & ~np.isnan(origdata2) (labeled_data, n_labels) = label(origdata2) cogs = np.ones((4, n_labels)) for i in range(n_labels): - cogs[:3, i] = np.array( - center_of_mass(origdata2, labeled_data, i + 1)) + cogs[:3, i] = np.array(center_of_mass(origdata2, labeled_data, i + 1)) cogs_coor = np.dot(nii2.affine, cogs)[:3, :] @@ -130,10 +132,11 @@ def _eucl_cog(self, nii1, nii2): def _eucl_mean(self, nii1, nii2, weighted=False): from scipy.spatial.distance import cdist - origdata1 = nii1.get_data().astype(np.bool) + + origdata1 = np.asanyarray(nii1.dataobj).astype(bool) border1 = self._find_border(origdata1) - origdata2 = nii2.get_data().astype(np.bool) + origdata2 = np.asanyarray(nii2.dataobj).astype(bool) set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(origdata2, nii2.affine) @@ -141,38 +144,37 @@ def _eucl_mean(self, nii1, nii2, weighted=False): dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) min_dist_matrix = np.amin(dist_matrix, axis=0) import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + plt.figure() - plt.hist(min_dist_matrix, 50, normed=1, facecolor='green') + plt.hist(min_dist_matrix, 50, density=True, facecolor="green") plt.savefig(self._hist_filename) plt.clf() plt.close() if weighted: - return np.average( - min_dist_matrix, weights=nii2.get_data()[origdata2].flat) + return np.average(min_dist_matrix, weights=nii2.dataobj[origdata2].flat) else: return np.mean(min_dist_matrix) def _eucl_max(self, nii1, nii2): from scipy.spatial.distance import cdist - origdata1 = nii1.get_data() - origdata1 = np.logical_not( - np.logical_or(origdata1 == 0, np.isnan(origdata1))) - origdata2 = nii2.get_data() - origdata2 = np.logical_not( - np.logical_or(origdata2 == 0, np.isnan(origdata2))) + + origdata1 = np.asanyarray(nii1.dataobj) + origdata1 = (np.rint(origdata1) != 0) & ~np.isnan(origdata1) + origdata2 = np.asanyarray(nii2.dataobj) + origdata2 = (np.rint(origdata2) != 0) & ~np.isnan(origdata2) if isdefined(self.inputs.mask_volume): - maskdata = nb.load(self.inputs.mask_volume).get_data() - maskdata = np.logical_not( - np.logical_or(maskdata == 0, np.isnan(maskdata))) + maskdata = np.asanyarray(nb.load(self.inputs.mask_volume).dataobj) + maskdata = (np.rint(maskdata) != 0) & ~np.isnan(maskdata) origdata1 = np.logical_and(maskdata, origdata1) origdata2 = np.logical_and(maskdata, origdata2) if origdata1.max() == 0 or origdata2.max() == 0: - return np.NaN + return np.nan border1 = self._find_border(origdata1) border2 = self._find_border(origdata2) @@ -180,8 +182,7 @@ def _eucl_max(self, nii1, nii2): set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(border2, nii2.affine) distances = cdist(set1_coordinates.T, set2_coordinates.T) - mins = np.concatenate((np.amin(distances, axis=0), - np.amin(distances, axis=1))) + mins = np.concatenate((np.amin(distances, axis=0), np.amin(distances, axis=1))) return np.max(mins) @@ -191,8 +192,7 @@ def _run_interface(self, runtime): nii2 = nb.load(self.inputs.volume2, mmap=False) if self.inputs.method == "eucl_min": - self._distance, self._point1, self._point2 = self._eucl_min( - nii1, nii2) + self._distance, self._point1, self._point2 = self._eucl_min(nii1, nii2) elif self.inputs.method == "eucl_cog": self._distance = self._eucl_cog(nii1, nii2) @@ -209,62 +209,55 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['distance'] = self._distance + outputs["distance"] = self._distance if self.inputs.method == "eucl_min": - outputs['point1'] = self._point1 - outputs['point2'] = self._point2 + outputs["point1"] = self._point1 + outputs["point2"] = self._point2 elif self.inputs.method in ["eucl_mean", "eucl_wmean"]: - outputs['histogram'] = os.path.abspath(self._hist_filename) + outputs["histogram"] = os.path.abspath(self._hist_filename) return outputs class OverlapInputSpec(BaseInterfaceInputSpec): volume1 = File( - exists=True, - mandatory=True, - desc='Has to have the same dimensions as volume2.') + exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." + ) volume2 = File( - exists=True, - mandatory=True, - desc='Has to have the same dimensions as volume1.') - mask_volume = File( - exists=True, desc='calculate overlap only within this mask.') + exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." + ) + mask_volume = File(exists=True, desc="calculate overlap only within this mask.") bg_overlap = traits.Bool( - False, - usedefault=True, - mandatory=True, - desc='consider zeros as a label') - out_file = File('diff.nii', usedefault=True) + False, usedefault=True, mandatory=True, desc="consider zeros as a label" + ) + out_file = File("diff.nii", usedefault=True) weighting = traits.Enum( - 'none', - 'volume', - 'squared_vol', + "none", + "volume", + "squared_vol", usedefault=True, - desc=('\'none\': no class-overlap weighting is ' - 'performed. \'volume\': computed class-' - 'overlaps are weighted by class volume ' - '\'squared_vol\': computed class-overlaps ' - 'are weighted by the squared volume of ' - 'the class')) + desc=( + "'none': no class-overlap weighting is " + "performed. 'volume': computed class-" + "overlaps are weighted by class volume " + "'squared_vol': computed class-overlaps " + "are weighted by the squared volume of " + "the class" + ), + ) vol_units = traits.Enum( - 'voxel', - 'mm', - mandatory=True, - usedefault=True, - desc='units for volumes') + "voxel", "mm", mandatory=True, usedefault=True, desc="units for volumes" + ) class OverlapOutputSpec(TraitedSpec): - jaccard = traits.Float(desc='averaged jaccard index') - dice = traits.Float(desc='averaged dice index') - roi_ji = traits.List( - traits.Float(), desc=('the Jaccard index (JI) per ROI')) - roi_di = traits.List(traits.Float(), desc=('the Dice index (DI) per ROI')) - volume_difference = traits.Float(desc=('averaged volume difference')) - roi_voldiff = traits.List( - traits.Float(), desc=('volume differences of ROIs')) - labels = traits.List(traits.Int(), desc=('detected labels')) - diff_file = File(exists=True, desc='error map of differences') + jaccard = traits.Float(desc="averaged jaccard index") + dice = traits.Float(desc="averaged dice index") + roi_ji = traits.List(traits.Float(), desc=("the Jaccard index (JI) per ROI")) + roi_di = traits.List(traits.Float(), desc=("the Dice index (DI) per ROI")) + volume_difference = traits.Float(desc=("averaged volume difference")) + roi_voldiff = traits.List(traits.Float(), desc=("volume differences of ROIs")) + labels = traits.List(traits.Int(), desc=("detected labels")) + diff_file = File(exists=True, desc="error map of differences") class Overlap(BaseInterface): @@ -286,12 +279,14 @@ class Overlap(BaseInterface): >>> res = overlap.run() # doctest: +SKIP """ + input_spec = OverlapInputSpec output_spec = OverlapOutputSpec def _bool_vec_dissimilarity(self, booldata1, booldata2, method): from scipy.spatial.distance import dice, jaccard - methods = {'dice': dice, 'jaccard': jaccard} + + methods = {"dice": dice, "jaccard": jaccard} if not (np.any(booldata1) or np.any(booldata2)): return 0 return 1 - methods[method](booldata1.flat, booldata2.flat) @@ -302,20 +297,18 @@ def _run_interface(self, runtime): scale = 1.0 - if self.inputs.vol_units == 'mm': - voxvol = nii1.header.get_zooms() - for i in range(nii1.get_data().ndim - 1): - scale = scale * voxvol[i] + if self.inputs.vol_units == "mm": + scale = np.prod(nii1.header.get_zooms()[:3]) - data1 = nii1.get_data() + data1 = np.asanyarray(nii1.dataobj) data1[np.logical_or(data1 < 0, np.isnan(data1))] = 0 max1 = int(data1.max()) data1 = data1.astype(np.min_scalar_type(max1)) - data2 = nii2.get_data().astype(np.min_scalar_type(max1)) + data2 = np.asanyarray(nii2.dataobj).astype(np.min_scalar_type(max1)) data2[np.logical_or(data1 < 0, np.isnan(data1))] = 0 if isdefined(self.inputs.mask_volume): - maskdata = nb.load(self.inputs.mask_volume).get_data() + maskdata = np.asanyarray(nb.load(self.inputs.mask_volume).dataobj) maskdata = ~np.logical_or(maskdata == 0, np.isnan(maskdata)) data1[~maskdata] = 0 data2[~maskdata] = 0 @@ -330,19 +323,19 @@ def _run_interface(self, runtime): for l in labels: res.append( - self._bool_vec_dissimilarity( - data1 == l, data2 == l, method='jaccard')) + self._bool_vec_dissimilarity(data1 == l, data2 == l, method="jaccard") + ) volumes1.append(scale * len(data1[data1 == l])) volumes2.append(scale * len(data2[data2 == l])) results = dict(jaccard=[], dice=[]) - results['jaccard'] = np.array(res) - results['dice'] = 2.0 * results['jaccard'] / (results['jaccard'] + 1.0) + results["jaccard"] = np.array(res) + results["dice"] = 2.0 * results["jaccard"] / (results["jaccard"] + 1.0) - weights = np.ones((len(volumes1), ), dtype=np.float32) - if self.inputs.weighting != 'none': + weights = np.ones((len(volumes1),), dtype=np.float32) + if self.inputs.weighting != "none": weights = weights / np.array(volumes1) - if self.inputs.weighting == 'squared_vol': + if self.inputs.weighting == "squared_vol": weights = weights**2 weights = weights / np.sum(weights) @@ -350,31 +343,30 @@ def _run_interface(self, runtime): both_data[(data1 - data2) != 0] = 1 nb.save( - nb.Nifti1Image(both_data, nii1.affine, nii1.header), - self.inputs.out_file) + nb.Nifti1Image(both_data, nii1.affine, nii1.header), self.inputs.out_file + ) self._labels = labels self._ove_rois = results - self._vol_rois = ( - np.array(volumes1) - np.array(volumes2)) / np.array(volumes1) + self._vol_rois = (np.array(volumes1) - np.array(volumes2)) / np.array(volumes1) - self._dice = round(np.sum(weights * results['dice']), 5) - self._jaccard = round(np.sum(weights * results['jaccard']), 5) + self._dice = round(np.sum(weights * results["dice"]), 5) + self._jaccard = round(np.sum(weights * results["jaccard"]), 5) self._volume = np.sum(weights * self._vol_rois) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['labels'] = self._labels - outputs['jaccard'] = self._jaccard - outputs['dice'] = self._dice - outputs['volume_difference'] = self._volume - - outputs['roi_ji'] = self._ove_rois['jaccard'].tolist() - outputs['roi_di'] = self._ove_rois['dice'].tolist() - outputs['roi_voldiff'] = self._vol_rois.tolist() - outputs['diff_file'] = os.path.abspath(self.inputs.out_file) + outputs["labels"] = self._labels + outputs["jaccard"] = self._jaccard + outputs["dice"] = self._dice + outputs["volume_difference"] = self._volume + + outputs["roi_ji"] = self._ove_rois["jaccard"].tolist() + outputs["roi_di"] = self._ove_rois["dice"].tolist() + outputs["roi_voldiff"] = self._vol_rois.tolist() + outputs["diff_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -382,38 +374,44 @@ class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): in_ref = InputMultiPath( File(exists=True), mandatory=True, - desc='Reference image. Requires the same dimensions as in_tst.') + desc="Reference image. Requires the same dimensions as in_tst.", + ) in_tst = InputMultiPath( File(exists=True), mandatory=True, - desc='Test image. Requires the same dimensions as in_ref.') - in_mask = File(exists=True, desc='calculate overlap only within mask') + desc="Test image. Requires the same dimensions as in_ref.", + ) + in_mask = File(exists=True, desc="calculate overlap only within mask") weighting = traits.Enum( - 'none', - 'volume', - 'squared_vol', + "none", + "volume", + "squared_vol", usedefault=True, - desc=('\'none\': no class-overlap weighting is ' - 'performed. \'volume\': computed class-' - 'overlaps are weighted by class volume ' - '\'squared_vol\': computed class-overlaps ' - 'are weighted by the squared volume of ' - 'the class')) + desc=( + "'none': no class-overlap weighting is " + "performed. 'volume': computed class-" + "overlaps are weighted by class volume " + "'squared_vol': computed class-overlaps " + "are weighted by the squared volume of " + "the class" + ), + ) out_file = File( - 'diff.nii', - desc='alternative name for resulting difference-map', - usedefault=True) + "diff.nii", + desc="alternative name for resulting difference-map", + usedefault=True, + ) class FuzzyOverlapOutputSpec(TraitedSpec): - jaccard = traits.Float(desc='Fuzzy Jaccard Index (fJI), all the classes') - dice = traits.Float(desc='Fuzzy Dice Index (fDI), all the classes') + jaccard = traits.Float(desc="Fuzzy Jaccard Index (fJI), all the classes") + dice = traits.Float(desc="Fuzzy Dice Index (fDI), all the classes") class_fji = traits.List( - traits.Float(), - desc='Array containing the fJIs of each computed class') + traits.Float(), desc="Array containing the fJIs of each computed class" + ) class_fdi = traits.List( - traits.Float(), - desc='Array containing the fDIs of each computed class') + traits.Float(), desc="Array containing the fDIs of each computed class" + ) class FuzzyOverlap(SimpleInterface): @@ -441,22 +439,22 @@ class FuzzyOverlap(SimpleInterface): def _run_interface(self, runtime): # Load data - refdata = nb.concat_images(self.inputs.in_ref).get_data() - tstdata = nb.concat_images(self.inputs.in_tst).get_data() + refdata = nb.concat_images(self.inputs.in_ref).dataobj + tstdata = nb.concat_images(self.inputs.in_tst).dataobj # Data must have same shape if not refdata.shape == tstdata.shape: raise RuntimeError( - 'Size of "in_tst" %s must match that of "in_ref" %s.' % - (tstdata.shape, refdata.shape)) + 'Size of "in_tst" %s must match that of "in_ref" %s.' + % (tstdata.shape, refdata.shape) + ) ncomp = refdata.shape[-1] # Load mask mask = np.ones_like(refdata, dtype=bool) if isdefined(self.inputs.in_mask): - mask = nb.load(self.inputs.in_mask).get_data() - mask = mask > 0 + mask = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) > 0 mask = np.repeat(mask[..., np.newaxis], ncomp, -1) assert mask.shape == refdata.shape @@ -465,29 +463,33 @@ def _run_interface(self, runtime): tstdata = tstdata[mask] if np.any(refdata < 0.0): - iflogger.warning('Negative values encountered in "in_ref" input, ' - 'taking absolute values.') + iflogger.warning( + 'Negative values encountered in "in_ref" input, ' + "taking absolute values." + ) refdata = np.abs(refdata) if np.any(tstdata < 0.0): - iflogger.warning('Negative values encountered in "in_tst" input, ' - 'taking absolute values.') + iflogger.warning( + 'Negative values encountered in "in_tst" input, ' + "taking absolute values." + ) tstdata = np.abs(tstdata) if np.any(refdata > 1.0): - iflogger.warning('Values greater than 1.0 found in "in_ref" input, ' - 'scaling values.') + iflogger.warning( + 'Values greater than 1.0 found in "in_ref" input, scaling values.' + ) refdata /= refdata.max() if np.any(tstdata > 1.0): - iflogger.warning('Values greater than 1.0 found in "in_tst" input, ' - 'scaling values.') + iflogger.warning( + 'Values greater than 1.0 found in "in_tst" input, scaling values.' + ) tstdata /= tstdata.max() - numerators = np.atleast_2d( - np.minimum(refdata, tstdata).reshape((-1, ncomp))) - denominators = np.atleast_2d( - np.maximum(refdata, tstdata).reshape((-1, ncomp))) + numerators = np.atleast_2d(np.minimum(refdata, tstdata).reshape((-1, ncomp))) + denominators = np.atleast_2d(np.maximum(refdata, tstdata).reshape((-1, ncomp))) jaccards = numerators.sum(axis=0) / denominators.sum(axis=0) @@ -503,10 +505,10 @@ def _run_interface(self, runtime): dices = 2.0 * jaccards / (jaccards + 1.0) # Fill-in the results object - self._results['jaccard'] = float(weights.dot(jaccards)) - self._results['dice'] = float(weights.dot(dices)) - self._results['class_fji'] = [float(v) for v in jaccards] - self._results['class_fdi'] = [float(v) for v in dices] + self._results["jaccard"] = float(weights.dot(jaccards)) + self._results["dice"] = float(weights.dot(dices)) + self._results["class_fji"] = [float(v) for v in jaccards] + self._results["class_fdi"] = [float(v) for v in dices] return runtime @@ -514,18 +516,21 @@ class ErrorMapInputSpec(BaseInterfaceInputSpec): in_ref = File( exists=True, mandatory=True, - desc="Reference image. Requires the same dimensions as in_tst.") + desc="Reference image. Requires the same dimensions as in_tst.", + ) in_tst = File( exists=True, mandatory=True, - desc="Test image. Requires the same dimensions as in_ref.") + desc="Test image. Requires the same dimensions as in_ref.", + ) mask = File(exists=True, desc="calculate overlap only within this mask.") metric = traits.Enum( "sqeuclidean", "euclidean", - desc='error map metric (as implemented in scipy cdist)', + desc="error map metric (as implemented in scipy cdist)", usedefault=True, - mandatory=True) + mandatory=True, + ) out_map = File(desc="Name for the output file") @@ -535,7 +540,7 @@ class ErrorMapOutputSpec(TraitedSpec): class ErrorMap(BaseInterface): - """ Calculates the error (distance) map between two input volumes. + """Calculates the error (distance) map between two input volumes. Example ------- @@ -545,31 +550,34 @@ class ErrorMap(BaseInterface): >>> errormap.inputs.in_tst = 'cont2.nii' >>> res = errormap.run() # doctest: +SKIP """ + input_spec = ErrorMapInputSpec output_spec = ErrorMapOutputSpec - _out_file = '' + _out_file = "" def _run_interface(self, runtime): # Get two numpy data matrices nii_ref = nb.load(self.inputs.in_ref) - ref_data = np.squeeze(nii_ref.get_data()) - tst_data = np.squeeze(nb.load(self.inputs.in_tst).get_data()) - assert (ref_data.ndim == tst_data.ndim) + ref_data = np.squeeze(nii_ref.dataobj) + tst_data = np.squeeze(nb.load(self.inputs.in_tst).dataobj) + assert ref_data.ndim == tst_data.ndim # Load mask comps = 1 mapshape = ref_data.shape - if (ref_data.ndim == 4): + if ref_data.ndim == 4: comps = ref_data.shape[-1] mapshape = ref_data.shape[:-1] if isdefined(self.inputs.mask): - msk = nb.load(self.inputs.mask).get_data() - if (mapshape != msk.shape): - raise RuntimeError("Mask should match volume shape, \ - mask is %s and volumes are %s" % - (list(msk.shape), list(mapshape))) + msk = np.asanyarray(nb.load(self.inputs.mask).dataobj) + if mapshape != msk.shape: + raise RuntimeError( + "Mask should match volume shape, \ + mask is %s and volumes are %s" + % (list(msk.shape), list(mapshape)) + ) else: msk = np.ones(shape=mapshape) @@ -578,51 +586,52 @@ def _run_interface(self, runtime): msk_idxs = np.where(mskvector == 1) refvector = ref_data.reshape(-1, comps)[msk_idxs].astype(np.float32) tstvector = tst_data.reshape(-1, comps)[msk_idxs].astype(np.float32) - diffvector = (refvector - tstvector) + diffvector = refvector - tstvector # Scale the difference - if self.inputs.metric == 'sqeuclidean': + if self.inputs.metric == "sqeuclidean": errvector = diffvector**2 - if (comps > 1): + if comps > 1: errvector = np.sum(errvector, axis=1) else: errvector = np.squeeze(errvector) - elif self.inputs.metric == 'euclidean': + elif self.inputs.metric == "euclidean": errvector = np.linalg.norm(diffvector, axis=1) errvectorexp = np.zeros_like( - mskvector, dtype=np.float32) # The default type is uint8 + mskvector, dtype=np.float32 + ) # The default type is uint8 errvectorexp[msk_idxs] = errvector # Get averaged error - self._distance = np.average( - errvector) # Only average the masked voxels + self._distance = np.average(errvector) # Only average the masked voxels errmap = errvectorexp.reshape(mapshape) hdr = nii_ref.header.copy() hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 hdr.set_data_shape(mapshape) if not isdefined(self.inputs.out_map): fname, ext = op.splitext(op.basename(self.inputs.in_tst)) - if ext == '.gz': + if ext == ".gz": fname, ext2 = op.splitext(fname) ext = ext2 + ext self._out_file = op.abspath(fname + "_errmap" + ext) else: self._out_file = self.inputs.out_map - nb.Nifti1Image(errmap.astype(np.float32), nii_ref.affine, - hdr).to_filename(self._out_file) + nb.Nifti1Image(errmap.astype(np.float32), nii_ref.affine, hdr).to_filename( + self._out_file + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_map'] = self._out_file - outputs['distance'] = self._distance + outputs["out_map"] = self._out_file + outputs["distance"] = self._distance return outputs @@ -632,7 +641,7 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( - traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), + traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, @@ -642,12 +651,14 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", - usedefault=True) + usedefault=True, + ) class SimilarityOutputSpec(TraitedSpec): similarity = traits.List( - traits.Float(desc="Similarity between volume 1 and 2, frame by frame")) + traits.Float(desc="Similarity between volume 1 and 2, frame by frame") + ) class Similarity(NipyBaseInterface): @@ -675,13 +686,15 @@ class Similarity(NipyBaseInterface): output_spec = SimilarityOutputSpec def _run_interface(self, runtime): - from nipy.algorithms.registration.histogram_registration import HistogramRegistration + from nipy.algorithms.registration.histogram_registration import ( + HistogramRegistration, + ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) vol2_nii = nb.load(self.inputs.volume2) - dims = vol1_nii.get_data().ndim + dims = len(vol1_nii.shape) if dims == 3 or dims == 2: vols1 = [vol1_nii] @@ -692,15 +705,16 @@ def _run_interface(self, runtime): if dims < 2 or dims > 4: raise RuntimeError( - 'Image dimensions not supported (detected %dD file)' % dims) + "Image dimensions not supported (detected %dD file)" % dims + ) if isdefined(self.inputs.mask1): - mask1 = nb.load(self.inputs.mask1).get_data() == 1 + mask1 = np.asanyarray(nb.load(self.inputs.mask1).dataobj) == 1 else: mask1 = None if isdefined(self.inputs.mask2): - mask2 = nb.load(self.inputs.mask2).get_data() == 1 + mask2 = np.asanyarray(nb.load(self.inputs.mask2).dataobj) == 1 else: mask2 = None @@ -712,12 +726,13 @@ def _run_interface(self, runtime): to_img=ts2, similarity=self.inputs.metric, from_mask=mask1, - to_mask=mask2) + to_mask=mask2, + ) self._similarity.append(histreg.eval(Affine())) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['similarity'] = self._similarity + outputs["similarity"] = self._similarity return outputs diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index a4ecd3a5e2..e1a67f0b08 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,14 +1,6 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' -Miscellaneous algorithms -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, zip, range, open -from future.utils import raise_from - +"""Miscellaneous algorithms.""" import os import os.path as op @@ -21,36 +13,49 @@ from .. import logging from . import metrics as nam from ..interfaces.base import ( - BaseInterface, traits, TraitedSpec, File, InputMultiPath, OutputMultiPath, - BaseInterfaceInputSpec, isdefined, DynamicTraitedSpec, Undefined) + BaseInterface, + traits, + TraitedSpec, + File, + InputMultiPath, + OutputMultiPath, + BaseInterfaceInputSpec, + isdefined, + DynamicTraitedSpec, + Undefined, + Tuple, +) from ..utils.filemanip import fname_presuffix, split_filename, ensure_list -from ..utils import NUMPY_MMAP from . import confounds -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class PickAtlasInputSpec(BaseInterfaceInputSpec): atlas = File( - exists=True, - desc="Location of the atlas that will be used.", - mandatory=True) + exists=True, desc="Location of the atlas that will be used.", mandatory=True + ) labels = traits.Either( traits.Int, traits.List(traits.Int), - desc=("Labels of regions that will be included in the mask. Must be\ - compatible with the atlas used."), - mandatory=True) + desc=( + "Labels of regions that will be included in the mask. Must be\ + compatible with the atlas used." + ), + mandatory=True, + ) hemi = traits.Enum( - 'both', - 'left', - 'right', + "both", + "left", + "right", desc="Restrict the mask to only one hemisphere: left or right", - usedefault=True) + usedefault=True, + ) dilation_size = traits.Int( usedefault=True, - desc="Defines how much the mask will be dilated (expanded in 3D).") + desc="Defines how much the mask will be dilated (expanded in 3D).", + ) output_file = File(desc="Where to store the output mask.") @@ -78,14 +83,15 @@ def _gen_output_filename(self): fname=self.inputs.atlas, suffix="_mask", newpath=os.getcwd(), - use_ext=True) + use_ext=True, + ) else: output = os.path.realpath(self.inputs.output_file) return output def _get_brodmann_area(self): nii = nb.load(self.inputs.atlas) - origdata = nii.get_data() + origdata = np.asanyarray(nii.dataobj) newdata = np.zeros(origdata.shape) if not isinstance(self.inputs.labels, list): @@ -94,50 +100,56 @@ def _get_brodmann_area(self): labels = self.inputs.labels for lab in labels: newdata[origdata == lab] = 1 - if self.inputs.hemi == 'right': - newdata[int(floor(float(origdata.shape[0]) / 2)):, :, :] = 0 - elif self.inputs.hemi == 'left': - newdata[:int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 + if self.inputs.hemi == "right": + newdata[int(floor(float(origdata.shape[0]) / 2)) :, :, :] = 0 + elif self.inputs.hemi == "left": + newdata[: int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 if self.inputs.dilation_size != 0: from scipy.ndimage.morphology import grey_dilation - newdata = grey_dilation(newdata, - (2 * self.inputs.dilation_size + 1, - 2 * self.inputs.dilation_size + 1, - 2 * self.inputs.dilation_size + 1)) + + newdata = grey_dilation( + newdata, + ( + 2 * self.inputs.dilation_size + 1, + 2 * self.inputs.dilation_size + 1, + 2 * self.inputs.dilation_size + 1, + ), + ) return nb.Nifti1Image(newdata, nii.affine, nii.header) def _list_outputs(self): outputs = self._outputs().get() - outputs['mask_file'] = self._gen_output_filename() + outputs["mask_file"] = self._gen_output_filename() return outputs class SimpleThresholdInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( - File(exists=True), desc='volumes to be thresholded', mandatory=True) + File(exists=True), desc="volumes to be thresholded", mandatory=True + ) threshold = traits.Float( - desc='volumes to be thresholdedeverything below this value will be set\ - to zero', - mandatory=True) + desc="volumes to be thresholdedeverything below this value will be set\ + to zero", + mandatory=True, + ) class SimpleThresholdOutputSpec(TraitedSpec): - thresholded_volumes = OutputMultiPath( - File(exists=True), desc="thresholded volumes") + thresholded_volumes = OutputMultiPath(File(exists=True), desc="thresholded volumes") class SimpleThreshold(BaseInterface): - """Applies a threshold to input volumes - """ + """Applies a threshold to input volumes""" + input_spec = SimpleThresholdInputSpec output_spec = SimpleThresholdOutputSpec def _run_interface(self, runtime): for fname in self.inputs.volumes: - img = nb.load(fname, mmap=NUMPY_MMAP) - data = np.array(img.get_data()) + img = nb.load(fname) + data = img.get_fdata() active_map = data > self.inputs.threshold @@ -146,7 +158,7 @@ def _run_interface(self, runtime): new_img = nb.Nifti1Image(thresholded_map, img.affine, img.header) _, base, _ = split_filename(fname) - nb.save(new_img, base + '_thresholded.nii') + nb.save(new_img, base + "_thresholded.nii") return runtime @@ -156,21 +168,24 @@ def _list_outputs(self): for fname in self.inputs.volumes: _, base, _ = split_filename(fname) outputs["thresholded_volumes"].append( - os.path.abspath(base + '_thresholded.nii')) + os.path.abspath(base + "_thresholded.nii") + ) return outputs class ModifyAffineInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( File(exists=True), - desc='volumes which affine matrices will be modified', - mandatory=True) + desc="volumes which affine matrices will be modified", + mandatory=True, + ) transformation_matrix = traits.Array( value=np.eye(4), shape=(4, 4), desc="transformation matrix that will be left multiplied by the\ affine matrix", - usedefault=True) + usedefault=True, + ) class ModifyAffineOutputSpec(TraitedSpec): @@ -181,6 +196,7 @@ class ModifyAffine(BaseInterface): """Left multiplies the affine matrix with a specified values. Saves the volume as a nifti file. """ + input_spec = ModifyAffineInputSpec output_spec = ModifyAffineOutputSpec @@ -190,30 +206,31 @@ def _gen_output_filename(self, name): def _run_interface(self, runtime): for fname in self.inputs.volumes: - img = nb.load(fname, mmap=NUMPY_MMAP) + img = nb.load(fname) affine = img.affine affine = np.dot(self.inputs.transformation_matrix, affine) nb.save( - nb.Nifti1Image(img.get_data(), affine, img.header), - self._gen_output_filename(fname)) + nb.Nifti1Image(img.dataobj, affine, img.header), + self._gen_output_filename(fname), + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['transformed_volumes'] = [] + outputs["transformed_volumes"] = [] for fname in self.inputs.volumes: - outputs['transformed_volumes'].append( - self._gen_output_filename(fname)) + outputs["transformed_volumes"].append(self._gen_output_filename(fname)) return outputs class CreateNiftiInputSpec(BaseInterfaceInputSpec): data_file = File(exists=True, mandatory=True, desc="ANALYZE img file") header_file = File( - exists=True, mandatory=True, desc="corresponding ANALYZE hdr file") + exists=True, mandatory=True, desc="corresponding ANALYZE hdr file" + ) affine = traits.Array(desc="affine transformation array") @@ -222,8 +239,8 @@ class CreateNiftiOutputSpec(TraitedSpec): class CreateNifti(BaseInterface): - """Creates a nifti volume - """ + """Creates a nifti volume""" + input_spec = CreateNiftiInputSpec output_spec = CreateNiftiOutputSpec @@ -232,7 +249,7 @@ def _gen_output_file_name(self): return os.path.abspath(base + ".nii") def _run_interface(self, runtime): - with open(self.inputs.header_file, 'rb') as hdr_file: + with open(self.inputs.header_file, "rb") as hdr_file: hdr = nb.AnalyzeHeader.from_fileobj(hdr_file) if isdefined(self.inputs.affine): @@ -240,7 +257,7 @@ def _run_interface(self, runtime): else: affine = None - with open(self.inputs.data_file, 'rb') as data_file: + with open(self.inputs.data_file, "rb") as data_file: data = hdr.data_from_fileobj(data_file) img = nb.Nifti1Image(data, affine, hdr) @@ -250,54 +267,92 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['nifti_file'] = self._gen_output_file_name() + outputs["nifti_file"] = self._gen_output_file_name() return outputs -class GunzipInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True) +class GzipInputSpec(TraitedSpec): + in_file = File(exists=True, mandatory=True, desc="file to (de)compress") + mode = traits.Enum( + "compress", "decompress", usedefault=True, desc="compress or decompress" + ) -class GunzipOutputSpec(TraitedSpec): - out_file = File(exists=True) +class GzipOutputSpec(TraitedSpec): + out_file = File() -class Gunzip(BaseInterface): - """Gunzip wrapper +class Gzip(BaseInterface): + """Gzip wrapper - >>> from nipype.algorithms.misc import Gunzip - >>> gunzip = Gunzip(in_file='tpms_msk.nii.gz') - >>> res = gunzip.run() + >>> from nipype.algorithms.misc import Gzip + >>> gzip = Gzip(in_file='tpms_msk.nii.gz', mode="decompress") + >>> res = gzip.run() >>> res.outputs.out_file # doctest: +ELLIPSIS '.../tpms_msk.nii' + >>> gzip = Gzip(in_file='tpms_msk.nii') + >>> res = gzip.run() + >>> res.outputs.out_file # doctest: +ELLIPSIS + '.../tpms_msk.nii.gz' + .. testcleanup:: >>> os.unlink('tpms_msk.nii') """ - input_spec = GunzipInputSpec - output_spec = GunzipOutputSpec + + input_spec = GzipInputSpec + output_spec = GzipOutputSpec def _gen_output_file_name(self): _, base, ext = split_filename(self.inputs.in_file) - if ext[-3:].lower() == ".gz": + if self.inputs.mode == "decompress" and ext[-3:].lower() == ".gz": ext = ext[:-3] + elif self.inputs.mode == "compress": + ext = f"{ext}.gz" return os.path.abspath(base + ext) def _run_interface(self, runtime): import gzip import shutil - with gzip.open(self.inputs.in_file, 'rb') as in_file: - with open(self._gen_output_file_name(), 'wb') as out_file: + + if self.inputs.mode == "compress": + open_input, open_output = open, gzip.open + else: + open_input, open_output = gzip.open, open + + with open_input(self.inputs.in_file, "rb") as in_file: + with open_output(self._gen_output_file_name(), "wb") as out_file: shutil.copyfileobj(in_file, out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_output_file_name() + outputs["out_file"] = self._gen_output_file_name() return outputs +class GunzipInputSpec(GzipInputSpec): + mode = traits.Enum("decompress", usedefault=True, desc="decompress or compress") + + +class Gunzip(Gzip): + """Gunzip wrapper + + >>> from nipype.algorithms.misc import Gunzip + >>> gunzip = Gunzip(in_file='tpms_msk.nii.gz') + >>> res = gunzip.run() + >>> res.outputs.out_file # doctest: +ELLIPSIS + '.../tpms_msk.nii' + + .. testcleanup:: + + >>> os.unlink('tpms_msk.nii') + """ + + input_spec = GunzipInputSpec + + def replaceext(in_list, ext): out_list = list() for filename in in_list: @@ -307,54 +362,60 @@ def replaceext(in_list, ext): return out_list -def matlab2csv(in_array, name, reshape): +def _matlab2csv(in_array, name, reshape): output_array = np.asarray(in_array) if reshape: if len(np.shape(output_array)) > 1: output_array = np.reshape( - output_array, - (np.shape(output_array)[0] * np.shape(output_array)[1], 1)) + output_array, (np.shape(output_array)[0] * np.shape(output_array)[1], 1) + ) iflogger.info(np.shape(output_array)) - output_name = op.abspath(name + '.csv') - np.savetxt(output_name, output_array, delimiter=',') + output_name = op.abspath(name + ".csv") + np.savetxt(output_name, output_array, delimiter=",") return output_name class Matlab2CSVInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc='Input MATLAB .mat file') + in_file = File(exists=True, mandatory=True, desc="Input MATLAB .mat file") reshape_matrix = traits.Bool( True, usedefault=True, - desc='The output of this interface is meant for R, so matrices will be\ - reshaped to vectors by default.') + desc="The output of this interface is meant for R, so matrices will be\ + reshaped to vectors by default.", + ) class Matlab2CSVOutputSpec(TraitedSpec): csv_files = OutputMultiPath( - File(desc='Output CSV files for each variable saved in the input .mat\ - file')) + File( + desc="Output CSV files for each variable saved in the input .mat\ + file" + ) + ) class Matlab2CSV(BaseInterface): - """Simple interface to save the components of a MATLAB .mat file as a text - file with comma-separated values (CSVs). + """ + Save the components of a MATLAB .mat file as a text file with comma-separated values (CSVs). CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.Matlab2CSV() >>> mat2csv.inputs.in_file = 'cmatrix.mat' >>> mat2csv.run() # doctest: +SKIP + """ + input_spec = Matlab2CSVInputSpec output_spec = Matlab2CSVOutputSpec def _run_interface(self, runtime): import scipy.io as sio + in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) # Check if the file has multiple variables in it. If it does, loop @@ -364,87 +425,100 @@ def _run_interface(self, runtime): saved_variables = list() for key in list(in_dict.keys()): - if not key.startswith('__'): + if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.info('One of the keys in the input file, %s, is ' - 'not a Numpy array', key) + iflogger.info( + "One of the keys in the input file, %s, is " + "not a Numpy array", + key, + ) if len(saved_variables) > 1: - iflogger.info('%i variables found:', len(saved_variables)) + iflogger.info("%i variables found:", len(saved_variables)) iflogger.info(saved_variables) for variable in saved_variables: - iflogger.info('...Converting %s - type %s - to CSV', variable, - type(in_dict[variable])) - matlab2csv(in_dict[variable], variable, - self.inputs.reshape_matrix) + iflogger.info( + "...Converting %s - type %s - to CSV", + variable, + type(in_dict[variable]), + ) + _matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] - iflogger.info('Single variable found %s, type %s:', variable, - type(in_dict[variable])) - iflogger.info('...Converting %s to CSV from %s', variable, - self.inputs.in_file) - matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) + iflogger.info( + "Single variable found %s, type %s:", variable, type(in_dict[variable]) + ) + iflogger.info( + "...Converting %s to CSV from %s", variable, self.inputs.in_file + ) + _matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: - iflogger.error('No values in the MATLAB file?!') + iflogger.error("No values in the MATLAB file?!") return runtime def _list_outputs(self): import scipy.io as sio + outputs = self.output_spec().get() in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) saved_variables = list() for key in list(in_dict.keys()): - if not key.startswith('__'): + if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.error('One of the keys in the input file, %s, is ' - 'not a Numpy array', key) + iflogger.error( + "One of the keys in the input file, %s, is " + "not a Numpy array", + key, + ) if len(saved_variables) > 1: - outputs['csv_files'] = replaceext(saved_variables, '.csv') + outputs["csv_files"] = replaceext(saved_variables, ".csv") elif len(saved_variables) == 1: _, name, ext = split_filename(self.inputs.in_file) - outputs['csv_files'] = op.abspath(name + '.csv') + outputs["csv_files"] = op.abspath(name + ".csv") else: - iflogger.error('No values in the MATLAB file?!') + iflogger.error("No values in the MATLAB file?!") return outputs def merge_csvs(in_list): for idx, in_file in enumerate(in_list): try: - in_array = np.loadtxt(in_file, delimiter=',') + in_array = np.loadtxt(in_file, delimiter=",") except ValueError: try: - in_array = np.loadtxt(in_file, delimiter=',', skiprows=1) + in_array = np.loadtxt(in_file, delimiter=",", skiprows=1) except ValueError: - with open(in_file, 'r') as first: + with open(in_file) as first: header_line = first.readline() - header_list = header_line.split(',') + header_list = header_line.split(",") n_cols = len(header_list) try: in_array = np.loadtxt( in_file, - delimiter=',', + delimiter=",", skiprows=1, - usecols=list(range(1, n_cols))) + usecols=list(range(1, n_cols)), + ) except ValueError: in_array = np.loadtxt( in_file, - delimiter=',', + delimiter=",", skiprows=1, - usecols=list(range(1, n_cols - 1))) + usecols=list(range(1, n_cols - 1)), + ) if idx == 0: out_array = in_array else: out_array = np.dstack((out_array, in_array)) out_array = np.squeeze(out_array) - iflogger.info('Final output array shape:') + iflogger.info("Final output array shape:") iflogger.info(np.shape(out_array)) return out_array @@ -452,16 +526,17 @@ def merge_csvs(in_list): def remove_identical_paths(in_files): import os.path as op from ..utils.filemanip import split_filename + if len(in_files) > 1: out_names = list() commonprefix = op.commonprefix(in_files) - lastslash = commonprefix.rfind('/') - commonpath = commonprefix[0:(lastslash + 1)] - for fileidx, in_file in enumerate(in_files): + lastslash = commonprefix.rfind("/") + commonpath = commonprefix[0 : (lastslash + 1)] + for in_file in in_files: path, name, ext = split_filename(in_file) in_file = op.join(path, name) - name = in_file.replace(commonpath, '') - name = name.replace('_subject_id_', '') + name = in_file.replace(commonpath, "") + name = name.replace("_subject_id_", "") out_names.append(name) else: path, name, ext = split_filename(in_files[0]) @@ -472,37 +547,34 @@ def remove_identical_paths(in_files): def maketypelist(rowheadings, shape, extraheadingBool, extraheading): typelist = [] if rowheadings: - typelist.append(('heading', 'a40')) + typelist.append(("heading", "a40")) if len(shape) > 1: - for idx in range(1, (min(shape) + 1)): - typelist.append((str(idx), float)) + typelist.extend((str(idx), float) for idx in range(1, (min(shape) + 1))) else: - for idx in range(1, (shape[0] + 1)): - typelist.append((str(idx), float)) + typelist.extend((str(idx), float) for idx in range(1, (shape[0] + 1))) if extraheadingBool: - typelist.append((extraheading, 'a40')) + typelist.append((extraheading, "a40")) iflogger.info(typelist) return typelist -def makefmtlist(output_array, typelist, rowheadingsBool, shape, - extraheadingBool): +def makefmtlist(output_array, typelist, rowheadingsBool, shape, extraheadingBool): fmtlist = [] if rowheadingsBool: - fmtlist.append('%s') + fmtlist.append("%s") if len(shape) > 1: output = np.zeros(max(shape), typelist) for idx in range(1, min(shape) + 1): output[str(idx)] = output_array[:, idx - 1] - fmtlist.append('%f') + fmtlist.append("%f") else: output = np.zeros(1, typelist) for idx in range(1, len(output_array) + 1): output[str(idx)] = output_array[idx - 1] - fmtlist.append('%f') + fmtlist.append("%f") if extraheadingBool: - fmtlist.append('%s') - fmt = ','.join(fmtlist) + fmtlist.append("%s") + fmt = ",".join(fmtlist) return fmt, output @@ -510,99 +582,101 @@ class MergeCSVFilesInputSpec(TraitedSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='Input comma-separated value (CSV) files') + desc="Input comma-separated value (CSV) files", + ) out_file = File( - 'merged.csv', - usedefault=True, - desc='Output filename for merged CSV file') + "merged.csv", usedefault=True, desc="Output filename for merged CSV file" + ) column_headings = traits.List( traits.Str, - desc='List of column headings to save in merged CSV file\ + desc="List of column headings to save in merged CSV file\ (must be equal to number of input files). If left undefined, these\ - will be pulled from the input filenames.') + will be pulled from the input filenames.", + ) row_headings = traits.List( traits.Str, - desc='List of row headings to save in merged CSV file\ - (must be equal to number of rows in the input files).') + desc="List of row headings to save in merged CSV file\ + (must be equal to number of rows in the input files).", + ) row_heading_title = traits.Str( - 'label', + "label", usedefault=True, - desc='Column heading for the row headings\ - added') - extra_column_heading = traits.Str( - desc='New heading to add for the added field.') + desc="Column heading for the row headings\ + added", + ) + extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( - desc='New field to add to each row. This is useful for saving the\ - group or subject ID in the file.') + desc="New field to add to each row. This is useful for saving the\ + group or subject ID in the file." + ) class MergeCSVFilesOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing columns ') + csv_file = File(desc="Output CSV file containing columns ") class MergeCSVFiles(BaseInterface): - """This interface is designed to facilitate data loading in the R environment. - It takes input CSV files and merges them into a single CSV file. + """ + Merge several CSV files into a single CSV file. + + This interface is designed to facilitate data loading in the R environment. If provided, it will also incorporate column heading names into the resulting CSV file. - CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.MergeCSVFiles() >>> mat2csv.inputs.in_files = ['degree.mat','clustering.mat'] >>> mat2csv.inputs.column_headings = ['degree','clustering'] >>> mat2csv.run() # doctest: +SKIP + """ + input_spec = MergeCSVFilesInputSpec output_spec = MergeCSVFilesOutputSpec def _run_interface(self, runtime): extraheadingBool = False - extraheading = '' + extraheading = "" rowheadingsBool = False """ This block defines the column headings. """ if isdefined(self.inputs.column_headings): - iflogger.info('Column headings have been provided:') + iflogger.info("Column headings have been provided:") headings = self.inputs.column_headings else: - iflogger.info( - 'Column headings not provided! Pulled from input filenames:') + iflogger.info("Column headings not provided! Pulled from input filenames:") headings = remove_identical_paths(self.inputs.in_files) if isdefined(self.inputs.extra_field): if isdefined(self.inputs.extra_column_heading): extraheading = self.inputs.extra_column_heading - iflogger.info('Extra column heading provided: %s', - extraheading) + iflogger.info("Extra column heading provided: %s", extraheading) else: - extraheading = 'type' - iflogger.info( - 'Extra column heading was not defined. Using "type"') + extraheading = "type" + iflogger.info('Extra column heading was not defined. Using "type"') headings.append(extraheading) extraheadingBool = True if len(self.inputs.in_files) == 1: - iflogger.warning('Only one file input!') + iflogger.warning("Only one file input!") if isdefined(self.inputs.row_headings): - iflogger.info('Row headings have been provided. Adding "labels"' - 'column header.') - prefix = '"{p}","'.format(p=self.inputs.row_heading_title) - csv_headings = prefix + '","'.join( - itertools.chain(headings)) + '"\n' + iflogger.info( + 'Row headings have been provided. Adding "labels" column header.' + ) + prefix = f'"{self.inputs.row_heading_title}","' + csv_headings = prefix + '","'.join(itertools.chain(headings)) + '"\n' rowheadingsBool = True else: - iflogger.info('Row headings have not been provided.') + iflogger.info("Row headings have not been provided.") csv_headings = '"' + '","'.join(itertools.chain(headings)) + '"\n' - iflogger.info('Final Headings:') + iflogger.info("Final Headings:") iflogger.info(csv_headings) """ Next we merge the arrays and define the output text file @@ -610,18 +684,18 @@ def _run_interface(self, runtime): output_array = merge_csvs(self.inputs.in_files) _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if ext != ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - with open(out_file, 'w') as file_handle: + with open(out_file, "w") as file_handle: file_handle.write(csv_headings) shape = np.shape(output_array) - typelist = maketypelist(rowheadingsBool, shape, extraheadingBool, - extraheading) - fmt, output = makefmtlist(output_array, typelist, rowheadingsBool, - shape, extraheadingBool) + typelist = maketypelist(rowheadingsBool, shape, extraheadingBool, extraheading) + fmt, output = makefmtlist( + output_array, typelist, rowheadingsBool, shape, extraheadingBool + ) if rowheadingsBool: row_heading_list = self.inputs.row_headings @@ -629,8 +703,8 @@ def _run_interface(self, runtime): for row_heading in row_heading_list: row_heading_with_quotes = '"' + row_heading + '"' row_heading_list_with_quotes.append(row_heading_with_quotes) - row_headings = np.array(row_heading_list_with_quotes, dtype='|S40') - output['heading'] = row_headings + row_headings = np.array(row_heading_list_with_quotes, dtype="|S40") + output["heading"] = row_headings if isdefined(self.inputs.extra_field): extrafieldlist = [] @@ -638,79 +712,77 @@ def _run_interface(self, runtime): mx = shape[0] else: mx = 1 - for idx in range(0, mx): - extrafieldlist.append(self.inputs.extra_field) + extrafieldlist.extend(self.inputs.extra_field for idx in range(mx)) iflogger.info(len(extrafieldlist)) output[extraheading] = extrafieldlist iflogger.info(output) iflogger.info(fmt) - with open(out_file, 'a') as file_handle: - np.savetxt(file_handle, output, fmt, delimiter=',') + with open(out_file, "a") as file_handle: + np.savetxt(file_handle, output, fmt, delimiter=",") return runtime def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if ext != ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - outputs['csv_file'] = out_file + outputs["csv_file"] = out_file return outputs class AddCSVColumnInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='Input comma-separated value (CSV) files') + exists=True, mandatory=True, desc="Input comma-separated value (CSV) files" + ) out_file = File( - 'extra_heading.csv', - usedefault=True, - desc='Output filename for merged CSV file') - extra_column_heading = traits.Str( - desc='New heading to add for the added field.') + "extra_heading.csv", usedefault=True, desc="Output filename for merged CSV file" + ) + extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( - desc='New field to add to each row. This is useful for saving the\ - group or subject ID in the file.') + desc="New field to add to each row. This is useful for saving the\ + group or subject ID in the file." + ) class AddCSVColumnOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing columns ') + csv_file = File(desc="Output CSV file containing columns ") class AddCSVColumn(BaseInterface): - """Short interface to add an extra column and field to a text file + """ + Short interface to add an extra column and field to a text file. Example ------- - >>> from nipype.algorithms import misc >>> addcol = misc.AddCSVColumn() >>> addcol.inputs.in_file = 'degree.csv' >>> addcol.inputs.extra_column_heading = 'group' >>> addcol.inputs.extra_field = 'male' >>> addcol.run() # doctest: +SKIP + """ + input_spec = AddCSVColumnInputSpec output_spec = AddCSVColumnOutputSpec def _run_interface(self, runtime): - in_file = open(self.inputs.in_file, 'r') + in_file = open(self.inputs.in_file) _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if ext != ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - out_file = open(out_file, 'w') + out_file = open(out_file, "w") firstline = in_file.readline() - firstline = firstline.replace('\n', '') - new_firstline = firstline + ',"' + \ - self.inputs.extra_column_heading + '"\n' + firstline = firstline.replace("\n", "") + new_firstline = firstline + ',"' + self.inputs.extra_column_heading + '"\n' out_file.write(new_firstline) for line in in_file: - new_line = line.replace('\n', '') - new_line = new_line + ',' + self.inputs.extra_field + '\n' + new_line = line.replace("\n", "") + new_line = new_line + "," + self.inputs.extra_field + "\n" out_file.write(new_line) in_file.close() out_file.close() @@ -719,35 +791,35 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if ext != ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - outputs['csv_file'] = out_file + outputs["csv_file"] = out_file return outputs class AddCSVRowInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - in_file = traits.File( - mandatory=True, desc='Input comma-separated value (CSV) files') + in_file = File(mandatory=True, desc="Input comma-separated value (CSV) files") _outputs = traits.Dict(traits.Any, value={}, usedefault=True) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): - super(AddCSVRowInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(AddCSVRowInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class AddCSVRowOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing rows ') + csv_file = File(desc="Output CSV file containing rows ") class AddCSVRow(BaseInterface): - """Simple interface to add an extra row to a csv file + """ + Simple interface to add an extra row to a CSV file. .. note:: Requires `pandas `_ @@ -760,7 +832,6 @@ class AddCSVRow(BaseInterface): Example ------- - >>> from nipype.algorithms import misc >>> addrow = misc.AddCSVRow() >>> addrow.inputs.in_file = 'scores.csv' @@ -769,12 +840,14 @@ class AddCSVRow(BaseInterface): >>> addrow.inputs.subject_id = 'S400' >>> addrow.inputs.list_of_values = [ 0.4, 0.7, 0.3 ] >>> addrow.run() # doctest: +SKIP + """ + input_spec = AddCSVRowInputSpec output_spec = AddCSVRowOutputSpec def __init__(self, infields=None, force_run=True, **kwargs): - super(AddCSVRow, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} self._infields = infields self._have_lock = False @@ -794,35 +867,38 @@ def _run_interface(self, runtime): try: import pandas as pd except ImportError as e: - raise_from( - ImportError('This interface requires pandas ' - '(http://pandas.pydata.org/) to run.'), e) + raise ImportError( + "This interface requires pandas (http://pandas.pydata.org/) to run." + ) from e try: - import lockfile as pl + from filelock import SoftFileLock + self._have_lock = True except ImportError: from warnings import warn - warn(('Python module lockfile was not found: AddCSVRow will not be' - ' thread-safe in multi-processor execution')) + + warn( + "Python module filelock was not found: AddCSVRow will not be" + " thread-safe in multi-processor execution" + ) input_dict = {} for key, val in list(self.inputs._outputs.items()): # expand lists to several columns - if key == 'trait_added' and val in self.inputs.copyable_trait_names( - ): + if key == "trait_added" and val in self.inputs.copyable_trait_names(): continue if isinstance(val, list): for i, v in enumerate(val): - input_dict['%s_%d' % (key, i)] = v + input_dict["%s_%d" % (key, i)] = v else: input_dict[key] = val df = pd.DataFrame([input_dict]) if self._have_lock: - self._lock = pl.FileLock(self.inputs.in_file) + self._lock = SoftFileLock("%s.lock" % self.inputs.in_file) # Acquire lock self._lock.acquire() @@ -831,28 +907,21 @@ def _run_interface(self, runtime): formerdf = pd.read_csv(self.inputs.in_file, index_col=0) df = pd.concat([formerdf, df], ignore_index=True) - with open(self.inputs.in_file, 'w') as f: + with open(self.inputs.in_file, "w") as f: df.to_csv(f) if self._have_lock: self._lock.release() - # Using nipype.external.portalocker this might be something like: - # with pl.Lock(self.inputs.in_file, timeout=1) as fh: - # if op.exists(fh): - # formerdf = pd.read_csv(fh, index_col=0) - # df = pd.concat([formerdf, df], ignore_index=True) - # df.to_csv(fh) - return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['csv_file'] = self.inputs.in_file + outputs["csv_file"] = self.inputs.in_file return outputs def _outputs(self): - return self._add_output_traits(super(AddCSVRow, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return base @@ -862,42 +931,44 @@ class CalculateNormalizedMomentsInputSpec(TraitedSpec): timeseries_file = File( exists=True, mandatory=True, - desc='Text file with timeseries in columns and timepoints in rows,\ - whitespace separated') + desc="Text file with timeseries in columns and timepoints in rows,\ + whitespace separated", + ) moment = traits.Int( mandatory=True, desc="Define which moment should be calculated, 3 for skewness, 4 for\ - kurtosis.") + kurtosis.", + ) class CalculateNormalizedMomentsOutputSpec(TraitedSpec): - moments = traits.List(traits.Float(), desc='Moments') + moments = traits.List(traits.Float(), desc="Moments") class CalculateNormalizedMoments(BaseInterface): - """Calculates moments of timeseries. + """ + Calculates moments of timeseries. Example ------- - >>> from nipype.algorithms import misc >>> skew = misc.CalculateNormalizedMoments() >>> skew.inputs.moment = 3 >>> skew.inputs.timeseries_file = 'timeseries.txt' >>> skew.run() # doctest: +SKIP + """ + input_spec = CalculateNormalizedMomentsInputSpec output_spec = CalculateNormalizedMomentsOutputSpec def _run_interface(self, runtime): - - self._moments = calc_moments(self.inputs.timeseries_file, - self.inputs.moment) + self._moments = calc_moments(self.inputs.timeseries_file, self.inputs.moment) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['skewness'] = self._moments + outputs["skewness"] = self._moments return outputs @@ -910,47 +981,50 @@ def calc_moments(timeseries_file, moment): """ import scipy.stats as stats + timeseries = np.genfromtxt(timeseries_file) m2 = stats.moment(timeseries, 2, axis=0) m3 = stats.moment(timeseries, moment, axis=0) - zero = (m2 == 0) - return np.where(zero, 0, m3 / m2**(moment / 2.0)) + zero = m2 == 0 + return np.where(zero, 0, m3 / m2 ** (moment / 2.0)) class AddNoiseInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, - desc='input image that will be corrupted with noise') + desc="input image that will be corrupted with noise", + ) in_mask = File( exists=True, - desc=('input mask, voxels outside this mask ' - 'will be considered background')) - snr = traits.Float(10.0, desc='desired output SNR in dB', usedefault=True) + desc=("input mask, voxels outside this mask will be considered background"), + ) + snr = traits.Float(10.0, desc="desired output SNR in dB", usedefault=True) dist = traits.Enum( - 'normal', - 'rician', + "normal", + "rician", usedefault=True, mandatory=True, - desc=('desired noise distribution')) + desc=("desired noise distribution"), + ) bg_dist = traits.Enum( - 'normal', - 'rayleigh', + "normal", + "rayleigh", usedefault=True, mandatory=True, - desc=('desired noise distribution, currently ' - 'only normal is implemented')) - out_file = File(desc='desired output filename') + desc=("desired noise distribution, currently only normal is implemented"), + ) + out_file = File(desc="desired output filename") class AddNoiseOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='corrupted image') + out_file = File(exists=True, desc="corrupted image") class AddNoise(BaseInterface): """ - Corrupts with noise the input image + Corrupts with noise the input image. Example @@ -963,16 +1037,17 @@ class AddNoise(BaseInterface): >>> noise.run() # doctest: +SKIP """ + input_spec = AddNoiseInputSpec output_spec = AddNoiseOutputSpec def _run_interface(self, runtime): in_image = nb.load(self.inputs.in_file) - in_data = in_image.get_data() + in_data = in_image.get_fdata() snr = self.inputs.snr if isdefined(self.inputs.in_mask): - in_mask = nb.load(self.inputs.in_mask).get_data() + in_mask = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) else: in_mask = np.ones_like(in_data) @@ -981,7 +1056,8 @@ def _run_interface(self, runtime): mask=in_mask, snr_db=snr, dist=self.inputs.dist, - bg_dist=self.inputs.bg_dist) + bg_dist=self.inputs.bg_dist, + ) res_im = nb.Nifti1Image(result, in_image.affine, in_image.header) res_im.to_filename(self._gen_output_filename()) return runtime @@ -989,8 +1065,7 @@ def _run_interface(self, runtime): def _gen_output_filename(self): if not isdefined(self.inputs.out_file): _, base, ext = split_filename(self.inputs.in_file) - out_file = os.path.abspath('%s_SNR%03.2f%s' % - (base, self.inputs.snr, ext)) + out_file = os.path.abspath(f"{base}_SNR{self.inputs.snr:03.2f}{ext}") else: out_file = self.inputs.out_file @@ -998,20 +1073,16 @@ def _gen_output_filename(self): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self._gen_output_filename() + outputs["out_file"] = self._gen_output_filename() return outputs - def gen_noise(self, - image, - mask=None, - snr_db=10.0, - dist='normal', - bg_dist='normal'): + def gen_noise(self, image, mask=None, snr_db=10.0, dist="normal", bg_dist="normal"): """ Generates a copy of an image with a certain amount of added gaussian noise (rayleigh for background in mask) """ from math import sqrt + snr = sqrt(np.power(10.0, snr_db / 10.0)) if mask is None: @@ -1025,36 +1096,37 @@ def gen_noise(self, signal = image[mask > 0].reshape(-1) - if dist == 'normal': + if dist == "normal": signal = signal - signal.mean() sigma_n = sqrt(signal.var() / snr) noise = np.random.normal(size=image.shape, scale=sigma_n) - if (np.any(mask == 0)) and (bg_dist == 'rayleigh'): + if (np.any(mask == 0)) and (bg_dist == "rayleigh"): bg_noise = np.random.rayleigh(size=image.shape, scale=sigma_n) noise[mask == 0] = bg_noise[mask == 0] im_noise = image + noise - elif dist == 'rician': + elif dist == "rician": sigma_n = signal.mean() / snr n_1 = np.random.normal(size=image.shape, scale=sigma_n) n_2 = np.random.normal(size=image.shape, scale=sigma_n) stde_1 = n_1 / sqrt(2.0) stde_2 = n_2 / sqrt(2.0) - im_noise = np.sqrt((image + stde_1)**2 + (stde_2)**2) + im_noise = np.sqrt((image + stde_1) ** 2 + (stde_2) ** 2) else: - raise NotImplementedError(('Only normal and rician distributions ' - 'are supported')) + raise NotImplementedError( + "Only normal and rician distributions are supported" + ) return im_noise class NormalizeProbabilityMapSetInputSpec(TraitedSpec): in_files = InputMultiPath( - File(exists=True, mandatory=True, desc='The tpms to be normalized')) - in_mask = File( - exists=True, desc='Masked voxels must sum up 1.0, 0.0 otherwise.') + File(exists=True, mandatory=True, desc="The tpms to be normalized") + ) + in_mask = File(exists=True, desc="Masked voxels must sum up 1.0, 0.0 otherwise.") class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): @@ -1062,22 +1134,25 @@ class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): class NormalizeProbabilityMapSet(BaseInterface): - """ Returns the input tissue probability maps (tpms, aka volume fractions) - normalized to sum up 1.0 at each voxel within the mask. + """ + Returns the input tissue probability maps (tpms, aka volume fractions). + + The tissue probability maps are normalized to sum up 1.0 at each voxel within the mask. .. note:: Please recall this is not a spatial normalization algorithm Example ------- - >>> from nipype.algorithms import misc >>> normalize = misc.NormalizeProbabilityMapSet() >>> normalize.inputs.in_files = [ 'tpm_00.nii.gz', 'tpm_01.nii.gz', \ 'tpm_02.nii.gz' ] >>> normalize.inputs.in_mask = 'tpms_msk.nii.gz' >>> normalize.run() # doctest: +SKIP + """ + input_spec = NormalizeProbabilityMapSetInputSpec output_spec = NormalizeProbabilityMapSetOutputSpec @@ -1092,33 +1167,34 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_files'] = self._out_filenames + outputs["out_files"] = self._out_filenames return outputs class SplitROIsInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc='file to be splitted') - in_mask = File(exists=True, desc='only process files inside mask') - roi_size = traits.Tuple( - traits.Int, traits.Int, traits.Int, desc='desired ROI size') + in_file = File(exists=True, mandatory=True, desc="file to be split") + in_mask = File(exists=True, desc="only process files inside mask") + roi_size = Tuple(traits.Int, traits.Int, traits.Int, desc="desired ROI size") class SplitROIsOutputSpec(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='the resulting ROIs') + out_files = OutputMultiPath(File(exists=True), desc="the resulting ROIs") out_masks = OutputMultiPath( - File(exists=True), desc='a mask indicating valid values') + File(exists=True), desc="a mask indicating valid values" + ) out_index = OutputMultiPath( - File(exists=True), desc='arrays keeping original locations') + File(exists=True), desc="arrays keeping original locations" + ) class SplitROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.SplitROIs() >>> rois.inputs.in_file = 'diffusion.nii' @@ -1126,6 +1202,7 @@ class SplitROIs(BaseInterface): >>> rois.run() # doctest: +SKIP """ + input_spec = SplitROIsInputSpec output_spec = SplitROIsOutputSpec @@ -1140,9 +1217,9 @@ def _run_interface(self, runtime): roisize = self.inputs.roi_size res = split_rois(self.inputs.in_file, mask, roisize) - self._outnames['out_files'] = res[0] - self._outnames['out_masks'] = res[1] - self._outnames['out_index'] = res[2] + self._outnames["out_files"] = res[0] + self._outnames["out_masks"] = res[1] + self._outnames["out_index"] = res[2] return runtime def _list_outputs(self): @@ -1154,25 +1231,26 @@ def _list_outputs(self): class MergeROIsInputSpec(TraitedSpec): in_files = InputMultiPath( - File(exists=True, mandatory=True, desc='files to be re-merged')) + File(exists=True, mandatory=True, desc="files to be re-merged") + ) in_index = InputMultiPath( - File(exists=True, mandatory=True), - desc='array keeping original locations') - in_reference = File(exists=True, desc='reference file') + File(exists=True, mandatory=True), desc="array keeping original locations" + ) + in_reference = File(exists=True, desc="reference file") class MergeROIsOutputSpec(TraitedSpec): - merged_file = File(exists=True, desc='the recomposed file') + merged_file = File(exists=True, desc="the recomposed file") class MergeROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.MergeROIs() >>> rois.inputs.in_files = ['roi%02d.nii' % i for i in range(1, 6)] @@ -1181,18 +1259,20 @@ class MergeROIs(BaseInterface): >>> rois.run() # doctest: +SKIP """ + input_spec = MergeROIsInputSpec output_spec = MergeROIsOutputSpec def _run_interface(self, runtime): - res = merge_rois(self.inputs.in_files, self.inputs.in_index, - self.inputs.in_reference) + res = merge_rois( + self.inputs.in_files, self.inputs.in_index, self.inputs.in_reference + ) self._merged = res return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['merged_file'] = self._merged + outputs["merged_file"] = self._merged return outputs @@ -1213,36 +1293,35 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): if len(out_files) != len(in_files): for i, finname in enumerate(in_files): fname, fext = op.splitext(op.basename(finname)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('%s_norm_%02d%s' % (fname, i, fext)) + out_file = op.abspath("%s_norm_%02d%s" % (fname, i, fext)) out_files += [out_file] - imgs = [nb.load(fim, mmap=NUMPY_MMAP) for fim in in_files] + imgs = [nb.load(fim) for fim in in_files] if len(in_files) == 1: - img_data = imgs[0].get_data() + img_data = imgs[0].get_fdata(dtype=np.float32) img_data[img_data > 0.0] = 1.0 hdr = imgs[0].header.copy() - hdr['data_type'] = 16 hdr.set_data_dtype(np.float32) - nb.save( - nb.Nifti1Image(img_data.astype(np.float32), imgs[0].affine, hdr), - out_files[0]) + nb.save(nb.Nifti1Image(img_data, imgs[0].affine, hdr), out_files[0]) return out_files[0] - img_data = np.array([im.get_data() for im in imgs]).astype(np.float32) + img_data = np.stack( + [im.get_fdata(caching="unchanged", dtype=np.float32) for im in imgs] + ) # img_data[img_data>1.0] = 1.0 img_data[img_data < 0.0] = 0.0 weights = np.sum(img_data, axis=0) - msk = np.ones_like(imgs[0].get_data()) + msk = np.ones(imgs[0].shape) msk[weights <= 0] = 0 if in_mask is not None: - msk = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() + msk = np.asanyarray(nb.load(in_mask).dataobj) msk[msk <= 0] = 0 msk[msk > 0] = 1 @@ -1252,11 +1331,10 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): data = np.ma.masked_equal(img_data[i], 0) probmap = data / weights hdr = imgs[i].header.copy() - hdr['data_type'] = 16 - hdr.set_data_dtype('float32') + hdr.set_data_dtype("float32") nb.save( - nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, hdr), - out_file) + nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, hdr), out_file + ) return out_files @@ -1267,13 +1345,13 @@ def split_rois(in_file, mask=None, roishape=None): """ import nibabel as nb import numpy as np - from math import sqrt, ceil + from math import ceil import os.path as op if roishape is None: roishape = (10, 10, 1) - im = nb.load(in_file, mmap=NUMPY_MMAP) + im = nb.load(in_file) imshape = im.shape dshape = imshape[:3] nvols = imshape[-1] @@ -1281,7 +1359,7 @@ def split_rois(in_file, mask=None, roishape=None): droishape = (roishape[0], roishape[1], roishape[2], nvols) if mask is not None: - mask = nb.load(mask, mmap=NUMPY_MMAP).get_data() + mask = np.asanyarray(nb.load(mask).dataobj) mask[mask > 0] = 1 mask[mask < 1] = 0 else: @@ -1292,13 +1370,14 @@ def split_rois(in_file, mask=None, roishape=None): els = np.sum(mask) nrois = int(ceil(els / float(roisize))) - data = im.get_data().reshape((mask.size, -1)) + data = np.asanyarray(im.dataobj).reshape((mask.size, -1)) data = np.squeeze(data.take(nzels, axis=0)) nvols = data.shape[-1] - roidefname = op.abspath('onesmask.nii.gz') - nb.Nifti1Image(np.ones(roishape, dtype=np.uint8), None, - None).to_filename(roidefname) + roidefname = op.abspath("onesmask.nii.gz") + nb.Nifti1Image(np.ones(roishape, dtype=np.uint8), None, None).to_filename( + roidefname + ) out_files = [] out_mask = [] @@ -1314,24 +1393,25 @@ def split_rois(in_file, mask=None, roishape=None): last = els droi = data[first:last, ...] - iname = op.abspath('roi%010d_idx' % i) - out_idxs.append(iname + '.npz') - np.savez(iname, (nzels[0][first:last], )) + iname = op.abspath("roi%010d_idx" % i) + out_idxs.append(iname + ".npz") + np.savez(iname, (nzels[0][first:last],)) if fill > 0: - droi = np.vstack((droi, - np.zeros( - (int(fill), int(nvols)), dtype=np.float32))) - partialmsk = np.ones((roisize, ), dtype=np.uint8) - partialmsk[-int(fill):] = 0 - partname = op.abspath('partialmask.nii.gz') - nb.Nifti1Image(partialmsk.reshape(roishape), None, - None).to_filename(partname) + droi = np.vstack( + (droi, np.zeros((int(fill), int(nvols)), dtype=np.float32)) + ) + partialmsk = np.ones((roisize,), dtype=np.uint8) + partialmsk[-int(fill) :] = 0 + partname = op.abspath("partialmask.nii.gz") + nb.Nifti1Image(partialmsk.reshape(roishape), None, None).to_filename( + partname + ) out_mask.append(partname) else: out_mask.append(roidefname) - fname = op.abspath('roi%010d.nii.gz' % i) + fname = op.abspath("roi%010d.nii.gz" % i) nb.Nifti1Image(droi.reshape(droishape), None, None).to_filename(fname) out_files.append(fname) return out_files, out_mask, out_idxs @@ -1347,81 +1427,80 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): import subprocess as sp if out_file is None: - out_file = op.abspath('merged.nii.gz') + out_file = op.abspath("merged.nii.gz") if dtype is None: dtype = np.float32 # if file is compressed, uncompress using os # to avoid memory errors - if op.splitext(in_ref)[1] == '.gz': + if op.splitext(in_ref)[1] == ".gz": try: - iflogger.info('uncompress %i', in_ref) - sp.check_call(['gunzip', in_ref], stdout=sp.PIPE, shell=True) + iflogger.info("uncompress %s", in_ref) + sp.check_call(["gunzip", in_ref], stdout=sp.PIPE, shell=True) in_ref = op.splitext(in_ref)[0] except: pass - ref = nb.load(in_ref, mmap=NUMPY_MMAP) + ref = nb.load(in_ref) aff = ref.affine hdr = ref.header.copy() rsh = ref.shape del ref npix = rsh[0] * rsh[1] * rsh[2] - fcdata = nb.load(in_files[0]).get_data() + fcimg = nb.load(in_files[0]) - if fcdata.ndim == 4: - ndirs = fcdata.shape[-1] + if len(fcimg.shape) == 4: + ndirs = fcimg.shape[-1] else: ndirs = 1 newshape = (rsh[0], rsh[1], rsh[2], ndirs) hdr.set_data_dtype(dtype) - hdr.set_xyzt_units('mm', 'sec') + hdr.set_xyzt_units("mm", "sec") if ndirs < 300: - data = np.zeros((npix, ndirs)) + data = np.zeros((npix, ndirs), dtype=dtype) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f['arr_0']) - cdata = nb.load( - cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs) + idxs = np.squeeze(f["arr_0"]) + cdata = np.asanyarray(nb.load(cname).dataobj).reshape(-1, ndirs) nels = len(idxs) - idata = (idxs, ) + idata = (idxs,) try: data[idata, ...] = cdata[0:nels, ...] except: - print(('Consistency between indexes and chunks was ' - 'lost: data=%s, chunk=%s') % (str(data.shape), - str(cdata.shape))) + print( + ( + "Consistency between indexes and chunks was " + "lost: data=%s, chunk=%s" + ) + % (str(data.shape), str(cdata.shape)) + ) raise - hdr.set_data_shape(newshape) - nb.Nifti1Image(data.reshape(newshape).astype(dtype), aff, - hdr).to_filename(out_file) + nb.Nifti1Image(data.reshape(newshape), aff, hdr).to_filename(out_file) else: hdr.set_data_shape(rsh[:3]) nii = [] for d in range(ndirs): - fname = op.abspath('vol%06d.nii' % d) + fname = op.abspath("vol%06d.nii" % d) nb.Nifti1Image(np.zeros(rsh[:3]), aff, hdr).to_filename(fname) nii.append(fname) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f['arr_0']) + idxs = np.squeeze(f["arr_0"]) for d, fname in enumerate(nii): - data = nb.load(fname, mmap=NUMPY_MMAP).get_data().reshape(-1) - cdata = nb.load( - cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs)[:, d] + data = np.asanyarray(nb.load(fname).dataobj).reshape(-1) + cdata = nb.load(cname).dataobj[..., d].reshape(-1) nels = len(idxs) - idata = (idxs, ) + idata = (idxs,) data[idata] = cdata[0:nels] - nb.Nifti1Image(data.reshape(rsh[:3]), aff, - hdr).to_filename(fname) + nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) - imgs = [nb.load(im, mmap=NUMPY_MMAP) for im in nii] + imgs = [nb.load(im) for im in nii] allim = nb.concat_images(imgs) allim.to_filename(out_file) @@ -1433,15 +1512,17 @@ class CalculateMedianInputSpec(BaseInterfaceInputSpec): File( exists=True, mandatory=True, - desc="One or more realigned Nifti 4D timeseries")) + desc="One or more realigned Nifti 4D timeseries", + ) + ) median_file = traits.Str(desc="Filename prefix to store median images") median_per_file = traits.Bool( - False, usedefault=True, desc="Calculate a median file for each Nifti") + False, usedefault=True, desc="Calculate a median file for each Nifti" + ) class CalculateMedianOutputSpec(TraitedSpec): - median_files = OutputMultiPath( - File(exists=True), desc="One or more median images") + median_files = OutputMultiPath(File(exists=True), desc="One or more median images") class CalculateMedian(BaseInterface): @@ -1450,18 +1531,18 @@ class CalculateMedian(BaseInterface): Example ------- - >>> from nipype.algorithms.misc import CalculateMedian >>> mean = CalculateMedian() >>> mean.inputs.in_files = 'functional.nii' >>> mean.run() # doctest: +SKIP """ + input_spec = CalculateMedianInputSpec output_spec = CalculateMedianOutputSpec def __init__(self, *args, **kwargs): - super(CalculateMedian, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._median_files = [] def _gen_fname(self, suffix, idx=None, ext=None): @@ -1473,27 +1554,27 @@ def _gen_fname(self, suffix, idx=None, ext=None): else: in_file = self.inputs.in_files fname, in_ext = op.splitext(op.basename(in_file)) - if in_ext == '.gz': + if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext - if ext.startswith('.'): + if ext.startswith("."): ext = ext[1:] if self.inputs.median_file: outname = self.inputs.median_file else: - outname = '{}_{}'.format(fname, suffix) + outname = f"{fname}_{suffix}" if idx: outname += str(idx) - return op.abspath('{}.{}'.format(outname, ext)) + return op.abspath(f"{outname}.{ext}") def _run_interface(self, runtime): total = None self._median_files = [] for idx, fname in enumerate(ensure_list(self.inputs.in_files)): - img = nb.load(fname, mmap=NUMPY_MMAP) - data = np.median(img.get_data(), axis=3) + img = nb.load(fname) + data = np.median(img.get_fdata(), axis=3) if self.inputs.median_per_file: self._median_files.append(self._write_nifti(img, data, idx)) else: @@ -1507,16 +1588,15 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['median_files'] = self._median_files + outputs["median_files"] = self._median_files return outputs - def _write_nifti(self, img, data, idx, suffix='median'): + def _write_nifti(self, img, data, idx, suffix="median"): if self.inputs.median_per_file: median_img = nb.Nifti1Image(data, img.affine, img.header) filename = self._gen_fname(suffix, idx=idx) else: - median_img = nb.Nifti1Image(data / (idx + 1), img.affine, - img.header) + median_img = nb.Nifti1Image(data / (idx + 1), img.affine, img.header) filename = self._gen_fname(suffix) median_img.to_filename(filename) return filename @@ -1534,9 +1614,13 @@ class Distance(nam.Distance): def __init__(self, **inputs): super(nam.Distance, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Distance"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.Distance" + ), + DeprecationWarning, + ) class Overlap(nam.Overlap): @@ -1548,9 +1632,13 @@ class Overlap(nam.Overlap): def __init__(self, **inputs): super(nam.Overlap, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Overlap"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.Overlap" + ), + DeprecationWarning, + ) class FuzzyOverlap(nam.FuzzyOverlap): @@ -1563,9 +1651,13 @@ class FuzzyOverlap(nam.FuzzyOverlap): def __init__(self, **inputs): super(nam.FuzzyOverlap, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.FuzzyOverlap"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.FuzzyOverlap" + ), + DeprecationWarning, + ) class TSNR(confounds.TSNR): @@ -1576,6 +1668,10 @@ class TSNR(confounds.TSNR): def __init__(self, **inputs): super(confounds.TSNR, self).__init__(**inputs) - warnings.warn(("This interface has been moved since 0.12.0," - " please use nipype.algorithms.confounds.TSNR"), - UserWarning) + warnings.warn( + ( + "This interface has been moved since 0.12.0," + " please use nipype.algorithms.confounds.TSNR" + ), + UserWarning, + ) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index d6af1c0f17..ec1c123c71 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -6,75 +5,61 @@ subject analysis of task-based fMRI experiments. In particular it also includes algorithms for generating regressors for sparse and sparse-clustered acquisition experiments. - -These functions include: - - * SpecifyModel: allows specification of sparse and non-sparse models """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str, bytes, int - from copy import deepcopy +import csv +import math import os from nibabel import load import numpy as np -from ..utils import NUMPY_MMAP -from ..interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath, - traits, File, Bunch, BaseInterfaceInputSpec, - isdefined) +from ..interfaces.base import ( + BaseInterface, + TraitedSpec, + InputMultiPath, + traits, + File, + Bunch, + BaseInterfaceInputSpec, + isdefined, +) from ..utils.filemanip import ensure_list from ..utils.misc import normalize_mc_params from .. import config, logging -iflogger = logging.getLogger('nipype.interface') - -def gcd(a, b): - """Returns the greatest common divisor of two integers - - uses Euclid's algorithm - - >>> gcd(4, 5) - 1 - >>> gcd(4, 8) - 4 - >>> gcd(22, 55) - 11 - - """ - while b > 0: - a, b = b, a % b - return a +iflogger = logging.getLogger("nipype.interface") def spm_hrf(RT, P=None, fMRI_T=16): - """ python implementation of spm_hrf - - see spm_hrf for implementation details - - % RT - scan repeat time - % p - parameters of the response function (two gamma - % functions) - % defaults (seconds) - % p(0) - delay of response (relative to onset) 6 - % p(1) - delay of undershoot (relative to onset) 16 - % p(2) - dispersion of response 1 - % p(3) - dispersion of undershoot 1 - % p(4) - ratio of response to undershoot 6 - % p(5) - onset (seconds) 0 - % p(6) - length of kernel (seconds) 32 - % - % hrf - hemodynamic response function - % p - parameters of the response function - - the following code using scipy.stats.distributions.gamma - doesn't return the same result as the spm_Gpdf function :: + """ + python implementation of spm_hrf + + See ``spm_hrf`` for implementation details:: + + % RT - scan repeat time + % p - parameters of the response function (two gamma + % functions) + % defaults (seconds) + % p(0) - delay of response (relative to onset) 6 + % p(1) - delay of undershoot (relative to onset) 16 + % p(2) - dispersion of response 1 + % p(3) - dispersion of undershoot 1 + % p(4) - ratio of response to undershoot 6 + % p(5) - onset (seconds) 0 + % p(6) - length of kernel (seconds) 32 + % + % hrf - hemodynamic response function + % p - parameters of the response function + + The following code using ``scipy.stats.distributions.gamma`` + doesn't return the same result as the ``spm_Gpdf`` function:: hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) - gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4] + Example + ------- >>> print(spm_hrf(2)) [ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01 2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02 @@ -84,17 +69,22 @@ def spm_hrf(RT, P=None, fMRI_T=16): """ from scipy.special import gammaln + p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float) if P is not None: - p[0:len(P)] = P + p[0 : len(P)] = P - _spm_Gpdf = lambda x, h, l: np.exp(h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h)) + _spm_Gpdf = lambda x, h, l: np.exp( + h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h) + ) # modelled hemodynamic response function - {mixture of Gammas} dt = RT / float(fMRI_T) u = np.arange(0, int(p[6] / dt + 1)) - p[5] / dt - with np.errstate(divide='ignore'): # Known division-by-zero - hrf = _spm_Gpdf(u, p[0] / p[2], dt / p[2]) - _spm_Gpdf( - u, p[1] / p[3], dt / p[3]) / p[4] + with np.errstate(divide="ignore"): # Known division-by-zero + hrf = ( + _spm_Gpdf(u, p[0] / p[2], dt / p[2]) + - _spm_Gpdf(u, p[1] / p[3], dt / p[3]) / p[4] + ) idx = np.arange(0, int((p[6] / RT) + 1)) * fMRI_T hrf = hrf[idx] hrf = hrf / np.sum(hrf) @@ -122,11 +112,11 @@ def orth(x_in, y_in): def scale_timings(timelist, input_units, output_units, time_repetition): - """Scales timings given input and output units (scans/secs) + """ + Scale timings given input and output units (scans/secs). Parameters ---------- - timelist: list of times to scale input_units: 'secs' or 'scans' output_units: Ibid. @@ -134,29 +124,83 @@ def scale_timings(timelist, input_units, output_units, time_repetition): """ if input_units == output_units: - _scalefactor = 1. + _scalefactor = 1.0 - if (input_units == 'scans') and (output_units == 'secs'): + if (input_units == "scans") and (output_units == "secs"): _scalefactor = time_repetition - if (input_units == 'secs') and (output_units == 'scans'): - _scalefactor = 1. / time_repetition - timelist = [np.max([0., _scalefactor * t]) for t in timelist] + if (input_units == "secs") and (output_units == "scans"): + _scalefactor = 1.0 / time_repetition + timelist = [np.max([0.0, _scalefactor * t]) for t in timelist] return timelist -def gen_info(run_event_files): - """Generate subject_info structure from a list of event files +def bids_gen_info( + bids_event_files, condition_column="", amplitude_column=None, time_repetition=False +): + """ + Generate a subject_info structure from a list of BIDS .tsv event files. + + Parameters + ---------- + bids_event_files : list of str + Filenames of BIDS .tsv event files containing columns including: + 'onset', 'duration', and 'trial_type' or the `condition_column` value. + condition_column : str + Column of files in `bids_event_files` based on the values of which + events will be sorted into different regressors + amplitude_column : str + Column of files in `bids_event_files` based on the values of which + to apply amplitudes to events. If unspecified, all events will be + represented with an amplitude of 1. + + Returns + ------- + subject_info: list of Bunch + """ info = [] + for bids_event_file in bids_event_files: + with open(bids_event_file) as f: + f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t") + events = list(f_events) + if not condition_column: + condition_column = "_trial_type" + for i in events: + i.update({condition_column: "ev0"}) + conditions = sorted({i[condition_column] for i in events}) + runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) + for condition in conditions: + selected_events = [i for i in events if i[condition_column] == condition] + onsets = [float(i["onset"]) for i in selected_events] + durations = [float(i["duration"]) for i in selected_events] + if time_repetition: + decimals = math.ceil(-math.log10(time_repetition)) + onsets = [np.round(i, decimals) for i in onsets] + durations = [np.round(i, decimals) for i in durations] + runinfo.conditions.append(condition) + runinfo.onsets.append(onsets) + runinfo.durations.append(durations) + try: + amplitudes = [float(i[amplitude_column]) for i in selected_events] + runinfo.amplitudes.append(amplitudes) + except KeyError: + runinfo.amplitudes.append([1] * len(onsets)) + info.append(runinfo) + return info + + +def gen_info(run_event_files): + """Generate subject_info structure from a list of event files.""" + info = [] for i, event_files in enumerate(run_event_files): runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for event_file in event_files: _, name = os.path.split(event_file) - if '.run' in name: - name, _ = name.split('.run%03d' % (i + 1)) - elif '.txt' in name: - name, _ = name.split('.txt') + if ".run" in name: + name, _ = name.split(".run%03d" % (i + 1)) + elif ".txt" in name: + name, _ = name.split(".txt") runinfo.conditions.append(name) event_info = np.atleast_2d(np.loadtxt(event_file)) @@ -169,7 +213,7 @@ def gen_info(run_event_files): if event_info.shape[1] > 2: runinfo.amplitudes.append(event_info[:, 2].tolist()) else: - delattr(runinfo, 'amplitudes') + delattr(runinfo, "amplitudes") info.append(runinfo) return info @@ -178,23 +222,42 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): subject_info = InputMultiPath( Bunch, mandatory=True, - xor=['subject_info', 'event_files'], - desc='Bunch or List(Bunch) subject-specific ' - 'condition information. see ' - ':ref:`SpecifyModel` or ' - 'SpecifyModel.__doc__ for details') + xor=["subject_info", "event_files", "bids_event_file"], + desc="Bunch or List(Bunch) subject-specific " + "condition information. see " + ":ref:`nipype.algorithms.modelgen.SpecifyModel` or for details", + ) event_files = InputMultiPath( traits.List(File(exists=True)), mandatory=True, - xor=['subject_info', 'event_files'], - desc='List of event description files 1, 2 or 3 ' - 'column format corresponding to onsets, ' - 'durations and amplitudes') + xor=["subject_info", "event_files", "bids_event_file"], + desc="List of event description files 1, 2 or 3 " + "column format corresponding to onsets, " + "durations and amplitudes", + ) + bids_event_file = InputMultiPath( + File(exists=True), + mandatory=True, + xor=["subject_info", "event_files", "bids_event_file"], + desc="TSV event file containing common BIDS fields: `onset`," + "`duration`, and categorization and amplitude columns", + ) + bids_condition_column = traits.Str( + default_value="trial_type", + usedefault=True, + desc="Column of the file passed to ``bids_event_file`` to the " + "unique values of which events will be assigned" + "to regressors", + ) + bids_amplitude_column = traits.Str( + desc="Column of the file passed to ``bids_event_file`` " + "according to which to assign amplitudes to events" + ) realignment_parameters = InputMultiPath( File(exists=True), - desc='Realignment parameters returned ' - 'by motion correction algorithm', - copyfile=False) + desc="Realignment parameters returned by motion correction algorithm", + copyfile=False, + ) parameter_source = traits.Enum( "SPM", "FSL", @@ -202,72 +265,78 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): "FSFAST", "NIPY", usedefault=True, - desc="Source of motion parameters") + desc="Source of motion parameters", + ) outlier_files = InputMultiPath( File(exists=True), - desc='Files containing scan outlier indices ' - 'that should be tossed', - copyfile=False) + desc="Files containing scan outlier indices that should be tossed", + copyfile=False, + ) functional_runs = InputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), mandatory=True, - desc='Data files for model. List of 4D ' - 'files or list of list of 3D ' - 'files per session', - copyfile=False) + desc="Data files for model. List of 4D " + "files or list of list of 3D " + "files per session", + copyfile=False, + ) input_units = traits.Enum( - 'secs', - 'scans', + "secs", + "scans", mandatory=True, - desc='Units of event onsets and durations (secs ' - 'or scans). Output units are always in secs') + desc="Units of event onsets and durations (secs " + "or scans). Output units are always in secs", + ) high_pass_filter_cutoff = traits.Float( - mandatory=True, desc='High-pass filter cutoff in secs') + mandatory=True, desc="High-pass filter cutoff in secs" + ) time_repetition = traits.Float( mandatory=True, - desc='Time between the start of one volume ' - 'to the start of the next image volume.') + desc="Time between the start of one volume " + "to the start of the next image volume.", + ) # Not implemented yet # polynomial_order = traits.Range(0, low=0, # desc ='Number of polynomial functions to model high pass filter.') class SpecifyModelOutputSpec(TraitedSpec): - session_info = traits.Any(desc='Session info for level1designs') + session_info = traits.Any(desc="Session info for level1designs") class SpecifyModel(BaseInterface): - """Makes a model specification compatible with spm/fsl designers. + """ + Makes a model specification compatible with spm/fsl designers. The subject_info field should contain paradigm information in the form of a Bunch or a list of Bunch. The Bunch should contain the following information:: - [Mandatory] - - conditions : list of names - - onsets : lists of onsets corresponding to each condition - - durations : lists of durations corresponding to each condition. Should be - left to a single 0 if all events are being modelled as impulses. - - [Optional] - - regressor_names : list of str - list of names corresponding to each column. Should be None if - automatically assigned. - - regressors : list of lists - values for each regressor - must correspond to the number of - volumes in the functional run - - amplitudes : lists of amplitudes for each event. This will be ignored by - SPM's Level1Design. - - The following two (tmod, pmod) will be ignored by any Level1Design class - other than SPM: - - - tmod : lists of conditions that should be temporally modulated. Should - default to None if not being used. - - pmod : list of Bunch corresponding to conditions - - name : name of parametric modulator - - param : values of the modulator - - poly : degree of modulation + [Mandatory] + conditions : list of names + onsets : lists of onsets corresponding to each condition + durations : lists of durations corresponding to each condition. Should be + left to a single 0 if all events are being modelled as impulses. + + [Optional] + regressor_names : list of str + list of names corresponding to each column. Should be None if + automatically assigned. + regressors : list of lists + values for each regressor - must correspond to the number of + volumes in the functional run + amplitudes : lists of amplitudes for each event. This will be ignored by + SPM's Level1Design. + + The following two (tmod, pmod) will be ignored by any Level1Design class + other than SPM: + + tmod : lists of conditions that should be temporally modulated. Should + default to None if not being used. + pmod : list of Bunch corresponding to conditions + - name : name of parametric modulator + - param : values of the modulator + - poly : degree of modulation Alternatively, you can provide information through event files. @@ -278,7 +347,6 @@ class SpecifyModel(BaseInterface): Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifyModel() @@ -290,8 +358,7 @@ class SpecifyModel(BaseInterface): >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] - Using pmod: - + >>> # Using pmod >>> evs_run2 = Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50], [100, 180]], \ durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \ None]) @@ -301,127 +368,128 @@ class SpecifyModel(BaseInterface): >>> s.inputs.subject_info = [evs_run2, evs_run3] """ + input_spec = SpecifyModelInputSpec output_spec = SpecifyModelOutputSpec - def _generate_standard_design(self, - infolist, - functional_runs=None, - realignment_parameters=None, - outliers=None): - """ Generates a standard design matrix paradigm given information about - each run - """ + def _generate_standard_design( + self, infolist, functional_runs=None, realignment_parameters=None, outliers=None + ): + """Generate a standard design matrix paradigm given information about each run.""" sessinfo = [] - output_units = 'secs' - if 'output_units' in self.inputs.traits(): + output_units = "secs" + if "output_units" in self.inputs.traits(): output_units = self.inputs.output_units for i, info in enumerate(infolist): sessinfo.insert(i, dict(cond=[])) if isdefined(self.inputs.high_pass_filter_cutoff): - sessinfo[i]['hpf'] = \ - np.float(self.inputs.high_pass_filter_cutoff) + sessinfo[i]["hpf"] = float(self.inputs.high_pass_filter_cutoff) - if hasattr(info, 'conditions') and info.conditions is not None: + if hasattr(info, "conditions") and info.conditions is not None: for cid, cond in enumerate(info.conditions): - sessinfo[i]['cond'].insert(cid, dict()) - sessinfo[i]['cond'][cid]['name'] = info.conditions[cid] + sessinfo[i]["cond"].insert(cid, dict()) + sessinfo[i]["cond"][cid]["name"] = info.conditions[cid] scaled_onset = scale_timings( - info.onsets[cid], self.inputs.input_units, - output_units, self.inputs.time_repetition) - sessinfo[i]['cond'][cid]['onset'] = scaled_onset + info.onsets[cid], + self.inputs.input_units, + output_units, + self.inputs.time_repetition, + ) + sessinfo[i]["cond"][cid]["onset"] = scaled_onset scaled_duration = scale_timings( - info.durations[cid], self.inputs.input_units, - output_units, self.inputs.time_repetition) - sessinfo[i]['cond'][cid]['duration'] = scaled_duration - if hasattr(info, 'amplitudes') and info.amplitudes: - sessinfo[i]['cond'][cid]['amplitudes'] = \ - info.amplitudes[cid] - - if hasattr(info, 'tmod') and info.tmod and \ - len(info.tmod) > cid: - sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid] - - if hasattr(info, 'pmod') and info.pmod and \ - len(info.pmod) > cid: + info.durations[cid], + self.inputs.input_units, + output_units, + self.inputs.time_repetition, + ) + sessinfo[i]["cond"][cid]["duration"] = scaled_duration + if hasattr(info, "amplitudes") and info.amplitudes: + sessinfo[i]["cond"][cid]["amplitudes"] = info.amplitudes[cid] + + if hasattr(info, "tmod") and info.tmod and len(info.tmod) > cid: + sessinfo[i]["cond"][cid]["tmod"] = info.tmod[cid] + + if hasattr(info, "pmod") and info.pmod and len(info.pmod) > cid: if info.pmod[cid]: - sessinfo[i]['cond'][cid]['pmod'] = [] + sessinfo[i]["cond"][cid]["pmod"] = [] for j, name in enumerate(info.pmod[cid].name): - sessinfo[i]['cond'][cid]['pmod'].insert(j, {}) - sessinfo[i]['cond'][cid]['pmod'][j]['name'] = \ - name - sessinfo[i]['cond'][cid]['pmod'][j]['poly'] = \ - info.pmod[cid].poly[j] - sessinfo[i]['cond'][cid]['pmod'][j]['param'] = \ + sessinfo[i]["cond"][cid]["pmod"].insert(j, {}) + sessinfo[i]["cond"][cid]["pmod"][j]["name"] = name + sessinfo[i]["cond"][cid]["pmod"][j]["poly"] = info.pmod[ + cid + ].poly[j] + sessinfo[i]["cond"][cid]["pmod"][j]["param"] = ( info.pmod[cid].param[j] + ) - sessinfo[i]['regress'] = [] - if hasattr(info, 'regressors') and info.regressors is not None: + sessinfo[i]["regress"] = [] + if hasattr(info, "regressors") and info.regressors is not None: for j, r in enumerate(info.regressors): - sessinfo[i]['regress'].insert(j, dict(name='', val=[])) - if hasattr(info, 'regressor_names') and \ - info.regressor_names is not None: - sessinfo[i]['regress'][j]['name'] = \ - info.regressor_names[j] + sessinfo[i]["regress"].insert(j, dict(name="", val=[])) + if ( + hasattr(info, "regressor_names") + and info.regressor_names is not None + ): + sessinfo[i]["regress"][j]["name"] = info.regressor_names[j] else: - sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j + 1) - sessinfo[i]['regress'][j]['val'] = info.regressors[j] - sessinfo[i]['scans'] = functional_runs[i] + sessinfo[i]["regress"][j]["name"] = "UR%d" % (j + 1) + sessinfo[i]["regress"][j]["val"] = info.regressors[j] + sessinfo[i]["scans"] = functional_runs[i] if realignment_parameters is not None: for i, rp in enumerate(realignment_parameters): mc = realignment_parameters[i] for col in range(mc.shape[1]): - colidx = len(sessinfo[i]['regress']) - sessinfo[i]['regress'].insert(colidx, dict( - name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Realign%d' % ( - col + 1) - sessinfo[i]['regress'][colidx]['val'] = mc[:, col].tolist() + colidx = len(sessinfo[i]["regress"]) + sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) + sessinfo[i]["regress"][colidx]["name"] = "Realign%d" % (col + 1) + sessinfo[i]["regress"][colidx]["val"] = mc[:, col].tolist() if outliers is not None: for i, out in enumerate(outliers): numscans = 0 - for f in ensure_list(sessinfo[i]['scans']): - shape = load(f, mmap=NUMPY_MMAP).shape + for f in ensure_list(sessinfo[i]["scans"]): + shape = load(f).shape if len(shape) == 3 or shape[3] == 1: - iflogger.warning('You are using 3D instead of 4D ' - 'files. Are you sure this was ' - 'intended?') + iflogger.warning( + "You are using 3D instead of 4D " + "files. Are you sure this was " + "intended?" + ) numscans += 1 else: numscans += shape[3] for j, scanno in enumerate(out): - colidx = len(sessinfo[i]['regress']) - sessinfo[i]['regress'].insert(colidx, dict( - name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' % ( - j + 1) - sessinfo[i]['regress'][colidx]['val'] = \ - np.zeros((1, numscans))[0].tolist() - sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1 + colidx = len(sessinfo[i]["regress"]) + sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) + sessinfo[i]["regress"][colidx]["name"] = "Outlier%d" % (j + 1) + sessinfo[i]["regress"][colidx]["val"] = np.zeros((1, numscans))[ + 0 + ].tolist() + sessinfo[i]["regress"][colidx]["val"][int(scanno)] = 1 return sessinfo def _generate_design(self, infolist=None): - """Generate design specification for a typical fmri paradigm - """ + """Generate design specification for a typical fmri paradigm""" realignment_parameters = [] if isdefined(self.inputs.realignment_parameters): - for parfile in self.inputs.realignment_parameters: - realignment_parameters.append( - np.apply_along_axis( - func1d=normalize_mc_params, - axis=1, - arr=np.loadtxt(parfile), - source=self.inputs.parameter_source)) + realignment_parameters.extend( + np.apply_along_axis( + func1d=normalize_mc_params, + axis=1, + arr=np.loadtxt(parfile), + source=self.inputs.parameter_source, + ) + for parfile in self.inputs.realignment_parameters + ) outliers = [] if isdefined(self.inputs.outlier_files): for filename in self.inputs.outlier_files: try: outindices = np.loadtxt(filename, dtype=int) - except IOError: + except OSError: outliers.append([]) else: if outindices.size == 1: @@ -432,26 +500,33 @@ def _generate_design(self, infolist=None): if infolist is None: if isdefined(self.inputs.subject_info): infolist = self.inputs.subject_info - else: + elif isdefined(self.inputs.event_files): infolist = gen_info(self.inputs.event_files) + elif isdefined(self.inputs.bids_event_file): + infolist = bids_gen_info( + self.inputs.bids_event_file, + self.inputs.bids_condition_column, + self.inputs.bids_amplitude_column, + self.inputs.time_repetition, + ) self._sessinfo = self._generate_standard_design( infolist, functional_runs=self.inputs.functional_runs, realignment_parameters=realignment_parameters, - outliers=outliers) + outliers=outliers, + ) def _run_interface(self, runtime): - """ - """ + """ """ self._sessioninfo = None self._generate_design() return runtime def _list_outputs(self): outputs = self._outputs().get() - if not hasattr(self, '_sessinfo'): + if not hasattr(self, "_sessinfo"): self._generate_design() - outputs['session_info'] = self._sessinfo + outputs["session_info"] = self._sessinfo return outputs @@ -460,26 +535,26 @@ class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool( False, usedefault=True, - desc='Concatenate all runs to look like a ' - 'single session.') + desc="Concatenate all runs to look like a single session.", + ) output_units = traits.Enum( - 'secs', - 'scans', + "secs", + "scans", usedefault=True, - desc='Units of design event onsets and durations ' - '(secs or scans)') + desc="Units of design event onsets and durations (secs or scans)", + ) class SpecifySPMModel(SpecifyModel): - """Adds SPM specific options to SpecifyModel + """Add SPM specific options to SpecifyModel + + Adds: - adds: - concatenate_runs - output_units Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySPMModel() @@ -503,10 +578,10 @@ def _concatenate_info(self, infolist): if isinstance(f, list): numscans = len(f) elif isinstance(f, (str, bytes)): - img = load(f, mmap=NUMPY_MMAP) + img = load(f) numscans = img.shape[3] else: - raise Exception('Functional input not specified correctly') + raise Exception("Functional input not specified correctly") nscans.insert(i, numscans) # now combine all fields into 1 @@ -515,67 +590,68 @@ def _concatenate_info(self, infolist): infoout = infolist[0] for j, val in enumerate(infolist[0].durations): if len(infolist[0].onsets[j]) > 1 and len(val) == 1: - infoout.durations[j] = ( - infolist[0].durations[j] * len(infolist[0].onsets[j])) + infoout.durations[j] = infolist[0].durations[j] * len( + infolist[0].onsets[j] + ) for i, info in enumerate(infolist[1:]): # info.[conditions, tmod] remain the same if info.onsets: for j, val in enumerate(info.onsets): - if self.inputs.input_units == 'secs': - onsets = np.array(info.onsets[j]) +\ - self.inputs.time_repetition * \ - sum(nscans[0:(i + 1)]) + if self.inputs.input_units == "secs": + onsets = np.array( + info.onsets[j] + ) + self.inputs.time_repetition * sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) else: - onsets = np.array(info.onsets[j]) + \ - sum(nscans[0:(i + 1)]) + onsets = np.array(info.onsets[j]) + sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) for j, val in enumerate(info.durations): if len(info.onsets[j]) > 1 and len(val) == 1: infoout.durations[j].extend( - info.durations[j] * len(info.onsets[j])) + info.durations[j] * len(info.onsets[j]) + ) elif len(info.onsets[j]) == len(val): infoout.durations[j].extend(info.durations[j]) else: - raise ValueError('Mismatch in number of onsets and \ - durations for run {0}, condition \ - {1}'.format(i + 2, j + 1)) + raise ValueError( + f"Mismatch in number of onsets and durations for run {i + 2}, " + f"condition {j + 1}" + ) - if hasattr(info, 'amplitudes') and info.amplitudes: + if hasattr(info, "amplitudes") and info.amplitudes: for j, val in enumerate(info.amplitudes): infoout.amplitudes[j].extend(info.amplitudes[j]) - if hasattr(info, 'pmod') and info.pmod: + if hasattr(info, "pmod") and info.pmod: for j, val in enumerate(info.pmod): if val: for key, data in enumerate(val.param): infoout.pmod[j].param[key].extend(data) - if hasattr(info, 'regressors') and info.regressors: + if hasattr(info, "regressors") and info.regressors: # assumes same ordering of regressors across different # runs and the same names for the regressors for j, v in enumerate(info.regressors): infoout.regressors[j].extend(info.regressors[j]) # insert session regressors - if not hasattr(infoout, 'regressors') or not infoout.regressors: + if not hasattr(infoout, "regressors") or not infoout.regressors: infoout.regressors = [] infoout.regressor_names = [] onelist = np.zeros((1, sum(nscans))) - onelist[0, sum(nscans[0:i]):sum(nscans[0:(i + 1)])] = 1 - infoout.regressors.insert( - len(infoout.regressors), - onelist.tolist()[0]) - # insert session regressor name - infoout.regressor_names.extend(['run' + str(i + 1)]) + onelist[0, sum(nscans[0:i]) : sum(nscans[0 : (i + 1)])] = 1 + infoout.regressors.insert(len(infoout.regressors), onelist.tolist()[0]) + infoout.regressor_names.append(f'run{i + 1}') return [infoout], nscans def _generate_design(self, infolist=None): - if not isdefined(self.inputs.concatenate_runs) or \ - not self.inputs.concatenate_runs: - super(SpecifySPMModel, self)._generate_design(infolist=infolist) + if ( + not isdefined(self.inputs.concatenate_runs) + or not self.inputs.concatenate_runs + ): + super()._generate_design(infolist=infolist) return if isdefined(self.inputs.subject_info): @@ -592,25 +668,28 @@ def _generate_design(self, infolist=None): func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), - source=self.inputs.parameter_source) + source=self.inputs.parameter_source, + ) if not realignment_parameters: realignment_parameters.insert(0, mc) else: - realignment_parameters[0] = \ - np.concatenate((realignment_parameters[0], mc)) + realignment_parameters[0] = np.concatenate( + (realignment_parameters[0], mc) + ) outliers = [] if isdefined(self.inputs.outlier_files): outliers = [[]] for i, filename in enumerate(self.inputs.outlier_files): try: out = np.loadtxt(filename) - except IOError: - iflogger.warning('Error reading outliers file %s', filename) + except OSError: + iflogger.warning("Error reading outliers file %s", filename) out = np.array([]) if out.size > 0: - iflogger.debug('fname=%s, out=%s, nscans=%d', filename, - out, sum(nscans[0:i])) + iflogger.debug( + "fname=%s, out=%s, nscans=%d", filename, out, sum(nscans[0:i]) + ) sumscans = out.astype(int) + sum(nscans[0:i]) if out.size == 1: @@ -622,53 +701,46 @@ def _generate_design(self, infolist=None): concatlist, functional_runs=functional_runs, realignment_parameters=realignment_parameters, - outliers=outliers) + outliers=outliers, + ) class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float( - 0, - mandatory=True, - desc='Time in seconds to acquire a single ' - 'image volume') + 0, mandatory=True, desc="Time in seconds to acquire a single image volume" + ) volumes_in_cluster = traits.Range( - 1, usedefault=True, desc='Number of scan volumes in a cluster') - model_hrf = traits.Bool(desc='Model sparse events with hrf') + 1, usedefault=True, desc="Number of scan volumes in a cluster" + ) + model_hrf = traits.Bool(desc="Model sparse events with hrf") stimuli_as_impulses = traits.Bool( - True, desc='Treat each stimulus to be impulse-like', usedefault=True) + True, desc="Treat each stimulus to be impulse-like", usedefault=True + ) use_temporal_deriv = traits.Bool( - requires=['model_hrf'], - desc='Create a temporal derivative in ' - 'addition to regular regressor') + requires=["model_hrf"], + desc="Create a temporal derivative in addition to regular regressor", + ) scale_regressors = traits.Bool( - True, desc='Scale regressors by the peak', usedefault=True) + True, desc="Scale regressors by the peak", usedefault=True + ) scan_onset = traits.Float( - 0.0, - desc='Start of scanning relative to onset of run in secs', - usedefault=True) + 0.0, desc="Start of scanning relative to onset of run in secs", usedefault=True + ) save_plot = traits.Bool( - desc=('Save plot of sparse design calculation ' - '(requires matplotlib)')) + desc=("Save plot of sparse design calculation (requires matplotlib)") + ) class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec): - sparse_png_file = File(desc='PNG file showing sparse design') - sparse_svg_file = File(desc='SVG file showing sparse design') + sparse_png_file = File(desc="PNG file showing sparse design") + sparse_svg_file = File(desc="SVG file showing sparse design") class SpecifySparseModel(SpecifyModel): - """ Specify a sparse model that is compatible with spm/fsl designers - - References - ---------- - - .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of - sparse-sampling fMRI experiments. Front. Neurosci. 7:55 - http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract + """Specify a sparse model that is compatible with SPM/FSL designers [1]_. Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySparseModel() @@ -678,29 +750,36 @@ class SpecifySparseModel(SpecifyModel): >>> s.inputs.time_acquisition = 2 >>> s.inputs.high_pass_filter_cutoff = 128. >>> s.inputs.model_hrf = True - >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \ -durations=[[1]]) - >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \ -durations=[[1]]) - >>> s.inputs.subject_info = [evs_run2, evs_run3] + >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], + ... durations=[[1]]) + >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], + ... durations=[[1]]) + >>> s.inputs.subject_info = [evs_run2, evs_run3] # doctest: +SKIP + + References + ---------- + .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of + sparse-sampling fMRI experiments. Front. Neurosci. 7:55 + http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract """ + input_spec = SpecifySparseModelInputSpec output_spec = SpecifySparseModelOutputSpec def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): - """Generates a regressor for a sparse/clustered-sparse acquisition - """ + """Generates a regressor for a sparse/clustered-sparse acquisition""" bplot = False if isdefined(self.inputs.save_plot) and self.inputs.save_plot: bplot = True import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt - TR = np.round(self.inputs.time_repetition * 1000) # in ms + TR = int(np.round(self.inputs.time_repetition * 1000)) # in ms if self.inputs.time_acquisition: - TA = np.round(self.inputs.time_acquisition * 1000) # in ms + TA = int(np.round(self.inputs.time_acquisition * 1000)) # in ms else: TA = TR # in ms nvol = self.inputs.volumes_in_cluster @@ -710,20 +789,20 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): dt = TA / 10.0 durations = np.round(np.array(i_durations) * 1000) if len(durations) == 1: - durations = durations * np.ones((len(i_onsets))) + durations = durations * np.ones(len(i_onsets)) onsets = np.round(np.array(i_onsets) * 1000) - dttemp = gcd(TA, gcd(SILENCE, TR)) + dttemp = math.gcd(TA, math.gcd(SILENCE, TR)) if dt < dttemp: if dttemp % dt != 0: - dt = float(gcd(dttemp, dt)) + dt = float(math.gcd(dttemp, int(dt))) if dt < 1: - raise Exception('Time multiple less than 1 ms') - iflogger.info('Setting dt = %d ms\n', dt) + raise Exception("Time multiple less than 1 ms") + iflogger.info("Setting dt = %d ms\n", dt) npts = int(np.ceil(total_time / dt)) times = np.arange(0, total_time, dt) * 1e-3 - timeline = np.zeros((npts)) - timeline2 = np.zeros((npts)) + timeline = np.zeros(npts) + timeline2 = np.zeros(npts) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: hrf = spm_hrf(dt * 1e-3) reg_scale = 1.0 @@ -733,14 +812,15 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): boxcar[int(1.0 * 1e3 / dt)] = 1.0 reg_scale = float(TA / dt) else: - boxcar[int(1.0 * 1e3 / dt):int(2.0 * 1e3 / dt)] = 1.0 + boxcar[int(1.0 * 1e3 / dt) : int(2.0 * 1e3 / dt)] = 1.0 if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: response = np.convolve(boxcar, hrf) reg_scale = 1.0 / response.max() - iflogger.info('response sum: %.4f max: %.4f', response.sum(), - response.max()) - iflogger.info('reg_scale: %.4f', reg_scale) + iflogger.info( + "response sum: %.4f max: %.4f", response.sum(), response.max() + ) + iflogger.info("reg_scale: %.4f", reg_scale) for i, t in enumerate(onsets): idx = int(np.round(t / dt)) @@ -759,8 +839,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if not self.inputs.stimuli_as_impulses: if durations[i] == 0: durations[i] = TA * nvol - stimdur = np.ones((int(durations[i] / dt))) - timeline2 = np.convolve(timeline2, stimdur)[0:len(timeline2)] + stimdur = np.ones(int(durations[i] / dt)) + timeline2 = np.convolve(timeline2, stimdur)[0 : len(timeline2)] timeline += timeline2 timeline2[:] = 0 @@ -769,20 +849,24 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): plt.plot(times, timeline) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: - timeline = np.convolve(timeline, hrf)[0:len(timeline)] - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + timeline = np.convolve(timeline, hrf)[0 : len(timeline)] + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): # create temporal deriv timederiv = np.concatenate(([0], np.diff(timeline))) if bplot: plt.subplot(4, 1, 3) plt.plot(times, timeline) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): plt.plot(times, timederiv) # sample timeline - timeline2 = np.zeros((npts)) + timeline2 = np.zeros(npts) reg = [] regderiv = [] for i, trial in enumerate(np.arange(nscans) / nvol): @@ -790,13 +874,14 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): scanidx = scanstart + np.arange(int(TA / dt)) timeline2[scanidx] = np.max(timeline) reg.insert(i, np.mean(timeline[scanidx]) * reg_scale) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): regderiv.insert(i, np.mean(timederiv[scanidx]) * reg_scale) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: - iflogger.info('orthoganlizing derivative w.r.t. main regressor') + if isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv: + iflogger.info("orthoganlizing derivative w.r.t. main regressor") regderiv = orth(reg, regderiv) if bplot: @@ -804,8 +889,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): plt.plot(times, timeline2) plt.subplot(4, 1, 4) plt.bar(np.arange(len(reg)), reg, width=0.5) - plt.savefig('sparse.png') - plt.savefig('sparse.svg') + plt.savefig("sparse.png") + plt.savefig("sparse.svg") if regderiv: return [reg, regderiv] @@ -813,28 +898,36 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): return reg def _cond_to_regress(self, info, nscans): - """Converts condition information to full regressors - """ + """Converts condition information to full regressors""" reg = [] regnames = [] for i, cond in enumerate(info.conditions): - if hasattr(info, 'amplitudes') and info.amplitudes: + if hasattr(info, "amplitudes") and info.amplitudes: amplitudes = info.amplitudes[i] else: amplitudes = None regnames.insert(len(regnames), cond) - scaled_onsets = scale_timings(info.onsets[i], - self.inputs.input_units, 'secs', - self.inputs.time_repetition) - scaled_durations = scale_timings(info.durations[i], - self.inputs.input_units, 'secs', - self.inputs.time_repetition) - regressor = self._gen_regress(scaled_onsets, scaled_durations, - amplitudes, nscans) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + scaled_onsets = scale_timings( + info.onsets[i], + self.inputs.input_units, + "secs", + self.inputs.time_repetition, + ) + scaled_durations = scale_timings( + info.durations[i], + self.inputs.input_units, + "secs", + self.inputs.time_repetition, + ) + regressor = self._gen_regress( + scaled_onsets, scaled_durations, amplitudes, nscans + ) + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): reg.insert(len(reg), regressor[0]) - regnames.insert(len(regnames), cond + '_D') + regnames.insert(len(regnames), cond + "_D") reg.insert(len(reg), regressor[1]) else: reg.insert(len(reg), regressor) @@ -846,7 +939,7 @@ def _cond_to_regress(self, info, nscans): treg = np.zeros((nscans / nvol, nvol)) treg[:, i] = 1 reg.insert(len(reg), treg.ravel().tolist()) - regnames.insert(len(regnames), 'T1effect_%d' % i) + regnames.insert(len(regnames), "T1effect_%d" % i) return reg, regnames def _generate_clustered_design(self, infolist): @@ -860,13 +953,14 @@ def _generate_clustered_design(self, infolist): infoout[i].onsets = None infoout[i].durations = None if info.conditions: - img = load(self.inputs.functional_runs[i], mmap=NUMPY_MMAP) + img = load(self.inputs.functional_runs[i]) nscans = img.shape[3] reg, regnames = self._cond_to_regress(info, nscans) - if hasattr(infoout[i], 'regressors') and infoout[i].regressors: + if hasattr(infoout[i], "regressors") and infoout[i].regressors: if not infoout[i].regressor_names: - infoout[i].regressor_names = \ - ['R%d' % j for j in range(len(infoout[i].regressors))] + infoout[i].regressor_names = [ + "R%d" % j for j in range(len(infoout[i].regressors)) + ] else: infoout[i].regressors = [] infoout[i].regressor_names = [] @@ -883,17 +977,15 @@ def _generate_design(self, infolist=None): else: infolist = gen_info(self.inputs.event_files) sparselist = self._generate_clustered_design(infolist) - super(SpecifySparseModel, self)._generate_design(infolist=sparselist) + super()._generate_design(infolist=sparselist) def _list_outputs(self): outputs = self._outputs().get() - if not hasattr(self, '_sessinfo'): + if not hasattr(self, "_sessinfo"): self._generate_design() - outputs['session_info'] = self._sessinfo + outputs["session_info"] = self._sessinfo if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['sparse_png_file'] = os.path.join(os.getcwd(), - 'sparse.png') - outputs['sparse_svg_file'] = os.path.join(os.getcwd(), - 'sparse.svg') + outputs["sparse_png_file"] = os.path.join(os.getcwd(), "sparse.png") + outputs["sparse_svg_file"] = os.path.join(os.getcwd(), "sparse.svg") return outputs diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index d412493714..ff867ae26c 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -12,46 +11,49 @@ * StimulusCorrelation: determines correlation between stimuli schedule and movement/intensity parameters """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open, range, str, bytes - import os from copy import deepcopy from nibabel import load, funcs, Nifti1Image import numpy as np -from ..utils import NUMPY_MMAP -from ..interfaces.base import (BaseInterface, traits, InputMultiPath, - OutputMultiPath, TraitedSpec, File, - BaseInterfaceInputSpec, isdefined) +from ..interfaces.base import ( + BaseInterface, + traits, + InputMultiPath, + OutputMultiPath, + TraitedSpec, + File, + BaseInterfaceInputSpec, + isdefined, +) from ..utils.filemanip import ensure_list, save_json, split_filename from ..utils.misc import find_indices, normalize_mc_params from .. import logging, config -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") def _get_affine_matrix(params, source): """Return affine matrix given a set of translation and rotation parameters - params : np.array (upto 12 long) in native package format + params : np.array (up to 12 long) in native package format source : the package that generated the parameters supports SPM, AFNI, FSFAST, FSL, NIPY """ - if source == 'NIPY': + if source == "NIPY": # nipy does not store typical euler angles, use nipy to convert from nipy.algorithms.registration import to_matrix44 + return to_matrix44(params) params = normalize_mc_params(params, source) # process for FSL, SPM, AFNI and FSFAST - rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], - [-np.sin(x), np.cos(x)]]) + rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], [-np.sin(x), np.cos(x)]]) q = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0]) if len(params) < 12: - params = np.hstack((params, q[len(params):])) - params.shape = (len(params), ) + params = np.hstack((params, q[len(params) :])) + params.shape = (len(params),) # Translation T = np.eye(4) T[0:3, -1] = params[0:3] @@ -68,7 +70,7 @@ def _get_affine_matrix(params, source): # Shear Sh = np.eye(4) Sh[(0, 0, 1), (1, 2, 2)] = params[9:12] - if source in ('AFNI', 'FSFAST'): + if source in ("AFNI", "FSFAST"): return np.dot(T, np.dot(Ry, np.dot(Rx, np.dot(Rz, np.dot(S, Sh))))) return np.dot(T, np.dot(Rx, np.dot(Ry, np.dot(Rz, np.dot(S, Sh))))) @@ -92,9 +94,7 @@ def _calc_norm(mc, use_differences, source, brain_pts=None): """ - affines = [ - _get_affine_matrix(mc[i, :], source) for i in range(mc.shape[0]) - ] + affines = [_get_affine_matrix(mc[i, :], source) for i in range(mc.shape[0])] return _calc_norm_affine(affines, use_differences, brain_pts) @@ -133,24 +133,34 @@ def _calc_norm_affine(affines, use_differences, brain_pts=None): displacement[i, :] = np.sqrt( np.sum( np.power( - np.reshape(newpos[i, :], - (3, all_pts.shape[1])) - all_pts[0:3, :], - 2), - axis=0)) + np.reshape(newpos[i, :], (3, all_pts.shape[1])) + - all_pts[0:3, :], + 2, + ), + axis=0, + ) + ) # np.savez('displacement.npz', newpos=newpos, pts=all_pts) normdata = np.zeros(len(affines)) if use_differences: newpos = np.concatenate( - (np.zeros((1, n_pts)), np.diff(newpos, n=1, axis=0)), axis=0) + (np.zeros((1, n_pts)), np.diff(newpos, n=1, axis=0)), axis=0 + ) for i in range(newpos.shape[0]): - normdata[i] = \ - np.max(np.sqrt(np.sum( - np.reshape(np.power(np.abs(newpos[i, :]), 2), - (3, all_pts.shape[1])), - axis=0))) + normdata[i] = np.max( + np.sqrt( + np.sum( + np.reshape( + np.power(np.abs(newpos[i, :]), 2), (3, all_pts.shape[1]) + ), + axis=0, + ) + ) + ) else: from scipy.signal import detrend - newpos = np.abs(detrend(newpos, axis=0, type='constant')) + + newpos = np.abs(detrend(newpos, axis=0, type="constant")) normdata = np.sqrt(np.mean(np.power(newpos, 2), axis=1)) return normdata, displacement @@ -158,15 +168,18 @@ def _calc_norm_affine(affines, use_differences, brain_pts=None): class ArtifactDetectInputSpec(BaseInterfaceInputSpec): realigned_files = InputMultiPath( File(exists=True), - desc=("Names of realigned functional data " - "files"), - mandatory=True) + desc=("Names of realigned functional data files"), + mandatory=True, + ) realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, - desc=("Names of realignment " - "parameters corresponding to " - "the functional data files")) + desc=( + "Names of realignment " + "parameters corresponding to " + "the functional data files" + ), + ) parameter_source = traits.Enum( "SPM", "FSL", @@ -174,131 +187,167 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): "NiPy", "FSFAST", desc="Source of movement parameters", - mandatory=True) + mandatory=True, + ) use_differences = traits.ListBool( [True, False], minlen=2, maxlen=2, usedefault=True, - desc=("Use differences between successive" - " motion (first element) and " - "intensity parameter (second " - "element) estimates in order to " - "determine outliers. " - "(default is [True, False])")) + desc=( + "Use differences between successive" + " motion (first element) and " + "intensity parameter (second " + "element) estimates in order to " + "determine outliers. " + "(default is [True, False])" + ), + ) use_norm = traits.Bool( True, usedefault=True, - requires=['norm_threshold'], - desc=("Uses a composite of the motion parameters in " - "order to determine outliers.")) + requires=["norm_threshold"], + desc=( + "Uses a composite of the motion parameters in " + "order to determine outliers." + ), + ) norm_threshold = traits.Float( - xor=['rotation_threshold', 'translation_threshold'], + xor=["rotation_threshold", "translation_threshold"], mandatory=True, - desc=("Threshold to use to detect motion-rela" - "ted outliers when composite motion is " - "being used")) + desc=( + "Threshold to use to detect motion-rela" + "ted outliers when composite motion is " + "being used" + ), + ) rotation_threshold = traits.Float( mandatory=True, - xor=['norm_threshold'], - desc=("Threshold (in radians) to use to " - "detect rotation-related outliers")) + xor=["norm_threshold"], + desc=("Threshold (in radians) to use to detect rotation-related outliers"), + ) translation_threshold = traits.Float( mandatory=True, - xor=['norm_threshold'], - desc=("Threshold (in mm) to use to " - "detect translation-related " - "outliers")) + xor=["norm_threshold"], + desc=("Threshold (in mm) to use to detect translation-related outliers"), + ) zintensity_threshold = traits.Float( mandatory=True, - desc=("Intensity Z-threshold use to " - "detection images that deviate " - "from the mean")) + desc=( + "Intensity Z-threshold use to " + "detection images that deviate " + "from the mean" + ), + ) mask_type = traits.Enum( - 'spm_global', - 'file', - 'thresh', + "spm_global", + "file", + "thresh", mandatory=True, - desc=("Type of mask that should be used to mask the" - " functional data. *spm_global* uses an " - "spm_global like calculation to determine the" - " brain mask. *file* specifies a brain mask " - "file (should be an image file consisting of " - "0s and 1s). *thresh* specifies a threshold " - "to use. By default all voxels are used," - "unless one of these mask types are defined")) - mask_file = File( - exists=True, desc="Mask file to be used if mask_type is 'file'.") + desc=( + "Type of mask that should be used to mask the" + " functional data. *spm_global* uses an " + "spm_global like calculation to determine the" + " brain mask. *file* specifies a brain mask " + "file (should be an image file consisting of " + "0s and 1s). *thresh* specifies a threshold " + "to use. By default all voxels are used," + "unless one of these mask types are defined" + ), + ) + mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.") mask_threshold = traits.Float( - desc=("Mask threshold to be used if mask_type" - " is 'thresh'.")) + desc=("Mask threshold to be used if mask_type is 'thresh'.") + ) intersect_mask = traits.Bool( - True, usedefault=True, - desc=("Intersect the masks when computed from " - "spm_global.")) + True, + usedefault=True, + desc=("Intersect the masks when computed from spm_global."), + ) save_plot = traits.Bool( - True, desc="save plots containing outliers", usedefault=True) + True, desc="save plots containing outliers", usedefault=True + ) plot_type = traits.Enum( - 'png', - 'svg', - 'eps', - 'pdf', + "png", + "svg", + "eps", + "pdf", desc="file type of the outlier plot", - usedefault=True) + usedefault=True, + ) bound_by_brainmask = traits.Bool( False, - desc=("use the brain mask to " - "determine bounding box" - "for composite norm (works" - "for SPM and Nipy - currently" - "inaccurate for FSL, AFNI"), - usedefault=True) + desc=( + "use the brain mask to " + "determine bounding box" + "for composite norm (works" + "for SPM and Nipy - currently" + "inaccurate for FSL, AFNI" + ), + usedefault=True, + ) global_threshold = traits.Float( 8.0, - desc=("use this threshold when mask " - "type equal's spm_global"), - usedefault=True) + desc=("use this threshold when mask type equal's spm_global"), + usedefault=True, + ) class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing a list of 0-based indices" - " corresponding to outlier volumes")) + desc=( + "One file for each functional run " + "containing a list of 0-based indices" + " corresponding to outlier volumes" + ), + ) intensity_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing the global intensity " - "values determined from the " - "brainmask")) + desc=( + "One file for each functional run " + "containing the global intensity " + "values determined from the " + "brainmask" + ), + ) norm_files = OutputMultiPath( - File, - desc=("One file for each functional run " - "containing the composite norm")) + File, desc=("One file for each functional run containing the composite norm") + ) statistic_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing information about the " - "different types of artifacts and " - "if design info is provided then " - "details of stimulus correlated " - "motion and a listing or artifacts " - "by event type.")) + desc=( + "One file for each functional run " + "containing information about the " + "different types of artifacts and " + "if design info is provided then " + "details of stimulus correlated " + "motion and a listing or artifacts " + "by event type." + ), + ) plot_files = OutputMultiPath( File, - desc=("One image file for each functional run " - "containing the detected outliers")) + desc=( + "One image file for each functional run containing the detected outliers" + ), + ) mask_files = OutputMultiPath( File, - desc=("One image file for each functional run " - "containing the mask used for global " - "signal calculation")) + desc=( + "One image file for each functional run " + "containing the mask used for global " + "signal calculation" + ), + ) displacement_files = OutputMultiPath( File, - desc=("One image file for each " - "functional run containing the " - "voxel displacement timeseries")) + desc=( + "One image file for each " + "functional run containing the " + "voxel displacement timeseries" + ), + ) class ArtifactDetect(BaseInterface): @@ -328,7 +377,7 @@ class ArtifactDetect(BaseInterface): output_spec = ArtifactDetectOutputSpec def __init__(self, **inputs): - super(ArtifactDetect, self).__init__(**inputs) + super().__init__(**inputs) def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames @@ -348,62 +397,73 @@ def _get_output_filenames(self, motionfile, output_dir): else: raise Exception("Unknown type of file") _, filename, ext = split_filename(infile) - artifactfile = os.path.join(output_dir, ''.join(('art.', filename, - '_outliers.txt'))) - intensityfile = os.path.join(output_dir, ''.join(('global_intensity.', - filename, '.txt'))) - statsfile = os.path.join(output_dir, ''.join(('stats.', filename, - '.txt'))) - normfile = os.path.join(output_dir, ''.join(('norm.', filename, - '.txt'))) - plotfile = os.path.join(output_dir, ''.join(('plot.', filename, '.', - self.inputs.plot_type))) - displacementfile = os.path.join(output_dir, ''.join(('disp.', filename, - ext))) - maskfile = os.path.join(output_dir, ''.join(('mask.', filename, ext))) - return (artifactfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) + artifactfile = os.path.join(output_dir, f"art.{filename}_outliers.txt") + intensityfile = os.path.join(output_dir, f"global_intensity.{filename}.txt") + statsfile = os.path.join(output_dir, f"stats.{filename}.txt") + normfile = os.path.join(output_dir, f"norm.{filename}.txt") + plotfile = os.path.join(output_dir, f"plot.{filename}.{self.inputs.plot_type}") + displacementfile = os.path.join(output_dir, f"disp.{filename}{ext}") + maskfile = os.path.join(output_dir, f"mask.{filename}{ext}") + return ( + artifactfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) def _list_outputs(self): outputs = self._outputs().get() - outputs['outlier_files'] = [] - outputs['intensity_files'] = [] - outputs['statistic_files'] = [] - outputs['mask_files'] = [] + outputs["outlier_files"] = [] + outputs["intensity_files"] = [] + outputs["statistic_files"] = [] + outputs["mask_files"] = [] if isdefined(self.inputs.use_norm) and self.inputs.use_norm: - outputs['norm_files'] = [] + outputs["norm_files"] = [] if self.inputs.bound_by_brainmask: - outputs['displacement_files'] = [] + outputs["displacement_files"] = [] if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['plot_files'] = [] + outputs["plot_files"] = [] for i, f in enumerate(ensure_list(self.inputs.realigned_files)): - (outlierfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = \ - self._get_output_filenames(f, os.getcwd()) - outputs['outlier_files'].insert(i, outlierfile) - outputs['intensity_files'].insert(i, intensityfile) - outputs['statistic_files'].insert(i, statsfile) - outputs['mask_files'].insert(i, maskfile) + ( + outlierfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = self._get_output_filenames(f, os.getcwd()) + outputs["outlier_files"].insert(i, outlierfile) + outputs["intensity_files"].insert(i, intensityfile) + outputs["statistic_files"].insert(i, statsfile) + outputs["mask_files"].insert(i, maskfile) if isdefined(self.inputs.use_norm) and self.inputs.use_norm: - outputs['norm_files'].insert(i, normfile) + outputs["norm_files"].insert(i, normfile) if self.inputs.bound_by_brainmask: - outputs['displacement_files'].insert(i, displacementfile) + outputs["displacement_files"].insert(i, displacementfile) if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['plot_files'].insert(i, plotfile) + outputs["plot_files"].insert(i, plotfile) return outputs def _plot_outliers_with_wave(self, wave, outliers, name): import matplotlib + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + plt.plot(wave) plt.ylim([wave.min(), wave.max()]) plt.xlim([0, len(wave) - 1]) if len(outliers): plt.plot( np.tile(outliers[:, None], (1, 2)).T, - np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, 'r') - plt.xlabel('Scans - 0-based') + np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, + "r", + ) + plt.xlabel("Scans - 0-based") plt.ylabel(name) def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): @@ -411,36 +471,36 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): Core routine for detecting outliers """ from scipy import signal + if not cwd: cwd = os.getcwd() # read in functional image if isinstance(imgfile, (str, bytes)): - nim = load(imgfile, mmap=NUMPY_MMAP) + nim = load(imgfile) elif isinstance(imgfile, list): if len(imgfile) == 1: - nim = load(imgfile[0], mmap=NUMPY_MMAP) + nim = load(imgfile[0]) else: - images = [load(f, mmap=NUMPY_MMAP) for f in imgfile] + images = [load(f) for f in imgfile] nim = funcs.concat_images(images) # compute global intensity signal (x, y, z, timepoints) = nim.shape - data = nim.get_data() + data = nim.get_fdata(dtype=np.float32) affine = nim.affine g = np.zeros((timepoints, 1)) masktype = self.inputs.mask_type - if masktype == 'spm_global': # spm_global like calculation - iflogger.debug('art: using spm global') + if masktype == "spm_global": # spm_global like calculation + iflogger.debug("art: using spm global") intersect_mask = self.inputs.intersect_mask if intersect_mask: mask = np.ones((x, y, z), dtype=bool) for t0 in range(timepoints): vol = data[:, :, :, t0] # Use an SPM like approach - mask_tmp = vol > \ - (np.nanmean(vol) / self.inputs.global_threshold) + mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask = mask * mask_tmp for t0 in range(timepoints): vol = data[:, :, :, t0] @@ -449,23 +509,22 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): intersect_mask = False g = np.zeros((timepoints, 1)) if not intersect_mask: - iflogger.info('not intersect_mask is True') + iflogger.info("not intersect_mask is True") mask = np.zeros((x, y, z, timepoints)) for t0 in range(timepoints): vol = data[:, :, :, t0] - mask_tmp = vol > \ - (np.nanmean(vol) / self.inputs.global_threshold) + mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask[:, :, :, t0] = mask_tmp g[t0] = np.nansum(vol * mask_tmp) / np.nansum(mask_tmp) - elif masktype == 'file': # uses a mask image to determine intensity - maskimg = load(self.inputs.mask_file, mmap=NUMPY_MMAP) - mask = maskimg.get_data() + elif masktype == "file": # uses a mask image to determine intensity + maskimg = load(self.inputs.mask_file) + mask = maskimg.get_fdata(dtype=np.float32) affine = maskimg.affine mask = mask > 0.5 for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = np.nanmean(vol[mask]) - elif masktype == 'thresh': # uses a fixed signal threshold + elif masktype == "thresh": # uses a fixed signal threshold for t0 in range(timepoints): vol = data[:, :, :, t0] mask = vol > self.inputs.mask_threshold @@ -477,8 +536,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): # compute normalized intensity values gz = signal.detrend(g, axis=0) # detrend the signal if self.inputs.use_differences[1]: - gz = np.concatenate( - (np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) + gz = np.concatenate((np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) gz = (gz - np.mean(gz)) / np.std(gz) # normalize the detrended signal iidx = find_indices(abs(gz) > self.inputs.zintensity_threshold) @@ -486,9 +544,15 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): mc_in = np.loadtxt(motionfile) mc = deepcopy(mc_in) - (artifactfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = self._get_output_filenames( - imgfile, cwd) + ( + artifactfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = self._get_output_filenames(imgfile, cwd) mask_img = Nifti1Image(mask.astype(np.uint8), affine) mask_img.to_filename(maskfile) @@ -496,132 +560,130 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): brain_pts = None if self.inputs.bound_by_brainmask: voxel_coords = np.nonzero(mask) - coords = np.vstack((voxel_coords[0], - np.vstack((voxel_coords[1], - voxel_coords[2])))).T - brain_pts = np.dot(affine, - np.hstack((coords, - np.ones((coords.shape[0], - 1)))).T) + coords = np.vstack( + (voxel_coords[0], np.vstack((voxel_coords[1], voxel_coords[2]))) + ).T + brain_pts = np.dot( + affine, np.hstack((coords, np.ones((coords.shape[0], 1)))).T + ) # calculate the norm of the motion parameters normval, displacement = _calc_norm( mc, self.inputs.use_differences[0], self.inputs.parameter_source, - brain_pts=brain_pts) + brain_pts=brain_pts, + ) tidx = find_indices(normval > self.inputs.norm_threshold) ridx = find_indices(normval < 0) if displacement is not None: - dmap = np.zeros((x, y, z, timepoints), dtype=np.float) + dmap = np.zeros((x, y, z, timepoints), dtype=np.float64) for i in range(timepoints): - dmap[voxel_coords[0], voxel_coords[1], voxel_coords[2], - i] = displacement[i, :] + dmap[voxel_coords[0], voxel_coords[1], voxel_coords[2], i] = ( + displacement[i, :] + ) dimg = Nifti1Image(dmap, affine) dimg.to_filename(displacementfile) else: if self.inputs.use_differences[0]: mc = np.concatenate( - (np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0) + (np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0 + ) traval = mc[:, 0:3] # translation parameters (mm) rotval = mc[:, 3:6] # rotation parameters (rad) tidx = find_indices( - np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0) + np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0 + ) ridx = find_indices( - np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0) + np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0 + ) outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt=b'%d', delimiter=' ') - np.savetxt(intensityfile, g, fmt=b'%.2f', delimiter=' ') + np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") + np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt=b'%.4f', delimiter=' ') + np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + fig = plt.figure() if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(211) else: plt.subplot(311) - self._plot_outliers_with_wave(gz, iidx, 'Intensity') + self._plot_outliers_with_wave(gz, iidx, "Intensity") if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(212) - self._plot_outliers_with_wave(normval, np.union1d(tidx, ridx), - 'Norm (mm)') + self._plot_outliers_with_wave( + normval, np.union1d(tidx, ridx), "Norm (mm)" + ) else: - diff = '' + diff = "" if self.inputs.use_differences[0]: - diff = 'diff' + diff = "diff" plt.subplot(312) - self._plot_outliers_with_wave(traval, tidx, - 'Translation (mm)' + diff) + self._plot_outliers_with_wave(traval, tidx, "Translation (mm)" + diff) plt.subplot(313) - self._plot_outliers_with_wave(rotval, ridx, - 'Rotation (rad)' + diff) + self._plot_outliers_with_wave(rotval, ridx, "Rotation (rad)" + diff) plt.savefig(plotfile) plt.close(fig) motion_outliers = np.union1d(tidx, ridx) stats = [ + {"motion_file": motionfile, "functional_file": imgfile}, { - 'motion_file': motionfile, - 'functional_file': imgfile + "common_outliers": len(np.intersect1d(iidx, motion_outliers)), + "intensity_outliers": len(np.setdiff1d(iidx, motion_outliers)), + "motion_outliers": len(np.setdiff1d(motion_outliers, iidx)), }, { - 'common_outliers': len(np.intersect1d(iidx, motion_outliers)), - 'intensity_outliers': len(np.setdiff1d(iidx, motion_outliers)), - 'motion_outliers': len(np.setdiff1d(motion_outliers, iidx)), - }, - { - 'motion': [ - { - 'using differences': self.inputs.use_differences[0] - }, + "motion": [ + {"using differences": self.inputs.use_differences[0]}, { - 'mean': np.mean(mc_in, axis=0).tolist(), - 'min': np.min(mc_in, axis=0).tolist(), - 'max': np.max(mc_in, axis=0).tolist(), - 'std': np.std(mc_in, axis=0).tolist() + "mean": np.mean(mc_in, axis=0).tolist(), + "min": np.min(mc_in, axis=0).tolist(), + "max": np.max(mc_in, axis=0).tolist(), + "std": np.std(mc_in, axis=0).tolist(), }, ] }, { - 'intensity': [ + "intensity": [ + {"using differences": self.inputs.use_differences[1]}, { - 'using differences': self.inputs.use_differences[1] - }, - { - 'mean': np.mean(gz, axis=0).tolist(), - 'min': np.min(gz, axis=0).tolist(), - 'max': np.max(gz, axis=0).tolist(), - 'std': np.std(gz, axis=0).tolist() + "mean": np.mean(gz, axis=0).tolist(), + "min": np.min(gz, axis=0).tolist(), + "max": np.max(gz, axis=0).tolist(), + "std": np.std(gz, axis=0).tolist(), }, ] }, ] if self.inputs.use_norm: stats.insert( - 3, { - 'motion_norm': { - 'mean': np.mean(normval, axis=0).tolist(), - 'min': np.min(normval, axis=0).tolist(), - 'max': np.max(normval, axis=0).tolist(), - 'std': np.std(normval, axis=0).tolist(), + 3, + { + "motion_norm": { + "mean": np.mean(normval, axis=0).tolist(), + "min": np.min(normval, axis=0).tolist(), + "max": np.max(normval, axis=0).tolist(), + "std": np.std(normval, axis=0).tolist(), } - }) + }, + ) save_json(statsfile, stats) def _run_interface(self, runtime): - """Execute this module. - """ + """Execute this module.""" funcfilelist = ensure_list(self.inputs.realigned_files) motparamlist = ensure_list(self.inputs.realignment_parameters) for i, imgf in enumerate(funcfilelist): - self._detect_outliers_core( - imgf, motparamlist[i], i, cwd=os.getcwd()) + self._detect_outliers_core(imgf, motparamlist[i], i, cwd=os.getcwd()) return runtime @@ -629,29 +691,29 @@ class StimCorrInputSpec(BaseInterfaceInputSpec): realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, - desc=("Names of realignment " - "parameters corresponding to " - "the functional data files")) + desc=( + "Names of realignment parameters corresponding to " + "the functional data files" + ), + ) intensity_values = InputMultiPath( File(exists=True), mandatory=True, - desc=("Name of file containing intensity " - "values")) + desc=("Name of file containing intensity values"), + ) spm_mat_file = File( - exists=True, - mandatory=True, - desc="SPM mat file (use pre-estimate SPM.mat file)") + exists=True, mandatory=True, desc="SPM mat file (use pre-estimate SPM.mat file)" + ) concatenated_design = traits.Bool( mandatory=True, - desc=("state if the design matrix " - "contains concatenated sessions")) + desc=("state if the design matrix contains concatenated sessions"), + ) class StimCorrOutputSpec(TraitedSpec): stimcorr_files = OutputMultiPath( - File(exists=True), - desc=("List of files containing " - "correlation values")) + File(exists=True), desc=("List of files containing correlation values") + ) class StimulusCorrelation(BaseInterface): @@ -692,12 +754,10 @@ def _get_output_filenames(self, motionfile, output_dir): """ (_, filename) = os.path.split(motionfile) (filename, _) = os.path.splitext(filename) - corrfile = os.path.join(output_dir, ''.join(('qa.', filename, - '_stimcorr.txt'))) + corrfile = os.path.join(output_dir, f"qa.{filename}_stimcorr.txt") return corrfile - def _stimcorr_core(self, motionfile, intensityfile, designmatrix, - cwd=None): + def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cwd=None): """ Core routine for determining stimulus correlation @@ -714,14 +774,14 @@ def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cm = np.corrcoef(concat_matrix, rowvar=0) corrfile = self._get_output_filenames(motionfile, cwd) # write output to outputfile - file = open(corrfile, 'w') + file = open(corrfile, "w") file.write("Stats for:\n") file.write("Stimulus correlated motion:\n%s\n" % motionfile) for i in range(dcol): file.write("SCM.%d:" % i) for v in cm[i, dcol + np.arange(mccol)]: file.write(" %.2f" % v) - file.write('\n') + file.write("\n") file.write("Stimulus correlated intensity:\n%s\n" % intensityfile) for i in range(dcol): file.write("SCI.%d: %.2f\n" % (i, cm[i, -1])) @@ -736,21 +796,18 @@ def _get_spm_submatrix(self, spmmat, sessidx, rows=None): sessidx: int index to session that needs to be extracted. """ - designmatrix = spmmat['SPM'][0][0].xX[0][0].X - U = spmmat['SPM'][0][0].Sess[0][sessidx].U[0] + designmatrix = spmmat["SPM"][0][0].xX[0][0].X + U = spmmat["SPM"][0][0].Sess[0][sessidx].U[0] if rows is None: - rows = spmmat['SPM'][0][0].Sess[0][sessidx].row[0] - 1 - cols = (spmmat['SPM'][0][0].Sess[0][sessidx].col[0][list( - range(len(U)))] - 1) - outmatrix = designmatrix.take( - rows.tolist(), axis=0).take( - cols.tolist(), axis=1) + rows = spmmat["SPM"][0][0].Sess[0][sessidx].row[0] - 1 + cols = spmmat["SPM"][0][0].Sess[0][sessidx].col[0][list(range(len(U)))] - 1 + outmatrix = designmatrix.take(rows.tolist(), axis=0).take(cols.tolist(), axis=1) return outmatrix def _run_interface(self, runtime): - """Execute this module. - """ + """Execute this module.""" import scipy.io as sio + motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) @@ -764,8 +821,7 @@ def _run_interface(self, runtime): rows = np.sum(nrows) + np.arange(mc_in.shape[0]) nrows.append(mc_in.shape[0]) matrix = self._get_spm_submatrix(spmmat, sessidx, rows) - self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, - os.getcwd()) + self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, os.getcwd()) return runtime def _list_outputs(self): @@ -774,5 +830,5 @@ def _list_outputs(self): for i, f in enumerate(self.inputs.realignment_parameters): files.insert(i, self._get_output_filenames(f, os.getcwd())) if files: - outputs['stimcorr_files'] = files + outputs["stimcorr_files"] = files return outputs diff --git a/nipype/algorithms/stats.py b/nipype/algorithms/stats.py index 51a3bc9088..9fadd6fcf3 100644 --- a/nipype/algorithms/stats.py +++ b/nipype/algorithms/stats.py @@ -1,34 +1,40 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Managing statistical maps """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import nibabel as nb import numpy as np from ..interfaces.base import ( - BaseInterfaceInputSpec, TraitedSpec, SimpleInterface, - traits, InputMultiPath, File + BaseInterfaceInputSpec, + TraitedSpec, + SimpleInterface, + traits, + InputMultiPath, + File, ) from ..utils.filemanip import split_filename class ActivationCountInputSpec(BaseInterfaceInputSpec): - in_files = InputMultiPath(File(exists=True), mandatory=True, - desc='input file, generally a list of z-stat maps') + in_files = InputMultiPath( + File(exists=True), + mandatory=True, + desc="input file, generally a list of z-stat maps", + ) threshold = traits.Float( - mandatory=True, desc='binarization threshold. E.g. a threshold of 1.65 ' - 'corresponds to a two-sided Z-test of p<.10') + mandatory=True, + desc="binarization threshold. E.g. a threshold of 1.65 " + "corresponds to a two-sided Z-test of p<.10", + ) class ActivationCountOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output activation count map') - acm_pos = File(exists=True, desc='positive activation count map') - acm_neg = File(exists=True, desc='negative activation count map') + out_file = File(exists=True, desc="output activation count map") + acm_pos = File(exists=True, desc="positive activation count map") + acm_neg = File(exists=True, desc="negative activation count map") class ActivationCount(SimpleInterface): @@ -38,31 +44,35 @@ class ActivationCount(SimpleInterface): Adapted from: https://github.com/poldracklab/CNP_task_analysis/\ blob/61c27f5992db9d8800884f8ffceb73e6957db8af/CNP_2nd_level_ACM.py """ + input_spec = ActivationCountInputSpec output_spec = ActivationCountOutputSpec def _run_interface(self, runtime): - allmaps = nb.concat_images(self.inputs.in_files).get_data() - acm_pos = np.mean(allmaps > self.inputs.threshold, - axis=3, dtype=np.float32) - acm_neg = np.mean(allmaps < -1.0 * self.inputs.threshold, - axis=3, dtype=np.float32) + allmaps = nb.concat_images(self.inputs.in_files).dataobj + acm_pos = np.mean(allmaps > self.inputs.threshold, axis=3, dtype=np.float32) + acm_neg = np.mean( + allmaps < -1.0 * self.inputs.threshold, axis=3, dtype=np.float32 + ) acm_diff = acm_pos - acm_neg template_fname = self.inputs.in_files[0] ext = split_filename(template_fname)[2] - fname_fmt = os.path.join(runtime.cwd, 'acm_{}' + ext).format + fname_fmt = os.path.join(runtime.cwd, "acm_{}" + ext).format - self._results['out_file'] = fname_fmt('diff') - self._results['acm_pos'] = fname_fmt('pos') - self._results['acm_neg'] = fname_fmt('neg') + self._results["out_file"] = fname_fmt("diff") + self._results["acm_pos"] = fname_fmt("pos") + self._results["acm_neg"] = fname_fmt("neg") img = nb.load(template_fname) img.__class__(acm_diff, img.affine, img.header).to_filename( - self._results['out_file']) + self._results["out_file"] + ) img.__class__(acm_pos, img.affine, img.header).to_filename( - self._results['acm_pos']) + self._results["acm_pos"] + ) img.__class__(acm_neg, img.affine, img.header).to_filename( - self._results['acm_neg']) + self._results["acm_neg"] + ) return runtime diff --git a/nipype/algorithms/tests/__init__.py b/nipype/algorithms/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/algorithms/tests/__init__.py +++ b/nipype/algorithms/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 488ad3c960..f506ded628 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -10,14 +10,51 @@ from ..confounds import CompCor, TCompCor, ACompCor -class TestCompCor(): - ''' Note: Tests currently do a poor job of testing functionality ''' +def close_up_to_column_sign(a, b, rtol=1e-05, atol=1e-08, equal_nan=False): + """SVD can produce sign flips on a per-column basis.""" + a = np.asanyarray(a) + b = np.asanyarray(b) + kwargs = dict(rtol=rtol, atol=atol, equal_nan=equal_nan) + if np.allclose(a, b, **kwargs): + return True + + ret = True + for acol, bcol in zip(a.T, b.T): + ret &= np.allclose(acol, bcol, **kwargs) or np.allclose(acol, -bcol, **kwargs) + if not ret: + break + + return ret + + +@pytest.mark.parametrize( + "a, b, close", + [ + ([[0.1, 0.2], [0.3, 0.4]], [[-0.1, 0.2], [-0.3, 0.4]], True), + ([[0.1, 0.2], [0.3, 0.4]], [[-0.1, 0.2], [0.3, -0.4]], False), + ], +) +def test_close_up_to_column_sign(a, b, close): + a = np.asanyarray(a) + b = np.asanyarray(b) + assert close_up_to_column_sign(a, b) == close + # Sign flips of all columns never changes result + assert close_up_to_column_sign(a, -b) == close + assert close_up_to_column_sign(-a, b) == close + assert close_up_to_column_sign(-a, -b) == close + # Trivial case + assert close_up_to_column_sign(a, a) + assert close_up_to_column_sign(b, b) + + +class TestCompCor: + """Note: Tests currently do a poor job of testing functionality""" filenames = { - 'functionalnii': 'compcorfunc.nii', - 'masknii': 'compcormask.nii', - 'masknii2': 'compcormask2.nii', - 'components_file': None + "functionalnii": "compcorfunc.nii", + "masknii": "compcormask.nii", + "masknii2": "compcormask2.nii", + "components_file": None, } @pytest.fixture(autouse=True) @@ -26,77 +63,134 @@ def setup_class(self, tmpdir): tmpdir.chdir() noise = np.fromfunction(self.fake_noise_fun, self.fake_data.shape) self.realigned_file = utils.save_toy_nii( - self.fake_data + noise, self.filenames['functionalnii']) + self.fake_data + noise, self.filenames["functionalnii"] + ) mask = np.ones(self.fake_data.shape[:3]) mask[0, 0, 0] = 0 mask[0, 0, 1] = 0 - mask1 = utils.save_toy_nii(mask, self.filenames['masknii']) + mask1 = utils.save_toy_nii(mask, self.filenames["masknii"]) other_mask = np.ones(self.fake_data.shape[:3]) other_mask[0, 1, 0] = 0 other_mask[1, 1, 0] = 0 - mask2 = utils.save_toy_nii(other_mask, self.filenames['masknii2']) + mask2 = utils.save_toy_nii(other_mask, self.filenames["masknii2"]) self.mask_files = [mask1, mask2] def test_compcor(self): - expected_components = [['-0.1989607212', '-0.5753813646'], [ - '0.5692369697', '0.5674945949' - ], ['-0.6662573243', - '0.4675843432'], ['0.4206466244', '-0.3361270124'], - ['-0.1246655485', '-0.1235705610']] + expected_components = [ + [-0.1989607212, -0.5753813646], + [0.5692369697, 0.5674945949], + [-0.6662573243, 0.4675843432], + [0.4206466244, -0.3361270124], + [-0.1246655485, -0.1235705610], + ] self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, - mask_index=0), expected_components) + mask_index=0, + ), + expected_components, + ) self.run_cc( ACompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, - components_file='acc_components_file'), expected_components, - 'aCompCor') + components_file="acc_components_file", + ), + expected_components, + "aCompCor", + ) + + def test_compcor_variance_threshold_and_metadata(self): + expected_components = [ + [-0.2027150345, -0.4954813834], + [0.2565929051, 0.7866217875], + [-0.3550986008, -0.0089784905], + [0.7512786244, -0.3599828482], + [-0.4500578942, 0.0778209345], + ] + expected_metadata = { + "component": "CompCor00", + "mask": "mask", + "singular_value": "4.0720553036", + "variance_explained": "0.5527211465", + "cumulative_variance_explained": "0.5527211465", + "retained": "True", + } + ccinterface = CompCor( + variance_threshold=0.7, + realigned_file=self.realigned_file, + mask_files=self.mask_files, + mask_names=["mask"], + mask_index=1, + save_metadata=True, + ) + self.run_cc( + ccinterface=ccinterface, + expected_components=expected_components, + expected_n_components=2, + expected_metadata=expected_metadata, + ) def test_tcompcor(self): ccinterface = TCompCor( - realigned_file=self.realigned_file, percentile_threshold=0.75) - self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [ - '0.4566907310', '0.6983205193' - ], ['-0.7132557407', '0.1340170559'], [ - '0.5022537643', '-0.5098322262' - ], ['-0.1342351356', '0.1407855119']], 'tCompCor') + num_components=6, + realigned_file=self.realigned_file, + percentile_threshold=0.75, + ) + self.run_cc( + ccinterface, + [ + [-0.1114536190, -0.4632908609], + [0.4566907310, 0.6983205193], + [-0.7132557407, 0.1340170559], + [0.5022537643, -0.5098322262], + [-0.1342351356, 0.1407855119], + ], + "tCompCor", + ) def test_tcompcor_no_percentile(self): - ccinterface = TCompCor(realigned_file=self.realigned_file) + ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file) ccinterface.run() - mask = nb.load('mask_000.nii.gz').get_data() + mask = nb.load("mask_000.nii.gz").dataobj num_nonmasked_voxels = np.count_nonzero(mask) assert num_nonmasked_voxels == 1 def test_compcor_no_regress_poly(self): self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, - pre_filter=False), [['0.4451946442', '-0.7683311482'], [ - '-0.4285129505', '-0.0926034137' - ], ['0.5721540256', '0.5608764842'], [ - '-0.5367548139', '0.0059943226' - ], ['-0.0520809054', '0.2940637551']]) + pre_filter=False, + ), + [ + [0.4451946442, -0.7683311482], + [-0.4285129505, -0.0926034137], + [0.5721540256, 0.5608764842], + [-0.5367548139, 0.0059943226], + [-0.0520809054, 0.2940637551], + ], + ) def test_tcompcor_asymmetric_dim(self): asymmetric_shape = (2, 3, 4, 5) asymmetric_data = utils.save_toy_nii( - np.zeros(asymmetric_shape), 'asymmetric.nii') + np.zeros(asymmetric_shape), "asymmetric.nii" + ) TCompCor(realigned_file=asymmetric_data).run() - assert nb.load( - 'mask_000.nii.gz').get_data().shape == asymmetric_shape[:3] + assert nb.load("mask_000.nii.gz").shape == asymmetric_shape[:3] def test_compcor_bad_input_shapes(self): # dim 0 is < dim 0 of self.mask_files (2) @@ -105,91 +199,108 @@ def test_compcor_bad_input_shapes(self): shape_more_than = (3, 3, 3, 5) for data_shape in (shape_less_than, shape_more_than): - data_file = utils.save_toy_nii(np.zeros(data_shape), 'temp.nii') - interface = CompCor( - realigned_file=data_file, mask_files=self.mask_files[0]) - with pytest.raises(ValueError, message="Dimension mismatch"): - interface.run() + data_file = utils.save_toy_nii(np.zeros(data_shape), "temp.nii") + interface = CompCor(realigned_file=data_file, mask_files=self.mask_files[0]) + with pytest.raises(ValueError): + interface.run() # Dimension mismatch def test_tcompcor_bad_input_dim(self): bad_dims = (2, 2, 2) - data_file = utils.save_toy_nii(np.zeros(bad_dims), 'temp.nii') + data_file = utils.save_toy_nii(np.zeros(bad_dims), "temp.nii") interface = TCompCor(realigned_file=data_file) - with pytest.raises(ValueError, message='Not a 4D file'): - interface.run() + with pytest.raises(ValueError): + interface.run() # Not a 4D file def test_tcompcor_merge_intersect_masks(self): - for method in ['union', 'intersect']: + for method in ["union", "intersect"]: TCompCor( realigned_file=self.realigned_file, mask_files=self.mask_files, - merge_method=method).run() - if method == 'union': + merge_method=method, + ).run() + if method == "union": assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]])) - if method == 'intersect': + nb.load("mask_000.nii.gz").dataobj, + ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]]), + ) + if method == "intersect": assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) + nb.load("mask_000.nii.gz").dataobj, + ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]), + ) def test_tcompcor_index_mask(self): TCompCor( - realigned_file=self.realigned_file, - mask_files=self.mask_files, - mask_index=1).run() + realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=1 + ).run() assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) + nb.load("mask_000.nii.gz").dataobj, ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]) + ) def test_tcompcor_multi_mask_no_index(self): interface = TCompCor( - realigned_file=self.realigned_file, mask_files=self.mask_files) - with pytest.raises(ValueError, message='more than one mask file'): - interface.run() - - def run_cc(self, - ccinterface, - expected_components, - expected_header='CompCor'): + realigned_file=self.realigned_file, mask_files=self.mask_files + ) + with pytest.raises(ValueError): + interface.run() # more than one mask file + + def run_cc( + self, + ccinterface, + expected_components, + expected_header="CompCor", + expected_n_components=None, + expected_metadata=None, + ): # run ccresult = ccinterface.run() # assert - expected_file = ccinterface._list_outputs()['components_file'] + expected_file = ccinterface._list_outputs()["components_file"] assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - assert ccinterface.inputs.num_components == 6 - - with open(ccresult.outputs.components_file, 'r') as components_file: - expected_n_components = min(ccinterface.inputs.num_components, - self.fake_data.shape[3]) - - components_data = [line.split('\t') for line in components_file] - - # the first item will be '#', we can throw it out - header = components_data.pop(0) - expected_header = [ - expected_header + '{:02d}'.format(i) - for i in range(expected_n_components) - ] - for i, heading in enumerate(header): - assert expected_header[i] in heading - - num_got_timepoints = len(components_data) - assert num_got_timepoints == self.fake_data.shape[3] - for index, timepoint in enumerate(components_data): - assert (len(timepoint) == ccinterface.inputs.num_components - or len(timepoint) == self.fake_data.shape[3]) - assert timepoint[:2] == expected_components[index] + + with open(ccresult.outputs.components_file) as components_file: + header = components_file.readline().rstrip().split("\t") + components_data = np.loadtxt(components_file, delimiter="\t") + + if expected_n_components is None: + expected_n_components = min( + ccinterface.inputs.num_components, self.fake_data.shape[3] + ) + + assert header == [ + f"{expected_header}{i:02d}" for i in range(expected_n_components) + ] + + assert components_data.shape == (self.fake_data.shape[3], expected_n_components) + assert close_up_to_column_sign(components_data[:, :2], expected_components) + + if ccinterface.inputs.save_metadata: + expected_metadata_file = ccinterface._list_outputs()["metadata_file"] + assert ccresult.outputs.metadata_file == expected_metadata_file + assert os.path.exists(expected_metadata_file) + assert os.path.getsize(expected_metadata_file) > 0 + + with open(ccresult.outputs.metadata_file) as metadata_file: + components_metadata = [ + line.rstrip().split("\t") for line in metadata_file + ] + components_metadata = dict( + zip(components_metadata[0], components_metadata[1]) + ) + assert components_metadata == expected_metadata + return ccresult @staticmethod def fake_noise_fun(i, j, l, m): return m * i + l - j - fake_data = np.array([[[[8, 5, 3, 8, 0], [6, 7, 4, 7, 1]], - [[7, 9, 1, 6, 5], [0, 7, 4, 7, 7]]], - [[[2, 4, 5, 7, 0], [1, 7, 0, 5, 4]], - [[7, 3, 9, 0, 4], [9, 4, 1, 5, 0]]]]) + fake_data = np.array( + [ + [[[8, 5, 3, 8, 0], [6, 7, 4, 7, 1]], [[7, 9, 1, 6, 5], [0, 7, 4, 7, 7]]], + [[[2, 4, 5, 7, 0], [1, 7, 0, 5, 4]], [[7, 3, 9, 0, 4], [9, 4, 1, 5, 0]]], + ] + ) diff --git a/nipype/algorithms/tests/test_ErrorMap.py b/nipype/algorithms/tests/test_ErrorMap.py index cfd30b0b74..4cd0c7ce7e 100644 --- a/nipype/algorithms/tests/test_ErrorMap.py +++ b/nipype/algorithms/tests/test_ErrorMap.py @@ -1,52 +1,47 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -import pytest -from nipype.testing import example_data from nipype.algorithms.metrics import ErrorMap import nibabel as nb import numpy as np -import os def test_errormap(tmpdir): - # Single-Spectual # Make two fake 2*2*2 voxel volumes # John von Neumann's birthday volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # Alan Turing's birthday volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) - mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) + mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]], dtype=np.uint8) img1 = nb.Nifti1Image(volume1, np.eye(4)) img2 = nb.Nifti1Image(volume2, np.eye(4)) maskimg = nb.Nifti1Image(mask, np.eye(4)) - nb.save(img1, tmpdir.join('von.nii.gz').strpath) - nb.save(img2, tmpdir.join('alan.nii.gz').strpath) - nb.save(maskimg, tmpdir.join('mask.nii.gz').strpath) + nb.save(img1, tmpdir.join("von.nii.gz").strpath) + nb.save(img2, tmpdir.join("alan.nii.gz").strpath) + nb.save(maskimg, tmpdir.join("mask.nii.gz").strpath) # Default metric errmap = ErrorMap() - errmap.inputs.in_tst = tmpdir.join('von.nii.gz').strpath - errmap.inputs.in_ref = tmpdir.join('alan.nii.gz').strpath - errmap.out_map = tmpdir.join('out_map.nii.gz').strpath + errmap.inputs.in_tst = tmpdir.join("von.nii.gz").strpath + errmap.inputs.in_ref = tmpdir.join("alan.nii.gz").strpath + errmap.out_map = tmpdir.join("out_map.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.125 # Square metric - errmap.inputs.metric = 'sqeuclidean' + errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 1.125 # Linear metric - errmap.inputs.metric = 'euclidean' + errmap.inputs.metric = "euclidean" result = errmap.run() assert result.outputs.distance == 0.875 # Masked - errmap.inputs.mask = tmpdir.join('mask.nii.gz').strpath + errmap.inputs.mask = tmpdir.join("mask.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.0 @@ -64,15 +59,15 @@ def test_errormap(tmpdir): msvolume2[:, :, :, 1] = volume1 msimg2 = nb.Nifti1Image(msvolume2, np.eye(4)) - nb.save(msimg1, tmpdir.join('von-ray.nii.gz').strpath) - nb.save(msimg2, tmpdir.join('alan-ray.nii.gz').strpath) + nb.save(msimg1, tmpdir.join("von-ray.nii.gz").strpath) + nb.save(msimg2, tmpdir.join("alan-ray.nii.gz").strpath) - errmap.inputs.in_tst = tmpdir.join('von-ray.nii.gz').strpath - errmap.inputs.in_ref = tmpdir.join('alan-ray.nii.gz').strpath - errmap.inputs.metric = 'sqeuclidean' + errmap.inputs.in_tst = tmpdir.join("von-ray.nii.gz").strpath + errmap.inputs.in_ref = tmpdir.join("alan-ray.nii.gz").strpath + errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 5.5 - errmap.inputs.metric = 'euclidean' + errmap.inputs.metric = "euclidean" result = errmap.run() assert result.outputs.distance == np.float32(1.25 * (2**0.5)) diff --git a/nipype/algorithms/tests/test_Overlap.py b/nipype/algorithms/tests/test_Overlap.py index 786a7328b8..93a7cbb68f 100644 --- a/nipype/algorithms/tests/test_Overlap.py +++ b/nipype/algorithms/tests/test_Overlap.py @@ -1,11 +1,9 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -import os -from nipype.testing import (example_data) +from nipype.testing import example_data import numpy as np @@ -15,10 +13,11 @@ def test_overlap(tmpdir): def check_close(val1, val2): import numpy.testing as npt + return npt.assert_almost_equal(val1, val2, decimal=3) - in1 = example_data('segmentation0.nii.gz') - in2 = example_data('segmentation1.nii.gz') + in1 = example_data("segmentation0.nii.gz") + in2 = example_data("segmentation1.nii.gz") tmpdir.chdir() overlap = Overlap() @@ -36,8 +35,7 @@ def check_close(val1, val2): overlap = Overlap() overlap.inputs.volume1 = in1 overlap.inputs.volume2 = in2 - overlap.inputs.vol_units = 'mm' + overlap.inputs.vol_units = "mm" res = overlap.run() check_close(res.outputs.jaccard, 0.99705) - check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, - 0.0])) + check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, 0.0])) diff --git a/nipype/algorithms/tests/test_TSNR.py b/nipype/algorithms/tests/test_TSNR.py index 1d192ec056..320bec8ab2 100644 --- a/nipype/algorithms/tests/test_TSNR.py +++ b/nipype/algorithms/tests/test_TSNR.py @@ -7,24 +7,22 @@ import pytest import numpy.testing as npt -import mock +from unittest import mock import nibabel as nb import numpy as np import os -class TestTSNR(): - ''' Note: Tests currently do a poor job of testing functionality ''' +class TestTSNR: + """Note: Tests currently do a poor job of testing functionality""" - in_filenames = { - 'in_file': 'tsnrinfile.nii', - } + in_filenames = {"in_file": "tsnrinfile.nii"} out_filenames = { # default output file names - 'detrended_file': 'detrend.nii.gz', - 'mean_file': 'mean.nii.gz', - 'stddev_file': 'stdev.nii.gz', - 'tsnr_file': 'tsnr.nii.gz' + "detrended_file": "detrend.nii.gz", + "mean_file": "mean.nii.gz", + "stddev_file": "stdev.nii.gz", + "tsnr_file": "tsnr.nii.gz", } @pytest.fixture(autouse=True) @@ -32,78 +30,84 @@ def setup_class(self, tmpdir): # setup temp folder tmpdir.chdir() - utils.save_toy_nii(self.fake_data, self.in_filenames['in_file']) + utils.save_toy_nii(self.fake_data, self.in_filenames["in_file"]) def test_tsnr(self): # run - tsnrresult = TSNR(in_file=self.in_filenames['in_file']).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"]).run() # assert self.assert_expected_outputs( - tsnrresult, { - 'mean_file': (2.8, 7.4), - 'stddev_file': (0.8, 2.9), - 'tsnr_file': (1.3, 9.25) - }) + tsnrresult, + { + "mean_file": (2.8, 7.4), + "stddev_file": (0.8, 2.9), + "tsnr_file": (1.3, 9.25), + }, + ) def test_tsnr_withpoly1(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=1).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=1).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (-0.1, 8.7), - 'mean_file': (2.8, 7.4), - 'stddev_file': (0.75, 2.75), - 'tsnr_file': (1.4, 9.9) - }) + tsnrresult, + { + "detrended_file": (-0.1, 8.7), + "mean_file": (2.8, 7.4), + "stddev_file": (0.75, 2.75), + "tsnr_file": (1.4, 9.9), + }, + ) def test_tsnr_withpoly2(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=2).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=2).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (-0.22, 8.55), - 'mean_file': (2.8, 7.7), - 'stddev_file': (0.21, 2.4), - 'tsnr_file': (1.7, 35.9) - }) + tsnrresult, + { + "detrended_file": (-0.22, 8.55), + "mean_file": (2.8, 7.7), + "stddev_file": (0.21, 2.4), + "tsnr_file": (1.7, 35.9), + }, + ) def test_tsnr_withpoly3(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=3).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=3).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (1.8, 7.95), - 'mean_file': (2.8, 7.7), - 'stddev_file': (0.1, 1.7), - 'tsnr_file': (2.6, 57.3) - }) - - @mock.patch('warnings.warn') + tsnrresult, + { + "detrended_file": (1.8, 7.95), + "mean_file": (2.8, 7.7), + "stddev_file": (0.1, 1.7), + "tsnr_file": (2.6, 57.3), + }, + ) + + @mock.patch("warnings.warn") def test_warning(self, mock_warn): - ''' test that usage of misc.TSNR trips a warning to use - confounds.TSNR instead ''' + """test that usage of misc.TSNR trips a warning to use + confounds.TSNR instead""" # run - misc.TSNR(in_file=self.in_filenames['in_file']) + misc.TSNR(in_file=self.in_filenames["in_file"]) # assert assert True in [ - args[0].count('confounds') > 0 - for _, args, _ in mock_warn.mock_calls + args[0].count("confounds") > 0 for _, args, _ in mock_warn.mock_calls ] def assert_expected_outputs_poly(self, tsnrresult, expected_ranges): - assert os.path.basename(tsnrresult.outputs.detrended_file) == \ - self.out_filenames['detrended_file'] + assert ( + os.path.basename(tsnrresult.outputs.detrended_file) + == self.out_filenames["detrended_file"] + ) self.assert_expected_outputs(tsnrresult, expected_ranges) def assert_expected_outputs(self, tsnrresult, expected_ranges): @@ -111,12 +115,11 @@ def assert_expected_outputs(self, tsnrresult, expected_ranges): self.assert_unchanged(expected_ranges) def assert_default_outputs(self, outputs): - assert os.path.basename(outputs.mean_file) == \ - self.out_filenames['mean_file'] - assert os.path.basename(outputs.stddev_file) == \ - self.out_filenames['stddev_file'] - assert os.path.basename(outputs.tsnr_file) == \ - self.out_filenames['tsnr_file'] + assert os.path.basename(outputs.mean_file) == self.out_filenames["mean_file"] + assert ( + os.path.basename(outputs.stddev_file) == self.out_filenames["stddev_file"] + ) + assert os.path.basename(outputs.tsnr_file) == self.out_filenames["tsnr_file"] def assert_unchanged(self, expected_ranges): for key, (min_, max_) in expected_ranges.items(): @@ -124,7 +127,10 @@ def assert_unchanged(self, expected_ranges): npt.assert_almost_equal(np.amin(data), min_, decimal=1) npt.assert_almost_equal(np.amax(data), max_, decimal=1) - fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], - [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], - [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], - [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]]]) + fake_data = np.array( + [ + [[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], + [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]], + ], + dtype=np.int16, + ) diff --git a/nipype/algorithms/tests/test_auto_ACompCor.py b/nipype/algorithms/tests/test_auto_ACompCor.py index 235d15da9e..814aa71704 100644 --- a/nipype/algorithms/tests/test_auto_ACompCor.py +++ b/nipype/algorithms/tests/test_auto_ACompCor.py @@ -1,33 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..confounds import ACompCor def test_ACompCor_inputs(): input_map = dict( - components_file=dict(usedefault=True, ), - failure_mode=dict(usedefault=True, ), + components_file=dict( + usedefault=True, + ), + failure_mode=dict( + usedefault=True, + ), header_prefix=dict(), - high_pass_cutoff=dict(usedefault=True, ), - ignore_initial_volumes=dict(usedefault=True, ), + high_pass_cutoff=dict( + usedefault=True, + ), + ignore_initial_volumes=dict( + usedefault=True, + ), mask_files=dict(), mask_index=dict( - requires=['mask_files'], - xor=['merge_method'], + requires=["mask_files"], + xor=["merge_method"], ), + mask_names=dict(), merge_method=dict( - requires=['mask_files'], - xor=['mask_index'], + requires=["mask_files"], + xor=["mask_index"], + ), + num_components=dict( + xor=["variance_threshold"], + ), + pre_filter=dict( + usedefault=True, + ), + realigned_file=dict( + extensions=None, + mandatory=True, + ), + regress_poly_degree=dict( + usedefault=True, ), - num_components=dict(usedefault=True, ), - pre_filter=dict(usedefault=True, ), - realigned_file=dict(mandatory=True, ), - regress_poly_degree=dict(usedefault=True, ), repetition_time=dict(), - save_pre_filter=dict(), + save_metadata=dict( + usedefault=True, + ), + save_pre_filter=dict( + usedefault=True, + ), use_regress_poly=dict( - deprecated='0.15.0', - new_name='pre_filter', + deprecated="0.15.0", + new_name="pre_filter", + ), + variance_threshold=dict( + xor=["num_components"], ), ) inputs = ACompCor.input_spec() @@ -35,10 +60,19 @@ def test_ACompCor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACompCor_outputs(): output_map = dict( - components_file=dict(), - pre_filter_file=dict(), + components_file=dict( + extensions=None, + ), + metadata_file=dict( + extensions=None, + ), + pre_filter_file=dict( + extensions=None, + ), ) outputs = ACompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_ActivationCount.py b/nipype/algorithms/tests/test_auto_ActivationCount.py index d6e3ff7165..7df84ee122 100644 --- a/nipype/algorithms/tests/test_auto_ActivationCount.py +++ b/nipype/algorithms/tests/test_auto_ActivationCount.py @@ -1,23 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..stats import ActivationCount def test_ActivationCount_inputs(): input_map = dict( - in_files=dict(mandatory=True, ), - threshold=dict(mandatory=True, ), + in_files=dict( + mandatory=True, + ), + threshold=dict( + mandatory=True, + ), ) inputs = ActivationCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ActivationCount_outputs(): output_map = dict( - acm_neg=dict(), - acm_pos=dict(), - out_file=dict(), + acm_neg=dict( + extensions=None, + ), + acm_pos=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = ActivationCount.output_spec() diff --git a/nipype/algorithms/tests/test_auto_AddCSVColumn.py b/nipype/algorithms/tests/test_auto_AddCSVColumn.py index feedcf46e8..b76fd46457 100644 --- a/nipype/algorithms/tests/test_auto_AddCSVColumn.py +++ b/nipype/algorithms/tests/test_auto_AddCSVColumn.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import AddCSVColumn @@ -7,16 +6,28 @@ def test_AddCSVColumn_inputs(): input_map = dict( extra_column_heading=dict(), extra_field=dict(), - in_file=dict(mandatory=True, ), - out_file=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), ) inputs = AddCSVColumn.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddCSVColumn_outputs(): - output_map = dict(csv_file=dict(), ) + output_map = dict( + csv_file=dict( + extensions=None, + ), + ) outputs = AddCSVColumn.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_AddCSVRow.py b/nipype/algorithms/tests/test_auto_AddCSVRow.py index 4666a147d2..78976f418d 100644 --- a/nipype/algorithms/tests/test_auto_AddCSVRow.py +++ b/nipype/algorithms/tests/test_auto_AddCSVRow.py @@ -1,20 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import AddCSVRow def test_AddCSVRow_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), + _outputs=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = AddCSVRow.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddCSVRow_outputs(): - output_map = dict(csv_file=dict(), ) + output_map = dict( + csv_file=dict( + extensions=None, + ), + ) outputs = AddCSVRow.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_AddNoise.py b/nipype/algorithms/tests/test_auto_AddNoise.py index 6e0655a93e..5cf92e33f7 100644 --- a/nipype/algorithms/tests/test_auto_AddNoise.py +++ b/nipype/algorithms/tests/test_auto_AddNoise.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import AddNoise @@ -13,18 +12,33 @@ def test_AddNoise_inputs(): mandatory=True, usedefault=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(), - out_file=dict(), - snr=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + snr=dict( + usedefault=True, + ), ) inputs = AddNoise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddNoise_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AddNoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 85c57b8823..51010aea3a 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -1,32 +1,51 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..rapidart import ArtifactDetect def test_ArtifactDetect_inputs(): input_map = dict( - bound_by_brainmask=dict(usedefault=True, ), - global_threshold=dict(usedefault=True, ), - intersect_mask=dict(usedefault=True, ), - mask_file=dict(), + bound_by_brainmask=dict( + usedefault=True, + ), + global_threshold=dict( + usedefault=True, + ), + intersect_mask=dict( + usedefault=True, + ), + mask_file=dict( + extensions=None, + ), mask_threshold=dict(), - mask_type=dict(mandatory=True, ), + mask_type=dict( + mandatory=True, + ), norm_threshold=dict( mandatory=True, - xor=['rotation_threshold', 'translation_threshold'], + xor=["rotation_threshold", "translation_threshold"], + ), + parameter_source=dict( + mandatory=True, + ), + plot_type=dict( + usedefault=True, + ), + realigned_files=dict( + mandatory=True, + ), + realignment_parameters=dict( + mandatory=True, ), - parameter_source=dict(mandatory=True, ), - plot_type=dict(usedefault=True, ), - realigned_files=dict(mandatory=True, ), - realignment_parameters=dict(mandatory=True, ), rotation_threshold=dict( mandatory=True, - xor=['norm_threshold'], + xor=["norm_threshold"], + ), + save_plot=dict( + usedefault=True, ), - save_plot=dict(usedefault=True, ), translation_threshold=dict( mandatory=True, - xor=['norm_threshold'], + xor=["norm_threshold"], ), use_differences=dict( maxlen=2, @@ -34,16 +53,20 @@ def test_ArtifactDetect_inputs(): usedefault=True, ), use_norm=dict( - requires=['norm_threshold'], + requires=["norm_threshold"], usedefault=True, ), - zintensity_threshold=dict(mandatory=True, ), + zintensity_threshold=dict( + mandatory=True, + ), ) inputs = ArtifactDetect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ArtifactDetect_outputs(): output_map = dict( displacement_files=dict(), diff --git a/nipype/algorithms/tests/test_auto_CalculateMedian.py b/nipype/algorithms/tests/test_auto_CalculateMedian.py index 1f9aa6cd4d..ddc8b9814d 100644 --- a/nipype/algorithms/tests/test_auto_CalculateMedian.py +++ b/nipype/algorithms/tests/test_auto_CalculateMedian.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import CalculateMedian @@ -7,15 +6,21 @@ def test_CalculateMedian_inputs(): input_map = dict( in_files=dict(), median_file=dict(), - median_per_file=dict(usedefault=True, ), + median_per_file=dict( + usedefault=True, + ), ) inputs = CalculateMedian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalculateMedian_outputs(): - output_map = dict(median_files=dict(), ) + output_map = dict( + median_files=dict(), + ) outputs = CalculateMedian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py b/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py index 3dbbd772c8..a67f959176 100644 --- a/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py +++ b/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py @@ -1,20 +1,28 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import CalculateNormalizedMoments def test_CalculateNormalizedMoments_inputs(): input_map = dict( - moment=dict(mandatory=True, ), - timeseries_file=dict(mandatory=True, ), + moment=dict( + mandatory=True, + ), + timeseries_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CalculateNormalizedMoments.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalculateNormalizedMoments_outputs(): - output_map = dict(moments=dict(), ) + output_map = dict( + moments=dict(), + ) outputs = CalculateNormalizedMoments.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ComputeDVARS.py b/nipype/algorithms/tests/test_auto_ComputeDVARS.py index ca263c77ac..c5e1118341 100644 --- a/nipype/algorithms/tests/test_auto_ComputeDVARS.py +++ b/nipype/algorithms/tests/test_auto_ComputeDVARS.py @@ -1,41 +1,85 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..confounds import ComputeDVARS def test_ComputeDVARS_inputs(): input_map = dict( - figdpi=dict(usedefault=True, ), - figformat=dict(usedefault=True, ), - figsize=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(mandatory=True, ), - intensity_normalization=dict(usedefault=True, ), - remove_zerovariance=dict(usedefault=True, ), - save_all=dict(usedefault=True, ), - save_nstd=dict(usedefault=True, ), - save_plot=dict(usedefault=True, ), - save_std=dict(usedefault=True, ), - save_vxstd=dict(usedefault=True, ), + figdpi=dict( + usedefault=True, + ), + figformat=dict( + usedefault=True, + ), + figsize=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + mandatory=True, + ), + intensity_normalization=dict( + usedefault=True, + ), + remove_zerovariance=dict( + usedefault=True, + ), + save_all=dict( + usedefault=True, + ), + save_nstd=dict( + usedefault=True, + ), + save_plot=dict( + usedefault=True, + ), + save_std=dict( + usedefault=True, + ), + save_vxstd=dict( + usedefault=True, + ), series_tr=dict(), + variance_tol=dict( + usedefault=True, + ), ) inputs = ComputeDVARS.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeDVARS_outputs(): output_map = dict( avg_nstd=dict(), avg_std=dict(), avg_vxstd=dict(), - fig_nstd=dict(), - fig_std=dict(), - fig_vxstd=dict(), - out_all=dict(), - out_nstd=dict(), - out_std=dict(), - out_vxstd=dict(), + fig_nstd=dict( + extensions=None, + ), + fig_std=dict( + extensions=None, + ), + fig_vxstd=dict( + extensions=None, + ), + out_all=dict( + extensions=None, + ), + out_nstd=dict( + extensions=None, + ), + out_std=dict( + extensions=None, + ), + out_vxstd=dict( + extensions=None, + ), ) outputs = ComputeDVARS.output_spec() diff --git a/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py b/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py index 0308653786..639f03770c 100644 --- a/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py +++ b/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py @@ -1,27 +1,48 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..mesh import ComputeMeshWarp def test_ComputeMeshWarp_inputs(): input_map = dict( - metric=dict(usedefault=True, ), - out_file=dict(usedefault=True, ), - out_warp=dict(usedefault=True, ), - surface1=dict(mandatory=True, ), - surface2=dict(mandatory=True, ), - weighting=dict(usedefault=True, ), + metric=dict( + usedefault=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + out_warp=dict( + extensions=None, + usedefault=True, + ), + surface1=dict( + extensions=None, + mandatory=True, + ), + surface2=dict( + extensions=None, + mandatory=True, + ), + weighting=dict( + usedefault=True, + ), ) inputs = ComputeMeshWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMeshWarp_outputs(): output_map = dict( distance=dict(), - out_file=dict(), - out_warp=dict(), + out_file=dict( + extensions=None, + ), + out_warp=dict( + extensions=None, + ), ) outputs = ComputeMeshWarp.output_spec() diff --git a/nipype/algorithms/tests/test_auto_CreateNifti.py b/nipype/algorithms/tests/test_auto_CreateNifti.py index f8bf8a405f..f5c5c4a2f5 100644 --- a/nipype/algorithms/tests/test_auto_CreateNifti.py +++ b/nipype/algorithms/tests/test_auto_CreateNifti.py @@ -1,21 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import CreateNifti def test_CreateNifti_inputs(): input_map = dict( affine=dict(), - data_file=dict(mandatory=True, ), - header_file=dict(mandatory=True, ), + data_file=dict( + extensions=None, + mandatory=True, + ), + header_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CreateNifti.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateNifti_outputs(): - output_map = dict(nifti_file=dict(), ) + output_map = dict( + nifti_file=dict( + extensions=None, + ), + ) outputs = CreateNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Distance.py b/nipype/algorithms/tests/test_auto_Distance.py index 2c5d098d73..46e48342c4 100644 --- a/nipype/algorithms/tests/test_auto_Distance.py +++ b/nipype/algorithms/tests/test_auto_Distance.py @@ -1,24 +1,37 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import Distance def test_Distance_inputs(): input_map = dict( - mask_volume=dict(), - method=dict(usedefault=True, ), - volume1=dict(mandatory=True, ), - volume2=dict(mandatory=True, ), + mask_volume=dict( + extensions=None, + ), + method=dict( + usedefault=True, + ), + volume1=dict( + extensions=None, + mandatory=True, + ), + volume2=dict( + extensions=None, + mandatory=True, + ), ) inputs = Distance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Distance_outputs(): output_map = dict( distance=dict(), - histogram=dict(), + histogram=dict( + extensions=None, + ), point1=dict(), point2=dict(), ) diff --git a/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py b/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py index 685dec61e8..1308b4d97d 100644 --- a/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py +++ b/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py @@ -1,19 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..confounds import FramewiseDisplacement def test_FramewiseDisplacement_inputs(): input_map = dict( - figdpi=dict(usedefault=True, ), - figsize=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - normalize=dict(usedefault=True, ), - out_figure=dict(usedefault=True, ), - out_file=dict(usedefault=True, ), - parameter_source=dict(mandatory=True, ), - radius=dict(usedefault=True, ), - save_plot=dict(usedefault=True, ), + figdpi=dict( + usedefault=True, + ), + figsize=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + normalize=dict( + usedefault=True, + ), + out_figure=dict( + extensions=None, + usedefault=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + parameter_source=dict( + mandatory=True, + ), + radius=dict( + usedefault=True, + ), + save_plot=dict( + usedefault=True, + ), series_tr=dict(), ) inputs = FramewiseDisplacement.input_spec() @@ -21,11 +41,17 @@ def test_FramewiseDisplacement_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FramewiseDisplacement_outputs(): output_map = dict( fd_average=dict(), - out_figure=dict(), - out_file=dict(), + out_figure=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = FramewiseDisplacement.output_spec() diff --git a/nipype/algorithms/tests/test_auto_FuzzyOverlap.py b/nipype/algorithms/tests/test_auto_FuzzyOverlap.py index e9e28aaa44..e8a7fe5ef1 100644 --- a/nipype/algorithms/tests/test_auto_FuzzyOverlap.py +++ b/nipype/algorithms/tests/test_auto_FuzzyOverlap.py @@ -1,21 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import FuzzyOverlap def test_FuzzyOverlap_inputs(): input_map = dict( - in_mask=dict(), - in_ref=dict(mandatory=True, ), - in_tst=dict(mandatory=True, ), - out_file=dict(usedefault=True, ), - weighting=dict(usedefault=True, ), + in_mask=dict( + extensions=None, + ), + in_ref=dict( + mandatory=True, + ), + in_tst=dict( + mandatory=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + weighting=dict( + usedefault=True, + ), ) inputs = FuzzyOverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FuzzyOverlap_outputs(): output_map = dict( class_fdi=dict(), diff --git a/nipype/algorithms/tests/test_auto_Gunzip.py b/nipype/algorithms/tests/test_auto_Gunzip.py index f12e1f9b45..2d1b4a4beb 100644 --- a/nipype/algorithms/tests/test_auto_Gunzip.py +++ b/nipype/algorithms/tests/test_auto_Gunzip.py @@ -1,17 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import Gunzip def test_Gunzip_inputs(): - input_map = dict(in_file=dict(mandatory=True, ), ) + input_map = dict( + in_file=dict( + extensions=None, + mandatory=True, + ), + mode=dict( + usedefault=True, + ), + ) inputs = Gunzip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Gunzip_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Gunzip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Gzip.py b/nipype/algorithms/tests/test_auto_Gzip.py new file mode 100644 index 0000000000..1503f92790 --- /dev/null +++ b/nipype/algorithms/tests/test_auto_Gzip.py @@ -0,0 +1,32 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..misc import Gzip + + +def test_Gzip_inputs(): + input_map = dict( + in_file=dict( + extensions=None, + mandatory=True, + ), + mode=dict( + usedefault=True, + ), + ) + inputs = Gzip.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Gzip_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = Gzip.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/algorithms/tests/test_auto_ICC.py b/nipype/algorithms/tests/test_auto_ICC.py index 1a4a2b1517..4a2389202c 100644 --- a/nipype/algorithms/tests/test_auto_ICC.py +++ b/nipype/algorithms/tests/test_auto_ICC.py @@ -1,23 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..icc import ICC def test_ICC_inputs(): input_map = dict( - mask=dict(mandatory=True, ), - subjects_sessions=dict(mandatory=True, ), + mask=dict( + extensions=None, + mandatory=True, + ), + subjects_sessions=dict( + mandatory=True, + ), ) inputs = ICC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ICC_outputs(): output_map = dict( - icc_map=dict(), - session_var_map=dict(), - subject_var_map=dict(), + icc_map=dict( + extensions=None, + ), + session_var_map=dict( + extensions=None, + ), + subject_var_map=dict( + extensions=None, + ), ) outputs = ICC.output_spec() diff --git a/nipype/algorithms/tests/test_auto_Matlab2CSV.py b/nipype/algorithms/tests/test_auto_Matlab2CSV.py index fcc1648bf9..42acbd514a 100644 --- a/nipype/algorithms/tests/test_auto_Matlab2CSV.py +++ b/nipype/algorithms/tests/test_auto_Matlab2CSV.py @@ -1,20 +1,28 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import Matlab2CSV def test_Matlab2CSV_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - reshape_matrix=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + reshape_matrix=dict( + usedefault=True, + ), ) inputs = Matlab2CSV.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Matlab2CSV_outputs(): - output_map = dict(csv_files=dict(), ) + output_map = dict( + csv_files=dict(), + ) outputs = Matlab2CSV.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MergeCSVFiles.py b/nipype/algorithms/tests/test_auto_MergeCSVFiles.py index fd882e850a..bb7e9ed65a 100644 --- a/nipype/algorithms/tests/test_auto_MergeCSVFiles.py +++ b/nipype/algorithms/tests/test_auto_MergeCSVFiles.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import MergeCSVFiles @@ -8,9 +7,16 @@ def test_MergeCSVFiles_inputs(): column_headings=dict(), extra_column_heading=dict(), extra_field=dict(), - in_files=dict(mandatory=True, ), - out_file=dict(usedefault=True, ), - row_heading_title=dict(usedefault=True, ), + in_files=dict( + mandatory=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + row_heading_title=dict( + usedefault=True, + ), row_headings=dict(), ) inputs = MergeCSVFiles.input_spec() @@ -18,8 +24,14 @@ def test_MergeCSVFiles_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeCSVFiles_outputs(): - output_map = dict(csv_file=dict(), ) + output_map = dict( + csv_file=dict( + extensions=None, + ), + ) outputs = MergeCSVFiles.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MergeROIs.py b/nipype/algorithms/tests/test_auto_MergeROIs.py index 01b2b097a8..c43a33b686 100644 --- a/nipype/algorithms/tests/test_auto_MergeROIs.py +++ b/nipype/algorithms/tests/test_auto_MergeROIs.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import MergeROIs @@ -7,15 +6,23 @@ def test_MergeROIs_inputs(): input_map = dict( in_files=dict(), in_index=dict(), - in_reference=dict(), + in_reference=dict( + extensions=None, + ), ) inputs = MergeROIs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeROIs_outputs(): - output_map = dict(merged_file=dict(), ) + output_map = dict( + merged_file=dict( + extensions=None, + ), + ) outputs = MergeROIs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MeshWarpMaths.py b/nipype/algorithms/tests/test_auto_MeshWarpMaths.py index f89b16017b..a4295b8f46 100644 --- a/nipype/algorithms/tests/test_auto_MeshWarpMaths.py +++ b/nipype/algorithms/tests/test_auto_MeshWarpMaths.py @@ -1,29 +1,45 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..mesh import MeshWarpMaths def test_MeshWarpMaths_inputs(): input_map = dict( float_trait=dict(), - in_surf=dict(mandatory=True, ), - operation=dict(usedefault=True, ), + in_surf=dict( + extensions=None, + mandatory=True, + ), + operation=dict( + usedefault=True, + ), operator=dict( mandatory=True, usedefault=True, ), - out_file=dict(usedefault=True, ), - out_warp=dict(usedefault=True, ), + out_file=dict( + extensions=None, + usedefault=True, + ), + out_warp=dict( + extensions=None, + usedefault=True, + ), ) inputs = MeshWarpMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeshWarpMaths_outputs(): output_map = dict( - out_file=dict(), - out_warp=dict(), + out_file=dict( + extensions=None, + ), + out_warp=dict( + extensions=None, + ), ) outputs = MeshWarpMaths.output_spec() diff --git a/nipype/algorithms/tests/test_auto_ModifyAffine.py b/nipype/algorithms/tests/test_auto_ModifyAffine.py index a0c4150a98..6592e28690 100644 --- a/nipype/algorithms/tests/test_auto_ModifyAffine.py +++ b/nipype/algorithms/tests/test_auto_ModifyAffine.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import ModifyAffine def test_ModifyAffine_inputs(): input_map = dict( - transformation_matrix=dict(usedefault=True, ), - volumes=dict(mandatory=True, ), + transformation_matrix=dict( + usedefault=True, + ), + volumes=dict( + mandatory=True, + ), ) inputs = ModifyAffine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModifyAffine_outputs(): - output_map = dict(transformed_volumes=dict(), ) + output_map = dict( + transformed_volumes=dict(), + ) outputs = ModifyAffine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py b/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py index b86fe3df03..9e14e00595 100644 --- a/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py +++ b/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py @@ -1,17 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..confounds import NonSteadyStateDetector def test_NonSteadyStateDetector_inputs(): - input_map = dict(in_file=dict(mandatory=True, ), ) + input_map = dict( + in_file=dict( + extensions=None, + mandatory=True, + ), + ) inputs = NonSteadyStateDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NonSteadyStateDetector_outputs(): - output_map = dict(n_volumes_to_discard=dict(), ) + output_map = dict( + n_volumes_to_discard=dict(), + ) outputs = NonSteadyStateDetector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py b/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py index 4c9a5584d0..be18979a85 100644 --- a/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py +++ b/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py @@ -1,20 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import NormalizeProbabilityMapSet def test_NormalizeProbabilityMapSet_inputs(): input_map = dict( in_files=dict(), - in_mask=dict(), + in_mask=dict( + extensions=None, + ), ) inputs = NormalizeProbabilityMapSet.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NormalizeProbabilityMapSet_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = NormalizeProbabilityMapSet.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_P2PDistance.py b/nipype/algorithms/tests/test_auto_P2PDistance.py index 9948e3675e..a5623353ec 100644 --- a/nipype/algorithms/tests/test_auto_P2PDistance.py +++ b/nipype/algorithms/tests/test_auto_P2PDistance.py @@ -1,27 +1,48 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..mesh import P2PDistance def test_P2PDistance_inputs(): input_map = dict( - metric=dict(usedefault=True, ), - out_file=dict(usedefault=True, ), - out_warp=dict(usedefault=True, ), - surface1=dict(mandatory=True, ), - surface2=dict(mandatory=True, ), - weighting=dict(usedefault=True, ), + metric=dict( + usedefault=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + out_warp=dict( + extensions=None, + usedefault=True, + ), + surface1=dict( + extensions=None, + mandatory=True, + ), + surface2=dict( + extensions=None, + mandatory=True, + ), + weighting=dict( + usedefault=True, + ), ) inputs = P2PDistance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_P2PDistance_outputs(): output_map = dict( distance=dict(), - out_file=dict(), - out_warp=dict(), + out_file=dict( + extensions=None, + ), + out_warp=dict( + extensions=None, + ), ) outputs = P2PDistance.output_spec() diff --git a/nipype/algorithms/tests/test_auto_PickAtlas.py b/nipype/algorithms/tests/test_auto_PickAtlas.py index 3b15c302e5..2a29ca8d23 100644 --- a/nipype/algorithms/tests/test_auto_PickAtlas.py +++ b/nipype/algorithms/tests/test_auto_PickAtlas.py @@ -1,23 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import PickAtlas def test_PickAtlas_inputs(): input_map = dict( - atlas=dict(mandatory=True, ), - dilation_size=dict(usedefault=True, ), - hemi=dict(usedefault=True, ), - labels=dict(mandatory=True, ), - output_file=dict(), + atlas=dict( + extensions=None, + mandatory=True, + ), + dilation_size=dict( + usedefault=True, + ), + hemi=dict( + usedefault=True, + ), + labels=dict( + mandatory=True, + ), + output_file=dict( + extensions=None, + ), ) inputs = PickAtlas.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PickAtlas_outputs(): - output_map = dict(mask_file=dict(), ) + output_map = dict( + mask_file=dict( + extensions=None, + ), + ) outputs = PickAtlas.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Similarity.py b/nipype/algorithms/tests/test_auto_Similarity.py index b2ad79b5eb..a5e5f583d5 100644 --- a/nipype/algorithms/tests/test_auto_Similarity.py +++ b/nipype/algorithms/tests/test_auto_Similarity.py @@ -1,23 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..metrics import Similarity def test_Similarity_inputs(): input_map = dict( - mask1=dict(), - mask2=dict(), - metric=dict(usedefault=True, ), - volume1=dict(mandatory=True, ), - volume2=dict(mandatory=True, ), + mask1=dict( + extensions=None, + ), + mask2=dict( + extensions=None, + ), + metric=dict( + usedefault=True, + ), + volume1=dict( + extensions=None, + mandatory=True, + ), + volume2=dict( + extensions=None, + mandatory=True, + ), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Similarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict( + similarity=dict(), + ) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SimpleThreshold.py b/nipype/algorithms/tests/test_auto_SimpleThreshold.py index 4e34d86799..ab7141f0de 100644 --- a/nipype/algorithms/tests/test_auto_SimpleThreshold.py +++ b/nipype/algorithms/tests/test_auto_SimpleThreshold.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import SimpleThreshold def test_SimpleThreshold_inputs(): input_map = dict( - threshold=dict(mandatory=True, ), - volumes=dict(mandatory=True, ), + threshold=dict( + mandatory=True, + ), + volumes=dict( + mandatory=True, + ), ) inputs = SimpleThreshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimpleThreshold_outputs(): - output_map = dict(thresholded_volumes=dict(), ) + output_map = dict( + thresholded_volumes=dict(), + ) outputs = SimpleThreshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifyModel.py b/nipype/algorithms/tests/test_auto_SpecifyModel.py index 452a048764..15d9e4994e 100644 --- a/nipype/algorithms/tests/test_auto_SpecifyModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifyModel.py @@ -1,36 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..modelgen import SpecifyModel def test_SpecifyModel_inputs(): input_map = dict( + bids_amplitude_column=dict(), + bids_condition_column=dict( + usedefault=True, + ), + bids_event_file=dict( + mandatory=True, + xor=["subject_info", "event_files", "bids_event_file"], + ), event_files=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), - outlier_files=dict(copyfile=False, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + high_pass_filter_cutoff=dict( + mandatory=True, + ), + input_units=dict( + mandatory=True, + ), + outlier_files=dict( + copyfile=False, + ), + parameter_source=dict( + usedefault=True, + ), + realignment_parameters=dict( + copyfile=False, + ), subject_info=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], + ), + time_repetition=dict( + mandatory=True, ), - time_repetition=dict(mandatory=True, ), ) inputs = SpecifyModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifyModel_outputs(): - output_map = dict(session_info=dict(), ) + output_map = dict( + session_info=dict(), + ) outputs = SpecifyModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py index 1f3ec7058d..64bb206359 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py @@ -1,38 +1,65 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..modelgen import SpecifySPMModel def test_SpecifySPMModel_inputs(): input_map = dict( - concatenate_runs=dict(usedefault=True, ), + bids_amplitude_column=dict(), + bids_condition_column=dict( + usedefault=True, + ), + bids_event_file=dict( + mandatory=True, + xor=["subject_info", "event_files", "bids_event_file"], + ), + concatenate_runs=dict( + usedefault=True, + ), event_files=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), - outlier_files=dict(copyfile=False, ), - output_units=dict(usedefault=True, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + high_pass_filter_cutoff=dict( + mandatory=True, + ), + input_units=dict( + mandatory=True, + ), + outlier_files=dict( + copyfile=False, + ), + output_units=dict( + usedefault=True, + ), + parameter_source=dict( + usedefault=True, + ), + realignment_parameters=dict( + copyfile=False, + ), subject_info=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], + ), + time_repetition=dict( + mandatory=True, ), - time_repetition=dict(mandatory=True, ), ) inputs = SpecifySPMModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifySPMModel_outputs(): - output_map = dict(session_info=dict(), ) + output_map = dict( + session_info=dict(), + ) outputs = SpecifySPMModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py index 93fc035fc2..cac4ce5770 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py @@ -1,47 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..modelgen import SpecifySparseModel def test_SpecifySparseModel_inputs(): input_map = dict( + bids_amplitude_column=dict(), + bids_condition_column=dict( + usedefault=True, + ), + bids_event_file=dict( + mandatory=True, + xor=["subject_info", "event_files", "bids_event_file"], + ), event_files=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), + high_pass_filter_cutoff=dict( + mandatory=True, + ), + input_units=dict( + mandatory=True, + ), model_hrf=dict(), - outlier_files=dict(copyfile=False, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + outlier_files=dict( + copyfile=False, + ), + parameter_source=dict( + usedefault=True, + ), + realignment_parameters=dict( + copyfile=False, + ), save_plot=dict(), - scale_regressors=dict(usedefault=True, ), - scan_onset=dict(usedefault=True, ), - stimuli_as_impulses=dict(usedefault=True, ), + scale_regressors=dict( + usedefault=True, + ), + scan_onset=dict( + usedefault=True, + ), + stimuli_as_impulses=dict( + usedefault=True, + ), subject_info=dict( mandatory=True, - xor=['subject_info', 'event_files'], + xor=["subject_info", "event_files", "bids_event_file"], + ), + time_acquisition=dict( + mandatory=True, + ), + time_repetition=dict( + mandatory=True, + ), + use_temporal_deriv=dict( + requires=["model_hrf"], + ), + volumes_in_cluster=dict( + usedefault=True, ), - time_acquisition=dict(mandatory=True, ), - time_repetition=dict(mandatory=True, ), - use_temporal_deriv=dict(requires=['model_hrf'], ), - volumes_in_cluster=dict(usedefault=True, ), ) inputs = SpecifySparseModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifySparseModel_outputs(): output_map = dict( session_info=dict(), - sparse_png_file=dict(), - sparse_svg_file=dict(), + sparse_png_file=dict( + extensions=None, + ), + sparse_svg_file=dict( + extensions=None, + ), ) outputs = SpecifySparseModel.output_spec() diff --git a/nipype/algorithms/tests/test_auto_SplitROIs.py b/nipype/algorithms/tests/test_auto_SplitROIs.py index 963926666d..c9eec86058 100644 --- a/nipype/algorithms/tests/test_auto_SplitROIs.py +++ b/nipype/algorithms/tests/test_auto_SplitROIs.py @@ -1,12 +1,16 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..misc import SplitROIs def test_SplitROIs_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - in_mask=dict(), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), roi_size=dict(), ) inputs = SplitROIs.input_spec() @@ -14,6 +18,8 @@ def test_SplitROIs_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplitROIs_outputs(): output_map = dict( out_files=dict(), diff --git a/nipype/algorithms/tests/test_auto_StimulusCorrelation.py b/nipype/algorithms/tests/test_auto_StimulusCorrelation.py index 8c7ef276d9..19cec418c4 100644 --- a/nipype/algorithms/tests/test_auto_StimulusCorrelation.py +++ b/nipype/algorithms/tests/test_auto_StimulusCorrelation.py @@ -1,22 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..rapidart import StimulusCorrelation def test_StimulusCorrelation_inputs(): input_map = dict( - concatenated_design=dict(mandatory=True, ), - intensity_values=dict(mandatory=True, ), - realignment_parameters=dict(mandatory=True, ), - spm_mat_file=dict(mandatory=True, ), + concatenated_design=dict( + mandatory=True, + ), + intensity_values=dict( + mandatory=True, + ), + realignment_parameters=dict( + mandatory=True, + ), + spm_mat_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = StimulusCorrelation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StimulusCorrelation_outputs(): - output_map = dict(stimcorr_files=dict(), ) + output_map = dict( + stimcorr_files=dict(), + ) outputs = StimulusCorrelation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_TCompCor.py b/nipype/algorithms/tests/test_auto_TCompCor.py index 59a5b84f76..0f802cc92e 100644 --- a/nipype/algorithms/tests/test_auto_TCompCor.py +++ b/nipype/algorithms/tests/test_auto_TCompCor.py @@ -1,34 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..confounds import TCompCor def test_TCompCor_inputs(): input_map = dict( - components_file=dict(usedefault=True, ), - failure_mode=dict(usedefault=True, ), + components_file=dict( + usedefault=True, + ), + failure_mode=dict( + usedefault=True, + ), header_prefix=dict(), - high_pass_cutoff=dict(usedefault=True, ), - ignore_initial_volumes=dict(usedefault=True, ), + high_pass_cutoff=dict( + usedefault=True, + ), + ignore_initial_volumes=dict( + usedefault=True, + ), mask_files=dict(), mask_index=dict( - requires=['mask_files'], - xor=['merge_method'], + requires=["mask_files"], + xor=["merge_method"], ), + mask_names=dict(), merge_method=dict( - requires=['mask_files'], - xor=['mask_index'], - ), - num_components=dict(usedefault=True, ), - percentile_threshold=dict(usedefault=True, ), - pre_filter=dict(usedefault=True, ), - realigned_file=dict(mandatory=True, ), - regress_poly_degree=dict(usedefault=True, ), + requires=["mask_files"], + xor=["mask_index"], + ), + num_components=dict( + xor=["variance_threshold"], + ), + percentile_threshold=dict( + usedefault=True, + ), + pre_filter=dict( + usedefault=True, + ), + realigned_file=dict( + extensions=None, + mandatory=True, + ), + regress_poly_degree=dict( + usedefault=True, + ), repetition_time=dict(), - save_pre_filter=dict(), + save_metadata=dict( + usedefault=True, + ), + save_pre_filter=dict( + usedefault=True, + ), use_regress_poly=dict( - deprecated='0.15.0', - new_name='pre_filter', + deprecated="0.15.0", + new_name="pre_filter", + ), + variance_threshold=dict( + xor=["num_components"], ), ) inputs = TCompCor.input_spec() @@ -36,11 +63,20 @@ def test_TCompCor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCompCor_outputs(): output_map = dict( - components_file=dict(), + components_file=dict( + extensions=None, + ), high_variance_masks=dict(), - pre_filter_file=dict(), + metadata_file=dict( + extensions=None, + ), + pre_filter_file=dict( + extensions=None, + ), ) outputs = TCompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py b/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py index 9fc2d17aba..d9b7309778 100644 --- a/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py +++ b/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..mesh import TVTKBaseInterface diff --git a/nipype/algorithms/tests/test_auto_WarpPoints.py b/nipype/algorithms/tests/test_auto_WarpPoints.py index b6965065a2..fc72866d83 100644 --- a/nipype/algorithms/tests/test_auto_WarpPoints.py +++ b/nipype/algorithms/tests/test_auto_WarpPoints.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..mesh import WarpPoints @@ -10,21 +9,34 @@ def test_WarpPoints_inputs(): usedefault=True, ), out_points=dict( + extensions=None, keep_extension=True, - name_source='points', - name_template='%s_warped', - output_name='out_points', + name_source="points", + name_template="%s_warped", + output_name="out_points", + ), + points=dict( + extensions=None, + mandatory=True, + ), + warp=dict( + extensions=None, + mandatory=True, ), - points=dict(mandatory=True, ), - warp=dict(mandatory=True, ), ) inputs = WarpPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPoints_outputs(): - output_map = dict(out_points=dict(), ) + output_map = dict( + out_points=dict( + extensions=None, + ), + ) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_confounds.py b/nipype/algorithms/tests/test_confounds.py index 2c601374ab..b3fb198707 100644 --- a/nipype/algorithms/tests/test_confounds.py +++ b/nipype/algorithms/tests/test_confounds.py @@ -1,18 +1,14 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -import os - -from io import open import pytest from nipype.testing import example_data -from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS, \ - is_outlier +from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS, is_outlier import numpy as np nonitime = True try: import nitime + nonitime = False except ImportError: pass @@ -20,31 +16,34 @@ def test_fd(tmpdir): tempdir = tmpdir.strpath - ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt')) + ground_truth = np.loadtxt(example_data("fsl_motion_outliers_fd.txt")) fdisplacement = FramewiseDisplacement( - in_file=example_data('fsl_mcflirt_movpar.txt'), - out_file=tempdir + '/fd.txt', - parameter_source="FSL") + in_file=example_data("fsl_mcflirt_movpar.txt"), + out_file=tempdir + "/fd.txt", + parameter_source="FSL", + ) res = fdisplacement.run() with open(res.outputs.out_file) as all_lines: for line in all_lines: - assert 'FramewiseDisplacement' in line + assert "FramewiseDisplacement" in line break assert np.allclose( - ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=.16) + ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=0.16 + ) assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2 @pytest.mark.skipif(nonitime, reason="nitime is not installed") def test_dvars(tmpdir): - ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS')) + ground_truth = np.loadtxt(example_data("ds003_sub-01_mc.DVARS")) dvars = ComputeDVARS( - in_file=example_data('ds003_sub-01_mc.nii.gz'), - in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'), + in_file=example_data("ds003_sub-01_mc.nii.gz"), + in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), save_all=True, - intensity_normalization=0) + intensity_normalization=0, + ) tmpdir.chdir() res = dvars.run() @@ -56,9 +55,10 @@ def test_dvars(tmpdir): assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05 dvars = ComputeDVARS( - in_file=example_data('ds003_sub-01_mc.nii.gz'), - in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'), - save_all=True) + in_file=example_data("ds003_sub-01_mc.nii.gz"), + in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), + save_all=True, + ) res = dvars.run() dv1 = np.loadtxt(res.outputs.out_all, skiprows=1) diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index ef19b7f410..34e8c5c3e9 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import division import numpy as np from nipype.algorithms.icc import ICC_rep_anova @@ -8,8 +6,16 @@ def test_ICC_rep_anova(): # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass # Correlations: Uses in Assessing Rater Reliability". Psychological # Bulletin 86 (2): 420-428 - Y = np.array([[9, 2, 5, 8], [6, 1, 3, 2], [8, 4, 6, 8], [7, 1, 2, 6], - [10, 5, 6, 9], [6, 2, 4, 7]]) + Y = np.array( + [ + [9, 2, 5, 8], + [6, 1, 3, 2], + [8, 4, 6, 8], + [7, 1, 2, 6], + [10, 5, 6, 9], + [6, 2, 4, 7], + ] + ) icc, r_var, e_var, _, dfc, dfe = ICC_rep_anova(Y) # see table 4 diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index a08a5a97c3..f626cc6c0c 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -1,12 +1,8 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -import os import pytest -import nipype.testing as npt from nipype.testing import example_data import numpy as np from nipype.algorithms import mesh as m @@ -17,15 +13,15 @@ def test_ident_distances(tmpdir): tmpdir.chdir() - in_surf = example_data('surf01.vtk') + in_surf = example_data("surf01.vtk") dist_ident = m.ComputeMeshWarp() dist_ident.inputs.surface1 = in_surf dist_ident.inputs.surface2 = in_surf - dist_ident.inputs.out_file = tmpdir.join('distance.npy').strpath + dist_ident.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist_ident.run() assert res.outputs.distance == 0.0 - dist_ident.inputs.weighting = 'area' + dist_ident.inputs.weighting = "area" res = dist_ident.run() assert res.outputs.distance == 0.0 @@ -34,8 +30,8 @@ def test_ident_distances(tmpdir): def test_trans_distances(tmpdir): from ...interfaces.vtkbase import tvtk - in_surf = example_data('surf01.vtk') - warped_surf = tmpdir.join('warped.vtk').strpath + in_surf = example_data("surf01.vtk") + warped_surf = tmpdir.join("warped.vtk").strpath inc = np.array([0.7, 0.3, -0.2]) @@ -51,10 +47,10 @@ def test_trans_distances(tmpdir): dist = m.ComputeMeshWarp() dist.inputs.surface1 = in_surf dist.inputs.surface2 = warped_surf - dist.inputs.out_file = tmpdir.join('distance.npy').strpath + dist.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) - dist.inputs.weighting = 'area' + dist.inputs.weighting = "area" res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) diff --git a/nipype/algorithms/tests/test_metrics.py b/nipype/algorithms/tests/test_metrics.py index fb876b3c72..3652fc2ce5 100644 --- a/nipype/algorithms/tests/test_metrics.py +++ b/nipype/algorithms/tests/test_metrics.py @@ -11,24 +11,21 @@ def test_fuzzy_overlap(tmpdir): tmpdir.chdir() # Tests with tissue probability maps - in_mask = example_data('tpms_msk.nii.gz') - tpms = [example_data('tpm_%02d.nii.gz' % i) for i in range(3)] + in_mask = example_data("tpms_msk.nii.gz") + tpms = [example_data("tpm_%02d.nii.gz" % i) for i in range(3)] out = FuzzyOverlap(in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[0]).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[1]).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[1]).run().outputs assert 0 < out.dice < 1 out = FuzzyOverlap(in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms, in_tst=tpms).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 # Tests with synthetic 3x3x3 images @@ -36,23 +33,28 @@ def test_fuzzy_overlap(tmpdir): data[0, 0, 0] = 0.5 data[2, 2, 2] = 0.25 data[1, 1, 1] = 0.3 - nb.Nifti1Image(data, np.eye(4)).to_filename('test1.nii.gz') + nb.Nifti1Image(data, np.eye(4)).to_filename("test1.nii.gz") data = np.zeros((3, 3, 3), dtype=float) data[0, 0, 0] = 0.6 data[1, 1, 1] = 0.3 - nb.Nifti1Image(data, np.eye(4)).to_filename('test2.nii.gz') + nb.Nifti1Image(data, np.eye(4)).to_filename("test2.nii.gz") - out = FuzzyOverlap(in_ref='test1.nii.gz', in_tst='test2.nii.gz').run().outputs + out = FuzzyOverlap(in_ref="test1.nii.gz", in_tst="test2.nii.gz").run().outputs assert np.allclose(out.dice, 0.82051) # Just considering the mask, the central pixel # that raised the index now is left aside. - data = np.zeros((3, 3, 3), dtype=int) + data = np.zeros((3, 3, 3), dtype=np.uint8) data[0, 0, 0] = 1 data[2, 2, 2] = 1 - nb.Nifti1Image(data, np.eye(4)).to_filename('mask.nii.gz') - - out = FuzzyOverlap(in_ref='test1.nii.gz', in_tst='test2.nii.gz', - in_mask='mask.nii.gz').run().outputs + nb.Nifti1Image(data, np.eye(4)).to_filename("mask.nii.gz") + + out = ( + FuzzyOverlap( + in_ref="test1.nii.gz", in_tst="test2.nii.gz", in_mask="mask.nii.gz" + ) + .run() + .outputs + ) assert np.allclose(out.dice, 0.74074) diff --git a/nipype/algorithms/tests/test_misc.py b/nipype/algorithms/tests/test_misc.py index e9d5cbdb3c..3303344ef6 100644 --- a/nipype/algorithms/tests/test_misc.py +++ b/nipype/algorithms/tests/test_misc.py @@ -9,12 +9,10 @@ from nipype.algorithms import misc from nipype.utils.filemanip import fname_presuffix from nipype.testing.fixtures import create_analyze_pair_file_in_directory -from nipype.utils import NUMPY_MMAP from nipype.testing import example_data def test_CreateNifti(create_analyze_pair_file_in_directory): - filelist, outdir = create_analyze_pair_file_in_directory create_nifti = misc.CreateNifti() @@ -26,23 +24,23 @@ def test_CreateNifti(create_analyze_pair_file_in_directory): # .inputs based parameters setting create_nifti.inputs.header_file = filelist[0] create_nifti.inputs.data_file = fname_presuffix( - filelist[0], '', '.img', use_ext=False) + filelist[0], "", ".img", use_ext=False + ) result = create_nifti.run() assert os.path.exists(result.outputs.nifti_file) - assert nb.load(result.outputs.nifti_file, mmap=NUMPY_MMAP) + assert nb.load(result.outputs.nifti_file) def test_CalculateMedian(create_analyze_pair_file_in_directory): - mean = misc.CalculateMedian() with pytest.raises(TypeError): mean.run() - mean.inputs.in_files = example_data('ds003_sub-01_mc.nii.gz') + mean.inputs.in_files = example_data("ds003_sub-01_mc.nii.gz") eg = mean.run() assert os.path.exists(eg.outputs.median_files) - assert nb.load(eg.outputs.median_files, mmap=NUMPY_MMAP) + assert nb.load(eg.outputs.median_files) diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 824a634354..1b1aacae00 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -1,236 +1,243 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division from copy import deepcopy -import os from nibabel import Nifti1Image import numpy as np import pytest import numpy.testing as npt +from nipype.testing import example_data from nipype.interfaces.base import Bunch, TraitError -from nipype.algorithms.modelgen import (SpecifyModel, SpecifySparseModel, - SpecifySPMModel) +from nipype.algorithms.modelgen import ( + bids_gen_info, + SpecifyModel, + SpecifySparseModel, + SpecifySPMModel, +) + + +def test_bids_gen_info(): + fname = example_data("events.tsv") + res = bids_gen_info([fname]) + assert res[0].onsets == [ + [183.75, 313.75, 483.75, 633.75, 783.75, 933.75, 1083.75, 1233.75] + ] + assert res[0].durations == [[20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0]] + assert res[0].amplitudes == [[1, 1, 1, 1, 1, 1, 1, 1]] + assert res[0].conditions == ["ev0"] def test_modelgen1(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 200), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 200), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2) s = SpecifyModel() - s.inputs.input_units = 'scans' - set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans') + s.inputs.input_units = "scans" + set_output_units = lambda: setattr(s.inputs, "output_units", "scans") with pytest.raises(TraitError): set_output_units() s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 - s.inputs.high_pass_filter_cutoff = 128. + s.inputs.high_pass_filter_cutoff = 128.0 info = [ Bunch( - conditions=['cond1'], + conditions=["cond1"], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, - tmod=None), + tmod=None, + ), Bunch( - conditions=['cond1'], + conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, - tmod=None) + tmod=None, + ), ] s.inputs.subject_info = info res = s.run() assert len(res.outputs.session_info) == 2 - assert len(res.outputs.session_info[0]['regress']) == 0 - assert len(res.outputs.session_info[0]['cond']) == 1 + assert len(res.outputs.session_info[0]["regress"]) == 0 + assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([12, 300, 600, 1080])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([12, 300, 600, 1080]), + ) info = [ - Bunch(conditions=['cond1'], onsets=[[2]], durations=[[1]]), - Bunch(conditions=['cond1'], onsets=[[3]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[2]], durations=[[1]]), + Bunch(conditions=["cond1"], onsets=[[3]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([6.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][0]['duration']), - np.array([6.])) + np.array(res.outputs.session_info[1]["cond"][0]["duration"]), np.array([6.0]) + ) info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]) + durations=[[1, 1], [1, 1]], + ), ] s.inputs.subject_info = deepcopy(info) - s.inputs.input_units = 'scans' + s.inputs.input_units = "scans" res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([6., 6.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([6.0, 6.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([ - 6., - ])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([6.0]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][1]['duration']), - np.array([6., 6.])) + np.array(res.outputs.session_info[1]["cond"][1]["duration"]), + np.array([6.0, 6.0]), + ) def test_modelgen_spm_concat(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename2) # Test case when only one duration is passed, as being the same for all onsets. s = SpecifySPMModel() - s.inputs.input_units = 'secs' + s.inputs.input_units = "secs" s.inputs.concatenate_runs = True - setattr(s.inputs, 'output_units', 'secs') - assert s.inputs.output_units == 'secs' + s.inputs.output_units = "secs" + assert s.inputs.output_units == "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 - s.inputs.high_pass_filter_cutoff = 128. + s.inputs.high_pass_filter_cutoff = 128.0 info = [ - Bunch( - conditions=['cond1'], onsets=[[2, 50, 100, 170]], durations=[[1]]), - Bunch( - conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[2, 50, 100, 170]], durations=[[1]]), + Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() assert len(res.outputs.session_info) == 1 - assert len(res.outputs.session_info[0]['regress']) == 1 - assert np.sum(res.outputs.session_info[0]['regress'][0]['val']) == 30 - assert len(res.outputs.session_info[0]['cond']) == 1 + assert len(res.outputs.session_info[0]["regress"]) == 1 + assert np.sum(res.outputs.session_info[0]["regress"][0]["val"]) == 30 + assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1., 1., 1., 1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), + ) # Test case of scans as output units instead of seconds - setattr(s.inputs, 'output_units', 'scans') - assert s.inputs.output_units == 'scans' + s.inputs.output_units = "scans" + assert s.inputs.output_units == "scans" s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6, + ) # Test case for no concatenation with seconds as output units s.inputs.concatenate_runs = False s.inputs.subject_info = deepcopy(info) - s.inputs.output_units = 'secs' + s.inputs.output_units = "secs" res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0]), + ) # Test case for variable number of events in separate runs, sometimes unique. - filename3 = tmpdir.join('test3.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename3) + filename3 = tmpdir.join("test3.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename3) s.inputs.functional_runs = [filename1, filename2, filename3] info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]), + durations=[[1, 1], [1, 1]], + ), Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]) + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([ - 1., - ])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([1.0]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][1]['duration']), - np.array([1., 1.])) + np.array(res.outputs.session_info[1]["cond"][1]["duration"]), + np.array([1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[2]['cond'][1]['duration']), - np.array([ - 1., - ])) + np.array(res.outputs.session_info[2]["cond"][1]["duration"]), np.array([1.0]) + ) # Test case for variable number of events in concatenated runs, sometimes unique. s.inputs.concatenate_runs = True info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]), + durations=[[1, 1], [1, 1]], + ), Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]) + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1., 1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0]), + ) def test_modelgen_sparse(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 50), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 50), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename2) s = SpecifySparseModel() - s.inputs.input_units = 'secs' + s.inputs.input_units = "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 info = [ - Bunch( - conditions=['cond1'], onsets=[[0, 50, 100, 180]], durations=[[2]]), - Bunch( - conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[0, 50, 100, 180]], durations=[[2]]), + Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = info s.inputs.volumes_in_cluster = 1 @@ -238,26 +245,26 @@ def test_modelgen_sparse(tmpdir): s.inputs.high_pass_filter_cutoff = np.inf res = s.run() assert len(res.outputs.session_info) == 2 - assert len(res.outputs.session_info[0]['regress']) == 1 - assert len(res.outputs.session_info[0]['cond']) == 0 + assert len(res.outputs.session_info[0]["regress"]) == 1 + assert len(res.outputs.session_info[0]["cond"]) == 0 s.inputs.stimuli_as_impulses = False res = s.run() - assert res.outputs.session_info[0]['regress'][0]['val'][0] == 1.0 + assert res.outputs.session_info[0]["regress"][0]["val"][0] == 1.0 s.inputs.model_hrf = True res = s.run() npt.assert_almost_equal( - res.outputs.session_info[0]['regress'][0]['val'][0], - 0.016675298129743384) - assert len(res.outputs.session_info[0]['regress']) == 1 + res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 + ) + assert len(res.outputs.session_info[0]["regress"]) == 1 s.inputs.use_temporal_deriv = True res = s.run() - assert len(res.outputs.session_info[0]['regress']) == 2 + assert len(res.outputs.session_info[0]["regress"]) == 2 npt.assert_almost_equal( - res.outputs.session_info[0]['regress'][0]['val'][0], - 0.016675298129743384) + res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 + ) npt.assert_almost_equal( - res.outputs.session_info[1]['regress'][1]['val'][5], - 0.007671459162258378) + res.outputs.session_info[1]["regress"][1]["val"][5], 0.007671459162258378 + ) diff --git a/nipype/algorithms/tests/test_moments.py b/nipype/algorithms/tests/test_moments.py index fa174a79e4..6fe60c4e61 100644 --- a/nipype/algorithms/tests/test_moments.py +++ b/nipype/algorithms/tests/test_moments.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import numpy as np from nipype.algorithms.misc import calc_moments @@ -130,8 +129,16 @@ def test_skew(tmpdir): skewness = calc_moments(f.strpath, 3) assert np.allclose( skewness, - np.array([ - -0.23418937314622, 0.2946365564954823, -0.05781002053540932, - -0.3512508282578762, -0.07035664150233077, -0.01935867699166935, - 0.00483863369427428, 0.21879460029850167 - ])) + np.array( + [ + -0.23418937314622, + 0.2946365564954823, + -0.05781002053540932, + -0.3512508282578762, + -0.07035664150233077, + -0.01935867699166935, + 0.00483863369427428, + 0.21879460029850167, + ] + ), + ) diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index a65cc66770..37eb9bb51b 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -1,26 +1,18 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import range -import os -import pytest from nipype.testing import example_data import numpy as np import nibabel as nb -import nipype.testing as nit from nipype.algorithms.misc import normalize_tpms -from nipype.utils import NUMPY_MMAP def test_normalize_tpms(tmpdir): - - in_mask = example_data('tpms_msk.nii.gz') - mskdata = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() + in_mask = example_data("tpms_msk.nii.gz") + mskdata = np.asanyarray(nb.load(in_mask).dataobj) mskdata[mskdata > 0.0] = 1.0 mapdata = [] @@ -28,16 +20,17 @@ def test_normalize_tpms(tmpdir): out_files = [] for i in range(3): - mapname = example_data('tpm_%02d.nii.gz' % i) - filename = tmpdir.join('modtpm_%02d.nii.gz' % i).strpath - out_files.append(tmpdir.join('normtpm_%02d.nii.gz' % i).strpath) + mapname = example_data("tpm_%02d.nii.gz" % i) + filename = tmpdir.join("modtpm_%02d.nii.gz" % i).strpath + out_files.append(tmpdir.join("normtpm_%02d.nii.gz" % i).strpath) - im = nb.load(mapname, mmap=NUMPY_MMAP) - data = im.get_data() - mapdata.append(data.copy()) + im = nb.load(mapname) + data = im.get_fdata() + mapdata.append(data) - nb.Nifti1Image(2.0 * (data * mskdata), im.affine, - im.header).to_filename(filename) + nb.Nifti1Image(2.0 * (data * mskdata), im.affine, im.header).to_filename( + filename + ) in_files.append(filename) normalize_tpms(in_files, in_mask, out_files=out_files) @@ -45,7 +38,7 @@ def test_normalize_tpms(tmpdir): sumdata = np.zeros_like(mskdata) for i, tstfname in enumerate(out_files): - normdata = nb.load(tstfname, mmap=NUMPY_MMAP).get_data() + normdata = nb.load(tstfname).get_fdata() sumdata += normdata assert np.all(normdata[mskdata == 0] == 0) assert np.allclose(normdata, mapdata[i]) diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index 9c29648626..322d32ad2e 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division - import numpy as np import numpy.testing as npt @@ -18,58 +15,79 @@ def test_ad_init(): def test_ad_output_filenames(): ad = ra.ArtifactDetect() - outputdir = '/tmp' - f = 'motion.nii' - (outlierfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = ad._get_output_filenames(f, outputdir) - assert outlierfile == '/tmp/art.motion_outliers.txt' - assert intensityfile == '/tmp/global_intensity.motion.txt' - assert statsfile == '/tmp/stats.motion.txt' - assert normfile == '/tmp/norm.motion.txt' - assert plotfile == '/tmp/plot.motion.png' - assert displacementfile == '/tmp/disp.motion.nii' - assert maskfile == '/tmp/mask.motion.nii' + outputdir = "/tmp" + f = "motion.nii" + ( + outlierfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = ad._get_output_filenames(f, outputdir) + assert outlierfile == "/tmp/art.motion_outliers.txt" + assert intensityfile == "/tmp/global_intensity.motion.txt" + assert statsfile == "/tmp/stats.motion.txt" + assert normfile == "/tmp/norm.motion.txt" + assert plotfile == "/tmp/plot.motion.png" + assert displacementfile == "/tmp/disp.motion.nii" + assert maskfile == "/tmp/mask.motion.nii" def test_ad_get_affine_matrix(): - matrix = ra._get_affine_matrix(np.array([0]), 'SPM') + matrix = ra._get_affine_matrix(np.array([0]), "SPM") npt.assert_equal(matrix, np.eye(4)) # test translation params = [1, 2, 3] - matrix = ra._get_affine_matrix(params, 'SPM') + matrix = ra._get_affine_matrix(params, "SPM") out = np.eye(4) out[0:3, 3] = params npt.assert_equal(matrix, out) # test rotation params = np.array([0, 0, 0, np.pi / 2, np.pi / 2, np.pi / 2]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_almost_equal(matrix, out) # test scaling params = np.array([0, 0, 0, 0, 0, 0, 1, 2, 3]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) # test shear params = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 3]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) def test_ad_get_norm(): - params = np.array([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, np.pi / 4, np.pi / 4, np.pi / 4, 0, 0, 0, - -np.pi / 4, -np.pi / 4, -np.pi / 4 - ]).reshape((3, 6)) - norm, _ = ra._calc_norm(params, False, 'SPM') - npt.assert_almost_equal(norm, - np.array([18.86436316, 37.74610158, 31.29780829])) - norm, _ = ra._calc_norm(params, True, 'SPM') - npt.assert_almost_equal(norm, np.array([0., 143.72192614, 173.92527131])) + params = np.array( + [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + np.pi / 4, + np.pi / 4, + np.pi / 4, + 0, + 0, + 0, + -np.pi / 4, + -np.pi / 4, + -np.pi / 4, + ] + ).reshape((3, 6)) + norm, _ = ra._calc_norm(params, False, "SPM") + npt.assert_almost_equal(norm, np.array([18.86436316, 37.74610158, 31.29780829])) + norm, _ = ra._calc_norm(params, True, "SPM") + npt.assert_almost_equal(norm, np.array([0.0, 143.72192614, 173.92527131])) def test_sc_init(): @@ -83,13 +101,14 @@ def test_sc_populate_inputs(): realignment_parameters=None, intensity_values=None, spm_mat_file=None, - concatenated_design=None) + concatenated_design=None, + ) assert set(sc.inputs.__dict__.keys()) == set(inputs.__dict__.keys()) def test_sc_output_filenames(): sc = ra.StimulusCorrelation() - outputdir = '/tmp' - f = 'motion.nii' + outputdir = "/tmp" + f = "motion.nii" corrfile = sc._get_output_filenames(f, outputdir) - assert corrfile == '/tmp/qa.motion_stimcorr.txt' + assert corrfile == "/tmp/qa.motion_stimcorr.txt" diff --git a/nipype/algorithms/tests/test_splitmerge.py b/nipype/algorithms/tests/test_splitmerge.py index f05d291028..af4a920be5 100644 --- a/nipype/algorithms/tests/test_splitmerge.py +++ b/nipype/algorithms/tests/test_splitmerge.py @@ -1,22 +1,19 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- from nipype.testing import example_data -from nipype.utils import NUMPY_MMAP def test_split_and_merge(tmpdir): import numpy as np import nibabel as nb - import os.path as op - import os from nipype.algorithms.misc import split_rois, merge_rois - in_mask = example_data('tpms_msk.nii.gz') - dwfile = tmpdir.join('dwi.nii.gz').strpath - mskdata = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() - aff = nb.load(in_mask, mmap=NUMPY_MMAP).affine + in_mask = example_data("tpms_msk.nii.gz") + dwfile = tmpdir.join("dwi.nii.gz").strpath + mask_img = nb.load(in_mask) + mskdata = np.asanyarray(mask_img.dataobj) + aff = mask_img.affine dwshape = (mskdata.shape[0], mskdata.shape[1], mskdata.shape[2], 6) dwdata = np.random.normal(size=dwshape) @@ -25,7 +22,7 @@ def test_split_and_merge(tmpdir): resdw, resmsk, resid = split_rois(dwfile, in_mask, roishape=(20, 20, 2)) merged = merge_rois(resdw, resid, in_mask) - dwmerged = nb.load(merged, mmap=NUMPY_MMAP).get_data() + dwmerged = nb.load(merged).get_fdata(dtype=np.float32) dwmasked = dwdata * mskdata[:, :, :, np.newaxis] diff --git a/nipype/algorithms/tests/test_stats.py b/nipype/algorithms/tests/test_stats.py index 9a4c7525b5..ed698d47cf 100644 --- a/nipype/algorithms/tests/test_stats.py +++ b/nipype/algorithms/tests/test_stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -10,36 +9,41 @@ def test_ActivationCount(tmpdir): tmpdir.chdir() - in_files = ['{:d}.nii'.format(i) for i in range(3)] + in_files = [f"{i:d}.nii" for i in range(3)] for fname in in_files: - nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), - np.eye(4)).to_filename(fname) + nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), np.eye(4)).to_filename(fname) acm = ActivationCount(in_files=in_files, threshold=1.65) res = acm.run() diff = nb.load(res.outputs.out_file) pos = nb.load(res.outputs.acm_pos) neg = nb.load(res.outputs.acm_neg) - assert np.allclose(diff.get_data(), pos.get_data() - neg.get_data()) + assert np.allclose(diff.get_fdata(), pos.get_fdata() - neg.get_fdata()) -@pytest.mark.parametrize("threshold, above_thresh", [ - (1, 15.865), # above one standard deviation (one side) - (2, 2.275), # above two standard deviations (one side) - (3, 0.135) # above three standard deviations (one side) -]) +@pytest.mark.parametrize( + "threshold, above_thresh", + [ + (1, 15.865), # above one standard deviation (one side) + (2, 2.275), # above two standard deviations (one side) + (3, 0.135), # above three standard deviations (one side) + ], +) def test_ActivationCount_normaldistr(tmpdir, threshold, above_thresh): tmpdir.chdir() - in_files = ['{:d}.nii'.format(i) for i in range(3)] + in_files = [f"{i:d}.nii" for i in range(3)] for fname in in_files: - nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), - np.eye(4)).to_filename(fname) + nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), np.eye(4)).to_filename( + fname + ) acm = ActivationCount(in_files=in_files, threshold=threshold) res = acm.run() pos = nb.load(res.outputs.acm_pos) neg = nb.load(res.outputs.acm_neg) - assert np.isclose(pos.get_data().mean(), - above_thresh * 1.e-2, rtol=0.1, atol=1.e-4) - assert np.isclose(neg.get_data().mean(), - above_thresh * 1.e-2, rtol=0.1, atol=1.e-4) + assert np.isclose( + pos.get_fdata().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 + ) + assert np.isclose( + neg.get_fdata().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 + ) diff --git a/nipype/caching/__init__.py b/nipype/caching/__init__.py index 1e99ed4428..db0261ebea 100644 --- a/nipype/caching/__init__.py +++ b/nipype/caching/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- from .memory import Memory diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index 9fcf694d4b..a1d45ffff2 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -1,11 +1,7 @@ -# -*- coding: utf-8 -*- """ Using nipype with persistence and lazy recomputation but without explicit name-steps pipeline: getting back scope in command-line based programming. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object, open import os import hashlib @@ -22,42 +18,42 @@ # PipeFunc object: callable interface to nipype.interface objects -class PipeFunc(object): - """ Callable interface to nipype.interface objects +class PipeFunc: + """Callable interface to nipype.interface objects - Use this to wrap nipype.interface object and call them - specifying their input with keyword arguments:: + Use this to wrap nipype.interface object and call them + specifying their input with keyword arguments:: - fsl_merge = PipeFunc(fsl.Merge, base_dir='.') - out = fsl_merge(in_files=files, dimension='t') + fsl_merge = PipeFunc(fsl.Merge, base_dir='.') + out = fsl_merge(in_files=files, dimension='t') """ def __init__(self, interface, base_dir, callback=None): """ - Parameters - =========== - interface: a nipype interface class - The interface class to wrap - base_dir: a string - The directory in which the computation will be - stored - callback: a callable - An optional callable called each time after the function - is called. + Parameters + =========== + interface: a nipype interface class + The interface class to wrap + base_dir: a string + The directory in which the computation will be + stored + callback: a callable + An optional callable called each time after the function + is called. """ - if not (isinstance(interface, type) - and issubclass(interface, BaseInterface)): - raise ValueError('the interface argument should be a nipype ' - 'interface class, but %s (type %s) was passed.' % - (interface, type(interface))) + if not (isinstance(interface, type) and issubclass(interface, BaseInterface)): + raise ValueError( + "the interface argument should be a nipype " + "interface class, but %s (type %s) was passed." + % (interface, type(interface)) + ) self.interface = interface base_dir = os.path.abspath(base_dir) if not os.path.exists(base_dir) and os.path.isdir(base_dir): - raise ValueError('base_dir should be an existing directory') + raise ValueError("base_dir should be an existing directory") self.base_dir = base_dir - doc = '%s\n%s' % (self.interface.__doc__, - self.interface.help(returnhelp=True)) + doc = f"{self.interface.__doc__}\n{self.interface.help(returnhelp=True)}" self.__doc__ = doc self.callback = callback @@ -68,10 +64,12 @@ def __call__(self, **kwargs): interface.inputs.trait_set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() - hasher = hashlib.new('md5') + hasher = hashlib.new("md5") hasher.update(pickle.dumps(inputs)) - dir_name = '%s-%s' % (interface.__class__.__module__.replace('.', '-'), - interface.__class__.__name__) + dir_name = "{}-{}".format( + interface.__class__.__module__.replace(".", "-"), + interface.__class__.__name__, + ) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) @@ -88,9 +86,12 @@ def __call__(self, **kwargs): return out def __repr__(self): - return '{}({}.{}), base_dir={})'.format( - self.__class__.__name__, self.interface.__module__, - self.interface.__name__, self.base_dir) + return "{}({}.{}), base_dir={})".format( + self.__class__.__name__, + self.interface.__module__, + self.interface.__name__, + self.base_dir, + ) ############################################################################### @@ -102,9 +103,9 @@ def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() - with open(filename, 'r') as logfile: + with open(filename) as logfile: for line in logfile: - dir_name, job_name = line[:-1].split('/') + dir_name, job_name = line[:-1].split("/") jobs = run_dict.get(dir_name, set()) jobs.add(job_name) run_dict[dir_name] = jobs @@ -112,31 +113,31 @@ def read_log(filename, run_dict=None): def rm_all_but(base_dir, dirs_to_keep, warn=False): - """ Remove all the sub-directories of base_dir, but those listed - - Parameters - ============ - base_dir: string - The base directory - dirs_to_keep: set - The names of the directories to keep + """Remove all the sub-directories of base_dir, but those listed + + Parameters + ============ + base_dir: string + The base directory + dirs_to_keep: set + The names of the directories to keep """ try: all_dirs = os.listdir(base_dir) except OSError: "Dir has been deleted" return - all_dirs = [d for d in all_dirs if not d.startswith('log.')] + all_dirs = [d for d in all_dirs if not d.startswith("log.")] dirs_to_rm = list(dirs_to_keep.symmetric_difference(all_dirs)) for dir_name in dirs_to_rm: dir_name = os.path.join(base_dir, dir_name) if os.path.exists(dir_name): if warn: - print('removing directory: %s' % dir_name) + print("removing directory: %s" % dir_name) shutil.rmtree(dir_name) -class _MemoryCallback(object): +class _MemoryCallback: "An object to avoid closures and have everything pickle" def __init__(self, memory): @@ -146,139 +147,137 @@ def __call__(self, dir_name, job_name): self.memory._log_name(dir_name, job_name) -class Memory(object): - """ Memory context to provide caching for interfaces - - Parameters - ========== - base_dir: string - The directory name of the location for the caching - - Methods - ======= - cache - Creates a cacheable function from an nipype Interface class - clear_previous_runs - Removes from the disk all the runs that where not used after - the creation time of the specific Memory instance - clear_previous_runs - Removes from the disk all the runs that where not used after - the given time +class Memory: + """Memory context to provide caching for interfaces + + Parameters + ========== + base_dir: string + The directory name of the location for the caching + + Methods + ======= + cache + Creates a cacheable function from an nipype Interface class + clear_previous_runs + Removes from the disk all the runs that where not used after + the creation time of the specific Memory instance + clear_previous_runs + Removes from the disk all the runs that where not used after + the given time """ def __init__(self, base_dir): - base_dir = os.path.join(os.path.abspath(base_dir), 'nipype_mem') + base_dir = os.path.join(os.path.abspath(base_dir), "nipype_mem") if not os.path.exists(base_dir): os.mkdir(base_dir) elif not os.path.isdir(base_dir): - raise ValueError('base_dir should be a directory') + raise ValueError("base_dir should be a directory") self.base_dir = base_dir - open(os.path.join(base_dir, 'log.current'), 'a').close() + open(os.path.join(base_dir, "log.current"), "a").close() def cache(self, interface): - """ Returns a callable that caches the output of an interface + """Returns a callable that caches the output of an interface - Parameters - ========== - interface: nipype interface - The nipype interface class to be wrapped and cached + Parameters + ========== + interface: nipype interface + The nipype interface class to be wrapped and cached - Returns - ======= - pipe_func: a PipeFunc callable object - An object that can be used as a function to apply the - interface to arguments. Inputs of the interface are given - as keyword arguments, bearing the same name as the name - in the inputs specs of the interface. + Returns + ======= + pipe_func: a PipeFunc callable object + An object that can be used as a function to apply the + interface to arguments. Inputs of the interface are given + as keyword arguments, bearing the same name as the name + in the inputs specs of the interface. - Examples - ======== + Examples + ======== - >>> from tempfile import mkdtemp - >>> mem = Memory(mkdtemp()) - >>> from nipype.interfaces import fsl + >>> from tempfile import mkdtemp + >>> mem = Memory(mkdtemp()) + >>> from nipype.interfaces import fsl - Here we create a callable that can be used to apply an - fsl.Merge interface to files + Here we create a callable that can be used to apply an + fsl.Merge interface to files - >>> fsl_merge = mem.cache(fsl.Merge) + >>> fsl_merge = mem.cache(fsl.Merge) - Now we apply it to a list of files. We need to specify the - list of input files and the dimension along which the files - should be merged. + Now we apply it to a list of files. We need to specify the + list of input files and the dimension along which the files + should be merged. - >>> results = fsl_merge(in_files=['a.nii', 'b.nii'], - ... dimension='t') # doctest: +SKIP + >>> results = fsl_merge(in_files=['a.nii', 'b.nii'], + ... dimension='t') # doctest: +SKIP - We can retrieve the resulting file from the outputs: - >>> results.outputs.merged_file # doctest: +SKIP - '...' + We can retrieve the resulting file from the outputs: + >>> results.outputs.merged_file # doctest: +SKIP + '...' """ return PipeFunc(interface, self.base_dir, _MemoryCallback(self)) def _log_name(self, dir_name, job_name): - """ Increment counters tracking which cached function get executed. - """ + """Increment counters tracking which cached function get executed.""" base_dir = self.base_dir # Every counter is a file opened in append mode and closed # immediately to avoid race conditions in parallel computing: # file appends are atomic - with open(os.path.join(base_dir, 'log.current'), 'a') as currentlog: - currentlog.write('%s/%s\n' % (dir_name, job_name)) + with open(os.path.join(base_dir, "log.current"), "a") as currentlog: + currentlog.write(f"{dir_name}/{job_name}\n") t = time.localtime() - year_dir = os.path.join(base_dir, 'log.%i' % t.tm_year) + year_dir = os.path.join(base_dir, "log.%i" % t.tm_year) try: os.mkdir(year_dir) except OSError: "Dir exists" - month_dir = os.path.join(year_dir, '%02i' % t.tm_mon) + month_dir = os.path.join(year_dir, "%02i" % t.tm_mon) try: os.mkdir(month_dir) except OSError: "Dir exists" - with open(os.path.join(month_dir, '%02i.log' % t.tm_mday), - 'a') as rotatefile: - rotatefile.write('%s/%s\n' % (dir_name, job_name)) + with open(os.path.join(month_dir, "%02i.log" % t.tm_mday), "a") as rotatefile: + rotatefile.write(f"{dir_name}/{job_name}\n") def clear_previous_runs(self, warn=True): - """ Remove all the cache that where not used in the latest run of - the memory object: i.e. since the corresponding Python object - was created. - - Parameters - ========== - warn: boolean, optional - If true, echoes warning messages for all directory - removed + """Remove all the cache that where not used in the latest run of + the memory object: i.e. since the corresponding Python object + was created. + + Parameters + ========== + warn: boolean, optional + If true, echoes warning messages for all directory + removed """ base_dir = self.base_dir - latest_runs = read_log(os.path.join(base_dir, 'log.current')) + latest_runs = read_log(os.path.join(base_dir, "log.current")) self._clear_all_but(latest_runs, warn=warn) def clear_runs_since(self, day=None, month=None, year=None, warn=True): - """ Remove all the cache that where not used since the given date - - Parameters - ========== - day, month, year: integers, optional - The integers specifying the latest day (in localtime) that - a node should have been accessed to be kept. If not - given, the current date is used. - warn: boolean, optional - If true, echoes warning messages for all directory - removed + """Remove all the cache that where not used since the given date + + Parameters + ========== + day, month, year: integers, optional + The integers specifying the latest day (in localtime) that + a node should have been accessed to be kept. If not + given, the current date is used. + warn: boolean, optional + If true, echoes warning messages for all directory + removed """ t = time.localtime() day = day if day is not None else t.tm_mday month = month if month is not None else t.tm_mon year = year if year is not None else t.tm_year base_dir = self.base_dir - cut_off_file = '%s/log.%i/%02i/%02i.log' % (base_dir, year, month, day) + cut_off_file = "%s/log.%i/%02i/%02i.log" % (base_dir, year, month, day) logs_to_flush = list() recent_runs = dict() - for log_name in glob.glob('%s/log.*/*/*.log' % base_dir): + for log_name in glob.glob("%s/log.*/*/*.log" % base_dir): if log_name < cut_off_file: logs_to_flush.append(log_name) else: @@ -288,13 +287,12 @@ def clear_runs_since(self, day=None, month=None, year=None, warn=True): os.remove(log_name) def _clear_all_but(self, runs, warn=True): - """ Remove all the runs appart from those given to the function - input. + """Remove all the runs apart from those given to the function + input. """ rm_all_but(self.base_dir, set(runs.keys()), warn=warn) for dir_name, job_names in list(runs.items()): - rm_all_but( - os.path.join(self.base_dir, dir_name), job_names, warn=warn) + rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn) def __repr__(self): - return '{}(base_dir={})'.format(self.__class__.__name__, self.base_dir) + return f"{self.__class__.__name__}(base_dir={self.base_dir})" diff --git a/nipype/caching/tests/__init__.py b/nipype/caching/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/caching/tests/__init__.py +++ b/nipype/caching/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 3ea594f22a..5bd9fad528 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Test the nipype interface caching mechanism """ @@ -6,6 +5,7 @@ from ...pipeline.engine.tests.test_engine import EngineTestInterface from ... import config + config.set_default_config() nb_runs = 0 @@ -15,15 +15,14 @@ class SideEffectInterface(EngineTestInterface): def _run_interface(self, runtime): global nb_runs nb_runs += 1 - runtime.returncode = 0 - return runtime + return super()._run_interface(runtime) def test_caching(tmpdir): - old_rerun = config.get('execution', 'stop_on_first_rerun') + old_rerun = config.get("execution", "stop_on_first_rerun") try: # Prevent rerun to check that evaluation is computed only once - config.set('execution', 'stop_on_first_rerun', 'true') + config.set("execution", "stop_on_first_rerun", "true") mem = Memory(tmpdir.strpath) first_nb_run = nb_runs results = mem.cache(SideEffectInterface)(input1=2, input2=1) @@ -38,4 +37,4 @@ def test_caching(tmpdir): assert nb_runs == first_nb_run + 2 assert results.outputs.output1 == [1, 1] finally: - config.set('execution', 'stop_on_first_rerun', old_rerun) + config.set("execution", "stop_on_first_rerun", old_rerun) diff --git a/nipype/conftest.py b/nipype/conftest.py index 9a9175ce28..18b8a1ca6d 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -6,16 +6,18 @@ import py.path as pp NIPYPE_DATADIR = os.path.realpath( - os.path.join(os.path.dirname(__file__), 'testing/data')) + os.path.join(os.path.dirname(__file__), "testing/data") +) temp_folder = mkdtemp() -data_dir = os.path.join(temp_folder, 'data') +data_dir = os.path.join(temp_folder, "data") shutil.copytree(NIPYPE_DATADIR, data_dir) @pytest.fixture(autouse=True) def add_np(doctest_namespace): - doctest_namespace['np'] = numpy - doctest_namespace['os'] = os + doctest_namespace["np"] = numpy + doctest_namespace["os"] = os + doctest_namespace["pytest"] = pytest doctest_namespace["datadir"] = data_dir @@ -25,7 +27,6 @@ def _docdir(request): # Trigger ONLY for the doctests. doctest_plugin = request.config.pluginmanager.getplugin("doctest") if isinstance(request.node, doctest_plugin.DoctestItem): - # Get the fixture dynamically by its name. tmpdir = pp.local(data_dir) diff --git a/nipype/external/__init__.py b/nipype/external/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/external/__init__.py +++ b/nipype/external/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index 5fda934c84..289c8dfa2f 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2008 Lowell Alleman # # Licensed under the Apache License, Version 2.0 (the "License"); you may not @@ -36,21 +35,14 @@ testing, performance was more than adequate, but if you need a high-volume or low-latency solution, I suggest you look elsewhere. -This module currently only support the 'nt' and 'posix' platforms due to the -usage of the portalocker module. I do not have access to any other platforms -for testing, patches are welcome. - See the README file for an example usage of this module. """ -from builtins import range __version__ = "$Id: cloghandler.py 6175 2009-11-02 18:40:35Z lowell $" __author__ = "Lowell Alleman" -__all__ = [ - "ConcurrentRotatingFileHandler", -] +__all__ = ["ConcurrentRotatingFileHandler"] import os import sys @@ -63,13 +55,7 @@ except ImportError: codecs = None -# Question/TODO: Should we have a fallback mode if we can't load portalocker / -# we should still be better off than with the standard RotattingFileHandler -# class, right? We do some rename checking... that should prevent some file -# clobbering that the builtin class allows. - -# sibling module than handles all the ugly platform-specific details of file locking -from .portalocker import lock, unlock, LOCK_EX, LOCK_NB, LockException +from filelock import SoftFileLock # A client can set this to true to automatically convert relative paths to # absolute paths (which will also hide the absolute path warnings) @@ -84,14 +70,16 @@ class ConcurrentRotatingFileHandler(BaseRotatingHandler): exceed the given size. """ - def __init__(self, - filename, - mode='a', - maxBytes=0, - backupCount=0, - encoding=None, - debug=True, - supress_abs_warn=False): + def __init__( + self, + filename, + mode="a", + maxBytes=0, + backupCount=0, + encoding=None, + debug=True, + supress_abs_warn=False, + ): """ Open the specified file and use it as the stream for logging. @@ -149,18 +137,21 @@ def __init__(self, # if the given filename contains no path, we make an absolute path if not os.path.isabs(filename): - if FORCE_ABSOLUTE_PATH or \ - not os.path.split(filename)[0]: + if FORCE_ABSOLUTE_PATH or not os.path.split(filename)[0]: filename = os.path.abspath(filename) elif not supress_abs_warn: from warnings import warn + warn( "The given 'filename' should be an absolute path. If your " "application calls os.chdir(), your logs may get messed up. " - "Use 'supress_abs_warn=True' to hide this message.") + "Use 'supress_abs_warn=True' to hide this message." + ) try: BaseRotatingHandler.__init__(self, filename, mode, encoding) - except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) + except ( + TypeError + ): # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) BaseRotatingHandler.__init__(self, filename, mode) self.encoding = encoding @@ -168,11 +159,8 @@ def __init__(self, self.maxBytes = maxBytes self.backupCount = backupCount # Prevent multiple extensions on the lock file (Only handles the normal "*.log" case.) - if filename.endswith(".log"): - lock_file = filename[:-4] - else: - lock_file = filename - self.stream_lock = open(lock_file + ".lock", "w") + self.lock_file = "%s.lock" % filename + self.stream_lock = SoftFileLock(self.lock_file) # For debug mode, swap out the "_degrade()" method with a more a verbose one. if debug: @@ -185,28 +173,28 @@ def _openFile(self, mode): self.stream = open(self.baseFilename, mode) def acquire(self): - """ Acquire thread and file locks. Also re-opening log file when running - in 'degraded' mode. """ + """Acquire thread and file locks. Also re-opening log file when running + in 'degraded' mode.""" # handle thread lock Handler.acquire(self) - lock(self.stream_lock, LOCK_EX) + self.stream_lock.acquire() if self.stream.closed: self._openFile(self.mode) def release(self): - """ Release file and thread locks. Flush stream and take care of closing - stream in 'degraded' mode. """ + """Release file and thread locks. Flush stream and take care of closing + stream in 'degraded' mode.""" try: if not self.stream.closed: self.stream.flush() if self._rotateFailed: self.stream.close() - except IOError: + except OSError: if self._rotateFailed: self.stream.close() finally: try: - unlock(self.stream_lock) + self.stream_lock.release() finally: # release thread lock Handler.release(self) @@ -221,7 +209,7 @@ def close(self): Handler.close(self) def flush(self): - """ flush(): Do nothing. + """flush(): Do nothing. Since a flush is issued in release(), we don't do it here. To do a flush here, it would be necessary to re-lock everything, and it is just easier @@ -230,27 +218,31 @@ def flush(self): Doing a flush() here would also introduces a window of opportunity for another process to write to the log file in between calling - stream.write() and stream.flush(), which seems like a bad thing. """ + stream.write() and stream.flush(), which seems like a bad thing.""" pass def _degrade(self, degrade, msg, *args): - """ Set degrade mode or not. Ignore msg. """ + """Set degrade mode or not. Ignore msg.""" self._rotateFailed = degrade del msg, args # avoid pychecker warnings def _degrade_debug(self, degrade, msg, *args): - """ A more colorful version of _degade(). (This is enabled by passing + """A more colorful version of _degade(). (This is enabled by passing "debug=True" at initialization). """ if degrade: if not self._rotateFailed: - sys.stderr.write("Degrade mode - ENTERING - (pid=%d) %s\n" % - (os.getpid(), msg % args)) + sys.stderr.write( + "Degrade mode - ENTERING - (pid=%d) %s\n" + % (os.getpid(), msg % args) + ) self._rotateFailed = True else: if self._rotateFailed: - sys.stderr.write("Degrade mode - EXITING - (pid=%d) %s\n" % - (os.getpid(), msg % args)) + sys.stderr.write( + "Degrade mode - EXITING - (pid=%d) %s\n" + % (os.getpid(), msg % args) + ) self._rotateFailed = False def doRollover(self): @@ -268,15 +260,15 @@ def doRollover(self): # Attempt to rename logfile to tempname: There is a slight race-condition here, but it seems unavoidable tmpname = None while not tmpname or os.path.exists(tmpname): - tmpname = "%s.rotate.%08d" % (self.baseFilename, - randint(0, 99999999)) + tmpname = "%s.rotate.%08d" % (self.baseFilename, randint(0, 99999999)) try: # Do a rename test to determine if we can successfully rename the log file os.rename(self.baseFilename, tmpname) - except (IOError, OSError): + except OSError: exc_value = sys.exc_info()[1] - self._degrade(True, "rename failed. File in use? " - "exception=%s", exc_value) + self._degrade( + True, "rename failed. File in use? exception=%s", exc_value + ) return # Q: Is there some way to protect this code from a KeboardInterupt? @@ -324,19 +316,18 @@ def shouldRollover(self, record): def _shouldRollover(self): if self.maxBytes > 0: # are we rolling over? try: - self.stream.seek( - 0, 2) # due to non-posix-compliant Windows feature - except IOError: + self.stream.seek(0, 2) # due to non-posix-compliant Windows feature + except OSError: return True if self.stream.tell() >= self.maxBytes: return True else: - self._degrade(False, - "Rotation done or not needed at this time") + self._degrade(False, "Rotation done or not needed at this time") return False # Publish this class to the "logging.handlers" module so that it can be use # from a logging config file via logging.config.fileConfig(). import logging.handlers + logging.handlers.ConcurrentRotatingFileHandler = ConcurrentRotatingFileHandler diff --git a/nipype/external/due.py b/nipype/external/due.py index c360435bae..47a0ae4e0f 100644 --- a/nipype/external/due.py +++ b/nipype/external/due.py @@ -24,10 +24,10 @@ License: BSD-2 """ -__version__ = '0.0.5' +__version__ = "0.0.5" -class InactiveDueCreditCollector(object): +class InactiveDueCreditCollector: """Just a stub at the Collector which would not do anything""" def _donothing(self, *args, **kwargs): @@ -45,7 +45,7 @@ def nondecorating_decorator(func): cite = load = add = _donothing def __repr__(self): - return '{}()'.format(self.__class__.__name__) + return f"{self.__class__.__name__}()" def _donothing_func(*args, **kwargs): @@ -55,9 +55,9 @@ def _donothing_func(*args, **kwargs): try: from duecredit import due, BibTeX, Doi, Url - if 'due' in locals() and not hasattr(due, 'cite'): - raise RuntimeError( - "Imported due lacks .cite. DueCredit is now disabled") + + if "due" in locals() and not hasattr(due, "cite"): + raise RuntimeError("Imported due lacks .cite. DueCredit is now disabled") except ImportError: # Initiate due stub due = InactiveDueCreditCollector() diff --git a/nipype/external/fsl_imglob.py b/nipype/external/fsl_imglob.py index 3b9a4eea4d..46ae5f2549 100755 --- a/nipype/external/fsl_imglob.py +++ b/nipype/external/fsl_imglob.py @@ -63,10 +63,8 @@ # Innovation Limited ("Isis"), the technology transfer company of the # University, to negotiate a licence. Contact details are: # innovation@isis.ox.ac.uk quoting reference DE/9564. -from __future__ import print_function import sys import glob -from builtins import range def usage(): @@ -80,9 +78,9 @@ def usage(): # basename and extension pair ) def isImage(input, allExtensions): for extension in allExtensions: - if input[-len(extension):] == extension: - return True, input[:-len(extension)], extension - return False, input, '' + if input[-len(extension) :] == extension: + return True, input[: -len(extension)], extension + return False, input, "" def removeImageExtension(input, allExtensions): @@ -93,16 +91,9 @@ def main(): if len(sys.argv) <= 1: usage() - if sys.version_info < (2, 4): - import sets - from sets import Set - setAvailable = False - else: - setAvailable = True - deleteExtensions = True - primaryExtensions = ['.nii.gz', '.nii', '.hdr.gz', '.hdr'] - secondaryExtensions = ['.img.gz', '.img'] + primaryExtensions = [".nii.gz", ".nii", ".hdr.gz", ".hdr"] + secondaryExtensions = [".img.gz", ".img"] allExtensions = primaryExtensions + secondaryExtensions validExtensions = primaryExtensions startingArg = 1 @@ -126,23 +117,16 @@ def main(): for currentExtension in validExtensions: filelist.extend( glob.glob( - removeImageExtension(sys.argv[arg], allExtensions) + - currentExtension)) + removeImageExtension(sys.argv[arg], allExtensions) + + currentExtension + ) + ) if deleteExtensions: - for file in range(0, len(filelist)): - filelist[file] = removeImageExtension(filelist[file], - allExtensions) - if setAvailable: - filelist = list(set(filelist)) - else: - filelist = list(Set(filelist)) - filelist.sort() + filelist = [removeImageExtension(f, allExtensions) for f in filelist] + filelist = sorted(set(filelist)) - for file in range(0, len(filelist)): - print(filelist[file], end=' ') - if file < len(filelist) - 1: - print(" ", end=' ') + print(*filelist, sep=" ", end=" ") if __name__ == "__main__": diff --git a/nipype/external/portalocker.py b/nipype/external/portalocker.py deleted file mode 100644 index 1da24d894c..0000000000 --- a/nipype/external/portalocker.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -# portalocker.py - Cross-platform (posix/nt) API for flock-style file locking. -# Requires python 1.5.2 or better. -'''Cross-platform (posix/nt) API for flock-style file locking. - -Synopsis: - - import portalocker - file = open('somefile', 'r+') - portalocker.lock(file, portalocker.LOCK_EX) - file.seek(12) - file.write('foo') - file.close() - -If you know what you're doing, you may choose to - - portalocker.unlock(file) - -before closing the file, but why? - -Methods: - - lock( file, flags ) - unlock( file ) - -Constants: - - LOCK_EX - LOCK_SH - LOCK_NB - -Exceptions: - - LockException - -Notes: - -For the 'nt' platform, this module requires the Python Extensions for Windows. -Be aware that this may not work as expected on Windows 95/98/ME. - -History: - -I learned the win32 technique for locking files from sample code -provided by John Nielsen in the documentation -that accompanies the win32 modules. - -Author: Jonathan Feinberg , - Lowell Alleman -Version: $Id: portalocker.py 5474 2008-05-16 20:53:50Z lowell $ - -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - -__all__ = [ - 'lock', - 'unlock', - 'LOCK_EX', - 'LOCK_SH', - 'LOCK_NB', - 'LockException', -] - -import os - - -class LockException(Exception): - # Error codes: - LOCK_FAILED = 1 - - -if os.name == 'nt': - import win32con - import win32file - import pywintypes - LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK - LOCK_SH = 0 # the default - LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY - # is there any reason not to reuse the following structure? - __overlapped = pywintypes.OVERLAPPED() -elif os.name == 'posix': - import fcntl - LOCK_EX = fcntl.LOCK_EX - LOCK_SH = fcntl.LOCK_SH - LOCK_NB = fcntl.LOCK_NB -else: - raise RuntimeError('PortaLocker only defined for nt and posix platforms') - -if os.name == 'nt': - - def lock(file, flags): - hfile = win32file._get_osfhandle(file.fileno()) - try: - win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped) - except pywintypes.error as exc_value: - # error: (33, 'LockFileEx', 'The process cannot access the file - # because another process has locked a portion of the file.') - if exc_value[0] == 33: - raise LockException(LockException.LOCK_FAILED, exc_value[2]) - else: - # Q: Are there exceptions/codes we should be dealing with here? - raise - - def unlock(file): - hfile = win32file._get_osfhandle(file.fileno()) - try: - win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped) - except pywintypes.error as exc_value: - if exc_value[0] == 158: - # error: (158, 'UnlockFileEx', 'The segment is already - # unlocked.') To match the 'posix' implementation, silently - # ignore this error - pass - else: - # Q: Are there exceptions/codes we should be dealing with here? - raise - -elif os.name == 'posix': - - def lock(file, flags): - try: - fcntl.flock(file.fileno(), flags) - except IOError as exc_value: - # The exception code varies on different systems so we'll catch - # every IO error - raise LockException(*exc_value) - - def unlock(file): - fcntl.flock(file.fileno(), fcntl.LOCK_UN) - - -if __name__ == '__main__': - from time import time, strftime, localtime - import sys - from . import portalocker - - log = open('log.txt', 'a+') - portalocker.lock(log, portalocker.LOCK_EX) - timestamp = strftime('%m/%d/%Y %H:%M:%S\n', localtime(time())) - log.write(timestamp) - - print('Wrote lines. Hit enter to release lock.') - dummy = sys.stdin.readline() - log.close() diff --git a/nipype/info.py b/nipype/info.py index 7b1a757789..7b68ec909c 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -2,16 +2,10 @@ settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -import sys - -# nipype version information. An empty version_extra corresponds to a -# full release. '.dev' as a version_extra string means this is a development -# version -# Remove -dev for release -__version__ = '1.1.8-dev' +# nipype version information +# Remove .dev0 for release +__version__ = "1.8.7.dev0" def get_nipype_gitversion(): @@ -24,45 +18,51 @@ def get_nipype_gitversion(): """ import os import subprocess + try: import nipype + gitpath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)) + os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) + ) except: gitpath = os.getcwd() - gitpathgit = os.path.join(gitpath, '.git') + gitpathgit = os.path.join(gitpath, ".git") if not os.path.exists(gitpathgit): return None ver = None try: o, _ = subprocess.Popen( - 'git describe', shell=True, cwd=gitpath, - stdout=subprocess.PIPE).communicate() + "git describe", shell=True, cwd=gitpath, stdout=subprocess.PIPE + ).communicate() except Exception: pass else: - ver = o.decode().strip().split('-')[-1] + ver = o.decode().strip().split("-")[-1] return ver -if __version__.endswith('-dev'): +if __version__.endswith("-dev"): gitversion = get_nipype_gitversion() if gitversion: - __version__ = '{}+{}'.format(__version__, gitversion) + __version__ = f"{__version__}+{gitversion}" CLASSIFIERS = [ - 'Development Status :: 5 - Production/Stable', 'Environment :: Console', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', 'Topic :: Scientific/Engineering' + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Scientific/Engineering", ] +PYTHON_REQUIRES = ">= 3.9" -description = 'Neuroimaging in Python: Pipelines and Interfaces' +description = "Neuroimaging in Python: Pipelines and Interfaces" # Note: this long_description is actually a copy/paste from the top-level # README.txt, so that it shows up nicely on PyPI. So please remember to edit @@ -99,97 +99,102 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = '2.1.0' -NETWORKX_MIN_VERSION = '1.9' -NUMPY_MIN_VERSION = '1.9.0' -# Numpy bug in python 3.7: -# https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html -NUMPY_MIN_VERSION_37 = '1.15.3' -NUMPY_BAD_VERSION_27 = '1.16.0' -SCIPY_MIN_VERSION = '0.14' -TRAITS_MIN_VERSION = '4.6' -DATEUTIL_MIN_VERSION = '2.2' -PYTEST_MIN_VERSION = '3.6' -FUTURE_MIN_VERSION = '0.16.0' -SIMPLEJSON_MIN_VERSION = '3.8.0' -PROV_VERSION = '1.5.2' -CLICK_MIN_VERSION = '6.6.0' -PYDOT_MIN_VERSION = '1.2.3' - -NAME = 'nipype' -MAINTAINER = 'nipype developers' -MAINTAINER_EMAIL = 'neuroimaging@python.org' +NIBABEL_MIN_VERSION = "2.1.0" +NETWORKX_MIN_VERSION = "2.0" +NUMPY_MIN_VERSION = "1.17" +NUMPY_MAX_VERSION = "2.0" +SCIPY_MIN_VERSION = "0.14" +TRAITS_MIN_VERSION = "4.6" +DATEUTIL_MIN_VERSION = "2.2" +SIMPLEJSON_MIN_VERSION = "3.8.0" +PROV_MIN_VERSION = "1.5.2" +RDFLIB_MIN_VERSION = "5.0.0" +CLICK_MIN_VERSION = "6.6.0" +PYDOT_MIN_VERSION = "1.2.3" + +NAME = "nipype" +MAINTAINER = "nipype developers" +MAINTAINER_EMAIL = "neuroimaging@python.org" DESCRIPTION = description LONG_DESCRIPTION = long_description -URL = 'http://nipy.org/nipype' -DOWNLOAD_URL = 'http://github.com/nipy/nipype/archives/master' -LICENSE = 'Apache License, 2.0' -AUTHOR = 'nipype developers' -AUTHOR_EMAIL = 'neuroimaging@python.org' -PLATFORMS = 'OS Independent' -MAJOR = __version__.split('.')[0] -MINOR = __version__.split('.')[1] -MICRO = __version__.replace('-', '.').split('.')[2] -ISRELEASE = (len(__version__.replace('-', '.').split('.')) == 3 - or 'post' in __version__.replace('-', '.').split('.')[-1]) +URL = "http://nipy.org/nipype" +DOWNLOAD_URL = "http://github.com/nipy/nipype/archives/master" +LICENSE = "Apache License, 2.0" +AUTHOR = "nipype developers" +AUTHOR_EMAIL = "neuroimaging@python.org" +PLATFORMS = "OS Independent" +MAJOR = __version__.split(".")[0] +MINOR = __version__.split(".")[1] +MICRO = __version__.replace("-", ".").split(".")[2] +ISRELEASE = ( + len(__version__.replace("-", ".").split(".")) == 3 + or "post" in __version__.replace("-", ".").split(".")[-1] +) VERSION = __version__ -PROVIDES = ['nipype'] +PROVIDES = ["nipype"] REQUIRES = [ - 'nibabel>=%s' % NIBABEL_MIN_VERSION, - 'networkx>=%s' % NETWORKX_MIN_VERSION, - 'numpy>=%s,!=%s ; python_version == "2.7"' % (NUMPY_MIN_VERSION, NUMPY_BAD_VERSION_27), - 'numpy>=%s ; python_version > "3.0" and python_version < "3.7"' % NUMPY_MIN_VERSION, - 'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37, - 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, - 'scipy>=%s' % SCIPY_MIN_VERSION, - 'traits>=%s' % TRAITS_MIN_VERSION, - 'future>=%s' % FUTURE_MIN_VERSION, - 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, - 'prov>=%s' % PROV_VERSION, - 'neurdflib', - 'click>=%s' % CLICK_MIN_VERSION, - 'funcsigs', - 'mock', - 'pydotplus', - 'pydot>=%s' % PYDOT_MIN_VERSION, - 'packaging', - 'futures; python_version == "2.7"', + "click>=%s" % CLICK_MIN_VERSION, + "networkx>=%s" % NETWORKX_MIN_VERSION, + "nibabel>=%s" % NIBABEL_MIN_VERSION, + "numpy>=%s,<%s" % (NUMPY_MIN_VERSION, NUMPY_MAX_VERSION), + "packaging", + "prov>=%s" % PROV_MIN_VERSION, + "pydot>=%s" % PYDOT_MIN_VERSION, + "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, + "rdflib>=%s" % RDFLIB_MIN_VERSION, + "scipy>=%s" % SCIPY_MIN_VERSION, + "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, + "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, + "filelock>=3.0.0", + "etelemetry>=0.2.0", + "looseversion!=1.2", + "puremagic", ] -if sys.version_info <= (3, 4): - REQUIRES.append('configparser') - TESTS_REQUIRES = [ - 'pytest>=%s' % PYTEST_MIN_VERSION, - 'pytest-xdist', - 'pytest-cov', - 'codecov', - 'pytest-env', - 'coverage<5' + "codecov", + "coverage", + "pytest", + "pytest-cov", + "pytest-env", + "pytest-timeout", + "pytest-doctestplus", + "sphinx", ] EXTRA_REQUIRES = { - 'doc': ['Sphinx>=1.4', 'numpydoc', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'], - 'tests': TESTS_REQUIRES, - 'specs': ['yapf'], - 'nipy': ['nitime', 'nilearn<0.5.0', 'dipy', 'nipy', 'matplotlib'], - 'profiler': ['psutil>=5.0'], - 'duecredit': ['duecredit'], - 'xvfbwrapper': ['xvfbwrapper'], - 'pybids': ['pybids==0.6.5'], - 'ssh': ['paramiko'], + "data": ["datalad"], + "doc": [ + "dipy", + "ipython", + "matplotlib", + "nbsphinx", + "sphinx-argparse", + "sphinx>=2.1.2", + "sphinxcontrib-apidoc", + ], + "duecredit": ["duecredit"], + "maint": ["GitPython", "fuzzywuzzy"], + "nipy": ["nitime", "nilearn", "dipy", "nipy", "matplotlib"], + "profiler": ["psutil>=5.0"], + "pybids": ["pybids>=0.7.0"], + "specs": ["black"], + "ssh": ["paramiko"], + "tests": TESTS_REQUIRES, + "xvfbwrapper": ["xvfbwrapper"], # 'mesh': ['mayavi'] # Enable when it works } def _list_union(iterable): - return list(set(sum(iterable, []))) + return list(set(x for sublist in iterable for x in sublist)) # Enable a handle to install all extra dependencies at once -EXTRA_REQUIRES['all'] = _list_union(EXTRA_REQUIRES.values()) +EXTRA_REQUIRES["all"] = _list_union(EXTRA_REQUIRES.values()) # dev = doc + tests + specs -EXTRA_REQUIRES['dev'] = _list_union(val for key, val in EXTRA_REQUIRES.items() - if key in ('doc', 'tests', 'specs')) +EXTRA_REQUIRES["dev"] = _list_union( + val for key, val in EXTRA_REQUIRES.items() if key in ("doc", "tests", "specs") +) -STATUS = 'stable' +STATUS = "stable" diff --git a/nipype/interfaces/__init__.py b/nipype/interfaces/__init__.py index a19efa64e5..d72a463882 100644 --- a/nipype/interfaces/__init__.py +++ b/nipype/interfaces/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -6,9 +5,7 @@ Requires Packages to be installed """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .io import DataGrabber, DataSink, SelectFiles, BIDSDataGrabber from .utility import IdentityInterface, Rename, Function, Select, Merge diff --git a/nipype/interfaces/afni/__init__.py b/nipype/interfaces/afni/__init__.py index 7af80059f2..7e6df345bc 100644 --- a/nipype/interfaces/afni/__init__.py +++ b/nipype/interfaces/afni/__init__.py @@ -1,24 +1,91 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The afni module provides classes for interfacing with the `AFNI -`_ command line tools. +""" +AFNI_ is a software suite for the analysis and display of anatomical and functional MRI data. + +.. include:: ../../../doc/links_names.txt -Top-level namespace for afni. """ from .base import Info from .preprocess import ( - AlignEpiAnatPy, Allineate, Automask, AutoTcorrelate, AutoTLRC, Bandpass, - BlurInMask, BlurToFWHM, ClipLevel, DegreeCentrality, Despike, Detrend, ECM, - Fim, Fourier, Hist, LFCD, Maskave, Means, OutlierCount, QualityIndex, - ROIStats, Retroicor, Seg, SkullStrip, TCorr1D, TCorrMap, TCorrelate, TNorm, - TProject, TShift, Volreg, Warp, QwarpPlusMinus, Qwarp) -from .svm import (SVMTest, SVMTrain) + AlignEpiAnatPy, + Allineate, + Automask, + AutoTcorrelate, + AutoTLRC, + Bandpass, + BlurInMask, + BlurToFWHM, + ClipLevel, + DegreeCentrality, + Despike, + Detrend, + ECM, + Fim, + Fourier, + Hist, + LFCD, + Maskave, + Means, + NetCorr, + OutlierCount, + QualityIndex, + ROIStats, + Retroicor, + Seg, + SkullStrip, + TCorr1D, + TCorrMap, + TCorrelate, + TNorm, + TProject, + TShift, + TSmooth, + Volreg, + Warp, + QwarpPlusMinus, + Qwarp, +) +from .svm import SVMTest, SVMTrain from .utils import ( - ABoverlap, AFNItoNIFTI, Autobox, Axialize, BrickStat, Bucket, Calc, Cat, - CatMatvec, CenterMass, ConvertDset, Copy, Dot, Edge3, Eval, FWHMx, - LocalBistat, Localstat, MaskTool, Merge, Notes, NwarpApply, NwarpAdjust, - NwarpCat, OneDToolPy, Refit, ReHo, Resample, TCat, TCatSubBrick, TStat, - To3D, Unifize, Undump, ZCutUp, GCOR, Zcat, Zeropad) -from .model import (Deconvolve, Remlfit, Synthesize) + ABoverlap, + AFNItoNIFTI, + Autobox, + Axialize, + BrickStat, + Bucket, + Calc, + Cat, + CatMatvec, + CenterMass, + ConvertDset, + Copy, + Dot, + Edge3, + Eval, + FWHMx, + LocalBistat, + Localstat, + MaskTool, + Merge, + Notes, + NwarpApply, + NwarpAdjust, + NwarpCat, + OneDToolPy, + Refit, + ReHo, + Resample, + TCat, + TCatSubBrick, + TStat, + To3D, + Unifize, + Undump, + ZCutUp, + GCOR, + Zcat, + Zeropad, +) +from .model import Deconvolve, Remlfit, Synthesize diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index d4b8e474ff..e883b22c6e 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -1,52 +1,56 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provide interface to AFNI commands.""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object, str -from future.utils import raise_from - +"""Provide a base interface to AFNI commands.""" import os from sys import platform -from distutils import spawn +import shutil -from ... import logging, LooseVersion -from ...utils.filemanip import split_filename, fname_presuffix +from looseversion import LooseVersion -from ..base import (CommandLine, traits, CommandLineInputSpec, isdefined, File, - TraitedSpec, PackageInfo) +from ... import logging +from ...utils.filemanip import split_filename, fname_presuffix +from ..base import ( + CommandLine, + traits, + CommandLineInputSpec, + isdefined, + File, + TraitedSpec, + PackageInfo, +) from ...external.due import BibTeX # Use nipype's logging system -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): - """Handle afni output type and version information. - """ - __outputtype = 'AFNI' - ftypes = {'NIFTI': '.nii', 'AFNI': '', 'NIFTI_GZ': '.nii.gz'} - version_cmd = 'afni --version' + """Handle afni output type and version information.""" + + __outputtype = "AFNI" + ftypes = {"NIFTI": ".nii", "AFNI": "", "NIFTI_GZ": ".nii.gz"} + version_cmd = "afni --version" @staticmethod def parse_version(raw_info): - version_stamp = raw_info.split('\n')[0].split('Version ')[1] - if version_stamp.startswith('AFNI'): - version_stamp = version_stamp.split('AFNI_')[1] - elif version_stamp.startswith('Debian'): - version_stamp = version_stamp.split('Debian-')[1].split('~')[0] + """Check and parse AFNI's version.""" + version_stamp = raw_info.split("\n")[0].split("Version ")[1] + if version_stamp.startswith("AFNI"): + version_stamp = version_stamp.split("AFNI_")[1] + elif version_stamp.startswith("Debian"): + version_stamp = version_stamp.split("Debian-")[1].split("~")[0] else: return None - version = LooseVersion(version_stamp.replace('_', '.')).version[:3] + version = LooseVersion(version_stamp.replace("_", ".")).version[:3] if version[0] < 1000: version[0] = version[0] + 2000 return tuple(version) @classmethod def output_type_to_ext(cls, outputtype): - """Get the file extension for the given output type. + """ + Get the file extension for the given output type. Parameters ---------- @@ -57,40 +61,45 @@ def output_type_to_ext(cls, outputtype): ------- extension : str The file extension for the output type. - """ + """ try: return cls.ftypes[outputtype] except KeyError as e: - msg = 'Invalid AFNIOUTPUTTYPE: ', outputtype - raise_from(KeyError(msg), e) + msg = "Invalid AFNIOUTPUTTYPE: ", outputtype + raise KeyError(msg) from e @classmethod def outputtype(cls): - """AFNI has no environment variables, - Output filetypes get set in command line calls - Nipype uses AFNI as default + """ + Set default output filetype. + + AFNI has no environment variables, Output filetypes get set in command line calls + Nipype uses ``AFNI`` as default Returns ------- None + """ - # warn(('AFNI has no environment variable that sets filetype ' - # 'Nipype uses NIFTI_GZ as default')) - return 'AFNI' + return "AFNI" @staticmethod def standard_image(img_name): - '''Grab an image from the standard location. + """ + Grab an image from the standard location. - Could be made more fancy to allow for more relocatability''' + Could be made more fancy to allow for more relocatability + + """ clout = CommandLine( - 'which afni', + "which afni", ignore_exception=True, resource_monitor=False, - terminal_output='allatonce').run() - if clout.runtime.returncode is not 0: + terminal_output="allatonce", + ).run() + if clout.runtime.returncode != 0: return None out = clout.runtime.stdout @@ -100,68 +109,84 @@ def standard_image(img_name): class AFNICommandBase(CommandLine): """ - A base class to fix a linking problem in OSX and afni. - See http://afni.nimh.nih.gov/afni/community/board/read.php?1,145346,145347#msg-145347 + A base class to fix a linking problem in OSX and AFNI. + + See Also + -------- + `This thread + `__ + about the particular environment variable that fixes this problem. + """ - def _run_interface(self, runtime): - if platform == 'darwin': - runtime.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '/usr/local/afni/' - return super(AFNICommandBase, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + if platform == "darwin": + runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" + return super()._run_interface(runtime, correct_return_codes) class AFNICommandInputSpec(CommandLineInputSpec): num_threads = traits.Int( - 1, usedefault=True, nohash=True, desc='set number of threads') + 1, usedefault=True, nohash=True, desc="set number of threads" + ) outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) out_file = File( name_template="%s_afni", - desc='output image file name', - argstr='-prefix %s', - name_source=["in_file"]) + desc="output image file name", + argstr="-prefix %s", + name_source=["in_file"], + ) class AFNICommandOutputSpec(TraitedSpec): - out_file = File(desc='output file', exists=True) + out_file = File(desc="output file", exists=True) class AFNICommand(AFNICommandBase): - """Shared options for several AFNI commands """ + """Shared options for several AFNI commands.""" + input_spec = AFNICommandInputSpec _outputtype = None - references_ = [{ - 'entry': - BibTeX('@article{Cox1996,' - 'author={R.W. Cox},' - 'title={AFNI: software for analysis and ' - 'visualization of functional magnetic ' - 'resonance neuroimages},' - 'journal={Computers and Biomedical research},' - 'volume={29},' - 'number={3},' - 'pages={162-173},' - 'year={1996},' - '}'), - 'tags': ['implementation'], - }, { - 'entry': - BibTeX('@article{CoxHyde1997,' - 'author={R.W. Cox and J.S. Hyde},' - 'title={Software tools for analysis and ' - 'visualization of fMRI data},' - 'journal={NMR in Biomedicine},' - 'volume={10},' - 'number={45},' - 'pages={171-178},' - 'year={1997},' - '}'), - 'tags': ['implementation'], - }] + _references = [ + { + "entry": BibTeX( + "@article{Cox1996," + "author={R.W. Cox}," + "title={AFNI: software for analysis and " + "visualization of functional magnetic " + "resonance neuroimages}," + "journal={Computers and Biomedical research}," + "volume={29}," + "number={3}," + "pages={162-173}," + "year={1996}," + "}" + ), + "tags": ["implementation"], + }, + { + "entry": BibTeX( + "@article{CoxHyde1997," + "author={R.W. Cox and J.S. Hyde}," + "title={Software tools for analysis and " + "visualization of fMRI data}," + "journal={NMR in Biomedicine}," + "volume={10}," + "number={45}," + "pages={171-178}," + "year={1997}," + "}" + ), + "tags": ["implementation"], + }, + ] @property def num_threads(self): + """Get number of threads.""" return self.inputs.num_threads @num_threads.setter @@ -170,25 +195,26 @@ def num_threads(self, value): @classmethod def set_default_output_type(cls, outputtype): - """Set the default output type for AFNI classes. + """ + Set the default output type for AFNI classes. This method is used to set the default output type for all afni subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.outputtype. """ - if outputtype in Info.ftypes: cls._outputtype = outputtype else: - raise AttributeError('Invalid AFNI outputtype: %s' % outputtype) + raise AttributeError("Invalid AFNI outputtype: %s" % outputtype) def __init__(self, **inputs): - super(AFNICommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._output_update, 'outputtype') + """Instantiate an AFNI command tool wrapper.""" + super().__init__(**inputs) + self.inputs.on_trait_change(self._output_update, "outputtype") - if hasattr(self.inputs, 'num_threads'): - self.inputs.on_trait_change(self._nthreads_update, 'num_threads') + if hasattr(self.inputs, "num_threads"): + self.inputs.on_trait_change(self._nthreads_update, "num_threads") if self._outputtype is None: self._outputtype = Info.outputtype() @@ -199,23 +225,27 @@ def __init__(self, **inputs): self._output_update() def _nthreads_update(self): - """Update environment with new number of threads""" - self.inputs.environ['OMP_NUM_THREADS'] = '%d' % self.inputs.num_threads + """Update environment with new number of threads.""" + self.inputs.environ["OMP_NUM_THREADS"] = "%d" % self.inputs.num_threads def _output_update(self): - """ i think? updates class private attribute based on instance input - in fsl also updates ENVIRON variable....not valid in afni - as it uses no environment variables + """ + Update the internal property with the provided input. + + i think? updates class private attribute based on instance input + in fsl also updates ENVIRON variable....not valid in afni + as it uses no environment variables """ self._outputtype = self.inputs.outputtype def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) return os.path.join( - path, base + Info.output_type_to_ext(self.inputs.outputtype)) + path, base + Info.output_type_to_ext(self.inputs.outputtype) + ) def _list_outputs(self): - outputs = super(AFNICommand, self)._list_outputs() + outputs = super()._list_outputs() metadata = dict(name_source=lambda t: t is not None) out_names = list(self.inputs.traits(**metadata).keys()) if out_names: @@ -226,17 +256,13 @@ def _list_outputs(self): outputs[name] = outputs[name] + "+orig.BRIK" return outputs - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None): - """Generate a filename based on the given parameters. + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): + """ + Generate a filename based on the given parameters. The filename will take the form: cwd/basename. - If change_ext is True, it will use the extentions specified in - intputs.output_type. + If change_ext is True, it will use the extensions specified in + inputs.output_type. Parameters ---------- @@ -256,48 +282,48 @@ def _gen_fname(self, New filename based on given parameters. """ - - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if not basename: + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) + if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.output_type_to_ext(self.inputs.outputtype) if change_ext: - if suffix: - suffix = ''.join((suffix, ext)) - else: - suffix = ext + suffix = f"{suffix}{ext}" if suffix else ext + if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname -def no_afni(): - """ Checks if AFNI is available """ - if Info.version() is None: - return True - return False - - class AFNIPythonCommandInputSpec(CommandLineInputSpec): outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) py27_path = traits.Either( - 'python2', File(exists=True), usedefault=True, default='python2') + "python2", File(exists=True), usedefault=True, default="python2" + ) class AFNIPythonCommand(AFNICommand): + """A subtype of AFNI command line for Python scripts.""" + @property def cmd(self): - orig_cmd = super(AFNIPythonCommand, self).cmd - found = spawn.find_executable(orig_cmd) + """Revise the command path.""" + orig_cmd = super().cmd + found = shutil.which(orig_cmd) return found if found is not None else orig_cmd @property def _cmd_prefix(self): - return "{} ".format(self.inputs.py27_path) + return f"{self.inputs.py27_path} " + + +def no_afni(): + """Check whether AFNI is not available.""" + return Info.version() is None diff --git a/nipype/interfaces/afni/model.py b/nipype/interfaces/afni/model.py index 2cccdfe869..e3a7348b3b 100644 --- a/nipype/interfaces/afni/model.py +++ b/nipype/interfaces/afni/model.py @@ -1,231 +1,263 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft = python sts = 4 ts = 4 sw = 4 et: -"""AFNI modeling interfaces +""" +AFNI modeling interfaces. Examples -------- See the docstrings of the individual classes for examples. + """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from ..base import (CommandLineInputSpec, CommandLine, Directory, TraitedSpec, - traits, isdefined, File, InputMultiPath, Undefined, Str) -from ...external.due import BibTeX +from ..base import ( + TraitedSpec, + traits, + isdefined, + File, + InputMultiPath, + Str, + Tuple, +) -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec) +from .base import ( + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, +) class DeconvolveInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), - desc='filenames of 3D+time input datasets. More than one filename can ' - 'be given and the datasets will be auto-catenated in time. ' - 'You can input a 1D time series file here, but the time axis ' - 'should run along the ROW direction, not the COLUMN direction as ' - 'in the \'input1D\' option.', - argstr='-input %s', + desc="filenames of 3D+time input datasets. More than one filename can " + "be given and the datasets will be auto-catenated in time. " + "You can input a 1D time series file here, but the time axis " + "should run along the ROW direction, not the COLUMN direction as " + "in the 'input1D' option.", + argstr="-input %s", copyfile=False, sep=" ", - position=1) + position=1, + ) sat = traits.Bool( - desc='check the dataset time series for initial saturation transients,' - ' which should normally have been excised before data analysis.', - argstr='-sat', - xor=['trans']) + desc="check the dataset time series for initial saturation transients," + " which should normally have been excised before data analysis.", + argstr="-sat", + xor=["trans"], + ) trans = traits.Bool( - desc='check the dataset time series for initial saturation transients,' - ' which should normally have been excised before data analysis.', - argstr='-trans', - xor=['sat']) + desc="check the dataset time series for initial saturation transients," + " which should normally have been excised before data analysis.", + argstr="-trans", + xor=["sat"], + ) noblock = traits.Bool( - desc='normally, if you input multiple datasets with \'input\', then ' - 'the separate datasets are taken to be separate image runs that ' - 'get separate baseline models. Use this options if you want to ' - 'have the program consider these to be all one big run.' - '* If any of the input dataset has only 1 sub-brick, then this ' - 'option is automatically invoked!' - '* If the auto-catenation feature isn\'t used, then this option ' - 'has no effect, no how, no way.', - argstr='-noblock') + desc="normally, if you input multiple datasets with 'input', then " + "the separate datasets are taken to be separate image runs that " + "get separate baseline models. Use this options if you want to " + "have the program consider these to be all one big run." + "* If any of the input dataset has only 1 sub-brick, then this " + "option is automatically invoked!" + "* If the auto-catenation feature isn't used, then this option " + "has no effect, no how, no way.", + argstr="-noblock", + ) force_TR = traits.Float( - desc='use this value instead of the TR in the \'input\' ' - 'dataset. (It\'s better to fix the input using Refit.)', - argstr='-force_TR %f', - position=0) + desc="use this value instead of the TR in the 'input' " + "dataset. (It's better to fix the input using Refit.)", + argstr="-force_TR %f", + position=0, + ) input1D = File( - desc='filename of single (fMRI) .1D time series where time runs down ' - 'the column.', - argstr='-input1D %s', - exists=True) + desc="filename of single (fMRI) .1D time series where time runs down " + "the column.", + argstr="-input1D %s", + exists=True, + ) TR_1D = traits.Float( - desc='TR to use with \'input1D\'. This option has no effect if you do ' - 'not also use \'input1D\'.', - argstr='-TR_1D %f') + desc="TR to use with 'input1D'. This option has no effect if you do " + "not also use 'input1D'.", + argstr="-TR_1D %f", + ) legendre = traits.Bool( - desc='use Legendre polynomials for null hypothesis (baseline model)', - argstr='-legendre') + desc="use Legendre polynomials for null hypothesis (baseline model)", + argstr="-legendre", + ) nolegendre = traits.Bool( - desc='use power polynomials for null hypotheses. Don\'t do this ' - 'unless you are crazy!', - argstr='-nolegendre') + desc="use power polynomials for null hypotheses. Don't do this " + "unless you are crazy!", + argstr="-nolegendre", + ) nodmbase = traits.Bool( - desc='don\'t de-mean baseline time series', argstr='-nodmbase') + desc="don't de-mean baseline time series", argstr="-nodmbase" + ) dmbase = traits.Bool( - desc='de-mean baseline time series (default if \'polort\' >= 0)', - argstr='-dmbase') + desc="de-mean baseline time series (default if 'polort' >= 0)", argstr="-dmbase" + ) svd = traits.Bool( - desc='use SVD instead of Gaussian elimination (default)', - argstr='-svd') - nosvd = traits.Bool( - desc='use Gaussian elimination instead of SVD', argstr='-nosvd') + desc="use SVD instead of Gaussian elimination (default)", argstr="-svd" + ) + nosvd = traits.Bool(desc="use Gaussian elimination instead of SVD", argstr="-nosvd") rmsmin = traits.Float( - desc='minimum rms error to reject reduced model (default = 0; don\'t ' - 'use this option normally!)', - argstr='-rmsmin %f') + desc="minimum rms error to reject reduced model (default = 0; don't " + "use this option normally!)", + argstr="-rmsmin %f", + ) nocond = traits.Bool( - desc='DON\'T calculate matrix condition number', argstr='-nocond') + desc="DON'T calculate matrix condition number", argstr="-nocond" + ) singvals = traits.Bool( - desc='print out the matrix singular values', argstr='-singvals') + desc="print out the matrix singular values", argstr="-singvals" + ) goforit = traits.Int( - desc='use this to proceed even if the matrix has bad problems (e.g., ' - 'duplicate columns, large condition number, etc.).', - argstr='-GOFORIT %i') + desc="use this to proceed even if the matrix has bad problems (e.g., " + "duplicate columns, large condition number, etc.).", + argstr="-GOFORIT %i", + ) allzero_OK = traits.Bool( - desc='don\'t consider all zero matrix columns to be the type of error ' - 'that \'gotforit\' is needed to ignore.', - argstr='-allzero_OK') - dname = traits.Tuple( - Str, - Str, - desc='set environmental variable to provided value', - argstr='-D%s=%s') + desc="don't consider all zero matrix columns to be the type of error " + "that 'gotforit' is needed to ignore.", + argstr="-allzero_OK", + ) + dname = Tuple( + Str, Str, desc="set environmental variable to provided value", argstr="-D%s=%s" + ) mask = File( - desc='filename of 3D mask dataset; only data time series from within ' - 'the mask will be analyzed; results for voxels outside the mask ' - 'will be set to zero.', - argstr='-mask %s', - exists=True) + desc="filename of 3D mask dataset; only data time series from within " + "the mask will be analyzed; results for voxels outside the mask " + "will be set to zero.", + argstr="-mask %s", + exists=True, + ) automask = traits.Bool( - desc='build a mask automatically from input data (will be slow for ' - 'long time series datasets)', - argstr='-automask') + desc="build a mask automatically from input data (will be slow for " + "long time series datasets)", + argstr="-automask", + ) STATmask = File( - desc='build a mask from provided file, and use this mask for the ' - 'purpose of reporting truncation-to float issues AND for ' - 'computing the FDR curves. The actual results ARE not masked ' - 'with this option (only with \'mask\' or \'automask\' options).', - argstr='-STATmask %s', - exists=True) + desc="build a mask from provided file, and use this mask for the " + "purpose of reporting truncation-to float issues AND for " + "computing the FDR curves. The actual results ARE not masked " + "with this option (only with 'mask' or 'automask' options).", + argstr="-STATmask %s", + exists=True, + ) censor = File( - desc='filename of censor .1D time series. This is a file of 1s and ' - '0s, indicating which time points are to be included (1) and ' - 'which are to be excluded (0).', - argstr='-censor %s', - exists=True) + desc="filename of censor .1D time series. This is a file of 1s and " + "0s, indicating which time points are to be included (1) and " + "which are to be excluded (0).", + argstr="-censor %s", + exists=True, + ) polort = traits.Int( - desc='degree of polynomial corresponding to the null hypothesis ' - '[default: 1]', - argstr='-polort %d') - ortvec = traits.Tuple( - File(desc='filename', exists=True), - Str(desc='label'), - desc='this option lets you input a rectangular array of 1 or more ' - 'baseline vectors from a file. This method is a fast way to ' - 'include a lot of baseline regressors in one step. ', - argstr='-ortvec %s %s') - x1D = File(desc='specify name for saved X matrix', argstr='-x1D %s') + desc="degree of polynomial corresponding to the null hypothesis " + "[default: 1]", + argstr="-polort %d", + ) + ortvec = Tuple( + File(desc="filename", exists=True), + Str(desc="label"), + desc="this option lets you input a rectangular array of 1 or more " + "baseline vectors from a file. This method is a fast way to " + "include a lot of baseline regressors in one step. ", + argstr="-ortvec %s %s", + ) + x1D = File(desc="specify name for saved X matrix", argstr="-x1D %s") x1D_stop = traits.Bool( - desc='stop running after writing .xmat.1D file', argstr='-x1D_stop') + desc="stop running after writing .xmat.1D file", argstr="-x1D_stop" + ) cbucket = traits.Str( - desc='Name for dataset in which to save the regression ' - 'coefficients (no statistics). This dataset ' - 'will be used in a -xrestore run [not yet implemented] ' - 'instead of the bucket dataset, if possible.', - argstr='-cbucket %s') - out_file = File(desc='output statistics file', argstr='-bucket %s') + desc="Name for dataset in which to save the regression " + "coefficients (no statistics). This dataset " + "will be used in a -xrestore run [not yet implemented] " + "instead of the bucket dataset, if possible.", + argstr="-cbucket %s", + ) + out_file = File(desc="output statistics file", argstr="-bucket %s") num_threads = traits.Int( - desc='run the program with provided number of sub-processes', - argstr='-jobs %d', - nohash=True) - fout = traits.Bool( - desc='output F-statistic for each stimulus', argstr='-fout') + desc="run the program with provided number of sub-processes", + argstr="-jobs %d", + nohash=True, + ) + fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( - desc='output the R^2 statistic for each stimulus', argstr='-rout') - tout = traits.Bool( - desc='output the T-statistic for each stimulus', argstr='-tout') + desc="output the R^2 statistic for each stimulus", argstr="-rout" + ) + tout = traits.Bool(desc="output the T-statistic for each stimulus", argstr="-tout") vout = traits.Bool( - desc='output the sample variance (MSE) for each stimulus', - argstr='-vout') + desc="output the sample variance (MSE) for each stimulus", argstr="-vout" + ) nofdr = traits.Bool( - desc="Don't compute the statistic-vs-FDR curves for the bucket " - "dataset.", - argstr='-noFDR') + desc="Don't compute the statistic-vs-FDR curves for the bucket dataset.", + argstr="-noFDR", + ) global_times = traits.Bool( - desc='use global timing for stimulus timing files', - argstr='-global_times', - xor=['local_times']) + desc="use global timing for stimulus timing files", + argstr="-global_times", + xor=["local_times"], + ) local_times = traits.Bool( - desc='use local timing for stimulus timing files', - argstr='-local_times', - xor=['global_times']) + desc="use local timing for stimulus timing files", + argstr="-local_times", + xor=["global_times"], + ) num_stimts = traits.Int( - desc='number of stimulus timing files', - argstr='-num_stimts %d', - position=-6) + desc="number of stimulus timing files", argstr="-num_stimts %d", position=-6 + ) stim_times = traits.List( - traits.Tuple( - traits.Int(desc='k-th response model'), - File(desc='stimulus timing file', exists=True), - Str(desc='model')), - desc='generate a response model from a set of stimulus times' - ' given in file.', - argstr='-stim_times %d %s \'%s\'...', - position=-5) + Tuple( + traits.Int(desc="k-th response model"), + File(desc="stimulus timing file", exists=True), + Str(desc="model"), + ), + desc="generate a response model from a set of stimulus times given in file.", + argstr="-stim_times %d %s '%s'...", + position=-5, + ) stim_label = traits.List( - traits.Tuple( - traits.Int(desc='k-th input stimulus'), - Str(desc='stimulus label')), - desc='label for kth input stimulus (e.g., Label1)', - argstr='-stim_label %d %s...', - requires=['stim_times'], - position=-4) + Tuple(traits.Int(desc="k-th input stimulus"), Str(desc="stimulus label")), + desc="label for kth input stimulus (e.g., Label1)", + argstr="-stim_label %d %s...", + requires=["stim_times"], + position=-4, + ) stim_times_subtract = traits.Float( - desc='this option means to subtract specified seconds from each time ' - 'encountered in any \'stim_times\' option. The purpose of this ' - 'option is to make it simple to adjust timing files for the ' - 'removal of images from the start of each imaging run.', - argstr='-stim_times_subtract %f') + desc="this option means to subtract specified seconds from each time " + "encountered in any 'stim_times' option. The purpose of this " + "option is to make it simple to adjust timing files for the " + "removal of images from the start of each imaging run.", + argstr="-stim_times_subtract %f", + ) num_glt = traits.Int( - desc='number of general linear tests (i.e., contrasts)', - argstr='-num_glt %d', - position=-3) + desc="number of general linear tests (i.e., contrasts)", + argstr="-num_glt %d", + position=-3, + ) gltsym = traits.List( - Str(desc='symbolic general linear test'), - desc='general linear tests (i.e., contrasts) using symbolic ' - 'conventions (e.g., \'+Label1 -Label2\')', - argstr='-gltsym \'SYM: %s\'...', - position=-2) + Str(desc="symbolic general linear test"), + desc="general linear tests (i.e., contrasts) using symbolic " + "conventions (e.g., '+Label1 -Label2')", + argstr="-gltsym 'SYM: %s'...", + position=-2, + ) glt_label = traits.List( - traits.Tuple( - traits.Int(desc='k-th general linear test'), - Str(desc='GLT label')), - desc='general linear test (i.e., contrast) labels', - argstr='-glt_label %d %s...', - requires=['gltsym'], - position=-1) + Tuple(traits.Int(desc="k-th general linear test"), Str(desc="GLT label")), + desc="general linear test (i.e., contrast) labels", + argstr="-glt_label %d %s...", + requires=["gltsym"], + position=-1, + ) class DeconvolveOutputSpec(TraitedSpec): - out_file = File(desc='output statistics file', exists=True) + out_file = File(desc="output statistics file", exists=True) reml_script = File( - desc='automatical generated script to run 3dREMLfit', exists=True) - x1D = File(desc='save out X matrix', exists=True) - cbucket = File(desc='output regression coefficients file (if generated)') + desc="automatically generated script to run 3dREMLfit", exists=True + ) + x1D = File(desc="save out X matrix", exists=True) + cbucket = File(desc="output regression coefficients file (if generated)") class Deconvolve(AFNICommand): @@ -252,57 +284,54 @@ class Deconvolve(AFNICommand): >>> res = deconvolve.run() # doctest: +SKIP """ - _cmd = '3dDeconvolve' + _cmd = "3dDeconvolve" input_spec = DeconvolveInputSpec output_spec = DeconvolveOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'gltsym': + if name == "gltsym": for n, val in enumerate(value): - if val.startswith('SYM: '): - value[n] = val.lstrip('SYM: ') + if val.startswith("SYM: "): + value[n] = val.lstrip("SYM: ") - return super(Deconvolve, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: skip = [] - if len(self.inputs.stim_times) and not isdefined( - self.inputs.num_stimts): + if len(self.inputs.stim_times) and not isdefined(self.inputs.num_stimts): self.inputs.num_stimts = len(self.inputs.stim_times) if len(self.inputs.gltsym) and not isdefined(self.inputs.num_glt): self.inputs.num_glt = len(self.inputs.gltsym) if not isdefined(self.inputs.out_file): - self.inputs.out_file = 'Decon.nii' + self.inputs.out_file = "Decon.nii" - return super(Deconvolve, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() _gen_fname_opts = {} - _gen_fname_opts['basename'] = self.inputs.out_file - _gen_fname_opts['cwd'] = os.getcwd() + _gen_fname_opts["basename"] = self.inputs.out_file + _gen_fname_opts["cwd"] = os.getcwd() if isdefined(self.inputs.x1D): - if not self.inputs.x1D.endswith('.xmat.1D'): - outputs['x1D'] = os.path.abspath(self.inputs.x1D + '.xmat.1D') + if not self.inputs.x1D.endswith(".xmat.1D"): + outputs["x1D"] = os.path.abspath(self.inputs.x1D + ".xmat.1D") else: - outputs['x1D'] = os.path.abspath(self.inputs.x1D) + outputs["x1D"] = os.path.abspath(self.inputs.x1D) else: - outputs['x1D'] = self._gen_fname( - suffix='.xmat.1D', **_gen_fname_opts) + outputs["x1D"] = self._gen_fname(suffix=".xmat.1D", **_gen_fname_opts) if isdefined(self.inputs.cbucket): - outputs['cbucket'] = os.path.abspath(self.inputs.cbucket) + outputs["cbucket"] = os.path.abspath(self.inputs.cbucket) - outputs['reml_script'] = self._gen_fname( - suffix='.REML_cmd', **_gen_fname_opts) + outputs["reml_script"] = self._gen_fname(suffix=".REML_cmd", **_gen_fname_opts) # remove out_file from outputs if x1d_stop set to True if self.inputs.x1D_stop: - del outputs['out_file'], outputs['cbucket'] + del outputs["out_file"], outputs["cbucket"] else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -311,235 +340,259 @@ class RemlfitInputSpec(AFNICommandInputSpec): # mandatory files in_files = InputMultiPath( File(exists=True), - desc='Read time series dataset', + desc="Read time series dataset", argstr='-input "%s"', mandatory=True, copyfile=False, - sep=" ") + sep=" ", + ) matrix = File( - desc='the design matrix file, which should have been output from ' - 'Deconvolve via the \'x1D\' option', - argstr='-matrix %s', - mandatory=True) + desc="the design matrix file, which should have been output from " + "Deconvolve via the 'x1D' option", + argstr="-matrix %s", + mandatory=True, + ) # "Semi-Hidden Alternative Ways to Define the Matrix" polort = traits.Int( - desc='if no \'matrix\' option is given, AND no \'matim\' option, ' - 'create a matrix with Legendre polynomial regressors' - 'up to the specified order. The default value is 0, which' - 'produces a matrix with a single column of all ones', - argstr='-polort %d', - xor=['matrix']) - matim = traits.File( - desc='read a standard file as the matrix. You can use only Col as ' - 'a name in GLTs with these nonstandard matrix input methods, ' - 'since the other names come from the \'matrix\' file. ' - 'These mutually exclusive options are ignored if \'matrix\' ' - 'is used.', - argstr='-matim %s', - xor=['matrix']) + desc="if no 'matrix' option is given, AND no 'matim' option, " + "create a matrix with Legendre polynomial regressors" + "up to the specified order. The default value is 0, which" + "produces a matrix with a single column of all ones", + argstr="-polort %d", + xor=["matrix"], + ) + matim = File( + desc="read a standard file as the matrix. You can use only Col as " + "a name in GLTs with these nonstandard matrix input methods, " + "since the other names come from the 'matrix' file. " + "These mutually exclusive options are ignored if 'matrix' " + "is used.", + argstr="-matim %s", + xor=["matrix"], + ) # Other arguments mask = File( - desc='filename of 3D mask dataset; only data time series from within ' - 'the mask will be analyzed; results for voxels outside the mask ' - 'will be set to zero.', - argstr='-mask %s', - exists=True) + desc="filename of 3D mask dataset; only data time series from within " + "the mask will be analyzed; results for voxels outside the mask " + "will be set to zero.", + argstr="-mask %s", + exists=True, + ) automask = traits.Bool( usedefault=True, - argstr='-automask', - desc='build a mask automatically from input data (will be slow for ' - 'long time series datasets)') + argstr="-automask", + desc="build a mask automatically from input data (will be slow for " + "long time series datasets)", + ) STATmask = File( - desc='filename of 3D mask dataset to be used for the purpose ' - 'of reporting truncation-to float issues AND for computing the ' - 'FDR curves. The actual results ARE not masked with this option ' - '(only with \'mask\' or \'automask\' options).', - argstr='-STATmask %s', - exists=True) + desc="filename of 3D mask dataset to be used for the purpose " + "of reporting truncation-to float issues AND for computing the " + "FDR curves. The actual results ARE not masked with this option " + "(only with 'mask' or 'automask' options).", + argstr="-STATmask %s", + exists=True, + ) addbase = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='file(s) to add baseline model columns to the matrix with this ' - 'option. Each column in the specified file(s) will be appended ' - 'to the matrix. File(s) must have at least as many rows as the ' - 'matrix does.', + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="file(s) to add baseline model columns to the matrix with this " + "option. Each column in the specified file(s) will be appended " + "to the matrix. File(s) must have at least as many rows as the " + "matrix does.", copyfile=False, sep=" ", - argstr='-addbase %s') + argstr="-addbase %s", + ) slibase = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='similar to \'addbase\' in concept, BUT each specified file ' - 'must have an integer multiple of the number of slices ' - 'in the input dataset(s); then, separate regression ' - 'matrices are generated for each slice, with the ' - 'first column of the file appended to the matrix for ' - 'the first slice of the dataset, the second column of the file ' - 'appended to the matrix for the first slice of the dataset, ' - 'and so on. Intended to help model physiological noise in FMRI, ' - 'or other effects you want to regress out that might ' - 'change significantly in the inter-slice time intervals. This ' - 'will slow the program down, and make it use a lot more memory ' - '(to hold all the matrix stuff).', - argstr='-slibase %s') + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="similar to 'addbase' in concept, BUT each specified file " + "must have an integer multiple of the number of slices " + "in the input dataset(s); then, separate regression " + "matrices are generated for each slice, with the " + "first column of the file appended to the matrix for " + "the first slice of the dataset, the second column of the file " + "appended to the matrix for the first slice of the dataset, " + "and so on. Intended to help model physiological noise in FMRI, " + "or other effects you want to regress out that might " + "change significantly in the inter-slice time intervals. This " + "will slow the program down, and make it use a lot more memory " + "(to hold all the matrix stuff).", + argstr="-slibase %s", + ) slibase_sm = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='similar to \'slibase\', BUT each file much be in slice major ' - 'order (i.e. all slice0 columns come first, then all slice1 ' - 'columns, etc).', - argstr='-slibase_sm %s') + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="similar to 'slibase', BUT each file much be in slice major " + "order (i.e. all slice0 columns come first, then all slice1 " + "columns, etc).", + argstr="-slibase_sm %s", + ) usetemp = traits.Bool( - desc='write intermediate stuff to disk, to economize on RAM. ' - 'Using this option might be necessary to run with ' - '\'slibase\' and with \'Grid\' values above the default, ' - 'since the program has to store a large number of ' - 'matrices for such a problem: two for every slice and ' - 'for every (a,b) pair in the ARMA parameter grid. Temporary ' - 'files are written to the directory given in environment ' - 'variable TMPDIR, or in /tmp, or in ./ (preference is in that ' - 'order)', - argstr='-usetemp') + desc="write intermediate stuff to disk, to economize on RAM. " + "Using this option might be necessary to run with " + "'slibase' and with 'Grid' values above the default, " + "since the program has to store a large number of " + "matrices for such a problem: two for every slice and " + "for every (a,b) pair in the ARMA parameter grid. Temporary " + "files are written to the directory given in environment " + "variable TMPDIR, or in /tmp, or in ./ (preference is in that " + "order)", + argstr="-usetemp", + ) nodmbase = traits.Bool( - desc='by default, baseline columns added to the matrix via ' - '\'addbase\' or \'slibase\' or \'dsort\' will each have their ' - 'mean removed (as is done in Deconvolve); this option turns this ' - 'centering off', - argstr='-nodmbase', - requires=['addbase', 'dsort']) + desc="by default, baseline columns added to the matrix via " + "'addbase' or 'slibase' or 'dsort' will each have their " + "mean removed (as is done in Deconvolve); this option turns this " + "centering off", + argstr="-nodmbase", + requires=["addbase", "dsort"], + ) dsort = File( - desc='4D dataset to be used as voxelwise baseline regressor', + desc="4D dataset to be used as voxelwise baseline regressor", exists=True, copyfile=False, - argstr='-dsort %s') + argstr="-dsort %s", + ) dsort_nods = traits.Bool( - desc='if \'dsort\' option is used, this command will output ' - 'additional results files excluding the \'dsort\' file', - argstr='-dsort_nods', - requires=['dsort']) - fout = traits.Bool( - desc='output F-statistic for each stimulus', argstr='-fout') + desc="if 'dsort' option is used, this command will output " + "additional results files excluding the 'dsort' file", + argstr="-dsort_nods", + requires=["dsort"], + ) + fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( - desc='output the R^2 statistic for each stimulus', argstr='-rout') + desc="output the R^2 statistic for each stimulus", argstr="-rout" + ) tout = traits.Bool( - desc='output the T-statistic for each stimulus; if you use ' - '\'out_file\' and do not give any of \'fout\', \'tout\',' - 'or \'rout\', then the program assumes \'fout\' is activated.', - argstr='-tout') + desc="output the T-statistic for each stimulus; if you use " + "'out_file' and do not give any of 'fout', 'tout'," + "or 'rout', then the program assumes 'fout' is activated.", + argstr="-tout", + ) nofdr = traits.Bool( - desc='do NOT add FDR curve data to bucket datasets; FDR curves can ' - 'take a long time if \'tout\' is used', - argstr='-noFDR') + desc="do NOT add FDR curve data to bucket datasets; FDR curves can " + "take a long time if 'tout' is used", + argstr="-noFDR", + ) nobout = traits.Bool( - desc='do NOT add baseline (null hypothesis) regressor betas ' - 'to the \'rbeta_file\' and/or \'obeta_file\' output datasets.', - argstr='-nobout') + desc="do NOT add baseline (null hypothesis) regressor betas " + "to the 'rbeta_file' and/or 'obeta_file' output datasets.", + argstr="-nobout", + ) gltsym = traits.List( - traits.Either( - traits.Tuple(File(exists=True), Str()), traits.Tuple(Str(), - Str())), - desc='read a symbolic GLT from input file and associate it with a ' - 'label. As in Deconvolve, you can also use the \'SYM:\' method ' - 'to provide the definition of the GLT directly as a string ' - '(e.g., with \'SYM: +Label1 -Label2\'). Unlike Deconvolve, you ' - 'MUST specify \'SYM: \' if providing the GLT directly as a ' - 'string instead of from a file', - argstr='-gltsym "%s" %s...') + traits.Either(Tuple(File(exists=True), Str()), Tuple(Str(), Str())), + desc="read a symbolic GLT from input file and associate it with a " + "label. As in Deconvolve, you can also use the 'SYM:' method " + "to provide the definition of the GLT directly as a string " + "(e.g., with 'SYM: +Label1 -Label2'). Unlike Deconvolve, you " + "MUST specify 'SYM: ' if providing the GLT directly as a " + "string instead of from a file", + argstr='-gltsym "%s" %s...', + ) out_file = File( - desc='output dataset for beta + statistics from the REML estimation; ' - 'also contains the results of any GLT analysis requested ' - 'in the Deconvolve setup, similar to the \'bucket\' output ' - 'from Deconvolve. This dataset does NOT get the betas ' - '(or statistics) of those regressors marked as \'baseline\' ' - 'in the matrix file.', - argstr='-Rbuck %s') + desc="output dataset for beta + statistics from the REML estimation; " + "also contains the results of any GLT analysis requested " + "in the Deconvolve setup, similar to the 'bucket' output " + "from Deconvolve. This dataset does NOT get the betas " + "(or statistics) of those regressors marked as 'baseline' " + "in the matrix file.", + argstr="-Rbuck %s", + ) var_file = File( - desc='output dataset for REML variance parameters', argstr='-Rvar %s') + desc="output dataset for REML variance parameters", argstr="-Rvar %s" + ) rbeta_file = File( - desc='output dataset for beta weights from the REML estimation, ' - 'similar to the \'cbucket\' output from Deconvolve. This dataset ' - 'will contain all the beta weights, for baseline and stimulus ' - 'regressors alike, unless the \'-nobout\' option is given -- ' - 'in that case, this dataset will only get the betas for the ' - 'stimulus regressors.', - argstr='-Rbeta %s') + desc="output dataset for beta weights from the REML estimation, " + "similar to the 'cbucket' output from Deconvolve. This dataset " + "will contain all the beta weights, for baseline and stimulus " + "regressors alike, unless the '-nobout' option is given -- " + "in that case, this dataset will only get the betas for the " + "stimulus regressors.", + argstr="-Rbeta %s", + ) glt_file = File( - desc='output dataset for beta + statistics from the REML estimation, ' - 'but ONLY for the GLTs added on the REMLfit command line itself ' - 'via \'gltsym\'; GLTs from Deconvolve\'s command line will NOT ' - 'be included.', - argstr='-Rglt %s') - fitts_file = File( - desc='ouput dataset for REML fitted model', argstr='-Rfitts %s') + desc="output dataset for beta + statistics from the REML estimation, " + "but ONLY for the GLTs added on the REMLfit command line itself " + "via 'gltsym'; GLTs from Deconvolve's command line will NOT " + "be included.", + argstr="-Rglt %s", + ) + fitts_file = File(desc="output dataset for REML fitted model", argstr="-Rfitts %s") errts_file = File( - desc='output dataset for REML residuals = data - fitted model', - argstr='-Rerrts %s') + desc="output dataset for REML residuals = data - fitted model", + argstr="-Rerrts %s", + ) wherr_file = File( - desc='dataset for REML residual, whitened using the estimated ' - 'ARMA(1,1) correlation matrix of the noise', - argstr='-Rwherr %s') - quiet = traits.Bool( - desc='turn off most progress messages', argstr='-quiet') + desc="dataset for REML residual, whitened using the estimated " + "ARMA(1,1) correlation matrix of the noise", + argstr="-Rwherr %s", + ) + quiet = traits.Bool(desc="turn off most progress messages", argstr="-quiet") verb = traits.Bool( - desc='turns on more progress messages, including memory usage ' - 'progress reports at various stages', - argstr='-verb') + desc="turns on more progress messages, including memory usage " + "progress reports at various stages", + argstr="-verb", + ) + goforit = traits.Bool( + desc="With potential issues flagged in the design matrix, an attempt " + "will nevertheless be made to fit the model", + argstr="-GOFORIT", + ) ovar = File( - desc='dataset for OLSQ st.dev. parameter (kind of boring)', - argstr='-Ovar %s') + desc="dataset for OLSQ st.dev. parameter (kind of boring)", argstr="-Ovar %s" + ) obeta = File( - desc='dataset for beta weights from the OLSQ estimation', - argstr='-Obeta %s') + desc="dataset for beta weights from the OLSQ estimation", argstr="-Obeta %s" + ) obuck = File( - desc='dataset for beta + statistics from the OLSQ estimation', - argstr='-Obuck %s') + desc="dataset for beta + statistics from the OLSQ estimation", + argstr="-Obuck %s", + ) oglt = File( - desc='dataset for beta + statistics from \'gltsym\' options', - argstr='-Oglt %s') - ofitts = File(desc='dataset for OLSQ fitted model', argstr='-Ofitts %s') + desc="dataset for beta + statistics from 'gltsym' options", argstr="-Oglt %s" + ) + ofitts = File(desc="dataset for OLSQ fitted model", argstr="-Ofitts %s") oerrts = File( - desc='dataset for OLSQ residuals (data - fitted model)', - argstr='-Oerrts %s') + desc="dataset for OLSQ residuals (data - fitted model)", argstr="-Oerrts %s" + ) class RemlfitOutputSpec(AFNICommandOutputSpec): out_file = File( - desc='dataset for beta + statistics from the REML estimation (if ' - 'generated') - var_file = File(desc='dataset for REML variance parameters (if generated)') + desc="dataset for beta + statistics from the REML estimation (if generated)" + ) + var_file = File(desc="dataset for REML variance parameters (if generated)") rbeta_file = File( - desc='dataset for beta weights from the REML estimation (if ' - 'generated)') + desc="dataset for beta weights from the REML estimation (if generated)" + ) rbeta_file = File( - desc='output dataset for beta weights from the REML estimation (if ' - 'generated') + desc="output dataset for beta weights from the REML estimation (if generated)" + ) glt_file = File( - desc='output dataset for beta + statistics from the REML estimation, ' - 'but ONLY for the GLTs added on the REMLfit command ' - 'line itself via \'gltsym\' (if generated)') - fitts_file = File( - desc='ouput dataset for REML fitted model (if generated)') + desc="output dataset for beta + statistics from the REML estimation, " + "but ONLY for the GLTs added on the REMLfit command " + "line itself via 'gltsym' (if generated)" + ) + fitts_file = File(desc="output dataset for REML fitted model (if generated)") errts_file = File( - desc='output dataset for REML residuals = data - fitted model (if ' - 'generated') + desc="output dataset for REML residuals = data - fitted model (if generated)" + ) wherr_file = File( - desc='dataset for REML residual, whitened using the estimated ' - 'ARMA(1,1) correlation matrix of the noise (if generated)') - ovar = File(desc='dataset for OLSQ st.dev. parameter (if generated)') - obeta = File(desc='dataset for beta weights from the OLSQ estimation (if ' - 'generated)') + desc="dataset for REML residual, whitened using the estimated " + "ARMA(1,1) correlation matrix of the noise (if generated)" + ) + ovar = File(desc="dataset for OLSQ st.dev. parameter (if generated)") + obeta = File( + desc="dataset for beta weights from the OLSQ estimation (if generated)" + ) obuck = File( - desc='dataset for beta + statistics from the OLSQ estimation (if ' - 'generated)') + desc="dataset for beta + statistics from the OLSQ estimation (if generated)" + ) oglt = File( - desc='dataset for beta + statistics from \'gltsym\' options (if ' - 'generated') - ofitts = File(desc='dataset for OLSQ fitted model (if generated)') - oerrts = File(desc='dataset for OLSQ residuals = data - fitted model (if ' - 'generated') + desc="dataset for beta + statistics from 'gltsym' options (if generated)" + ) + ofitts = File(desc="dataset for OLSQ fitted model (if generated)") + oerrts = File( + desc="dataset for OLSQ residuals = data - fitted model (if generated)" + ) class Remlfit(AFNICommand): @@ -564,19 +617,19 @@ class Remlfit(AFNICommand): >>> res = remlfit.run() # doctest: +SKIP """ - _cmd = '3dREMLfit' + _cmd = "3dREMLfit" input_spec = RemlfitInputSpec output_spec = RemlfitOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] - return super(Remlfit, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() - for key in outputs.keys(): + for key in outputs: if isdefined(self.inputs.get()[key]): outputs[key] = os.path.abspath(self.inputs.get()[key]) @@ -585,50 +638,53 @@ def _list_outputs(self): class SynthesizeInputSpec(AFNICommandInputSpec): cbucket = File( - desc='Read the dataset output from ' - '3dDeconvolve via the \'-cbucket\' option.', - argstr='-cbucket %s', + desc="Read the dataset output from 3dDeconvolve via the '-cbucket' option.", + argstr="-cbucket %s", copyfile=False, - mandatory=True) + mandatory=True, + ) matrix = File( - desc='Read the matrix output from ' - '3dDeconvolve via the \'-x1D\' option.', - argstr='-matrix %s', + desc="Read the matrix output from 3dDeconvolve via the '-x1D' option.", + argstr="-matrix %s", copyfile=False, - mandatory=True) + mandatory=True, + ) select = traits.List( - Str(desc='selected columns to synthesize'), - argstr='-select %s', - desc='A list of selected columns from the matrix (and the ' - 'corresponding coefficient sub-bricks from the ' - 'cbucket). Valid types include \'baseline\', ' - ' \'polort\', \'allfunc\', \'allstim\', \'all\', ' - 'Can also provide \'something\' where something matches ' - 'a stim_label from 3dDeconvolve, and \'digits\' where digits ' - 'are the numbers of the select matrix columns by ' - 'numbers (starting at 0), or number ranges of the form ' - '\'3..7\' and \'3-7\'.', - mandatory=True) + Str(desc="selected columns to synthesize"), + argstr="-select %s", + desc="A list of selected columns from the matrix (and the " + "corresponding coefficient sub-bricks from the " + "cbucket). Valid types include 'baseline', " + " 'polort', 'allfunc', 'allstim', 'all', " + "Can also provide 'something' where something matches " + "a stim_label from 3dDeconvolve, and 'digits' where digits " + "are the numbers of the select matrix columns by " + "numbers (starting at 0), or number ranges of the form " + "'3..7' and '3-7'.", + mandatory=True, + ) out_file = File( - name_template='syn', - desc='output dataset prefix name (default \'syn\')', - argstr='-prefix %s') + name_template="syn", + desc="output dataset prefix name (default 'syn')", + argstr="-prefix %s", + ) dry_run = traits.Bool( - desc='Don\'t compute the output, just ' - 'check the inputs.', - argstr='-dry') + desc="Don't compute the output, just check the inputs.", argstr="-dry" + ) TR = traits.Float( - desc='TR to set in the output. The default value of ' - 'TR is read from the header of the matrix file.', - argstr='-TR %f') + desc="TR to set in the output. The default value of " + "TR is read from the header of the matrix file.", + argstr="-TR %f", + ) cenfill = traits.Enum( - 'zero', - 'nbhr', - 'none', - argstr='-cenfill %s', - desc='Determines how censored time points from the ' - '3dDeconvolve run will be filled. Valid types ' - 'are \'zero\', \'nbhr\' and \'none\'.') + "zero", + "nbhr", + "none", + argstr="-cenfill %s", + desc="Determines how censored time points from the " + "3dDeconvolve run will be filled. Valid types " + "are 'zero', 'nbhr' and 'none'.", + ) class Synthesize(AFNICommand): @@ -652,14 +708,14 @@ class Synthesize(AFNICommand): >>> syn = synthesize.run() # doctest: +SKIP """ - _cmd = '3dSynthesize' + _cmd = "3dSynthesize" input_spec = SynthesizeInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - for key in outputs.keys(): + for key in outputs: if isdefined(self.inputs.get()[key]): outputs[key] = os.path.abspath(self.inputs.get()[key]) diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 0ecbe4b347..d3daebcf4c 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -1,122 +1,144 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""AFNI preprocessing interfaces -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open +"""AFNI preprocessing interfaces.""" import os import os.path as op -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix) -from ..base import (CommandLineInputSpec, CommandLine, TraitedSpec, traits, - isdefined, File, InputMultiPath, Undefined, Str, - InputMultiObject) - -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec, AFNIPythonCommandInputSpec, - AFNIPythonCommand, Info, no_afni) - -from ...import logging -iflogger = logging.getLogger('nipype.interface') +from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix +from ..base import ( + CommandLineInputSpec, + CommandLine, + TraitedSpec, + traits, + Tuple, + isdefined, + File, + InputMultiPath, + Undefined, + Str, + InputMultiObject, +) + +from .base import ( + AFNICommandBase, + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, + AFNIPythonCommandInputSpec, + AFNIPythonCommand, + Info, + no_afni, +) + +from ... import logging + +iflogger = logging.getLogger("nipype.interface") class CentralityInputSpec(AFNICommandInputSpec): - """Common input spec class for all centrality-related commands - """ + """Common input spec class for all centrality-related commands""" - mask = File( - desc='mask file to mask input data', argstr='-mask %s', exists=True) + mask = File(desc="mask file to mask input data", argstr="-mask %s", exists=True) thresh = traits.Float( - desc='threshold to exclude connections where corr <= thresh', - argstr='-thresh %f') - polort = traits.Int(desc='', argstr='-polort %d') + desc="threshold to exclude connections where corr <= thresh", + argstr="-thresh %f", + ) + polort = traits.Int(desc="", argstr="-polort %d") autoclip = traits.Bool( - desc='Clip off low-intensity regions in the dataset', - argstr='-autoclip') + desc="Clip off low-intensity regions in the dataset", argstr="-autoclip" + ) automask = traits.Bool( - desc='Mask the dataset to target brain-only voxels', - argstr='-automask') + desc="Mask the dataset to target brain-only voxels", argstr="-automask" + ) class AlignEpiAnatPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( - desc='EPI dataset to align', - argstr='-epi %s', + desc="EPI dataset to align", + argstr="-epi %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) anat = File( - desc='name of structural dataset', - argstr='-anat %s', + desc="name of structural dataset", + argstr="-anat %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) epi_base = traits.Either( traits.Range(low=0), - traits.Enum('mean', 'median', 'max'), - desc='the epi base used in alignment' - 'should be one of (0/mean/median/max/subbrick#)', + traits.Enum("mean", "median", "max"), + desc="the epi base used in alignment" + "should be one of (0/mean/median/max/subbrick#)", mandatory=True, - argstr='-epi_base %s') + argstr="-epi_base %s", + ) anat2epi = traits.Bool( - desc='align anatomical to EPI dataset (default)', argstr='-anat2epi') - epi2anat = traits.Bool( - desc='align EPI to anatomical dataset', argstr='-epi2anat') + desc="align anatomical to EPI dataset (default)", argstr="-anat2epi" + ) + epi2anat = traits.Bool(desc="align EPI to anatomical dataset", argstr="-epi2anat") save_skullstrip = traits.Bool( - desc='save skull-stripped (not aligned)', argstr='-save_skullstrip') + desc="save skull-stripped (not aligned)", argstr="-save_skullstrip" + ) suffix = traits.Str( - '_al', - desc='append suffix to the original anat/epi dataset to use' + "_al", + desc="append suffix to the original anat/epi dataset to use" 'in the resulting dataset names (default is "_al")', usedefault=True, - argstr='-suffix %s') + argstr="-suffix %s", + ) epi_strip = traits.Enum( - ('3dSkullStrip', '3dAutomask', 'None'), - desc='method to mask brain in EPI data' - 'should be one of[3dSkullStrip]/3dAutomask/None)', - argstr='-epi_strip %s') + ("3dSkullStrip", "3dAutomask", "None"), + desc="method to mask brain in EPI data" + "should be one of[3dSkullStrip]/3dAutomask/None)", + argstr="-epi_strip %s", + ) volreg = traits.Enum( - 'on', - 'off', + "on", + "off", usedefault=True, - desc='do volume registration on EPI dataset before alignment' - 'should be \'on\' or \'off\', defaults to \'on\'', - argstr='-volreg %s') + desc="do volume registration on EPI dataset before alignment" + "should be 'on' or 'off', defaults to 'on'", + argstr="-volreg %s", + ) tshift = traits.Enum( - 'on', - 'off', + "on", + "off", usedefault=True, - desc='do time shifting of EPI dataset before alignment' - 'should be \'on\' or \'off\', defaults to \'on\'', - argstr='-tshift %s') + desc="do time shifting of EPI dataset before alignment" + "should be 'on' or 'off', defaults to 'on'", + argstr="-tshift %s", + ) class AlignEpiAnatPyOutputSpec(TraitedSpec): - anat_al_orig = File( - desc="A version of the anatomy that is aligned to the EPI") - epi_al_orig = File( - desc="A version of the EPI dataset aligned to the anatomy") + anat_al_orig = File(desc="A version of the anatomy that is aligned to the EPI") + epi_al_orig = File(desc="A version of the EPI dataset aligned to the anatomy") epi_tlrc_al = File( - desc="A version of the EPI dataset aligned to a standard template") + desc="A version of the EPI dataset aligned to a standard template" + ) anat_al_mat = File(desc="matrix to align anatomy to the EPI") epi_al_mat = File(desc="matrix to align EPI to anatomy") epi_vr_al_mat = File(desc="matrix to volume register EPI") - epi_reg_al_mat = File( - desc="matrix to volume register and align epi to anatomy") - epi_al_tlrc_mat = File(desc="matrix to volume register and align epi" - "to anatomy and put into standard space") - epi_vr_motion = File(desc="motion parameters from EPI time-series" - "registration (tsh included in name if slice" - "timing correction is also included).") + epi_reg_al_mat = File(desc="matrix to volume register and align epi to anatomy") + epi_al_tlrc_mat = File( + desc="matrix to volume register and align epi" + "to anatomy and put into standard space" + ) + epi_vr_motion = File( + desc="motion parameters from EPI time-series" + "registration (tsh included in name if slice" + "timing correction is also included)." + ) skullstrip = File(desc="skull-stripped (not aligned) volume") class AlignEpiAnatPy(AFNIPythonCommand): - """Align EPI to anatomical datasets or vice versa + """Align EPI to anatomical datasets or vice versa. + This Python script computes the alignment between two datasets, typically an EPI and an anatomical structural dataset, and applies the resulting transformation to one or the other to bring them into alignment. @@ -126,11 +148,12 @@ class AlignEpiAnatPy(AFNIPythonCommand): script combines multiple transformations, thereby minimizing the amount of interpolation applied to the data. - Basic Usage: - align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 + Basic Usage:: - The user must provide EPI and anatomical datasets and specify the EPI - sub-brick to use as a base in the alignment. + align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 + + The user must provide :abbr:`EPI (echo-planar imaging)` and anatomical datasets + and specify the EPI sub-brick to use as a base in the alignment. Internally, the script always aligns the anatomical to the EPI dataset, and the resulting transformation is saved to a 1D file. @@ -144,11 +167,8 @@ class AlignEpiAnatPy(AFNIPythonCommand): and requested (with options to turn on and off each of the steps) in order to create the aligned datasets. - For complete details, see the `align_epi_anat.py' Documentation. - `_ - Examples - ======== + -------- >>> from nipype.interfaces import afni >>> al_ea = afni.AlignEpiAnatPy() >>> al_ea.inputs.anat = "structural.nii" @@ -159,297 +179,369 @@ class AlignEpiAnatPy(AFNIPythonCommand): >>> al_ea.inputs.tshift = 'off' >>> al_ea.inputs.save_skullstrip = True >>> al_ea.cmdline # doctest: +ELLIPSIS - 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' - >>> res = allineate.run() # doctest: +SKIP + 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi \ +functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' + >>> res = al_ea.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `align_epi_anat.py documentation. + `__. + """ - _cmd = 'align_epi_anat.py' + + _cmd = "align_epi_anat.py" input_spec = AlignEpiAnatPyInputSpec output_spec = AlignEpiAnatPyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - anat_prefix = ''.join( - self._gen_fname(self.inputs.anat).split('+')[:-1]) - epi_prefix = ''.join( - self._gen_fname(self.inputs.in_file).split('+')[:-1]) + anat_prefix = self._gen_fname(self.inputs.anat) + epi_prefix = self._gen_fname(self.inputs.in_file) + if "+" in anat_prefix: + anat_prefix = "".join(anat_prefix.split("+")[:-1]) + if "+" in epi_prefix: + epi_prefix = "".join(epi_prefix.split("+")[:-1]) outputtype = self.inputs.outputtype - if outputtype == 'AFNI': - ext = '.HEAD' + if outputtype == "AFNI": + ext = ".HEAD" else: - Info.output_type_to_ext(outputtype) - matext = '.1D' + ext = Info.output_type_to_ext(outputtype) + matext = ".1D" suffix = self.inputs.suffix if self.inputs.anat2epi: - outputs['anat_al_orig'] = self._gen_fname( - anat_prefix, suffix=suffix + '+orig', ext=ext) - outputs['anat_al_mat'] = self._gen_fname( - anat_prefix, suffix=suffix + '_mat.aff12', ext=matext) + outputs["anat_al_orig"] = self._gen_fname( + anat_prefix, suffix=suffix + "+orig", ext=ext + ) + outputs["anat_al_mat"] = self._gen_fname( + anat_prefix, suffix=suffix + "_mat.aff12", ext=matext + ) if self.inputs.epi2anat: - outputs['epi_al_orig'] = self._gen_fname( - epi_prefix, suffix=suffix + '+orig', ext=ext) - outputs['epi_al_mat'] = self._gen_fname( - epi_prefix, suffix=suffix + '_mat.aff12', ext=matext) - if self.inputs.volreg == 'on': - outputs['epi_vr_al_mat'] = self._gen_fname( - epi_prefix, suffix='_vr' + suffix + '_mat.aff12', ext=matext) - if self.inputs.tshift == 'on': - outputs['epi_vr_motion'] = self._gen_fname( - epi_prefix, suffix='tsh_vr_motion', ext=matext) - elif self.inputs.tshift == 'off': - outputs['epi_vr_motion'] = self._gen_fname( - epi_prefix, suffix='vr_motion', ext=matext) - if self.inputs.volreg == 'on' and self.inputs.epi2anat: - outputs['epi_reg_al_mat'] = self._gen_fname( - epi_prefix, suffix='_reg' + suffix + '_mat.aff12', ext=matext) + outputs["epi_al_orig"] = self._gen_fname( + epi_prefix, suffix=suffix + "+orig", ext=ext + ) + outputs["epi_al_mat"] = self._gen_fname( + epi_prefix, suffix=suffix + "_mat.aff12", ext=matext + ) + if self.inputs.volreg == "on": + outputs["epi_vr_al_mat"] = self._gen_fname( + epi_prefix, suffix="_vr" + suffix + "_mat.aff12", ext=matext + ) + if self.inputs.tshift == "on": + outputs["epi_vr_motion"] = self._gen_fname( + epi_prefix, suffix="tsh_vr_motion", ext=matext + ) + elif self.inputs.tshift == "off": + outputs["epi_vr_motion"] = self._gen_fname( + epi_prefix, suffix="vr_motion", ext=matext + ) + if self.inputs.volreg == "on" and self.inputs.epi2anat: + outputs["epi_reg_al_mat"] = self._gen_fname( + epi_prefix, suffix="_reg" + suffix + "_mat.aff12", ext=matext + ) if self.inputs.save_skullstrip: outputs.skullstrip = self._gen_fname( - anat_prefix, suffix='_ns' + '+orig', ext=ext) + anat_prefix, suffix="_ns" + "+orig", ext=ext + ) return outputs class AllineateInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAllineate', - argstr='-source %s', + desc="input file to 3dAllineate", + argstr="-source %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) reference = File( exists=True, - argstr='-base %s', - desc='file to be used as reference, the first volume will be used if ' - 'not given the reference will be the first volume of in_file.') + argstr="-base %s", + desc="file to be used as reference, the first volume will be used if " + "not given the reference will be the first volume of in_file.", + ) out_file = File( - desc='output file from 3dAllineate', - argstr='-prefix %s', - name_template='%s_allineate', - name_source='in_file', + desc="output file from 3dAllineate", + argstr="-prefix %s", + name_template="%s_allineate", + name_source="in_file", hash_files=False, - xor=['allcostx']) + xor=["allcostx"], + ) out_param_file = File( - argstr='-1Dparam_save %s', - desc='Save the warp parameters in ASCII (.1D) format.', - xor=['in_param_file', 'allcostx']) + argstr="-1Dparam_save %s", + desc="Save the warp parameters in ASCII (.1D) format.", + xor=["in_param_file", "allcostx"], + ) in_param_file = File( exists=True, - argstr='-1Dparam_apply %s', - desc='Read warp parameters from file and apply them to ' - 'the source dataset, and produce a new dataset', - xor=['out_param_file']) + argstr="-1Dparam_apply %s", + desc="Read warp parameters from file and apply them to " + "the source dataset, and produce a new dataset", + xor=["out_param_file"], + ) out_matrix = File( - argstr='-1Dmatrix_save %s', - desc='Save the transformation matrix for each volume.', - xor=['in_matrix', 'allcostx']) + argstr="-1Dmatrix_save %s", + desc="Save the transformation matrix for each volume.", + xor=["in_matrix", "allcostx"], + ) in_matrix = File( - desc='matrix to align input file', - argstr='-1Dmatrix_apply %s', + desc="matrix to align input file", + argstr="-1Dmatrix_apply %s", position=-3, - xor=['out_matrix']) + xor=["out_matrix"], + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', argstr='-overwrite') + desc="overwrite output file if it already exists", argstr="-overwrite" + ) allcostx = File( - desc= - 'Compute and print ALL available cost functionals for the un-warped inputs' - 'AND THEN QUIT. If you use this option none of the other expected outputs will be produced', - argstr='-allcostx |& tee %s', + desc="Compute and print ALL available cost functionals for the un-warped inputs" + "AND THEN QUIT. If you use this option none of the other expected outputs will be produced", + argstr="-allcostx |& tee %s", position=-1, - xor=['out_file', 'out_matrix', 'out_param_file', 'out_weight_file']) + xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], + ) _cost_funcs = [ - 'leastsq', 'ls', 'mutualinfo', 'mi', 'corratio_mul', 'crM', - 'norm_mutualinfo', 'nmi', 'hellinger', 'hel', 'corratio_add', 'crA', - 'corratio_uns', 'crU' + "leastsq", + "ls", + "mutualinfo", + "mi", + "corratio_mul", + "crM", + "norm_mutualinfo", + "nmi", + "hellinger", + "hel", + "corratio_add", + "crA", + "corratio_uns", + "crU", ] cost = traits.Enum( *_cost_funcs, - argstr='-cost %s', - desc='Defines the \'cost\' function that defines the matching between ' - 'the source and the base') - _interp_funcs = [ - 'nearestneighbour', 'linear', 'cubic', 'quintic', 'wsinc5' - ] + argstr="-cost %s", + desc="Defines the 'cost' function that defines the matching between " + "the source and the base" + ) + _interp_funcs = ["nearestneighbour", "linear", "cubic", "quintic", "wsinc5"] interpolation = traits.Enum( *_interp_funcs[:-1], - argstr='-interp %s', - desc='Defines interpolation method to use during matching') + argstr="-interp %s", + desc="Defines interpolation method to use during matching" + ) final_interpolation = traits.Enum( *_interp_funcs, - argstr='-final %s', - desc='Defines interpolation method used to create the output dataset') + argstr="-final %s", + desc="Defines interpolation method used to create the output dataset" + ) # TECHNICAL OPTIONS (used for fine control of the program): nmatch = traits.Int( - argstr='-nmatch %d', - desc='Use at most n scattered points to match the datasets.') + argstr="-nmatch %d", + desc="Use at most n scattered points to match the datasets.", + ) no_pad = traits.Bool( - argstr='-nopad', desc='Do not use zero-padding on the base image.') + argstr="-nopad", desc="Do not use zero-padding on the base image." + ) zclip = traits.Bool( - argstr='-zclip', - desc='Replace negative values in the input datasets (source & base) ' - 'with zero.') + argstr="-zclip", + desc="Replace negative values in the input datasets (source & base) " + "with zero.", + ) convergence = traits.Float( - argstr='-conv %f', - desc='Convergence test in millimeters (default 0.05mm).') - usetemp = traits.Bool(argstr='-usetemp', desc='temporary file use') + argstr="-conv %f", desc="Convergence test in millimeters (default 0.05mm)." + ) + usetemp = traits.Bool(argstr="-usetemp", desc="temporary file use") check = traits.List( traits.Enum(*_cost_funcs), - argstr='-check %s', - desc='After cost functional optimization is done, start at the final ' - 'parameters and RE-optimize using this new cost functions. If ' - 'the results are too different, a warning message will be ' - 'printed. However, the final parameters from the original ' - 'optimization will be used to create the output dataset.') + argstr="-check %s", + desc="After cost functional optimization is done, start at the final " + "parameters and RE-optimize using this new cost functions. If " + "the results are too different, a warning message will be " + "printed. However, the final parameters from the original " + "optimization will be used to create the output dataset.", + ) # ** PARAMETERS THAT AFFECT THE COST OPTIMIZATION STRATEGY ** one_pass = traits.Bool( - argstr='-onepass', - desc='Use only the refining pass -- do not try a coarse resolution ' - 'pass first. Useful if you know that only small amounts of ' - 'image alignment are needed.') + argstr="-onepass", + desc="Use only the refining pass -- do not try a coarse resolution " + "pass first. Useful if you know that only small amounts of " + "image alignment are needed.", + ) two_pass = traits.Bool( - argstr='-twopass', - desc='Use a two pass alignment strategy for all volumes, searching ' - 'for a large rotation+shift and then refining the alignment.') + argstr="-twopass", + desc="Use a two pass alignment strategy for all volumes, searching " + "for a large rotation+shift and then refining the alignment.", + ) two_blur = traits.Float( - argstr='-twoblur %f', - desc='Set the blurring radius for the first pass in mm.') + argstr="-twoblur %f", desc="Set the blurring radius for the first pass in mm." + ) two_first = traits.Bool( - argstr='-twofirst', - desc='Use -twopass on the first image to be registered, and ' - 'then on all subsequent images from the source dataset, ' - 'use results from the first image\'s coarse pass to start ' - 'the fine pass.') + argstr="-twofirst", + desc="Use -twopass on the first image to be registered, and " + "then on all subsequent images from the source dataset, " + "use results from the first image's coarse pass to start " + "the fine pass.", + ) two_best = traits.Int( - argstr='-twobest %d', - desc='In the coarse pass, use the best \'bb\' set of initial' - 'points to search for the starting point for the fine' - 'pass. If bb==0, then no search is made for the best' - 'starting point, and the identity transformation is' - 'used as the starting point. [Default=5; min=0 max=11]') + argstr="-twobest %d", + desc="In the coarse pass, use the best 'bb' set of initial" + "points to search for the starting point for the fine" + "pass. If bb==0, then no search is made for the best" + "starting point, and the identity transformation is" + "used as the starting point. [Default=5; min=0 max=11]", + ) fine_blur = traits.Float( - argstr='-fineblur %f', - desc='Set the blurring radius to use in the fine resolution ' - 'pass to \'x\' mm. A small amount (1-2 mm?) of blurring at ' - 'the fine step may help with convergence, if there is ' - 'some problem, especially if the base volume is very noisy. ' - '[Default == 0 mm = no blurring at the final alignment pass]') + argstr="-fineblur %f", + desc="Set the blurring radius to use in the fine resolution " + "pass to 'x' mm. A small amount (1-2 mm?) of blurring at " + "the fine step may help with convergence, if there is " + "some problem, especially if the base volume is very noisy. " + "[Default == 0 mm = no blurring at the final alignment pass]", + ) center_of_mass = Str( - argstr='-cmass%s', - desc='Use the center-of-mass calculation to bracket the shifts.') + argstr="-cmass%s", + desc="Use the center-of-mass calculation to bracket the shifts.", + ) autoweight = Str( - argstr='-autoweight%s', - desc='Compute a weight function using the 3dAutomask ' - 'algorithm plus some blurring of the base image.') + argstr="-autoweight%s", + desc="Compute a weight function using the 3dAutomask " + "algorithm plus some blurring of the base image.", + ) automask = traits.Int( - argstr='-automask+%d', - desc='Compute a mask function, set a value for dilation or 0.') + argstr="-automask+%d", + desc="Compute a mask function, set a value for dilation or 0.", + ) autobox = traits.Bool( - argstr='-autobox', - desc='Expand the -automask function to enclose a rectangular ' - 'box that holds the irregular mask.') + argstr="-autobox", + desc="Expand the -automask function to enclose a rectangular " + "box that holds the irregular mask.", + ) nomask = traits.Bool( - argstr='-nomask', - desc='Don\'t compute the autoweight/mask; if -weight is not ' - 'also used, then every voxel will be counted equally.') + argstr="-nomask", + desc="Don't compute the autoweight/mask; if -weight is not " + "also used, then every voxel will be counted equally.", + ) weight_file = File( - argstr='-weight %s', + argstr="-weight %s", exists=True, - deprecated='1.0.0', - new_name='weight', - desc='Set the weighting for each voxel in the base dataset; ' - 'larger weights mean that voxel count more in the cost function. ' - 'Must be defined on the same grid as the base dataset') + deprecated="1.0.0", + new_name="weight", + desc="Set the weighting for each voxel in the base dataset; " + "larger weights mean that voxel count more in the cost function. " + "Must be defined on the same grid as the base dataset", + ) weight = traits.Either( File(exists=True), traits.Float(), - argstr='-weight %s', - desc='Set the weighting for each voxel in the base dataset; ' - 'larger weights mean that voxel count more in the cost function. ' - 'If an image file is given, the volume must be defined on the ' - 'same grid as the base dataset') - out_weight_file = traits.File( - argstr='-wtprefix %s', - desc='Write the weight volume to disk as a dataset', - xor=['allcostx']) + argstr="-weight %s", + desc="Set the weighting for each voxel in the base dataset; " + "larger weights mean that voxel count more in the cost function. " + "If an image file is given, the volume must be defined on the " + "same grid as the base dataset", + ) + out_weight_file = File( + argstr="-wtprefix %s", + desc="Write the weight volume to disk as a dataset", + xor=["allcostx"], + ) source_mask = File( - exists=True, argstr='-source_mask %s', desc='mask the input dataset') + exists=True, argstr="-source_mask %s", desc="mask the input dataset" + ) source_automask = traits.Int( - argstr='-source_automask+%d', - desc='Automatically mask the source dataset with dilation or 0.') + argstr="-source_automask+%d", + desc="Automatically mask the source dataset with dilation or 0.", + ) warp_type = traits.Enum( - 'shift_only', - 'shift_rotate', - 'shift_rotate_scale', - 'affine_general', - argstr='-warp %s', - desc='Set the warp type.') + "shift_only", + "shift_rotate", + "shift_rotate_scale", + "affine_general", + argstr="-warp %s", + desc="Set the warp type.", + ) warpfreeze = traits.Bool( - argstr='-warpfreeze', - desc='Freeze the non-rigid body parameters after first volume.') + argstr="-warpfreeze", + desc="Freeze the non-rigid body parameters after first volume.", + ) replacebase = traits.Bool( - argstr='-replacebase', - desc='If the source has more than one volume, then after the first ' - 'volume is aligned to the base.') + argstr="-replacebase", + desc="If the source has more than one volume, then after the first " + "volume is aligned to the base.", + ) replacemeth = traits.Enum( *_cost_funcs, - argstr='-replacemeth %s', - desc='After first volume is aligned, switch method for later volumes. ' - 'For use with \'-replacebase\'.') + argstr="-replacemeth %s", + desc="After first volume is aligned, switch method for later volumes. " + "For use with '-replacebase'." + ) epi = traits.Bool( - argstr='-EPI', - desc='Treat the source dataset as being composed of warped ' - 'EPI slices, and the base as comprising anatomically ' - '\'true\' images. Only phase-encoding direction image ' - 'shearing and scaling will be allowed with this option.') + argstr="-EPI", + desc="Treat the source dataset as being composed of warped " + "EPI slices, and the base as comprising anatomically " + "'true' images. Only phase-encoding direction image " + "shearing and scaling will be allowed with this option.", + ) maxrot = traits.Float( - argstr='-maxrot %f', desc='Maximum allowed rotation in degrees.') - maxshf = traits.Float( - argstr='-maxshf %f', desc='Maximum allowed shift in mm.') - maxscl = traits.Float( - argstr='-maxscl %f', desc='Maximum allowed scaling factor.') - maxshr = traits.Float( - argstr='-maxshr %f', desc='Maximum allowed shearing factor.') + argstr="-maxrot %f", desc="Maximum allowed rotation in degrees." + ) + maxshf = traits.Float(argstr="-maxshf %f", desc="Maximum allowed shift in mm.") + maxscl = traits.Float(argstr="-maxscl %f", desc="Maximum allowed scaling factor.") + maxshr = traits.Float(argstr="-maxshr %f", desc="Maximum allowed shearing factor.") master = File( exists=True, - argstr='-master %s', - desc='Write the output dataset on the same grid as this file.') + argstr="-master %s", + desc="Write the output dataset on the same grid as this file.", + ) newgrid = traits.Float( - argstr='-newgrid %f', - desc='Write the output dataset using isotropic grid spacing in mm.') + argstr="-newgrid %f", + desc="Write the output dataset using isotropic grid spacing in mm.", + ) # Non-linear experimental _nwarp_types = [ - 'bilinear', 'cubic', 'quintic', 'heptic', 'nonic', 'poly3', 'poly5', - 'poly7', 'poly9' + "bilinear", + "cubic", + "quintic", + "heptic", + "nonic", + "poly3", + "poly5", + "poly7", + "poly9", ] # same non-hellenistic nwarp = traits.Enum( *_nwarp_types, - argstr='-nwarp %s', - desc='Experimental nonlinear warping: bilinear or legendre poly.') - _dirs = ['X', 'Y', 'Z', 'I', 'J', 'K'] + argstr="-nwarp %s", + desc="Experimental nonlinear warping: bilinear or legendre poly." + ) + _dirs = ["X", "Y", "Z", "I", "J", "K"] nwarp_fixmot = traits.List( traits.Enum(*_dirs), - argstr='-nwarp_fixmot%s...', - desc='To fix motion along directions.') + argstr="-nwarp_fixmot%s...", + desc="To fix motion along directions.", + ) nwarp_fixdep = traits.List( traits.Enum(*_dirs), - argstr='-nwarp_fixdep%s...', - desc='To fix non-linear warp dependency along directions.') - verbose = traits.Bool( - argstr='-verb', desc='Print out verbose progress reports.') + argstr="-nwarp_fixdep%s...", + desc="To fix non-linear warp dependency along directions.", + ) + verbose = traits.Bool(argstr="-verb", desc="Print out verbose progress reports.") quiet = traits.Bool( - argstr='-quiet', desc="Don't print out verbose progress reports.") + argstr="-quiet", desc="Don't print out verbose progress reports." + ) class AllineateOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image file name') - out_matrix = File(exists=True, desc='matrix to align input file') - out_param_file = File(exists=True, desc='warp parameters') - out_weight_file = File(exists=True, desc='weight volume') + out_file = File(exists=True, desc="output image file name") + out_matrix = File(exists=True, desc="matrix to align input file") + out_param_file = File(exists=True, desc="warp parameters") + out_weight_file = File(exists=True, desc="weight volume") allcostx = File( - desc= - 'Compute and print ALL available cost functionals for the un-warped inputs' + desc="Compute and print ALL available cost functionals for the un-warped inputs" ) @@ -460,8 +552,7 @@ class Allineate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' @@ -488,66 +579,71 @@ class Allineate(AFNICommand): >>> res = allineate.run() # doctest: +SKIP """ - _cmd = '3dAllineate' + _cmd = "3dAllineate" input_spec = AllineateInputSpec output_spec = AllineateOutputSpec def _list_outputs(self): - outputs = super(Allineate, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.out_weight_file: - outputs['out_weight_file'] = op.abspath( - self.inputs.out_weight_file) + outputs["out_weight_file"] = op.abspath(self.inputs.out_weight_file) if self.inputs.out_matrix: - path, base, ext = split_filename(self.inputs.out_matrix) - if ext.lower() not in ['.1d', '.1D']: - outputs['out_matrix'] = self._gen_fname( - self.inputs.out_matrix, suffix='.aff12.1D') + ext = split_filename(self.inputs.out_matrix)[-1] + if ext.lower() not in [".1d", ".1D"]: + outputs["out_matrix"] = self._gen_fname( + self.inputs.out_matrix, suffix=".aff12.1D" + ) else: - outputs['out_matrix'] = op.abspath(self.inputs.out_matrix) + outputs["out_matrix"] = op.abspath(self.inputs.out_matrix) if self.inputs.out_param_file: - path, base, ext = split_filename(self.inputs.out_param_file) - if ext.lower() not in ['.1d', '.1D']: - outputs['out_param_file'] = self._gen_fname( - self.inputs.out_param_file, suffix='.param.1D') + ext = split_filename(self.inputs.out_param_file)[-1] + if ext.lower() not in [".1d", ".1D"]: + outputs["out_param_file"] = self._gen_fname( + self.inputs.out_param_file, suffix=".param.1D" + ) else: - outputs['out_param_file'] = op.abspath( - self.inputs.out_param_file) + outputs["out_param_file"] = op.abspath(self.inputs.out_param_file) if self.inputs.allcostx: - outputs['allcostX'] = os.path.abspath(self.inputs.allcostx) + outputs["allcostX"] = os.path.abspath(self.inputs.allcostx) return outputs class AutoTcorrelateInputSpec(AFNICommandInputSpec): in_file = File( - desc='timeseries x space (volume or surface) file', - argstr='%s', + desc="timeseries x space (volume or surface) file", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) polort = traits.Int( - desc='Remove polynomical trend of order m or -1 for no detrending', - argstr='-polort %d') - eta2 = traits.Bool(desc='eta^2 similarity', argstr='-eta2') - mask = File(exists=True, desc='mask of voxels', argstr='-mask %s') + desc="Remove polynomial trend of order m or -1 for no detrending", + argstr="-polort %d", + ) + eta2 = traits.Bool(desc="eta^2 similarity", argstr="-eta2") + mask = File(exists=True, desc="mask of voxels", argstr="-mask %s") mask_only_targets = traits.Bool( - desc='use mask only on targets voxels', - argstr='-mask_only_targets', - xor=['mask_source']) + desc="use mask only on targets voxels", + argstr="-mask_only_targets", + xor=["mask_source"], + ) mask_source = File( exists=True, - desc='mask for source voxels', - argstr='-mask_source %s', - xor=['mask_only_targets']) + desc="mask for source voxels", + argstr="-mask_source %s", + xor=["mask_only_targets"], + ) out_file = File( - name_template='%s_similarity_matrix.1D', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_similarity_matrix.1D", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class AutoTcorrelate(AFNICommand): @@ -559,8 +655,7 @@ class AutoTcorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> corr = afni.AutoTcorrelate() >>> corr.inputs.in_file = 'functional.nii' @@ -575,44 +670,48 @@ class AutoTcorrelate(AFNICommand): input_spec = AutoTcorrelateInputSpec output_spec = AFNICommandOutputSpec - _cmd = '3dAutoTcorrelate' + _cmd = "3dAutoTcorrelate" def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) - if ext.lower() not in ['.1d', '.1D', '.nii.gz', '.nii']: - ext = ext + '.1D' + if ext.lower() not in [".1d", ".1D", ".nii.gz", ".nii"]: + ext = ext + ".1D" return os.path.join(path, base + ext) class AutomaskInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAutomask', - argstr='%s', + desc="input file to 3dAutomask", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_mask', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_mask", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) brain_file = File( - name_template='%s_masked', - desc='output file from 3dAutomask', - argstr='-apply_prefix %s', - name_source='in_file') + name_template="%s_masked", + desc="output file from 3dAutomask", + argstr="-apply_prefix %s", + name_source="in_file", + ) clfrac = traits.Float( - desc='sets the clip level fraction (must be 0.1-0.9). A small value ' - 'will tend to make the mask larger [default = 0.5].', - argstr='-clfrac %s') - dilate = traits.Int(desc='dilate the mask outwards', argstr='-dilate %s') - erode = traits.Int(desc='erode the mask inwards', argstr='-erode %s') + desc="sets the clip level fraction (must be 0.1-0.9). A small value " + "will tend to make the mask larger [default = 0.5].", + argstr="-clfrac %s", + ) + dilate = traits.Int(desc="dilate the mask outwards", argstr="-dilate %s") + erode = traits.Int(desc="erode the mask inwards", argstr="-erode %s") class AutomaskOutputSpec(TraitedSpec): - out_file = File(desc='mask file', exists=True) - brain_file = File(desc='brain file (skull stripped)', exists=True) + out_file = File(desc="mask file", exists=True) + brain_file = File(desc="brain file (skull stripped)", exists=True) class Automask(AFNICommand): @@ -622,8 +721,7 @@ class Automask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> automask = afni.Automask() >>> automask.inputs.in_file = 'functional.nii' @@ -635,91 +733,101 @@ class Automask(AFNICommand): """ - _cmd = '3dAutomask' + _cmd = "3dAutomask" input_spec = AutomaskInputSpec output_spec = AutomaskOutputSpec class AutoTLRCInputSpec(CommandLineInputSpec): outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) in_file = File( - desc='Original anatomical volume (+orig).' - 'The skull is removed by this script' - 'unless instructed otherwise (-no_ss).', - argstr='-input %s', + desc="Original anatomical volume (+orig)." + "The skull is removed by this script" + "unless instructed otherwise (-no_ss).", + argstr="-input %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) base = traits.Str( - desc=' Reference anatomical volume' - ' Usually this volume is in some standard space like' - ' TLRC or MNI space and with afni dataset view of' - ' (+tlrc).' - ' Preferably, this reference volume should have had' - ' the skull removed but that is not mandatory.' - ' AFNI\'s distribution contains several templates.' - ' For a longer list, use "whereami -show_templates"' - 'TT_N27+tlrc --> Single subject, skull stripped volume.' - ' This volume is also known as ' - ' N27_SurfVol_NoSkull+tlrc elsewhere in ' - ' AFNI and SUMA land.' - ' (www.loni.ucla.edu, www.bic.mni.mcgill.ca)' - ' This template has a full set of FreeSurfer' - ' (surfer.nmr.mgh.harvard.edu)' - ' surface models that can be used in SUMA. ' - ' For details, see Talairach-related link:' - ' https://afni.nimh.nih.gov/afni/suma' - 'TT_icbm452+tlrc --> Average volume of 452 normal brains.' - ' Skull Stripped. (www.loni.ucla.edu)' - 'TT_avg152T1+tlrc --> Average volume of 152 normal brains.' - ' Skull Stripped.(www.bic.mni.mcgill.ca)' - 'TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1' - ' TT_avg152 and TT_EPI volume sources are from' - ' SPM\'s distribution. (www.fil.ion.ucl.ac.uk/spm/)' - 'If you do not specify a path for the template, the script' - 'will attempt to locate the template AFNI\'s binaries directory.' - 'NOTE: These datasets have been slightly modified from' - ' their original size to match the standard TLRC' - ' dimensions (Jean Talairach and Pierre Tournoux' - ' Co-Planar Stereotaxic Atlas of the Human Brain' - ' Thieme Medical Publishers, New York, 1988). ' - ' That was done for internal consistency in AFNI.' - ' You may use the original form of these' - ' volumes if you choose but your TLRC coordinates' - ' will not be consistent with AFNI\'s TLRC database' - ' (San Antonio Talairach Daemon database), for example.', + desc="""\ +Reference anatomical volume. +Usually this volume is in some standard space like +TLRC or MNI space and with afni dataset view of +(+tlrc). +Preferably, this reference volume should have had +the skull removed but that is not mandatory. +AFNI's distribution contains several templates. +For a longer list, use "whereami -show_templates" +TT_N27+tlrc --> Single subject, skull stripped volume. +This volume is also known as +N27_SurfVol_NoSkull+tlrc elsewhere in +AFNI and SUMA land. +(www.loni.ucla.edu, www.bic.mni.mcgill.ca) +This template has a full set of FreeSurfer +(surfer.nmr.mgh.harvard.edu) +surface models that can be used in SUMA. +For details, see Talairach-related link: +https://afni.nimh.nih.gov/afni/suma +TT_icbm452+tlrc --> Average volume of 452 normal brains. +Skull Stripped. (www.loni.ucla.edu) +TT_avg152T1+tlrc --> Average volume of 152 normal brains. +Skull Stripped.(www.bic.mni.mcgill.ca) +TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1 +TT_avg152 and TT_EPI volume sources are from +SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/) +If you do not specify a path for the template, the script +will attempt to locate the template AFNI's binaries directory. +NOTE: These datasets have been slightly modified from +their original size to match the standard TLRC +dimensions (Jean Talairach and Pierre Tournoux +Co-Planar Stereotaxic Atlas of the Human Brain +Thieme Medical Publishers, New York, 1988). +That was done for internal consistency in AFNI. +You may use the original form of these +volumes if you choose but your TLRC coordinates +will not be consistent with AFNI's TLRC database +(San Antonio Talairach Daemon database), for example.""", mandatory=True, - argstr='-base %s') + argstr="-base %s", + ) no_ss = traits.Bool( - desc='Do not strip skull of input data set' - '(because skull has already been removed' - 'or because template still has the skull)' - 'NOTE: The -no_ss option is not all that optional.' - ' Here is a table of when you should and should not use -no_ss' - ' Template Template' - ' WITH skull WITHOUT skull' - ' Dset.' - ' WITH skull -no_ss xxx ' - ' ' - ' WITHOUT skull No Cigar -no_ss' - ' ' - ' Template means: Your template of choice' - ' Dset. means: Your anatomical dataset' - ' -no_ss means: Skull stripping should not be attempted on Dset' - ' xxx means: Don\'t put anything, the script will strip Dset' - ' No Cigar means: Don\'t try that combination, it makes no sense.', - argstr='-no_ss') + desc="""\ +Do not strip skull of input data set +(because skull has already been removed +or because template still has the skull) +NOTE: The ``-no_ss`` option is not all that optional. +Here is a table of when you should and should not use ``-no_ss`` + + +------------------+------------+---------------+ + | Dataset | Template | + +==================+============+===============+ + | | w/ skull | wo/ skull | + +------------------+------------+---------------+ + | WITH skull | ``-no_ss`` | xxx | + +------------------+------------+---------------+ + | WITHOUT skull | No Cigar | ``-no_ss`` | + +------------------+------------+---------------+ + +Template means: Your template of choice +Dset. means: Your anatomical dataset +``-no_ss`` means: Skull stripping should not be attempted on Dset +xxx means: Don't put anything, the script will strip Dset +No Cigar means: Don't try that combination, it makes no sense.""", + argstr="-no_ss", + ) class AutoTLRC(AFNICommand): - """A minmal wrapper for the AutoTLRC script + """A minimal wrapper for the AutoTLRC script The only option currently supported is no_ss. For complete details, see the `3dQwarp Documentation. `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> autoTLRC = afni.AutoTLRC() >>> autoTLRC.inputs.in_file = 'structural.nii' @@ -730,86 +838,97 @@ class AutoTLRC(AFNICommand): >>> res = autoTLRC.run() # doctest: +SKIP """ - _cmd = '@auto_tlrc' + + _cmd = "@auto_tlrc" input_spec = AutoTLRCInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - ext = '.HEAD' - outputs['out_file'] = os.path.abspath( - self._gen_fname(self.inputs.in_file, suffix='+tlrc') + ext) + ext = ".HEAD" + outputs["out_file"] = os.path.abspath( + self._gen_fname(self.inputs.in_file, suffix="+tlrc") + ext + ) return outputs class BandpassInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dBandpass', - argstr='%s', + desc="input file to 3dBandpass", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_bp', - desc='output file from 3dBandpass', - argstr='-prefix %s', + name_template="%s_bp", + desc="output file from 3dBandpass", + argstr="-prefix %s", position=1, - name_source='in_file', - genfile=True) - lowpass = traits.Float( - desc='lowpass', argstr='%f', position=-2, mandatory=True) - highpass = traits.Float( - desc='highpass', argstr='%f', position=-3, mandatory=True) - mask = File(desc='mask file', position=2, argstr='-mask %s', exists=True) + name_source="in_file", + ) + lowpass = traits.Float(desc="lowpass", argstr="%f", position=-2, mandatory=True) + highpass = traits.Float(desc="highpass", argstr="%f", position=-3, mandatory=True) + mask = File(desc="mask file", position=2, argstr="-mask %s", exists=True) despike = traits.Bool( - argstr='-despike', - desc='Despike each time series before other processing. Hopefully, ' - 'you don\'t actually need to do this, which is why it is ' - 'optional.') + argstr="-despike", + desc="Despike each time series before other processing. Hopefully, " + "you don't actually need to do this, which is why it is " + "optional.", + ) orthogonalize_file = InputMultiPath( File(exists=True), - argstr='-ort %s', - desc='Also orthogonalize input to columns in f.1D. Multiple \'-ort\' ' - 'options are allowed.') + argstr="-ort %s", + desc="Also orthogonalize input to columns in f.1D. Multiple '-ort' " + "options are allowed.", + ) orthogonalize_dset = File( exists=True, - argstr='-dsort %s', - desc='Orthogonalize each voxel to the corresponding voxel time series ' - 'in dataset \'fset\', which must have the same spatial and ' - 'temporal grid structure as the main input dataset. At present, ' - 'only one \'-dsort\' option is allowed.') + argstr="-dsort %s", + desc="Orthogonalize each voxel to the corresponding voxel time series " + "in dataset 'fset', which must have the same spatial and " + "temporal grid structure as the main input dataset. At present, " + "only one '-dsort' option is allowed.", + ) no_detrend = traits.Bool( - argstr='-nodetrend', - desc='Skip the quadratic detrending of the input that occurs before ' - 'the FFT-based bandpassing. You would only want to do this if ' - 'the dataset had been detrended already in some other program.') + argstr="-nodetrend", + desc="Skip the quadratic detrending of the input that occurs before " + "the FFT-based bandpassing. You would only want to do this if " + "the dataset had been detrended already in some other program.", + ) tr = traits.Float( - argstr='-dt %f', - desc='Set time step (TR) in sec [default=from dataset header].') + argstr="-dt %f", desc="Set time step (TR) in sec [default=from dataset header]." + ) nfft = traits.Int( - argstr='-nfft %d', desc='Set the FFT length [must be a legal value].') + argstr="-nfft %d", desc="Set the FFT length [must be a legal value]." + ) normalize = traits.Bool( - argstr='-norm', - desc='Make all output time series have L2 norm = 1 (i.e., sum of ' - 'squares = 1).') + argstr="-norm", + desc="Make all output time series have L2 norm = 1 (i.e., sum of " + "squares = 1).", + ) automask = traits.Bool( - argstr='-automask', desc='Create a mask from the input dataset.') + argstr="-automask", desc="Create a mask from the input dataset." + ) blur = traits.Float( - argstr='-blur %f', - desc='Blur (inside the mask only) with a filter width (FWHM) of ' - '\'fff\' millimeters.') + argstr="-blur %f", + desc="Blur (inside the mask only) with a filter width (FWHM) of " + "'fff' millimeters.", + ) localPV = traits.Float( - argstr='-localPV %f', - desc='Replace each vector by the local Principal Vector (AKA first ' - 'singular vector) from a neighborhood of radius \'rrr\' ' - 'millimeters. Note that the PV time series is L2 normalized. ' - 'This option is mostly for Bob Cox to have fun with.') + argstr="-localPV %f", + desc="Replace each vector by the local Principal Vector (AKA first " + "singular vector) from a neighborhood of radius 'rrr' " + "millimeters. Note that the PV time series is L2 normalized. " + "This option is mostly for Bob Cox to have fun with.", + ) notrans = traits.Bool( - argstr='-notrans', - desc='Don\'t check for initial positive transients in the data. ' - 'The test is a little slow, so skipping it is OK, if you KNOW ' - 'the data time series are transient-free.') + argstr="-notrans", + desc="Don't check for initial positive transients in the data. " + "The test is a little slow, so skipping it is OK, if you KNOW " + "the data time series are transient-free.", + ) class Bandpass(AFNICommand): @@ -820,8 +939,7 @@ class Bandpass(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> bandpass = afni.Bandpass() @@ -834,46 +952,52 @@ class Bandpass(AFNICommand): """ - _cmd = '3dBandpass' + _cmd = "3dBandpass" input_spec = BandpassInputSpec output_spec = AFNICommandOutputSpec class BlurInMaskInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dSkullStrip', - argstr='-input %s', + desc="input file to 3dSkullStrip", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_blur', - desc='output to the file', - argstr='-prefix %s', - name_source='in_file', - position=-1) + name_template="%s_blur", + desc="output to the file", + argstr="-prefix %s", + name_source="in_file", + position=-1, + ) mask = File( - desc='Mask dataset, if desired. Blurring will occur only within the ' - 'mask. Voxels NOT in the mask will be set to zero in the output.', - argstr='-mask %s') + desc="Mask dataset, if desired. Blurring will occur only within the " + "mask. Voxels NOT in the mask will be set to zero in the output.", + argstr="-mask %s", + ) multimask = File( - desc='Multi-mask dataset -- each distinct nonzero value in dataset ' - 'will be treated as a separate mask for blurring purposes.', - argstr='-Mmask %s') + desc="Multi-mask dataset -- each distinct nonzero value in dataset " + "will be treated as a separate mask for blurring purposes.", + argstr="-Mmask %s", + ) automask = traits.Bool( - desc='Create an automask from the input dataset.', argstr='-automask') - fwhm = traits.Float( - desc='fwhm kernel size', argstr='-FWHM %f', mandatory=True) + desc="Create an automask from the input dataset.", argstr="-automask" + ) + fwhm = traits.Float(desc="fwhm kernel size", argstr="-FWHM %f", mandatory=True) preserve = traits.Bool( - desc='Normally, voxels not in the mask will be set to zero in the ' - 'output. If you want the original values in the dataset to be ' - 'preserved in the output, use this option.', - argstr='-preserve') + desc="Normally, voxels not in the mask will be set to zero in the " + "output. If you want the original values in the dataset to be " + "preserved in the output, use this option.", + argstr="-preserve", + ) float_out = traits.Bool( - desc='Save dataset as floats, no matter what the input data type is.', - argstr='-float') - options = Str(desc='options', argstr='%s', position=2) + desc="Save dataset as floats, no matter what the input data type is.", + argstr="-float", + ) + options = Str(desc="options", argstr="%s", position=2) class BlurInMask(AFNICommand): @@ -883,8 +1007,7 @@ class BlurInMask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bim = afni.BlurInMask() >>> bim.inputs.in_file = 'functional.nii' @@ -896,34 +1019,39 @@ class BlurInMask(AFNICommand): """ - _cmd = '3dBlurInMask' + _cmd = "3dBlurInMask" input_spec = BlurInMaskInputSpec output_spec = AFNICommandOutputSpec class BlurToFWHMInputSpec(AFNICommandInputSpec): in_file = File( - desc='The dataset that will be smoothed', - argstr='-input %s', + desc="The dataset that will be smoothed", + argstr="-input %s", mandatory=True, - exists=True) + exists=True, + ) automask = traits.Bool( - desc='Create an automask from the input dataset.', argstr='-automask') + desc="Create an automask from the input dataset.", argstr="-automask" + ) fwhm = traits.Float( - desc='Blur until the 3D FWHM reaches this value (in mm)', - argstr='-FWHM %f') + desc="Blur until the 3D FWHM reaches this value (in mm)", argstr="-FWHM %f" + ) fwhmxy = traits.Float( - desc='Blur until the 2D (x,y)-plane FWHM reaches this value (in mm)', - argstr='-FWHMxy %f') + desc="Blur until the 2D (x,y)-plane FWHM reaches this value (in mm)", + argstr="-FWHMxy %f", + ) blurmaster = File( - desc='The dataset whose smoothness controls the process.', - argstr='-blurmaster %s', - exists=True) + desc="The dataset whose smoothness controls the process.", + argstr="-blurmaster %s", + exists=True, + ) mask = File( - desc='Mask dataset, if desired. Voxels NOT in mask will be set to zero ' - 'in output.', - argstr='-mask %s', - exists=True) + desc="Mask dataset, if desired. Voxels NOT in mask will be set to zero " + "in output.", + argstr="-mask %s", + exists=True, + ) class BlurToFWHM(AFNICommand): @@ -934,8 +1062,7 @@ class BlurToFWHM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> blur = afni.preprocess.BlurToFWHM() >>> blur.inputs.in_file = 'epi.nii' @@ -945,37 +1072,42 @@ class BlurToFWHM(AFNICommand): >>> res = blur.run() # doctest: +SKIP """ - _cmd = '3dBlurToFWHM' + + _cmd = "3dBlurToFWHM" input_spec = BlurToFWHMInputSpec output_spec = AFNICommandOutputSpec class ClipLevelInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dClipLevel', - argstr='%s', + desc="input file to 3dClipLevel", + argstr="%s", position=-1, mandatory=True, - exists=True) + exists=True, + ) mfrac = traits.Float( - desc='Use the number ff instead of 0.50 in the algorithm', - argstr='-mfrac %s', - position=2) + desc="Use the number ff instead of 0.50 in the algorithm", + argstr="-mfrac %s", + position=2, + ) doall = traits.Bool( - desc='Apply the algorithm to each sub-brick separately.', - argstr='-doall', + desc="Apply the algorithm to each sub-brick separately.", + argstr="-doall", position=3, - xor=('grad')) - grad = traits.File( - desc='Also compute a \'gradual\' clip level as a function of voxel ' - 'position, and output that to a dataset.', - argstr='-grad %s', + xor=("grad"), + ) + grad = File( + desc="Also compute a 'gradual' clip level as a function of voxel " + "position, and output that to a dataset.", + argstr="-grad %s", position=3, - xor=('doall')) + xor=("doall"), + ) class ClipLevelOutputSpec(TraitedSpec): - clip_val = traits.Float(desc='output') + clip_val = traits.Float(desc="output") class ClipLevel(AFNICommandBase): @@ -986,8 +1118,7 @@ class ClipLevel(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> cliplevel = preprocess.ClipLevel() >>> cliplevel.inputs.in_file = 'anatomical.nii' @@ -996,24 +1127,24 @@ class ClipLevel(AFNICommandBase): >>> res = cliplevel.run() # doctest: +SKIP """ - _cmd = '3dClipLevel' + + _cmd = "3dClipLevel" input_spec = ClipLevelInputSpec output_spec = ClipLevelOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = self._outputs() - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - clip_val = load_json(outfile)['stat'] - except IOError: + clip_val = load_json(outfile)["stat"] + except OSError: return self.run().outputs else: clip_val = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -1030,31 +1161,32 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class DegreeCentralityInputSpec(CentralityInputSpec): - """DegreeCentrality inputspec - """ + """DegreeCentrality inputspec""" in_file = File( - desc='input file to 3dDegreeCentrality', - argstr='%s', + desc="input file to 3dDegreeCentrality", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) sparsity = traits.Float( - desc='only take the top percent of connections', argstr='-sparsity %f') + desc="only take the top percent of connections", argstr="-sparsity %f" + ) oned_file = Str( - desc='output filepath to text dump of correlation matrix', - argstr='-out1D %s') + desc="output filepath to text dump of correlation matrix", argstr="-out1D %s" + ) class DegreeCentralityOutputSpec(AFNICommandOutputSpec): - """DegreeCentrality outputspec - """ + """DegreeCentrality outputspec""" oned_file = File( - desc='The text output of the similarity matrix computed after ' - 'thresholding with one-dimensional and ijk voxel indices, ' - 'correlations, image extents, and affine matrix.') + desc="The text output of the similarity matrix computed after " + "thresholding with one-dimensional and ijk voxel indices, " + "correlations, image extents, and affine matrix." + ) class DegreeCentrality(AFNICommand): @@ -1065,8 +1197,7 @@ class DegreeCentrality(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> degree = afni.DegreeCentrality() >>> degree.inputs.in_file = 'functional.nii' @@ -1079,36 +1210,35 @@ class DegreeCentrality(AFNICommand): """ - _cmd = '3dDegreeCentrality' + _cmd = "3dDegreeCentrality" input_spec = DegreeCentralityInputSpec output_spec = DegreeCentralityOutputSpec # Re-define generated inputs def _list_outputs(self): - # Import packages - import os - # Update outputs dictionary if oned file is defined - outputs = super(DegreeCentrality, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.oned_file: - outputs['oned_file'] = os.path.abspath(self.inputs.oned_file) + outputs["oned_file"] = os.path.abspath(self.inputs.oned_file) return outputs class DespikeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dDespike', - argstr='%s', + desc="input file to 3dDespike", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_despike', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_despike", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class Despike(AFNICommand): @@ -1118,8 +1248,7 @@ class Despike(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> despike = afni.Despike() >>> despike.inputs.in_file = 'functional.nii' @@ -1129,24 +1258,26 @@ class Despike(AFNICommand): """ - _cmd = '3dDespike' + _cmd = "3dDespike" input_spec = DespikeInputSpec output_spec = AFNICommandOutputSpec class DetrendInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dDetrend', - argstr='%s', + desc="input file to 3dDetrend", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_detrend', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_detrend", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class Detrend(AFNICommand): @@ -1157,8 +1288,7 @@ class Detrend(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> detrend = afni.Detrend() >>> detrend.inputs.in_file = 'functional.nii' @@ -1170,53 +1300,62 @@ class Detrend(AFNICommand): """ - _cmd = '3dDetrend' + _cmd = "3dDetrend" input_spec = DetrendInputSpec output_spec = AFNICommandOutputSpec class ECMInputSpec(CentralityInputSpec): - """ECM inputspec - """ + """ECM inputspec""" in_file = File( - desc='input file to 3dECM', - argstr='%s', + desc="input file to 3dECM", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) sparsity = traits.Float( - desc='only take the top percent of connections', argstr='-sparsity %f') + desc="only take the top percent of connections", argstr="-sparsity %f" + ) full = traits.Bool( - desc='Full power method; enables thresholding; automatically selected ' - 'if -thresh or -sparsity are set', - argstr='-full') + desc="Full power method; enables thresholding; automatically selected " + "if -thresh or -sparsity are set", + argstr="-full", + ) fecm = traits.Bool( - desc='Fast centrality method; substantial speed increase but cannot ' - 'accomodate thresholding; automatically selected if -thresh or ' - '-sparsity are not set', - argstr='-fecm') + desc="Fast centrality method; substantial speed increase but cannot " + "accommodate thresholding; automatically selected if -thresh or " + "-sparsity are not set", + argstr="-fecm", + ) shift = traits.Float( - desc='shift correlation coefficients in similarity matrix to enforce ' - 'non-negativity, s >= 0.0; default = 0.0 for -full, 1.0 for -fecm', - argstr='-shift %f') + desc="shift correlation coefficients in similarity matrix to enforce " + "non-negativity, s >= 0.0; default = 0.0 for -full, 1.0 for -fecm", + argstr="-shift %f", + ) scale = traits.Float( - desc='scale correlation coefficients in similarity matrix to after ' - 'shifting, x >= 0.0; default = 1.0 for -full, 0.5 for -fecm', - argstr='-scale %f') + desc="scale correlation coefficients in similarity matrix to after " + "shifting, x >= 0.0; default = 1.0 for -full, 0.5 for -fecm", + argstr="-scale %f", + ) eps = traits.Float( - desc='sets the stopping criterion for the power iteration; ' - 'l2|v_old - v_new| < eps*|v_old|; default = 0.001', - argstr='-eps %f') + desc="sets the stopping criterion for the power iteration; " + ":math:`l2\\|v_\\text{old} - v_\\text{new}\\| < eps\\|v_\\text{old}\\|`; " + "default = 0.001", + argstr="-eps %f", + ) max_iter = traits.Int( - desc='sets the maximum number of iterations to use in the power ' - 'iteration; default = 1000', - argstr='-max_iter %d') + desc="sets the maximum number of iterations to use in the power " + "iteration; default = 1000", + argstr="-max_iter %d", + ) memory = traits.Float( - desc='Limit memory consumption on system by setting the amount of GB ' - 'to limit the algorithm to; default = 2GB', - argstr='-memory %f') + desc="Limit memory consumption on system by setting the amount of GB " + "to limit the algorithm to; default = 2GB", + argstr="-memory %f", + ) class ECM(AFNICommand): @@ -1227,8 +1366,7 @@ class ECM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> ecm = afni.ECM() >>> ecm.inputs.in_file = 'functional.nii' @@ -1241,38 +1379,39 @@ class ECM(AFNICommand): """ - _cmd = '3dECM' + _cmd = "3dECM" input_spec = ECMInputSpec output_spec = AFNICommandOutputSpec class FimInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dfim+', - argstr='-input %s', + desc="input file to 3dfim+", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_fim', - desc='output image file name', - argstr='-bucket %s', - name_source='in_file') + name_template="%s_fim", + desc="output image file name", + argstr="-bucket %s", + name_source="in_file", + ) ideal_file = File( - desc='ideal time series file name', - argstr='-ideal_file %s', + desc="ideal time series file name", + argstr="-ideal_file %s", position=2, mandatory=True, - exists=True) + exists=True, + ) fim_thr = traits.Float( - desc='fim internal mask threshold value', - argstr='-fim_thr %f', - position=3) + desc="fim internal mask threshold value", argstr="-fim_thr %f", position=3 + ) out = Str( - desc='Flag to output the specified parameter', - argstr='-out %s', - position=4) + desc="Flag to output the specified parameter", argstr="-out %s", position=4 + ) class Fim(AFNICommand): @@ -1283,8 +1422,7 @@ class Fim(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fim = afni.Fim() >>> fim.inputs.in_file = 'functional.nii' @@ -1298,32 +1436,33 @@ class Fim(AFNICommand): """ - _cmd = '3dfim+' + _cmd = "3dfim+" input_spec = FimInputSpec output_spec = AFNICommandOutputSpec class FourierInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dFourier', - argstr='%s', + desc="input file to 3dFourier", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_fourier', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - lowpass = traits.Float( - desc='lowpass', argstr='-lowpass %f', mandatory=True) - highpass = traits.Float( - desc='highpass', argstr='-highpass %f', mandatory=True) + name_template="%s_fourier", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + lowpass = traits.Float(desc="lowpass", argstr="-lowpass %f", mandatory=True) + highpass = traits.Float(desc="highpass", argstr="-highpass %f", mandatory=True) retrend = traits.Bool( - desc='Any mean and linear trend are removed before filtering. This ' - 'will restore the trend after filtering.', - argstr='-retrend') + desc="Any mean and linear trend are removed before filtering. This " + "will restore the trend after filtering.", + argstr="-retrend", + ) class Fourier(AFNICommand): @@ -1334,8 +1473,7 @@ class Fourier(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fourier = afni.Fourier() >>> fourier.inputs.in_file = 'functional.nii' @@ -1348,48 +1486,48 @@ class Fourier(AFNICommand): """ - _cmd = '3dFourier' + _cmd = "3dFourier" input_spec = FourierInputSpec output_spec = AFNICommandOutputSpec class HistInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dHist', - argstr='-input %s', + desc="input file to 3dHist", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - desc='Write histogram to niml file with this prefix', - name_template='%s_hist', + desc="Write histogram to niml file with this prefix", + name_template="%s_hist", keep_extension=False, - argstr='-prefix %s', - name_source=['in_file']) + argstr="-prefix %s", + name_source=["in_file"], + ) showhist = traits.Bool( - False, - usedefault=True, - desc='write a text visual histogram', - argstr='-showhist') + False, usedefault=True, desc="write a text visual histogram", argstr="-showhist" + ) out_show = File( - name_template='%s_hist.out', - desc='output image file name', + name_template="%s_hist.out", + desc="output image file name", keep_extension=False, - argstr='> %s', - name_source='in_file', - position=-1) - mask = File( - desc='matrix to align input file', argstr='-mask %s', exists=True) - nbin = traits.Int(desc='number of bins', argstr='-nbin %d') - max_value = traits.Float(argstr='-max %f', desc='maximum intensity value') - min_value = traits.Float(argstr='-min %f', desc='minimum intensity value') - bin_width = traits.Float(argstr='-binwidth %f', desc='bin width') + argstr="> %s", + name_source="in_file", + position=-1, + ) + mask = File(desc="matrix to align input file", argstr="-mask %s", exists=True) + nbin = traits.Int(desc="number of bins", argstr="-nbin %d") + max_value = traits.Float(argstr="-max %f", desc="maximum intensity value") + min_value = traits.Float(argstr="-min %f", desc="minimum intensity value") + bin_width = traits.Float(argstr="-binwidth %f", desc="bin width") class HistOutputSpec(TraitedSpec): - out_file = File(desc='output file', exists=True) - out_show = File(desc='output visual histogram') + out_file = File(desc="output file", exists=True) + out_show = File(desc="output visual histogram") class Hist(AFNICommandBase): @@ -1400,8 +1538,7 @@ class Hist(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> hist = afni.Hist() >>> hist.inputs.in_file = 'functional.nii' @@ -1411,13 +1548,13 @@ class Hist(AFNICommandBase): """ - _cmd = '3dHist' + _cmd = "3dHist" input_spec = HistInputSpec output_spec = HistOutputSpec _redirect_x = True def __init__(self, **inputs): - super(Hist, self).__init__(**inputs) + super().__init__(**inputs) if not no_afni(): version = Info.version() @@ -1429,28 +1566,28 @@ def _parse_inputs(self, skip=None): if not self.inputs.showhist: if skip is None: skip = [] - skip += ['out_show'] - return super(Hist, self)._parse_inputs(skip=skip) + skip += ["out_show"] + return super()._parse_inputs(skip=skip) def _list_outputs(self): - outputs = super(Hist, self)._list_outputs() - outputs['out_file'] += '.niml.hist' + outputs = super()._list_outputs() + outputs["out_file"] += ".niml.hist" if not self.inputs.showhist: - outputs['out_show'] = Undefined + outputs["out_show"] = Undefined return outputs class LFCDInputSpec(CentralityInputSpec): - """LFCD inputspec - """ + """LFCD inputspec""" in_file = File( - desc='input file to 3dLFCD', - argstr='%s', + desc="input file to 3dLFCD", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) class LFCD(AFNICommand): @@ -1461,8 +1598,7 @@ class LFCD(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> lfcd = afni.LFCD() >>> lfcd.inputs.in_file = 'functional.nii' @@ -1474,33 +1610,32 @@ class LFCD(AFNICommand): >>> res = lfcd.run() # doctest: +SKIP """ - _cmd = '3dLFCD' + _cmd = "3dLFCD" input_spec = LFCDInputSpec output_spec = AFNICommandOutputSpec class MaskaveInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dmaskave', - argstr='%s', + desc="input file to 3dmaskave", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_maskave.1D', - desc='output image file name', + name_template="%s_maskave.1D", + desc="output image file name", keep_extension=True, - argstr='> %s', - name_source='in_file', - position=-1) + argstr="> %s", + name_source="in_file", + position=-1, + ) mask = File( - desc='matrix to align input file', - argstr='-mask %s', - position=1, - exists=True) - quiet = traits.Bool( - desc='matrix to align input file', argstr='-quiet', position=2) + desc="matrix to align input file", argstr="-mask %s", position=1, exists=True + ) + quiet = traits.Bool(desc="matrix to align input file", argstr="-quiet", position=2) class Maskave(AFNICommand): @@ -1511,8 +1646,7 @@ class Maskave(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> maskave = afni.Maskave() >>> maskave.inputs.in_file = 'functional.nii' @@ -1524,40 +1658,39 @@ class Maskave(AFNICommand): """ - _cmd = '3dmaskave' + _cmd = "3dmaskave" input_spec = MaskaveInputSpec output_spec = AFNICommandOutputSpec class MeansInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 3dMean', - argstr='%s', + desc="input file to 3dMean", + argstr="%s", position=-2, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='another input file to 3dMean', - argstr='%s', - position=-1, - exists=True) + desc="another input file to 3dMean", argstr="%s", position=-1, exists=True + ) datum = traits.Str( - desc='Sets the data type of the output dataset', argstr='-datum %s') + desc="Sets the data type of the output dataset", argstr="-datum %s" + ) out_file = File( - name_template='%s_mean', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - scale = Str(desc='scaling of output', argstr='-%sscale') - non_zero = traits.Bool(desc='use only non-zero values', argstr='-non_zero') - std_dev = traits.Bool(desc='calculate std dev', argstr='-stdev') - sqr = traits.Bool(desc='mean square instead of value', argstr='-sqr') - summ = traits.Bool(desc='take sum, (not average)', argstr='-sum') - count = traits.Bool( - desc='compute count of non-zero voxels', argstr='-count') - mask_inter = traits.Bool( - desc='create intersection mask', argstr='-mask_inter') - mask_union = traits.Bool(desc='create union mask', argstr='-mask_union') + name_template="%s_mean", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + scale = Str(desc="scaling of output", argstr="-%sscale") + non_zero = traits.Bool(desc="use only non-zero values", argstr="-non_zero") + std_dev = traits.Bool(desc="calculate std dev", argstr="-stdev") + sqr = traits.Bool(desc="mean square instead of value", argstr="-sqr") + summ = traits.Bool(desc="take sum, (not average)", argstr="-sum") + count = traits.Bool(desc="compute count of non-zero voxels", argstr="-count") + mask_inter = traits.Bool(desc="create intersection mask", argstr="-mask_inter") + mask_union = traits.Bool(desc="create union mask", argstr="-mask_union") class Means(AFNICommand): @@ -1567,8 +1700,7 @@ class Means(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> means = afni.Means() >>> means.inputs.in_file_a = 'im1.nii' @@ -1589,81 +1721,82 @@ class Means(AFNICommand): """ - _cmd = '3dMean' + _cmd = "3dMean" input_spec = MeansInputSpec output_spec = AFNICommandOutputSpec class OutlierCountInputSpec(CommandLineInputSpec): in_file = File( - argstr='%s', - mandatory=True, - exists=True, - position=-2, - desc='input dataset') + argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" + ) mask = File( exists=True, - argstr='-mask %s', - xor=['autoclip', 'automask'], - desc='only count voxels within the given mask') + argstr="-mask %s", + xor=["autoclip", "automask"], + desc="only count voxels within the given mask", + ) qthr = traits.Range( value=1e-3, low=0.0, high=1.0, usedefault=True, - argstr='-qthr %.5f', - desc='indicate a value for q to compute alpha') + argstr="-qthr %.5f", + desc="indicate a value for q to compute alpha", + ) autoclip = traits.Bool( False, usedefault=True, - argstr='-autoclip', - xor=['mask'], - desc='clip off small voxels') + argstr="-autoclip", + xor=["mask"], + desc="clip off small voxels", + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - xor=['mask'], - desc='clip off small voxels') + argstr="-automask", + xor=["mask"], + desc="clip off small voxels", + ) fraction = traits.Bool( False, usedefault=True, - argstr='-fraction', - desc='write out the fraction of masked voxels which are outliers at ' - 'each timepoint') + argstr="-fraction", + desc="write out the fraction of masked voxels which are outliers at " + "each timepoint", + ) interval = traits.Bool( False, usedefault=True, - argstr='-range', - desc='write out the median + 3.5 MAD of outlier count with each ' - 'timepoint') - save_outliers = traits.Bool( - False, usedefault=True, desc='enables out_file option') + argstr="-range", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", + ) + save_outliers = traits.Bool(False, usedefault=True, desc="enables out_file option") outliers_file = File( - name_template='%s_outliers', - argstr='-save %s', - name_source=['in_file'], - output_name='out_outliers', + name_template="%s_outliers", + argstr="-save %s", + name_source=["in_file"], + output_name="out_outliers", keep_extension=True, - desc='output image file name') + desc="output image file name", + ) polort = traits.Int( - argstr='-polort %d', - desc='detrend each voxel timeseries with polynomials') + argstr="-polort %d", desc="detrend each voxel timeseries with polynomials" + ) legendre = traits.Bool( - False, - usedefault=True, - argstr='-legendre', - desc='use Legendre polynomials') + False, usedefault=True, argstr="-legendre", desc="use Legendre polynomials" + ) out_file = File( - name_template='%s_outliers', - name_source=['in_file'], + name_template="%s_outliers", + name_source=["in_file"], keep_extension=False, - desc='capture standard output') + desc="capture standard output", + ) class OutlierCountOutputSpec(TraitedSpec): - out_outliers = File(exists=True, desc='output image file name') - out_file = File(desc='capture standard output') + out_outliers = File(exists=True, desc="output image file name") + out_file = File(desc="capture standard output") class OutlierCount(CommandLine): @@ -1674,8 +1807,7 @@ class OutlierCount(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> toutcount = afni.OutlierCount() >>> toutcount.inputs.in_file = 'functional.nii' @@ -1685,10 +1817,10 @@ class OutlierCount(CommandLine): """ - _cmd = '3dToutcount' + _cmd = "3dToutcount" input_spec = OutlierCountInputSpec output_spec = OutlierCountOutputSpec - _terminal_output = 'file_split' + _terminal_output = "file_split" def _parse_inputs(self, skip=None): if skip is None: @@ -1696,97 +1828,96 @@ def _parse_inputs(self, skip=None): # This is not strictly an input, but needs be # set before run() is called. - if self.terminal_output == 'none': - self.terminal_output = 'file_split' + if self.terminal_output == "none": + self.terminal_output = "file_split" if not self.inputs.save_outliers: - skip += ['outliers_file'] - return super(OutlierCount, self)._parse_inputs(skip) + skip += ["outliers_file"] + return super()._parse_inputs(skip) - def _run_interface(self, runtime): - runtime = super(OutlierCount, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super()._run_interface(runtime, correct_return_codes) # Read from runtime.stdout or runtime.merged - with open(op.abspath(self.inputs.out_file), 'w') as outfh: + with open(op.abspath(self.inputs.out_file), "w") as outfh: outfh.write(runtime.stdout or runtime.merged) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) if self.inputs.save_outliers: - outputs['out_outliers'] = op.abspath(self.inputs.outliers_file) + outputs["out_outliers"] = op.abspath(self.inputs.outliers_file) return outputs class QualityIndexInputSpec(CommandLineInputSpec): in_file = File( - argstr='%s', - mandatory=True, - exists=True, - position=-2, - desc='input dataset') + argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" + ) mask = File( exists=True, - argstr='-mask %s', - xor=['autoclip', 'automask'], - desc='compute correlation only across masked voxels') + argstr="-mask %s", + xor=["autoclip", "automask"], + desc="compute correlation only across masked voxels", + ) spearman = traits.Bool( False, usedefault=True, - argstr='-spearman', - desc='Quality index is 1 minus the Spearman (rank) correlation ' - 'coefficient of each sub-brick with the median sub-brick. ' - '(default).') + argstr="-spearman", + desc="Quality index is 1 minus the Spearman (rank) correlation " + "coefficient of each sub-brick with the median sub-brick. " + "(default).", + ) quadrant = traits.Bool( False, usedefault=True, - argstr='-quadrant', - desc='Similar to -spearman, but using 1 minus the quadrant correlation ' - 'coefficient as the quality index.') + argstr="-quadrant", + desc="Similar to -spearman, but using 1 minus the quadrant correlation " + "coefficient as the quality index.", + ) autoclip = traits.Bool( False, usedefault=True, - argstr='-autoclip', - xor=['mask'], - desc='clip off small voxels') + argstr="-autoclip", + xor=["mask"], + desc="clip off small voxels", + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - xor=['mask'], - desc='clip off small voxels') - clip = traits.Float(argstr='-clip %f', desc='clip off values below') + argstr="-automask", + xor=["mask"], + desc="clip off small voxels", + ) + clip = traits.Float(argstr="-clip %f", desc="clip off values below") interval = traits.Bool( False, usedefault=True, - argstr='-range', - desc='write out the median + 3.5 MAD of outlier count with each ' - 'timepoint') + argstr="-range", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", + ) out_file = File( - name_template='%s_tqual', - name_source=['in_file'], - argstr='> %s', + name_template="%s_tqual", + name_source=["in_file"], + argstr="> %s", keep_extension=False, position=-1, - desc='capture standard output') + desc="capture standard output", + ) class QualityIndexOutputSpec(TraitedSpec): - out_file = File(desc='file containing the captured standard output') + out_file = File(desc="file containing the captured standard output") class QualityIndex(CommandLine): - """Computes a `quality index' for each sub-brick in a 3D+time dataset. + """Computes a quality index for each sub-brick in a 3D+time dataset. The output is a 1D time series with the index for each sub-brick. The results are written to stdout. - For complete details, see the `3dTqual Documentation - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tqual = afni.QualityIndex() >>> tqual.inputs.in_file = 'functional.nii' @@ -1794,111 +1925,141 @@ class QualityIndex(CommandLine): '3dTqual functional.nii > functional_tqual' >>> res = tqual.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTqual Documentation + `_ + """ - _cmd = '3dTqual' + + _cmd = "3dTqual" input_spec = QualityIndexInputSpec output_spec = QualityIndexOutputSpec class ROIStatsInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', - argstr='%s', - position=-2, - mandatory=True, - exists=True) - mask = File(desc='input mask', argstr='-mask %s', position=3, exists=True, - deprecated='1.1.4', new_name='mask_file') - mask_file = File(desc='input mask', argstr='-mask %s', exists=True) + desc="input dataset", argstr="%s", position=-2, mandatory=True, exists=True + ) + mask = File( + desc="input mask", + argstr="-mask %s", + position=3, + exists=True, + deprecated="1.1.4", + new_name="mask_file", + ) + mask_file = File(desc="input mask", argstr="-mask %s", exists=True) mask_f2short = traits.Bool( - desc='Tells the program to convert a float mask to short integers, ' - 'by simple rounding.', - argstr='-mask_f2short') + desc="Tells the program to convert a float mask to short integers, " + "by simple rounding.", + argstr="-mask_f2short", + ) num_roi = traits.Int( - desc='Forces the assumption that the mask dataset\'s ROIs are ' - 'denoted by 1 to n inclusive. Normally, the program ' - 'figures out the ROIs on its own. This option is ' - 'useful if a) you are certain that the mask dataset ' - 'has no values outside the range [0 n], b) there may ' - 'be some ROIs missing between [1 n] in the mask data-' - 'set and c) you want those columns in the output any-' - 'way so the output lines up with the output from other ' - 'invocations of 3dROIstats.', - argstr='-numroi %s') + desc="Forces the assumption that the mask dataset's ROIs are " + "denoted by 1 to n inclusive. Normally, the program " + "figures out the ROIs on its own. This option is " + "useful if a) you are certain that the mask dataset " + "has no values outside the range [0 n], b) there may " + "be some ROIs missing between [1 n] in the mask data-" + "set and c) you want those columns in the output any-" + "way so the output lines up with the output from other " + "invocations of 3dROIstats.", + argstr="-numroi %s", + ) zerofill = traits.Str( - requires=['num_roi'], - desc='For ROI labels not found, use the provided string instead of ' - 'a \'0\' in the output file. Only active if `num_roi` is ' - 'enabled.', - argstr='-zerofill %s') - roisel = traits.File( - exists=True, - desc='Only considers ROIs denoted by values found in the specified ' - 'file. Note that the order of the ROIs as specified in the file ' - 'is not preserved. So an SEL.1D of \'2 8 20\' produces the same ' - 'output as \'8 20 2\'', - argstr='-roisel %s') - debug = traits.Bool( - desc='print debug information', - argstr='-debug') - quiet = traits.Bool( - desc='execute quietly', - argstr='-quiet') + requires=["num_roi"], + desc="For ROI labels not found, use the provided string instead of " + "a '0' in the output file. Only active if `num_roi` is " + "enabled.", + argstr="-zerofill %s", + ) + roisel = File( + exists=True, + desc="Only considers ROIs denoted by values found in the specified " + "file. Note that the order of the ROIs as specified in the file " + "is not preserved. So an SEL.1D of '2 8 20' produces the same " + "output as '8 20 2'", + argstr="-roisel %s", + ) + debug = traits.Bool(desc="print debug information", argstr="-debug") + quiet = traits.Bool(desc="execute quietly", argstr="-quiet") nomeanout = traits.Bool( - desc='Do not include the (zero-inclusive) mean among computed stats', - argstr='-nomeanout') + desc="Do not include the (zero-inclusive) mean among computed stats", + argstr="-nomeanout", + ) nobriklab = traits.Bool( - desc='Do not print the sub-brick label next to its index', - argstr='-nobriklab') + desc="Do not print the sub-brick label next to its index", argstr="-nobriklab" + ) format1D = traits.Bool( - xor=['format1DR'], - desc='Output results in a 1D format that includes commented labels', - argstr='-1Dformat') + xor=["format1DR"], + desc="Output results in a 1D format that includes commented labels", + argstr="-1Dformat", + ) format1DR = traits.Bool( - xor=['format1D'], - desc='Output results in a 1D format that includes uncommented ' - 'labels. May not work optimally with typical 1D functions, ' - 'but is useful for R functions.', - argstr='-1DRformat') - _stat_names = ['mean', 'sum', 'voxels', 'minmax', 'sigma', 'median', - 'mode', 'summary', 'zerominmax', 'zerosigma', 'zeromedian', - 'zeromode'] + xor=["format1D"], + desc="Output results in a 1D format that includes uncommented " + "labels. May not work optimally with typical 1D functions, " + "but is useful for R functions.", + argstr="-1DRformat", + ) + _stat_names = [ + "mean", + "sum", + "voxels", + "minmax", + "sigma", + "median", + "mode", + "summary", + "zerominmax", + "zerosigma", + "zeromedian", + "zeromode", + ] stat = InputMultiObject( traits.Enum(_stat_names), - desc='statistics to compute. Options include: ' - ' * mean = Compute the mean using only non_zero voxels.' - ' Implies the opposite for the mean computed ' - ' by default.\n' - ' * median = Compute the median of nonzero voxels\n' - ' * mode = Compute the mode of nonzero voxels.' - ' (integral valued sets only)\n' - ' * minmax = Compute the min/max of nonzero voxels\n' - ' * sum = Compute the sum using only nonzero voxels.\n' - ' * voxels = Compute the number of nonzero voxels\n' - ' * sigma = Compute the standard deviation of nonzero' - ' voxels\n' - 'Statistics that include zero-valued voxels:\n' - ' * zerominmax = Compute the min/max of all voxels.\n' - ' * zerosigma = Compute the standard deviation of all' - ' voxels.\n' - ' * zeromedian = Compute the median of all voxels.\n' - ' * zeromode = Compute the mode of all voxels.\n' - ' * summary = Only output a summary line with the grand ' - ' mean across all briks in the input dataset.' - ' This option cannot be used with nomeanout.\n' - 'More that one option can be specified.', - argstr='%s...') + desc="""\ +Statistics to compute. Options include: + + * mean = Compute the mean using only non_zero voxels. + Implies the opposite for the mean computed + by default. + * median = Compute the median of nonzero voxels + * mode = Compute the mode of nonzero voxels. + (integral valued sets only) + * minmax = Compute the min/max of nonzero voxels + * sum = Compute the sum using only nonzero voxels. + * voxels = Compute the number of nonzero voxels + * sigma = Compute the standard deviation of nonzero + voxels + +Statistics that include zero-valued voxels: + + * zerominmax = Compute the min/max of all voxels. + * zerosigma = Compute the standard deviation of all + voxels. + * zeromedian = Compute the median of all voxels. + * zeromode = Compute the mode of all voxels. + * summary = Only output a summary line with the grand + mean across all briks in the input dataset. + This option cannot be used with nomeanout. + +More that one option can be specified.""", + argstr="%s...", + ) out_file = File( - name_template='%s_roistat.1D', - desc='output file', + name_template="%s_roistat.1D", + desc="output file", keep_extension=False, - argstr='> %s', - name_source='in_file', - position=-1) + argstr="> %s", + name_source="in_file", + position=-1, + ) class ROIStatsOutputSpec(TraitedSpec): - out_file = File(desc='output tab-separated values file', exists=True) + out_file = File(desc="output tab-separated values file", exists=True) class ROIStats(AFNICommandBase): @@ -1908,8 +2069,7 @@ class ROIStats(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> roistats = afni.ROIStats() >>> roistats.inputs.in_file = 'functional.nii' @@ -1921,75 +2081,84 @@ class ROIStats(AFNICommandBase): >>> res = roistats.run() # doctest: +SKIP """ - _cmd = '3dROIstats' - _terminal_output = 'allatonce' + + _cmd = "3dROIstats" + _terminal_output = "allatonce" input_spec = ROIStatsInputSpec output_spec = ROIStatsOutputSpec - def _format_arg(self, name, spec, value): + def _format_arg(self, name, trait_spec, value): _stat_dict = { - 'mean': '-nzmean', - 'median': '-nzmedian', - 'mode': '-nzmode', - 'minmax': '-nzminmax', - 'sigma': '-nzsigma', - 'voxels': '-nzvoxels', - 'sum': '-nzsum', - 'summary': '-summary', - 'zerominmax': '-minmax', - 'zeromedian': '-median', - 'zerosigma': '-sigma', - 'zeromode': '-mode' - } - if name == 'stat': + "mean": "-nzmean", + "median": "-nzmedian", + "mode": "-nzmode", + "minmax": "-nzminmax", + "sigma": "-nzsigma", + "voxels": "-nzvoxels", + "sum": "-nzsum", + "summary": "-summary", + "zerominmax": "-minmax", + "zeromedian": "-median", + "zerosigma": "-sigma", + "zeromode": "-mode", + } + if name == "stat": value = [_stat_dict[v] for v in value] - return super(ROIStats, self)._format_arg(name, spec, value) + return super()._format_arg(name, trait_spec, value) class RetroicorInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dretroicor', - argstr='%s', + desc="input file to 3dretroicor", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_retroicor', - name_source=['in_file'], - desc='output image file name', - argstr='-prefix %s', - position=1) + name_template="%s_retroicor", + name_source=["in_file"], + desc="output image file name", + argstr="-prefix %s", + position=1, + ) card = File( - desc='1D cardiac data file for cardiac correction', - argstr='-card %s', + desc="1D cardiac data file for cardiac correction", + argstr="-card %s", position=-2, - exists=True) + exists=True, + ) resp = File( - desc='1D respiratory waveform data for correction', - argstr='-resp %s', + desc="1D respiratory waveform data for correction", + argstr="-resp %s", position=-3, - exists=True) + exists=True, + ) threshold = traits.Int( - desc='Threshold for detection of R-wave peaks in input (Make sure it ' - 'is above the background noise level, Try 3/4 or 4/5 times range ' - 'plus minimum)', - argstr='-threshold %d', - position=-4) + desc="Threshold for detection of R-wave peaks in input (Make sure it " + "is above the background noise level, Try 3/4 or 4/5 times range " + "plus minimum)", + argstr="-threshold %d", + position=-4, + ) order = traits.Int( - desc='The order of the correction (2 is typical)', - argstr='-order %s', - position=-5) + desc="The order of the correction (2 is typical)", + argstr="-order %s", + position=-5, + ) cardphase = File( - desc='Filename for 1D cardiac phase output', - argstr='-cardphase %s', + desc="Filename for 1D cardiac phase output", + argstr="-cardphase %s", position=-6, - hash_files=False) + hash_files=False, + ) respphase = File( - desc='Filename for 1D resp phase output', - argstr='-respphase %s', + desc="Filename for 1D resp phase output", + argstr="-respphase %s", position=-7, - hash_files=False) + hash_files=False, + ) class Retroicor(AFNICommand): @@ -2014,7 +2183,7 @@ class Retroicor(AFNICommand): `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> ret = afni.Retroicor() >>> ret.inputs.in_file = 'functional.nii' @@ -2027,67 +2196,75 @@ class Retroicor(AFNICommand): """ - _cmd = '3dretroicor' + _cmd = "3dretroicor" input_spec = RetroicorInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file': - if not isdefined(self.inputs.card) and not isdefined( - self.inputs.resp): + if name == "in_file": + if not isdefined(self.inputs.card) and not isdefined(self.inputs.resp): return None - return super(Retroicor, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class SegInputSpec(CommandLineInputSpec): in_file = File( - desc='ANAT is the volume to segment', - argstr='-anat %s', + desc="ANAT is the volume to segment", + argstr="-anat %s", position=-1, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) mask = traits.Either( - traits.Enum('AUTO'), + traits.Enum("AUTO"), File(exists=True), - desc='only non-zero voxels in mask are analyzed. mask can either be a ' + desc="only non-zero voxels in mask are analyzed. mask can either be a " 'dataset or the string "AUTO" which would use AFNI\'s automask ' - 'function to create the mask.', - argstr='-mask %s', + "function to create the mask.", + argstr="-mask %s", position=-2, - mandatory=True) + mandatory=True, + ) blur_meth = traits.Enum( - 'BFT', - 'BIM', - argstr='-blur_meth %s', - desc='set the blurring method for bias field estimation') + "BFT", + "BIM", + argstr="-blur_meth %s", + desc="set the blurring method for bias field estimation", + ) bias_fwhm = traits.Float( - desc='The amount of blurring used when estimating the field bias with ' - 'the Wells method', - argstr='-bias_fwhm %f') + desc="The amount of blurring used when estimating the field bias with " + "the Wells method", + argstr="-bias_fwhm %f", + ) classes = Str( - desc='CLASS_STRING is a semicolon delimited string of class labels', - argstr='-classes %s') + desc="CLASS_STRING is a semicolon delimited string of class labels", + argstr="-classes %s", + ) bmrf = traits.Float( - desc='Weighting factor controlling spatial homogeneity of the ' - 'classifications', - argstr='-bmrf %f') + desc="Weighting factor controlling spatial homogeneity of the " + "classifications", + argstr="-bmrf %f", + ) bias_classes = Str( - desc='A semicolon delimited string of classes that contribute to the ' - 'estimation of the bias field', - argstr='-bias_classes %s') + desc="A semicolon delimited string of classes that contribute to the " + "estimation of the bias field", + argstr="-bias_classes %s", + ) prefix = Str( - desc='the prefix for the output folder containing all output volumes', - argstr='-prefix %s') + desc="the prefix for the output folder containing all output volumes", + argstr="-prefix %s", + ) mixfrac = Str( - desc='MIXFRAC sets up the volume-wide (within mask) tissue fractions ' - 'while initializing the segmentation (see IGNORE for exception)', - argstr='-mixfrac %s') + desc="MIXFRAC sets up the volume-wide (within mask) tissue fractions " + "while initializing the segmentation (see IGNORE for exception)", + argstr="-mixfrac %s", + ) mixfloor = traits.Float( - desc='Set the minimum value for any class\'s mixing fraction', - argstr='-mixfloor %f') - main_N = traits.Int( - desc='Number of iterations to perform.', argstr='-main_N %d') + desc="Set the minimum value for any class's mixing fraction", + argstr="-mixfloor %f", + ) + main_N = traits.Int(desc="Number of iterations to perform.", argstr="-main_N %d") class Seg(AFNICommandBase): @@ -2099,8 +2276,7 @@ class Seg(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> seg = preprocess.Seg() >>> seg.inputs.in_file = 'structural.nii' @@ -2111,21 +2287,19 @@ class Seg(AFNICommandBase): """ - _cmd = '3dSeg' + _cmd = "3dSeg" input_spec = SegInputSpec output_spec = AFNICommandOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - import glob outputs = self._outputs() if isdefined(self.inputs.prefix): - outfile = os.path.join(os.getcwd(), self.inputs.prefix, - 'Classes+*.BRIK') + outfile = os.path.join(os.getcwd(), self.inputs.prefix, "Classes+*.BRIK") else: - outfile = os.path.join(os.getcwd(), 'Segsy', 'Classes+*.BRIK') + outfile = os.path.join(os.getcwd(), "Segsy", "Classes+*.BRIK") outputs.out_file = glob.glob(outfile)[0] @@ -2134,17 +2308,19 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class SkullStripInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dSkullStrip', - argstr='-input %s', + desc="input file to 3dSkullStrip", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_skullstrip', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_skullstrip", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class SkullStrip(AFNICommand): @@ -2156,8 +2332,7 @@ class SkullStrip(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> skullstrip = afni.SkullStrip() >>> skullstrip.inputs.in_file = 'functional.nii' @@ -2167,13 +2342,14 @@ class SkullStrip(AFNICommand): >>> res = skullstrip.run() # doctest: +SKIP """ - _cmd = '3dSkullStrip' + + _cmd = "3dSkullStrip" _redirect_x = True input_spec = SkullStripInputSpec output_spec = AFNICommandOutputSpec def __init__(self, **inputs): - super(SkullStrip, self).__init__(**inputs) + super().__init__(**inputs) if not no_afni(): v = Info.version() @@ -2185,48 +2361,55 @@ def __init__(self, **inputs): class TCorr1DInputSpec(AFNICommandInputSpec): xset = File( - desc='3d+time dataset input', - argstr=' %s', + desc="3d+time dataset input", + argstr=" %s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) y_1d = File( - desc='1D time series file input', - argstr=' %s', + desc="1D time series file input", + argstr=" %s", position=-1, mandatory=True, - exists=True) + exists=True, + ) out_file = File( - desc='output filename prefix', - name_template='%s_correlation.nii.gz', - argstr='-prefix %s', - name_source='xset', - keep_extension=True) + desc="output filename prefix", + name_template="%s_correlation.nii.gz", + argstr="-prefix %s", + name_source="xset", + keep_extension=True, + ) pearson = traits.Bool( - desc='Correlation is the normal Pearson correlation coefficient', - argstr=' -pearson', - xor=['spearman', 'quadrant', 'ktaub'], - position=1) + desc="Correlation is the normal Pearson correlation coefficient", + argstr=" -pearson", + xor=["spearman", "quadrant", "ktaub"], + position=1, + ) spearman = traits.Bool( - desc='Correlation is the Spearman (rank) correlation coefficient', - argstr=' -spearman', - xor=['pearson', 'quadrant', 'ktaub'], - position=1) + desc="Correlation is the Spearman (rank) correlation coefficient", + argstr=" -spearman", + xor=["pearson", "quadrant", "ktaub"], + position=1, + ) quadrant = traits.Bool( - desc='Correlation is the quadrant correlation coefficient', - argstr=' -quadrant', - xor=['pearson', 'spearman', 'ktaub'], - position=1) + desc="Correlation is the quadrant correlation coefficient", + argstr=" -quadrant", + xor=["pearson", "spearman", "ktaub"], + position=1, + ) ktaub = traits.Bool( - desc='Correlation is the Kendall\'s tau_b correlation coefficient', - argstr=' -ktaub', - xor=['pearson', 'spearman', 'quadrant'], - position=1) + desc="Correlation is the Kendall's tau_b correlation coefficient", + argstr=" -ktaub", + xor=["pearson", "spearman", "quadrant"], + position=1, + ) class TCorr1DOutputSpec(TraitedSpec): - out_file = File(desc='output file containing correlations', exists=True) + out_file = File(desc="output file containing correlations", exists=True) class TCorr1D(AFNICommand): @@ -2246,73 +2429,69 @@ class TCorr1D(AFNICommand): """ - _cmd = '3dTcorr1D' + _cmd = "3dTcorr1D" input_spec = TCorr1DInputSpec output_spec = TCorr1DOutputSpec class TCorrMapInputSpec(AFNICommandInputSpec): - in_file = File( - exists=True, argstr='-input %s', mandatory=True, copyfile=False) - seeds = File(exists=True, argstr='-seed %s', xor=('seeds_width')) - mask = File(exists=True, argstr='-mask %s') - automask = traits.Bool(argstr='-automask') - polort = traits.Int(argstr='-polort %d') - bandpass = traits.Tuple( - (traits.Float(), traits.Float()), argstr='-bpass %f %f') - regress_out_timeseries = traits.File(exists=True, argstr='-ort %s') - blur_fwhm = traits.Float(argstr='-Gblur %f') - seeds_width = traits.Float(argstr='-Mseed %f', xor=('seeds')) + in_file = File(exists=True, argstr="-input %s", mandatory=True, copyfile=False) + seeds = File(exists=True, argstr="-seed %s", xor=("seeds_width")) + mask = File(exists=True, argstr="-mask %s") + automask = traits.Bool(argstr="-automask") + polort = traits.Int(argstr="-polort %d") + bandpass = Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") + regress_out_timeseries = File(exists=True, argstr="-ort %s") + blur_fwhm = traits.Float(argstr="-Gblur %f") + seeds_width = traits.Float(argstr="-Mseed %f", xor=("seeds")) # outputs - mean_file = File(argstr='-Mean %s', suffix='_mean', name_source='in_file') - zmean = File(argstr='-Zmean %s', suffix='_zmean', name_source='in_file') - qmean = File(argstr='-Qmean %s', suffix='_qmean', name_source='in_file') - pmean = File(argstr='-Pmean %s', suffix='_pmean', name_source='in_file') - - _thresh_opts = ('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize') + mean_file = File(argstr="-Mean %s", suffix="_mean", name_source="in_file") + zmean = File(argstr="-Zmean %s", suffix="_zmean", name_source="in_file") + qmean = File(argstr="-Qmean %s", suffix="_qmean", name_source="in_file") + pmean = File(argstr="-Pmean %s", suffix="_pmean", name_source="in_file") + + _thresh_opts = ( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ) thresholds = traits.List(traits.Int()) absolute_threshold = File( - argstr='-Thresh %f %s', - suffix='_thresh', - name_source='in_file', - xor=_thresh_opts) + argstr="-Thresh %f %s", + suffix="_thresh", + name_source="in_file", + xor=_thresh_opts, + ) var_absolute_threshold = File( - argstr='-VarThresh %f %f %f %s', - suffix='_varthresh', - name_source='in_file', - xor=_thresh_opts) + argstr="-VarThresh %f %f %f %s", + suffix="_varthresh", + name_source="in_file", + xor=_thresh_opts, + ) var_absolute_threshold_normalize = File( - argstr='-VarThreshN %f %f %f %s', - suffix='_varthreshn', - name_source='in_file', - xor=_thresh_opts) + argstr="-VarThreshN %f %f %f %s", + suffix="_varthreshn", + name_source="in_file", + xor=_thresh_opts, + ) - correlation_maps = File(argstr='-CorrMap %s', name_source='in_file') - correlation_maps_masked = File( - argstr='-CorrMask %s', name_source='in_file') + correlation_maps = File(argstr="-CorrMap %s", name_source="in_file") + correlation_maps_masked = File(argstr="-CorrMask %s", name_source="in_file") - _expr_opts = ('average_expr', 'average_expr_nonzero', 'sum_expr') + _expr_opts = ("average_expr", "average_expr_nonzero", "sum_expr") expr = Str() average_expr = File( - argstr='-Aexpr %s %s', - suffix='_aexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Aexpr %s %s", suffix="_aexpr", name_source="in_file", xor=_expr_opts + ) average_expr_nonzero = File( - argstr='-Cexpr %s %s', - suffix='_cexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Cexpr %s %s", suffix="_cexpr", name_source="in_file", xor=_expr_opts + ) sum_expr = File( - argstr='-Sexpr %s %s', - suffix='_sexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Sexpr %s %s", suffix="_sexpr", name_source="in_file", xor=_expr_opts + ) histogram_bin_numbers = traits.Int() - histogram = File( - name_source='in_file', argstr='-Hist %d %s', suffix='_hist') + histogram = File(name_source="in_file", argstr="-Hist %d %s", suffix="_hist") class TCorrMapOutputSpec(TraitedSpec): @@ -2340,8 +2519,7 @@ class TCorrMap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcm = afni.TCorrMap() >>> tcm.inputs.in_file = 'functional.nii' @@ -2353,48 +2531,251 @@ class TCorrMap(AFNICommand): """ - _cmd = '3dTcorrMap' + _cmd = "3dTcorrMap" input_spec = TCorrMapInputSpec output_spec = TCorrMapOutputSpec - _additional_metadata = ['suffix'] + _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): if name in self.inputs._thresh_opts: return trait_spec.argstr % self.inputs.thresholds + [value] elif name in self.inputs._expr_opts: return trait_spec.argstr % (self.inputs.expr, value) - elif name == 'histogram': - return trait_spec.argstr % (self.inputs.histogram_bin_numbers, - value) + elif name == "histogram": + return trait_spec.argstr % (self.inputs.histogram_bin_numbers, value) + else: + return super()._format_arg(name, trait_spec, value) + + +class NetCorrInputSpec(AFNICommandInputSpec): + in_file = File( + desc="input time series file (4D data set)", + exists=True, + argstr="-inset %s", + mandatory=True, + ) + in_rois = File( + desc="input set of ROIs, each labelled with distinct integers", + exists=True, + argstr="-in_rois %s", + mandatory=True, + ) + mask = File( + desc="can include a whole brain mask within which to " + "calculate correlation. Otherwise, data should be " + "masked already", + exists=True, + argstr="-mask %s", + ) + weight_ts = File( + desc="input a 1D file WTS of weights that will be applied " + "multiplicatively to each ROI's average time series. " + "WTS can be a column- or row-file of values, but it " + "must have the same length as the input time series " + "volume. " + "If the initial average time series was A[n] for " + "n=0,..,(N-1) time points, then applying a set of " + "weights W[n] of the same length from WTS would " + "produce a new time series: B[n] = A[n] * W[n]", + exists=True, + argstr="-weight_ts %s", + ) + fish_z = traits.Bool( + desc="switch to also output a matrix of Fisher Z-transform " + "values for the corr coefs (r): " + "Z = atanh(r) , " + "(with Z=4 being output along matrix diagonals where " + "r=1, as the r-to-Z conversion is ceilinged at " + "Z = atanh(r=0.999329) = 4, which is still *quite* a " + "high Pearson-r value", + argstr="-fish_z", + ) + part_corr = traits.Bool( + desc="output the partial correlation matrix", argstr="-part_corr" + ) + ts_out = traits.Bool( + desc="switch to output the mean time series of the ROIs that " + "have been used to generate the correlation matrices. " + "Output filenames mirror those of the correlation " + "matrix files, with a '.netts' postfix", + argstr="-ts_out", + ) + ts_label = traits.Bool( + desc="additional switch when using '-ts_out'. Using this " + "option will insert the integer ROI label at the start " + "of each line of the *.netts file created. Thus, for " + "a time series of length N, each line will have N+1 " + "numbers, where the first is the integer ROI label " + "and the subsequent N are scientific notation values", + argstr="-ts_label", + ) + ts_indiv = traits.Bool( + desc="switch to create a directory for each network that " + "contains the average time series for each ROI in " + "individual files (each file has one line). " + "The directories are labelled PREFIX_000_INDIV/, " + "PREFIX_001_INDIV/, etc. (one per network). Within each " + "directory, the files are labelled ROI_001.netts, " + "ROI_002.netts, etc., with the numbers given by the " + "actual ROI integer labels", + argstr="-ts_indiv", + ) + ts_wb_corr = traits.Bool( + desc="switch to create a set of whole brain correlation maps. " + "Performs whole brain correlation for each " + "ROI's average time series; this will automatically " + "create a directory for each network that contains the " + "set of whole brain correlation maps (Pearson 'r's). " + "The directories are labelled as above for '-ts_indiv' " + "Within each directory, the files are labelled " + "WB_CORR_ROI_001+orig, WB_CORR_ROI_002+orig, etc., with " + "the numbers given by the actual ROI integer labels", + argstr="-ts_wb_corr", + ) + ts_wb_Z = traits.Bool( + desc="same as above in '-ts_wb_corr', except that the maps " + "have been Fisher transformed to Z-scores the relation: " + "Z=atanh(r). " + "To avoid infinities in the transform, Pearson values " + "are effectively capped at |r| = 0.999329 (where |Z| = 4.0). " + "Files are labelled WB_Z_ROI_001+orig, etc", + argstr="-ts_wb_Z", + ) + ts_wb_strlabel = traits.Bool( + desc="by default, '-ts_wb_{corr,Z}' output files are named " + "using the int number of a given ROI, such as: " + "WB_Z_ROI_001+orig. " + "With this option, one can replace the int (such as '001') " + "with the string label (such as 'L-thalamus') " + "*if* one has a labeltable attached to the file", + argstr="-ts_wb_strlabel", + ) + nifti = traits.Bool( + desc="output any correlation map files as NIFTI files " + "(default is BRIK/HEAD). Only useful if using " + "'-ts_wb_corr' and/or '-ts_wb_Z'", + argstr="-nifti", + ) + output_mask_nonnull = traits.Bool( + desc="internally, this program checks for where there are " + "nonnull time series, because we don't like those, in " + "general. With this flag, the user can output the " + "determined mask of non-null time series.", + argstr="-output_mask_nonnull", + ) + push_thru_many_zeros = traits.Bool( + desc="by default, this program will grind to a halt and " + "refuse to calculate if any ROI contains >10 percent " + "of voxels with null times series (i.e., each point is " + "0), as of April, 2017. This is because it seems most " + "likely that hidden badness is responsible. However, " + "if the user still wants to carry on the calculation " + "anyways, then this option will allow one to push on " + "through. However, if any ROI *only* has null time " + "series, then the program will not calculate and the " + "user will really, really, really need to address their masking", + argstr="-push_thru_many_zeros", + ) + ignore_LT = traits.Bool( + desc="switch to ignore any label table labels in the " + "'-in_rois' file, if there are any labels attached", + argstr="-ignore_LT", + ) + out_file = File( + desc="output file name part", + name_template="%s_netcorr", + argstr="-prefix %s", + position=1, + name_source="in_file", + ) + + +class NetCorrOutputSpec(TraitedSpec): + out_corr_matrix = File( + desc="output correlation matrix between ROIs written to a text file with .netcc suffix" + ) + out_corr_maps = traits.List( + File(), desc="output correlation maps in Pearson and/or Z-scores" + ) + + +class NetCorr(AFNICommand): + """Calculate correlation matrix of a set of ROIs (using mean time series of + each). Several networks may be analyzed simultaneously, one per brick. + + For complete details, see the `3dNetCorr Documentation + `_. + + Examples + -------- + >>> from nipype.interfaces import afni + >>> ncorr = afni.NetCorr() + >>> ncorr.inputs.in_file = 'functional.nii' + >>> ncorr.inputs.mask = 'mask.nii' + >>> ncorr.inputs.in_rois = 'maps.nii' + >>> ncorr.inputs.ts_wb_corr = True + >>> ncorr.inputs.ts_wb_Z = True + >>> ncorr.inputs.fish_z = True + >>> ncorr.inputs.out_file = 'sub0.tp1.ncorr' + >>> ncorr.cmdline + '3dNetCorr -prefix sub0.tp1.ncorr -fish_z -inset functional.nii -in_rois maps.nii -mask mask.nii -ts_wb_Z -ts_wb_corr' + >>> res = ncorr.run() # doctest: +SKIP + + """ + + _cmd = "3dNetCorr" + input_spec = NetCorrInputSpec + output_spec = NetCorrOutputSpec + + def _list_outputs(self): + import glob + + outputs = self.output_spec().get() + + if not isdefined(self.inputs.out_file): + prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr") else: - return super(TCorrMap, self)._format_arg(name, trait_spec, value) + prefix = self.inputs.out_file + + # All outputs should be in the same directory as the prefix + odir = os.path.dirname(os.path.abspath(prefix)) + outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] + + if isdefined(self.inputs.ts_wb_corr) or isdefined(self.inputs.ts_Z_corr): + corrdir = os.path.join(odir, prefix + "_000_INDIV") + outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) + + return outputs class TCorrelateInputSpec(AFNICommandInputSpec): xset = File( - desc='input xset', - argstr='%s', + desc="input xset", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) yset = File( - desc='input yset', - argstr='%s', + desc="input yset", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tcorr', - desc='output image file name', - argstr='-prefix %s', - name_source='xset') + name_template="%s_tcorr", + desc="output image file name", + argstr="-prefix %s", + name_source="xset", + ) pearson = traits.Bool( - desc='Correlation is the normal Pearson correlation coefficient', - argstr='-pearson') - polort = traits.Int( - desc='Remove polynomical trend of order m', argstr='-polort %d') + desc="Correlation is the normal Pearson correlation coefficient", + argstr="-pearson", + ) + polort = traits.Int(desc="Remove polynomial trend of order m", argstr="-polort %d") class TCorrelate(AFNICommand): @@ -2405,8 +2786,7 @@ class TCorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcorrelate = afni.TCorrelate() >>> tcorrelate.inputs.xset= 'u_rc1s1_Template.nii' @@ -2420,56 +2800,62 @@ class TCorrelate(AFNICommand): """ - _cmd = '3dTcorrelate' + _cmd = "3dTcorrelate" input_spec = TCorrelateInputSpec output_spec = AFNICommandOutputSpec class TNormInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTNorm', - argstr='%s', + desc="input file to 3dTNorm", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tnorm', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_tnorm", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) norm2 = traits.Bool( - desc='L2 normalize (sum of squares = 1) [DEFAULT]', argstr='-norm2') + desc="L2 normalize (sum of squares = 1) [DEFAULT]", argstr="-norm2" + ) normR = traits.Bool( - desc= - 'normalize so sum of squares = number of time points * e.g., so RMS = 1.', - argstr='-normR') + desc="normalize so sum of squares = number of time points \\* e.g., so RMS = 1.", + argstr="-normR", + ) norm1 = traits.Bool( - desc='L1 normalize (sum of absolute values = 1)', argstr='-norm1') + desc="L1 normalize (sum of absolute values = 1)", argstr="-norm1" + ) normx = traits.Bool( - desc='Scale so max absolute value = 1 (L_infinity norm)', - argstr='-normx') + desc="Scale so max absolute value = 1 (L_infinity norm)", argstr="-normx" + ) polort = traits.Int( - desc="""Detrend with polynomials of order p before normalizing - [DEFAULT = don't do this] - * Use '-polort 0' to remove the mean, for example""", - argstr='-polort %s') + desc="""\ +Detrend with polynomials of order p before normalizing [DEFAULT = don't do this]. +Use '-polort 0' to remove the mean, for example""", + argstr="-polort %s", + ) L1fit = traits.Bool( - desc="""Detrend with L1 regression (L2 is the default) - * This option is here just for the hell of it""", - argstr='-L1fit') + desc="""\ +Detrend with L1 regression (L2 is the default) +This option is here just for the hell of it""", + argstr="-L1fit", + ) class TNorm(AFNICommand): - """Shifts voxel time series from input so that seperate slices are aligned + """Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. For complete details, see the `3dTnorm Documentation. `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tnorm = afni.TNorm() >>> tnorm.inputs.in_file = 'functional.nii' @@ -2480,150 +2866,199 @@ class TNorm(AFNICommand): >>> res = tshift.run() # doctest: +SKIP """ - _cmd = '3dTnorm' + + _cmd = "3dTnorm" input_spec = TNormInputSpec output_spec = AFNICommandOutputSpec class TProjectInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTproject', - argstr='-input %s', + desc="input file to 3dTproject", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tproject', - desc='output image file name', + name_template="%s_tproject", + desc="output image file name", position=-1, - argstr='-prefix %s', - name_source='in_file') + argstr="-prefix %s", + name_source="in_file", + ) censor = File( - desc="""filename of censor .1D time series - * This is a file of 1s and 0s, indicating which - time points are to be included (1) and which are - to be excluded (0).""", + desc="""\ +Filename of censor .1D time series. +This is a file of 1s and 0s, indicating which +time points are to be included (1) and which are +to be excluded (0).""", argstr="-censor %s", - exists=True) + exists=True, + ) censortr = traits.List( traits.Str(), - desc="""list of strings that specify time indexes - to be removed from the analysis. Each string is - of one of the following forms: - 37 => remove global time index #37 - 2:37 => remove time index #37 in run #2 - 37..47 => remove global time indexes #37-47 - 37-47 => same as above - 2:37..47 => remove time indexes #37-47 in run #2 - *:0-2 => remove time indexes #0-2 in all runs - +Time indexes within each run start at 0. - +Run indexes start at 1 (just be to confusing). - +N.B.: 2:37,47 means index #37 in run #2 and - global time index 47; it does NOT mean - index #37 in run #2 AND index #47 in run #2.""", - argstr="-CENSORTR %s") + desc="""\ +List of strings that specify time indexes +to be removed from the analysis. Each string is +of one of the following forms: + +* ``37`` => remove global time index #37 +* ``2:37`` => remove time index #37 in run #2 +* ``37..47`` => remove global time indexes #37-47 +* ``37-47`` => same as above +* ``2:37..47`` => remove time indexes #37-47 in run #2 +* ``*:0-2`` => remove time indexes #0-2 in all runs + + * Time indexes within each run start at 0. + * Run indexes start at 1 (just be to confusing). + * N.B.: 2:37,47 means index #37 in run #2 and + global time index 47; it does NOT mean + index #37 in run #2 AND index #47 in run #2. + +""", + argstr="-CENSORTR %s", + ) cenmode = traits.Enum( - 'KILL', 'ZERO', 'NTRP', - desc="""specifies how censored time points are treated in - the output dataset: - + mode = ZERO ==> put zero values in their place - ==> output datset is same length as input - + mode = KILL ==> remove those time points - ==> output dataset is shorter than input - + mode = NTRP ==> censored values are replaced by interpolated - neighboring (in time) non-censored values, - BEFORE any projections, and then the - analysis proceeds without actual removal - of any time points -- this feature is to - keep the Spanish Inquisition happy. - * The default mode is KILL !!!""", - argstr='-cenmode %s') + "KILL", + "ZERO", + "NTRP", + desc="""\ +Specifies how censored time points are treated in +the output dataset: + +* mode = ZERO -- put zero values in their place; + output dataset is same length as input +* mode = KILL -- remove those time points; + output dataset is shorter than input +* mode = NTRP -- censored values are replaced by interpolated + neighboring (in time) non-censored values, + BEFORE any projections, and then the + analysis proceeds without actual removal + of any time points -- this feature is to + keep the Spanish Inquisition happy. +* The default mode is KILL !!! + +""", + argstr="-cenmode %s", + ) concat = File( - desc="""The catenation file, as in 3dDeconvolve, containing the - TR indexes of the start points for each contiguous run - within the input dataset (the first entry should be 0). - ++ Also as in 3dDeconvolve, if the input dataset is - automatically catenated from a collection of datasets, - then the run start indexes are determined directly, - and '-concat' is not needed (and will be ignored). - ++ Each run must have at least 9 time points AFTER - censoring, or the program will not work! - ++ The only use made of this input is in setting up - the bandpass/stopband regressors. - ++ '-ort' and '-dsort' regressors run through all time - points, as read in. If you want separate projections - in each run, then you must either break these ort files - into appropriate components, OR you must run 3dTproject - for each run separately, using the appropriate pieces - from the ort files via the '{...}' selector for the - 1D files and the '[...]' selector for the datasets.""", - exists=True, - argstr='-concat %s') + desc="""\ +The catenation file, as in 3dDeconvolve, containing the +TR indexes of the start points for each contiguous run +within the input dataset (the first entry should be 0). + +* Also as in 3dDeconvolve, if the input dataset is + automatically catenated from a collection of datasets, + then the run start indexes are determined directly, + and '-concat' is not needed (and will be ignored). +* Each run must have at least 9 time points AFTER + censoring, or the program will not work! +* The only use made of this input is in setting up + the bandpass/stopband regressors. +* '-ort' and '-dsort' regressors run through all time + points, as read in. If you want separate projections + in each run, then you must either break these ort files + into appropriate components, OR you must run 3dTproject + for each run separately, using the appropriate pieces + from the ort files via the ``{...}`` selector for the + 1D files and the ``[...]`` selector for the datasets. + +""", + exists=True, + argstr="-concat %s", + ) noblock = traits.Bool( - desc="""Also as in 3dDeconvolve, if you want the program to treat - an auto-catenated dataset as one long run, use this option. - ++ However, '-noblock' will not affect catenation if you use - the '-concat' option.""", - argstr='-noblock') + desc="""\ +Also as in 3dDeconvolve, if you want the program to treat +an auto-catenated dataset as one long run, use this option. +However, '-noblock' will not affect catenation if you use +the '-concat' option.""", + argstr="-noblock", + ) ort = File( - desc="""Remove each column in file - ++ Each column will have its mean removed.""", + desc="""\ +Remove each column in file. +Each column will have its mean removed.""", exists=True, - argstr="-ort %s") + argstr="-ort %s", + ) polort = traits.Int( - desc="""Remove polynomials up to and including degree pp. - ++ Default value is 2. - ++ It makes no sense to use a value of pp greater than - 2, if you are bandpassing out the lower frequencies! - ++ For catenated datasets, each run gets a separate set - set of pp+1 Legendre polynomial regressors. - ++ Use of -polort -1 is not advised (if data mean != 0), - even if -ort contains constant terms, as all means are - removed.""", - argstr="-polort %d") + desc="""\ +Remove polynomials up to and including degree pp. + +* Default value is 2. +* It makes no sense to use a value of pp greater than + 2, if you are bandpassing out the lower frequencies! +* For catenated datasets, each run gets a separate set + set of pp+1 Legendre polynomial regressors. +* Use of -polort -1 is not advised (if data mean != 0), + even if -ort contains constant terms, as all means are + removed. + +""", + argstr="-polort %d", + ) dsort = InputMultiObject( - File( - exists=True, - copyfile=False), + File(exists=True, copyfile=False), argstr="-dsort %s...", - desc="""Remove the 3D+time time series in dataset fset. - ++ That is, 'fset' contains a different nuisance time - series for each voxel (e.g., from AnatICOR). - ++ Multiple -dsort options are allowed.""") - bandpass = traits.Tuple( - traits.Float, traits.Float, + desc="""\ +Remove the 3D+time time series in dataset fset. + +* That is, 'fset' contains a different nuisance time + series for each voxel (e.g., from AnatICOR). +* Multiple -dsort options are allowed. + +""", + ) + bandpass = Tuple( + traits.Float, + traits.Float, desc="""Remove all frequencies EXCEPT those in the range""", - argstr='-bandpass %g %g') - stopband = traits.Tuple( - traits.Float, traits.Float, + argstr="-bandpass %g %g", + ) + stopband = Tuple( + traits.Float, + traits.Float, desc="""Remove all frequencies in the range""", - argstr='-stopband %g %g') + argstr="-stopband %g %g", + ) TR = traits.Float( - desc="""Use time step dd for the frequency calculations, - rather than the value stored in the dataset header.""", - argstr='-TR %g') + desc="""\ +Use time step dd for the frequency calculations, +rather than the value stored in the dataset header.""", + argstr="-TR %g", + ) mask = File( exists=True, - desc="""Only operate on voxels nonzero in the mset dataset. - ++ Voxels outside the mask will be filled with zeros. - ++ If no masking option is given, then all voxels - will be processed.""", - argstr='-mask %s') + desc="""\ +Only operate on voxels nonzero in the mset dataset. + +* Voxels outside the mask will be filled with zeros. +* If no masking option is given, then all voxels + will be processed. + +""", + argstr="-mask %s", + ) automask = traits.Bool( - desc="""Generate a mask automatically""", - xor=['mask'], - argstr='-automask') + desc="""Generate a mask automatically""", xor=["mask"], argstr="-automask" + ) blur = traits.Float( - desc="""Blur (inside the mask only) with a filter that has - width (FWHM) of fff millimeters. - ++ Spatial blurring (if done) is after the time - series filtering.""", - argstr='-blur %g') + desc="""\ +Blur (inside the mask only) with a filter that has +width (FWHM) of fff millimeters. +Spatial blurring (if done) is after the time +series filtering.""", + argstr="-blur %g", + ) norm = traits.Bool( - desc="""Normalize each output time series to have sum of - squares = 1. This is the LAST operation.""", - argstr='-norm') + desc=""" +Normalize each output time series to have sum of +squares = 1. This is the LAST operation.""", + argstr="-norm", + ) class TProject(AFNICommand): @@ -2631,18 +3066,14 @@ class TProject(AFNICommand): This program projects (detrends) out various 'nuisance' time series from each voxel in the input dataset. Note that all the projections are done via linear regression, including the frequency-based options such - as '-passband'. In this way, you can bandpass time-censored data, and at + as ``-passband``. In this way, you can bandpass time-censored data, and at the same time, remove other time series of no interest (e.g., physiological estimates, motion parameters). - Shifts voxel time series from input so that seperate slices are aligned to + Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. - For complete details, see the `3dTproject Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tproject = afni.TProject() >>> tproject.inputs.in_file = 'functional.nii' @@ -2654,77 +3085,98 @@ class TProject(AFNICommand): '3dTproject -input functional.nii -automask -bandpass 0.00667 99999 -polort 3 -prefix projected.nii.gz' >>> res = tproject.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTproject Documentation. + `__ + """ - _cmd = '3dTproject' + + _cmd = "3dTproject" input_spec = TProjectInputSpec output_spec = AFNICommandOutputSpec - class TShiftInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTshift', - argstr='%s', + desc="input file to 3dTshift", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tshift', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_tshift", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) tr = Str( desc='manually set the TR. You can attach suffix "s" for seconds ' 'or "ms" for milliseconds.', - argstr='-TR %s') + argstr="-TR %s", + ) tzero = traits.Float( - desc='align each slice to given time offset', - argstr='-tzero %s', - xor=['tslice']) + desc="align each slice to given time offset", argstr="-tzero %s", xor=["tslice"] + ) tslice = traits.Int( - desc='align each slice to time offset of given slice', - argstr='-slice %s', - xor=['tzero']) + desc="align each slice to time offset of given slice", + argstr="-slice %s", + xor=["tzero"], + ) ignore = traits.Int( - desc='ignore the first set of points specified', argstr='-ignore %s') + desc="ignore the first set of points specified", argstr="-ignore %s" + ) interp = traits.Enum( - ('Fourier', 'linear', 'cubic', 'quintic', 'heptic'), - desc='different interpolation methods (see 3dTshift for details) ' - 'default = Fourier', - argstr='-%s') + ("Fourier", "linear", "cubic", "quintic", "heptic"), + desc="different interpolation methods (see 3dTshift for details) " + "default = Fourier", + argstr="-%s", + ) tpattern = traits.Either( - traits.Enum('alt+z', 'altplus', # Synonyms - 'alt+z2', - 'alt-z', 'altminus', # Synonyms - 'alt-z2', - 'seq+z', 'seqplus', # Synonyms - 'seq-z', 'seqminus'), # Synonyms + traits.Enum( + "alt+z", + "altplus", # Synonyms + "alt+z2", + "alt-z", + "altminus", # Synonyms + "alt-z2", + "seq+z", + "seqplus", # Synonyms + "seq-z", + "seqminus", + ), # Synonyms Str, # For backwards compatibility - desc='use specified slice time pattern rather than one in header', - argstr='-tpattern %s', - xor=['slice_timing']) + desc="use specified slice time pattern rather than one in header", + argstr="-tpattern %s", + xor=["slice_timing"], + ) slice_timing = traits.Either( File(exists=True), traits.List(traits.Float), - desc='time offsets from the volume acquisition onset for each slice', - argstr='-tpattern @%s', - xor=['tpattern']) + desc="time offsets from the volume acquisition onset for each slice", + argstr="-tpattern @%s", + xor=["tpattern"], + ) slice_encoding_direction = traits.Enum( - 'k', 'k-', + "k", + "k-", usedefault=True, - desc='Direction in which slice_timing is specified (default: k). If negative,' - 'slice_timing is defined in reverse order, that is, the first entry ' - 'corresponds to the slice with the largest index, and the final entry ' - 'corresponds to slice index zero. Only in effect when slice_timing is ' - 'passed as list, not when it is passed as file.',) + desc="Direction in which slice_timing is specified (default: k). If negative," + "slice_timing is defined in reverse order, that is, the first entry " + "corresponds to the slice with the largest index, and the final entry " + "corresponds to slice index zero. Only in effect when slice_timing is " + "passed as list, not when it is passed as file.", + ) rlt = traits.Bool( - desc='Before shifting, remove the mean and linear trend', - argstr='-rlt') + desc="Before shifting, remove the mean and linear trend", argstr="-rlt" + ) rltplus = traits.Bool( - desc='Before shifting, remove the mean and linear trend and later put ' - 'back the mean', - argstr='-rlt+') + desc="Before shifting, remove the mean and linear trend and later put " + "back the mean", + argstr="-rlt+", + ) class TShiftOutputSpec(AFNICommandOutputSpec): @@ -2732,15 +3184,14 @@ class TShiftOutputSpec(AFNICommandOutputSpec): class TShift(AFNICommand): - """Shifts voxel time series from input so that seperate slices are aligned + """Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. For complete details, see the `3dTshift Documentation. `_ Examples - ======== - + -------- Slice timing details may be specified explicitly via the ``slice_timing`` input: @@ -2821,103 +3272,194 @@ class TShift(AFNICommand): >>> res = tshift.run() # doctest: +SKIP """ - _cmd = '3dTshift' + + _cmd = "3dTshift" input_spec = TShiftInputSpec output_spec = TShiftOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'tpattern' and value.startswith('@'): - iflogger.warning('Passing a file prefixed by "@" will be deprecated' - '; please use the `slice_timing` input') - elif name == 'slice_timing' and isinstance(value, list): + if name == "tpattern" and value.startswith("@"): + iflogger.warning( + 'Passing a file prefixed by "@" will be deprecated' + "; please use the `slice_timing` input" + ) + elif name == "slice_timing" and isinstance(value, list): value = self._write_slice_timing() - return super(TShift, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _write_slice_timing(self): slice_timing = list(self.inputs.slice_timing) if self.inputs.slice_encoding_direction.endswith("-"): slice_timing.reverse() - fname = 'slice_timing.1D' - with open(fname, 'w') as fobj: - fobj.write('\t'.join(map(str, slice_timing))) + fname = "slice_timing.1D" + with open(fname, "w") as fobj: + fobj.write("\t".join(map(str, slice_timing))) return fname def _list_outputs(self): - outputs = super(TShift, self)._list_outputs() + outputs = super()._list_outputs() if isdefined(self.inputs.slice_timing): if isinstance(self.inputs.slice_timing, list): - outputs['timing_file'] = os.path.abspath('slice_timing.1D') + outputs["timing_file"] = os.path.abspath("slice_timing.1D") else: - outputs['timing_file'] = os.path.abspath(self.inputs.slice_timing) + outputs["timing_file"] = os.path.abspath(self.inputs.slice_timing) return outputs +class TSmoothInputSpec(AFNICommandInputSpec): + in_file = File( + desc="input file to 3dTSmooth", + argstr="%s", + position=-1, + mandatory=True, + exists=True, + copyfile=False, + ) + out_file = File( + name_template="%s_smooth", + desc="output file from 3dTSmooth", + argstr="-prefix %s", + name_source="in_file", + ) + datum = traits.Str( + desc="Sets the data type of the output dataset", argstr="-datum %s" + ) + lin = traits.Bool( + desc=r"3 point linear filter: :math:`0.15\,a + 0.70\,b + 0.15\,c`" + " [This is the default smoother]", + argstr="-lin", + ) + med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") + osf = traits.Bool( + desc="3 point order statistics filter:" + r":math:`0.15\,min(a,b,c) + 0.70\,median(a,b,c) + 0.15\,max(a,b,c)`", + argstr="-osf", + ) + lin3 = traits.Int( + desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`. " + "Here, 'm' is a number strictly between 0 and 1.", + argstr="-3lin %d", + ) + hamming = traits.Int( + argstr="-hamming %d", + desc="Use N point Hamming windows. (N must be odd and bigger than 1.)", + ) + blackman = traits.Int( + argstr="-blackman %d", + desc="Use N point Blackman windows. (N must be odd and bigger than 1.)", + ) + custom = File( + argstr="-custom %s", + desc="odd # of coefficients must be in a single column in ASCII file", + ) + adaptive = traits.Int( + argstr="-adaptive %d", + desc="use adaptive mean filtering of width N " + "(where N must be odd and bigger than 3).", + ) + + +class TSmooth(AFNICommand): + """Smooths each voxel time series in a 3D+time dataset and produces + as output a new 3D+time dataset (e.g., lowpass filter in time). + + For complete details, see the `3dTsmooth Documentation. + `_ + + Examples + -------- + >>> from nipype.interfaces import afni + >>> from nipype.testing import example_data + >>> smooth = afni.TSmooth() + >>> smooth.inputs.in_file = 'functional.nii' + >>> smooth.inputs.adaptive = 5 + >>> smooth.cmdline + '3dTsmooth -adaptive 5 -prefix functional_smooth functional.nii' + >>> res = smooth.run() # doctest: +SKIP + + """ + + _cmd = "3dTsmooth" + input_spec = TSmoothInputSpec + output_spec = AFNICommandOutputSpec + + class VolregInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dvolreg', - argstr='%s', + desc="input file to 3dvolreg", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) in_weight_volume = traits.Either( - traits.Tuple(File(exists=True), traits.Int), + Tuple(File(exists=True), traits.Int), File(exists=True), - desc='weights for each voxel specified by a file with an ' - 'optional volume number (defaults to 0)', - argstr="-weight '%s[%d]'") + desc="weights for each voxel specified by a file with an " + "optional volume number (defaults to 0)", + argstr="-weight '%s[%d]'", + ) out_file = File( - name_template='%s_volreg', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_volreg", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) basefile = File( - desc='base file for registration', - argstr='-base %s', - position=-6, - exists=True) + desc="base file for registration", argstr="-base %s", position=-6, exists=True + ) zpad = traits.Int( - desc='Zeropad around the edges by \'n\' voxels during rotations', - argstr='-zpad %d', - position=-5) + desc="Zeropad around the edges by 'n' voxels during rotations", + argstr="-zpad %d", + position=-5, + ) md1d_file = File( - name_template='%s_md.1D', - desc='max displacement output file', - argstr='-maxdisp1D %s', - name_source='in_file', + name_template="%s_md.1D", + desc="max displacement output file", + argstr="-maxdisp1D %s", + name_source="in_file", keep_extension=True, - position=-4) + position=-4, + ) oned_file = File( - name_template='%s.1D', - desc='1D movement parameters output file', - argstr='-1Dfile %s', - name_source='in_file', - keep_extension=True) + name_template="%s.1D", + desc="1D movement parameters output file", + argstr="-1Dfile %s", + name_source="in_file", + keep_extension=True, + ) verbose = traits.Bool( - desc='more detailed description of the process', argstr='-verbose') + desc="more detailed description of the process", argstr="-verbose" + ) timeshift = traits.Bool( - desc='time shift to mean slice time offset', argstr='-tshift 0') + desc="time shift to mean slice time offset", argstr="-tshift 0" + ) copyorigin = traits.Bool( - desc='copy base file origin coords to output', argstr='-twodup') + desc="copy base file origin coords to output", argstr="-twodup" + ) oned_matrix_save = File( - name_template='%s.aff12.1D', - desc='Save the matrix transformation', - argstr='-1Dmatrix_save %s', + name_template="%s.aff12.1D", + desc="Save the matrix transformation", + argstr="-1Dmatrix_save %s", keep_extension=True, - name_source='in_file') + name_source="in_file", + ) interp = traits.Enum( - ('Fourier', 'cubic', 'heptic', 'quintic', 'linear'), - desc='spatial interpolation methods [default = heptic]', - argstr='-%s') + ("Fourier", "cubic", "heptic", "quintic", "linear"), + desc="spatial interpolation methods [default = heptic]", + argstr="-%s", + ) class VolregOutputSpec(TraitedSpec): - out_file = File(desc='registered file', exists=True) - md1d_file = File(desc='max displacement info file', exists=True) - oned_file = File(desc='movement parameters info file', exists=True) + out_file = File(desc="registered file", exists=True) + md1d_file = File(desc="max displacement info file", exists=True) + oned_file = File(desc="movement parameters info file", exists=True) oned_matrix_save = File( - desc='matrix transformation from base to input', exists=True) + desc="matrix transformation from base to input", exists=True + ) class Volreg(AFNICommand): @@ -2927,8 +3469,7 @@ class Volreg(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> volreg = afni.Volreg() >>> volreg.inputs.in_file = 'functional.nii' @@ -2936,7 +3477,8 @@ class Volreg(AFNICommand): >>> volreg.inputs.zpad = 4 >>> volreg.inputs.outputtype = 'NIFTI' >>> volreg.cmdline # doctest: +ELLIPSIS - '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' + '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix \ +functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -2950,83 +3492,86 @@ class Volreg(AFNICommand): >>> volreg.inputs.oned_file = 'dfile.r1.1D' >>> volreg.inputs.oned_matrix_save = 'mat.r1.tshift+orig.1D' >>> volreg.cmdline - '3dvolreg -cubic -1Dfile dfile.r1.1D -1Dmatrix_save mat.r1.tshift+orig.1D -prefix rm.epi.volreg.r1 -verbose -base functional.nii -zpad 1 -maxdisp1D functional_md.1D functional.nii' + '3dvolreg -cubic -1Dfile dfile.r1.1D -1Dmatrix_save mat.r1.tshift+orig.1D -prefix \ +rm.epi.volreg.r1 -verbose -base functional.nii -zpad 1 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP """ - _cmd = '3dvolreg' + _cmd = "3dvolreg" input_spec = VolregInputSpec output_spec = VolregOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_weight_volume' and not isinstance(value, tuple): + if name == "in_weight_volume" and not isinstance(value, tuple): value = (value, 0) - return super(Volreg, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class WarpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dWarp', - argstr='%s', + desc="input file to 3dWarp", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_warp', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file', - keep_extension=True) + name_template="%s_warp", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + keep_extension=True, + ) tta2mni = traits.Bool( - desc='transform dataset from Talairach to MNI152', argstr='-tta2mni') + desc="transform dataset from Talairach to MNI152", argstr="-tta2mni" + ) mni2tta = traits.Bool( - desc='transform dataset from MNI152 to Talaraich', argstr='-mni2tta') + desc="transform dataset from MNI152 to Talaraich", argstr="-mni2tta" + ) matparent = File( - desc='apply transformation from 3dWarpDrive', - argstr='-matparent %s', - exists=True) + desc="apply transformation from 3dWarpDrive", + argstr="-matparent %s", + exists=True, + ) oblique_parent = File( - desc='Read in the oblique transformation matrix from an oblique ' - 'dataset and make cardinal dataset oblique to match', - argstr='-oblique_parent %s', - exists=True) + desc="Read in the oblique transformation matrix from an oblique " + "dataset and make cardinal dataset oblique to match", + argstr="-oblique_parent %s", + exists=True, + ) deoblique = traits.Bool( - desc='transform dataset from oblique to cardinal', argstr='-deoblique') + desc="transform dataset from oblique to cardinal", argstr="-deoblique" + ) interp = traits.Enum( - ('linear', 'cubic', 'NN', 'quintic'), - desc='spatial interpolation methods [default = linear]', - argstr='-%s') + ("linear", "cubic", "NN", "quintic"), + desc="spatial interpolation methods [default = linear]", + argstr="-%s", + ) gridset = File( - desc='copy grid of specified dataset', - argstr='-gridset %s', - exists=True) - newgrid = traits.Float( - desc='specify grid of this size (mm)', argstr='-newgrid %f') + desc="copy grid of specified dataset", argstr="-gridset %s", exists=True + ) + newgrid = traits.Float(desc="specify grid of this size (mm)", argstr="-newgrid %f") zpad = traits.Int( - desc='pad input dataset with N planes of zero on all sides.', - argstr='-zpad %d') + desc="pad input dataset with N planes of zero on all sides.", argstr="-zpad %d" + ) verbose = traits.Bool( - desc='Print out some information along the way.', argstr='-verb') - save_warp = traits.Bool( - desc='save warp as .mat file', requires=['verbose']) + desc="Print out some information along the way.", argstr="-verb" + ) + save_warp = traits.Bool(desc="save warp as .mat file", requires=["verbose"]) class WarpOutputSpec(TraitedSpec): - out_file = File(desc='Warped file.', exists=True) - warp_file = File(desc='warp transform .mat file') + out_file = File(desc="Warped file.", exists=True) + warp_file = File(desc="warp transform .mat file") class Warp(AFNICommand): - """Use 3dWarp for spatially transforming a dataset - - For complete details, see the `3dWarp Documentation. - `_ + """Use 3dWarp for spatially transforming a dataset. Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> warp = afni.Warp() >>> warp.inputs.in_file = 'structural.nii' @@ -3044,572 +3589,685 @@ class Warp(AFNICommand): '3dWarp -newgrid 1.000000 -prefix trans.nii.gz structural.nii' >>> res = warp_2.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dWarp Documentation. + `__. + """ - _cmd = '3dWarp' + + _cmd = "3dWarp" input_spec = WarpInputSpec output_spec = WarpOutputSpec - def _run_interface(self, runtime): - runtime = super(Warp, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super()._run_interface(runtime, correct_return_codes) if self.inputs.save_warp: import numpy as np - warp_file = self._list_outputs()['warp_file'] - np.savetxt(warp_file, [runtime.stdout], fmt=str('%s')) + + warp_file = self._list_outputs()["warp_file"] + np.savetxt(warp_file, [runtime.stdout], fmt="%s") return runtime def _list_outputs(self): - outputs = super(Warp, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.save_warp: - outputs['warp_file'] = fname_presuffix(outputs['out_file'], - suffix='_transform.mat', - use_ext=False) + outputs["warp_file"] = fname_presuffix( + outputs["out_file"], suffix="_transform.mat", use_ext=False + ) return outputs class QwarpInputSpec(AFNICommandInputSpec): in_file = File( - desc= - 'Source image (opposite phase encoding direction than base image).', - argstr='-source %s', + desc="Source image (opposite phase encoding direction than base image).", + argstr="-source %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) base_file = File( - desc= - 'Base image (opposite phase encoding direction than source image).', - argstr='-base %s', + desc="Base image (opposite phase encoding direction than source image).", + argstr="-base %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - argstr='-prefix %s', - name_template='%s_QW', - name_source=['in_file'], - genfile=True, - desc='out_file ppp' - 'Sets the prefix for the output datasets.' - '* The source dataset is warped to match the base' - 'and gets prefix \'ppp\'. (Except if \'-plusminus\' is used.)' - '* The final interpolation to this output dataset is' - 'done using the \'wsinc5\' method. See the output of' - ' 3dAllineate -HELP' - '(in the "Modifying \'-final wsinc5\'" section) for' - 'the lengthy technical details.' - '* The 3D warp used is saved in a dataset with' - 'prefix \'ppp_WARP\' -- this dataset can be used' - 'with 3dNwarpApply and 3dNwarpCat, for example.' - '* To be clear, this is the warp from source dataset' - ' coordinates to base dataset coordinates, where the' - ' values at each base grid point are the xyz displacments' - ' needed to move that grid point\'s xyz values to the' - ' corresponding xyz values in the source dataset:' - ' base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z)' - ' Another way to think of this warp is that it \'pulls\'' - ' values back from source space to base space.' - '* 3dNwarpApply would use \'ppp_WARP\' to transform datasets' - 'aligned with the source dataset to be aligned with the' - 'base dataset.' - '** If you do NOT want this warp saved, use the option \'-nowarp\'.' - '-->> (However, this warp is usually the most valuable possible output!)' - '* If you want to calculate and save the inverse 3D warp,' - 'use the option \'-iwarp\'. This inverse warp will then be' - 'saved in a dataset with prefix \'ppp_WARPINV\'.' - '* This inverse warp could be used to transform data from base' - 'space to source space, if you need to do such an operation.' - '* You can easily compute the inverse later, say by a command like' - ' 3dNwarpCat -prefix Z_WARPINV \'INV(Z_WARP+tlrc)\'' - 'or the inverse can be computed as needed in 3dNwarpApply, like' - ' 3dNwarpApply -nwarp \'INV(Z_WARP+tlrc)\' -source Dataset.nii ...') + argstr="-prefix %s", + name_template="ppp_%s", + name_source=["in_file"], + desc="""\ +Sets the prefix/suffix for the output datasets. + +* The source dataset is warped to match the base + and gets prefix 'ppp'. (Except if '-plusminus' is used +* The final interpolation to this output dataset is + done using the 'wsinc5' method. See the output of + 3dAllineate -HELP + (in the "Modifying '-final wsinc5'" section) for + the lengthy technical details. +* The 3D warp used is saved in a dataset with + prefix 'ppp_WARP' -- this dataset can be used + with 3dNwarpApply and 3dNwarpCat, for example. +* To be clear, this is the warp from source dataset + coordinates to base dataset coordinates, where the + values at each base grid point are the xyz displacements + needed to move that grid point's xyz values to the + corresponding xyz values in the source dataset: + base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) + Another way to think of this warp is that it 'pulls' + values back from source space to base space. +* 3dNwarpApply would use 'ppp_WARP' to transform datasets + aligned with the source dataset to be aligned with the + base dataset. + +**If you do NOT want this warp saved, use the option '-nowarp'**. +(However, this warp is usually the most valuable possible output!) + +* If you want to calculate and save the inverse 3D warp, + use the option '-iwarp'. This inverse warp will then be + saved in a dataset with prefix 'ppp_WARPINV'. +* This inverse warp could be used to transform data from base + space to source space, if you need to do such an operation. +* You can easily compute the inverse later, say by a command like + 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' + or the inverse can be computed as needed in 3dNwarpApply, like + 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ... + +""", + ) resample = traits.Bool( - desc='This option simply resamples the source dataset to match the' - 'base dataset grid. You can use this if the two datasets' - 'overlap well (as seen in the AFNI GUI), but are not on the' - 'same 3D grid.' - '* If they don\'t overlap well, allineate them first' - '* The reampling here is done with the' - '\'wsinc5\' method, which has very little blurring artifact.' - '* If the base and source datasets ARE on the same 3D grid,' - 'then the -resample option will be ignored.' - '* You CAN use -resample with these 3dQwarp options:' - '-plusminus -inilev -iniwarp -duplo', - argstr='-resample') + desc="""\ +This option simply resamples the source dataset to match the +base dataset grid. You can use this if the two datasets +overlap well (as seen in the AFNI GUI), but are not on the +same 3D grid. + +* If they don't overlap well, allineate them first +* The reampling here is done with the + 'wsinc5' method, which has very little blurring artifact. +* If the base and source datasets ARE on the same 3D grid, + then the -resample option will be ignored. +* You CAN use -resample with these 3dQwarp options: + -plusminus -inilev -iniwarp -duplo + +""", + argstr="-resample", + ) allineate = traits.Bool( - desc='This option will make 3dQwarp run 3dAllineate first, to align ' - 'the source dataset to the base with an affine transformation. ' - 'It will then use that alignment as a starting point for the ' - 'nonlinear warping.', - argstr='-allineate') + desc="This option will make 3dQwarp run 3dAllineate first, to align " + "the source dataset to the base with an affine transformation. " + "It will then use that alignment as a starting point for the " + "nonlinear warping.", + argstr="-allineate", + ) allineate_opts = traits.Str( - desc='add extra options to the 3dAllineate command to be run by ' - '3dQwarp.', - argstr='-allineate_opts %s', - requires=['allineate']) - nowarp = traits.Bool(desc='Do not save the _WARP file.', argstr='-nowarp') + desc="add extra options to the 3dAllineate command to be run by 3dQwarp.", + argstr="-allineate_opts %s", + requires=["allineate"], + ) + nowarp = traits.Bool(desc="Do not save the _WARP file.", argstr="-nowarp") iwarp = traits.Bool( - desc='Do compute and save the _WARPINV file.', - argstr='-iwarp', - xor=['plusminus']) + desc="Do compute and save the _WARPINV file.", + argstr="-iwarp", + xor=["plusminus"], + ) pear = traits.Bool( - desc='Use strict Pearson correlation for matching.' - '* Not usually recommended, since the \'clipped Pearson\' method' - 'used by default will reduce the impact of outlier values.', - argstr='-pear') + desc="Use strict Pearson correlation for matching." + "Not usually recommended, since the 'clipped Pearson' method" + "used by default will reduce the impact of outlier values.", + argstr="-pear", + ) noneg = traits.Bool( - desc='Replace negative values in either input volume with 0.' - '* If there ARE negative input values, and you do NOT use -noneg,' - 'then strict Pearson correlation will be used, since the \'clipped\'' - 'method only is implemented for non-negative volumes.' - '* \'-noneg\' is not the default, since there might be situations where' - 'you want to align datasets with positive and negative values mixed.' - '* But, in many cases, the negative values in a dataset are just the' - 'result of interpolation artifacts (or other peculiarities), and so' - 'they should be ignored. That is what \'-noneg\' is for.', - argstr='-noneg') + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", + argstr="-noneg", + ) nopenalty = traits.Bool( - desc='Replace negative values in either input volume with 0.' - '* If there ARE negative input values, and you do NOT use -noneg,' - 'then strict Pearson correlation will be used, since the \'clipped\'' - 'method only is implemented for non-negative volumes.' - '* \'-noneg\' is not the default, since there might be situations where' - 'you want to align datasets with positive and negative values mixed.' - '* But, in many cases, the negative values in a dataset are just the' - 'result of interpolation artifacts (or other peculiarities), and so' - 'they should be ignored. That is what \'-noneg\' is for.', - argstr='-nopenalty') + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", + argstr="-nopenalty", + ) penfac = traits.Float( - desc='Use this value to weight the penalty.' - 'The default value is 1.Larger values mean the' - 'penalty counts more, reducing grid distortions,' - 'insha\'Allah; \'-nopenalty\' is the same as \'-penfac 0\'.' - ' -->>* [23 Sep 2013] -- Zhark increased the default value of' - ' the penalty by a factor of 5, and also made it get' - ' progressively larger with each level of refinement.' - ' Thus, warping results will vary from earlier instances' - ' of 3dQwarp.' - ' * The progressive increase in the penalty at higher levels' - ' means that the \'cost function\' can actually look like the' - ' alignment is getting worse when the levels change.' - ' * IF you wish to turn off this progression, for whatever' - ' reason (e.g., to keep compatibility with older results),' - ' use the option \'-penold\'.To be completely compatible with' - ' the older 3dQwarp, you\'ll also have to use \'-penfac 0.2\'.', - argstr='-penfac %f') + argstr="-penfac %f", + desc="""\ +Use this value to weight the penalty. +The default value is 1. Larger values mean the +penalty counts more, reducing grid distortions, +insha'Allah; '-nopenalty' is the same as '-penfac 0'. +In 23 Sep 2013 Zhark increased the default value of +the penalty by a factor of 5, and also made it get +progressively larger with each level of refinement. +Thus, warping results will vary from earlier instances +of 3dQwarp. + +* The progressive increase in the penalty at higher levels + means that the 'cost function' can actually look like the + alignment is getting worse when the levels change. +* IF you wish to turn off this progression, for whatever + reason (e.g., to keep compatibility with older results), + use the option '-penold'.To be completely compatible with + the older 3dQwarp, you'll also have to use '-penfac 0.2'. + +""", + ) noweight = traits.Bool( - desc='If you want a binary weight (the old default), use this option.' - 'That is, each voxel in the base volume automask will be' - 'weighted the same in the computation of the cost functional.', - argstr='-noweight') + desc="If you want a binary weight (the old default), use this option." + "That is, each voxel in the base volume automask will be" + "weighted the same in the computation of the cost functional.", + argstr="-noweight", + ) weight = File( - desc='Instead of computing the weight from the base dataset,' - 'directly input the weight volume from dataset \'www\'.' - '* Useful if you know what over parts of the base image you' - 'want to emphasize or de-emphasize the matching functional.', - argstr='-weight %s', - exists=True) + desc="Instead of computing the weight from the base dataset," + "directly input the weight volume from dataset 'www'." + "Useful if you know what over parts of the base image you" + "want to emphasize or de-emphasize the matching functional.", + argstr="-weight %s", + exists=True, + ) wball = traits.List( traits.Int(), - desc='-wball x y z r f' - 'Enhance automatic weight from \'-useweight\' by a factor' - 'of 1+f*Gaussian(FWHM=r) centered in the base image at' - 'DICOM coordinates (x,y,z) and with radius \'r\'. The' - 'goal of this option is to try and make the alignment' - 'better in a specific part of the brain.' - '* Example: -wball 0 14 6 30 40' - 'to emphasize the thalamic area (in MNI/Talairach space).' - '* The \'r\' parameter must be positive!' - '* The \'f\' parameter must be between 1 and 100 (inclusive).' - '* \'-wball\' does nothing if you input your own weight' - 'with the \'-weight\' option.' - '* \'-wball\' does change the binary weight created by' - 'the \'-noweight\' option.' - '* You can only use \'-wball\' once in a run of 3dQwarp.' - '*** The effect of \'-wball\' is not dramatic. The example' - 'above makes the average brain image across a collection' - 'of subjects a little sharper in the thalamic area, which' - 'might have some small value. If you care enough about' - 'alignment to use \'-wball\', then you should examine the' - 'results from 3dQwarp for each subject, to see if the' - 'alignments are good enough for your purposes.', - argstr='-wball %s', + desc=""""\ +``-wball x y z r f`` +Enhance automatic weight from '-useweight' by a factor +of 1+f\\*Gaussian(FWHM=r) centered in the base image at +DICOM coordinates (x,y,z) and with radius 'r'. The +goal of this option is to try and make the alignment +better in a specific part of the brain. +Example: -wball 0 14 6 30 40 +to emphasize the thalamic area (in MNI/Talairach space). + +* The 'r' parameter must be positive! +* The 'f' parameter must be between 1 and 100 (inclusive). +* '-wball' does nothing if you input your own weight + with the '-weight' option. +* '-wball' does change the binary weight created by + the '-noweight' option. +* You can only use '-wball' once in a run of 3dQwarp. + +**The effect of '-wball' is not dramatic.** The example +above makes the average brain image across a collection +of subjects a little sharper in the thalamic area, which +might have some small value. If you care enough about +alignment to use '-wball', then you should examine the +results from 3dQwarp for each subject, to see if the +alignments are good enough for your purposes.""", + argstr="-wball %s", minlen=5, - maxlen=5) - traits.Tuple((traits.Float(), traits.Float()), argstr='-bpass %f %f') - wmask = traits.Tuple( + maxlen=5, + xor=["wmask"], + ) + bandpass = Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") + wmask = Tuple( (File(exists=True), traits.Float()), - desc='-wmask ws f' - 'Similar to \'-wball\', but here, you provide a dataset \'ws\'' - 'that indicates where to increase the weight.' - '* The \'ws\' dataset must be on the same 3D grid as the base dataset.' - '* \'ws\' is treated as a mask -- it only matters where it' - 'is nonzero -- otherwise, the values inside are not used.' - '* After \'ws\' comes the factor \'f\' by which to increase the' - 'automatically computed weight. Where \'ws\' is nonzero,' - 'the weighting will be multiplied by (1+f).' - '* As with \'-wball\', the factor \'f\' should be between 1 and 100.' - '* You cannot use \'-wball\' and \'-wmask\' together!', - argstr='-wpass %s %f') - out_weight_file = traits.File( - argstr='-wtprefix %s', - desc='Write the weight volume to disk as a dataset') + desc="""\ +Similar to '-wball', but here, you provide a dataset 'ws' +that indicates where to increase the weight. + +* The 'ws' dataset must be on the same 3D grid as the base dataset. +* 'ws' is treated as a mask -- it only matters where it + is nonzero -- otherwise, the values inside are not used. +* After 'ws' comes the factor 'f' by which to increase the + automatically computed weight. Where 'ws' is nonzero, + the weighting will be multiplied by (1+f). +* As with '-wball', the factor 'f' should be between 1 and 100. + +""", + argstr="-wpass %s %f", + xor=["wball"], + ) + out_weight_file = File( + argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" + ) blur = traits.List( traits.Float(), - desc='Gaussian blur the input images by \'bb\' (FWHM) voxels before' - 'doing the alignment (the output dataset will not be blurred).' - 'The default is 2.345 (for no good reason).' - '* Optionally, you can provide 2 values for \'bb\', and then' - 'the first one is applied to the base volume, the second' - 'to the source volume.' - '-->>* e.g., \'-blur 0 3\' to skip blurring the base image' - '(if the base is a blurry template, for example).' - '* A negative blur radius means to use 3D median filtering,' - 'rather than Gaussian blurring. This type of filtering will' - 'better preserve edges, which can be important in alignment.' - '* If the base is a template volume that is already blurry,' - 'you probably don\'t want to blur it again, but blurring' - 'the source volume a little is probably a good idea, to' - 'help the program avoid trying to match tiny features.' - '* Note that -duplo will blur the volumes some extra' - 'amount for the initial small-scale warping, to make' - 'that phase of the program converge more rapidly.', - argstr='-blur %s', + desc="""\ +Gaussian blur the input images by 'bb' (FWHM) voxels before +doing the alignment (the output dataset will not be blurred). +The default is 2.345 (for no good reason). + +* Optionally, you can provide 2 values for 'bb', and then + the first one is applied to the base volume, the second + to the source volume. + e.g., '-blur 0 3' to skip blurring the base image + (if the base is a blurry template, for example). +* A negative blur radius means to use 3D median filtering, + rather than Gaussian blurring. This type of filtering will + better preserve edges, which can be important in alignment. +* If the base is a template volume that is already blurry, + you probably don't want to blur it again, but blurring + the source volume a little is probably a good idea, to + help the program avoid trying to match tiny features. +* Note that -duplo will blur the volumes some extra + amount for the initial small-scale warping, to make + that phase of the program converge more rapidly. + +""", + argstr="-blur %s", minlen=1, - maxlen=2) + maxlen=2, + ) pblur = traits.List( traits.Float(), - desc='Use progressive blurring; that is, for larger patch sizes,' - 'the amount of blurring is larger. The general idea is to' - 'avoid trying to match finer details when the patch size' - 'and incremental warps are coarse. When \'-blur\' is used' - 'as well, it sets a minimum amount of blurring that will' - 'be used. [06 Aug 2014 -- \'-pblur\' may become the default someday].' - '* You can optionally give the fraction of the patch size that' - 'is used for the progressive blur by providing a value between' - '0 and 0.25 after \'-pblur\'. If you provide TWO values, the' - 'the first fraction is used for progressively blurring the' - 'base image and the second for the source image. The default' - 'parameters when just \'-pblur\' is given is the same as giving' - 'the options as \'-pblur 0.09 0.09\'.' - '* \'-pblur\' is useful when trying to match 2 volumes with high' - 'amounts of detail; e.g, warping one subject\'s brain image to' - 'match another\'s, or trying to warp to match a detailed template.' - '* Note that using negative values with \'-blur\' means that the' - 'progressive blurring will be done with median filters, rather' - 'than Gaussian linear blurring.' - '-->>*** The combination of the -allineate and -pblur options will make' - 'the results of using 3dQwarp to align to a template somewhat' - 'less sensitive to initial head position and scaling.', - argstr='-pblur %s', + desc="""\ +Use progressive blurring; that is, for larger patch sizes, +the amount of blurring is larger. The general idea is to +avoid trying to match finer details when the patch size +and incremental warps are coarse. When '-blur' is used +as well, it sets a minimum amount of blurring that will +be used. [06 Aug 2014 -- '-pblur' may become the default someday]. + +* You can optionally give the fraction of the patch size that + is used for the progressive blur by providing a value between + 0 and 0.25 after '-pblur'. If you provide TWO values, the + the first fraction is used for progressively blurring the + base image and the second for the source image. The default + parameters when just '-pblur' is given is the same as giving + the options as '-pblur 0.09 0.09'. +* '-pblur' is useful when trying to match 2 volumes with high + amounts of detail; e.g, warping one subject's brain image to + match another's, or trying to warp to match a detailed template. +* Note that using negative values with '-blur' means that the + progressive blurring will be done with median filters, rather + than Gaussian linear blurring. + +Note: The combination of the -allineate and -pblur options will make +the results of using 3dQwarp to align to a template somewhat +less sensitive to initial head position and scaling.""", + argstr="-pblur %s", minlen=1, - maxlen=2) + maxlen=2, + ) emask = File( - desc='Here, \'ee\' is a dataset to specify a mask of voxels' - 'to EXCLUDE from the analysis -- all voxels in \'ee\'' - 'that are NONZERO will not be used in the alignment.' - '* The base image always automasked -- the emask is' - 'extra, to indicate voxels you definitely DON\'T want' - 'included in the matching process, even if they are' - 'inside the brain.', - argstr='-emask %s', - exists=True, - copyfile=False) - noXdis = traits.Bool( - desc='Warp will not displace in x directoin', argstr='-noXdis') - noYdis = traits.Bool( - desc='Warp will not displace in y directoin', argstr='-noYdis') - noZdis = traits.Bool( - desc='Warp will not displace in z directoin', argstr='-noZdis') + desc="Here, 'ee' is a dataset to specify a mask of voxels" + "to EXCLUDE from the analysis -- all voxels in 'ee'" + "that are NONZERO will not be used in the alignment." + "The base image always automasked -- the emask is" + "extra, to indicate voxels you definitely DON'T want" + "included in the matching process, even if they are" + "inside the brain.", + argstr="-emask %s", + exists=True, + copyfile=False, + ) + noXdis = traits.Bool(desc="Warp will not displace in x direction", argstr="-noXdis") + noYdis = traits.Bool(desc="Warp will not displace in y direction", argstr="-noYdis") + noZdis = traits.Bool(desc="Warp will not displace in z direction", argstr="-noZdis") iniwarp = traits.List( File(exists=True, copyfile=False), - desc='A dataset with an initial nonlinear warp to use.' - '* If this option is not used, the initial warp is the identity.' - '* You can specify a catenation of warps (in quotes) here, as in' - 'program 3dNwarpApply.' - '* As a special case, if you just input an affine matrix in a .1D' - 'file, that will work also -- it is treated as giving the initial' - 'warp via the string "IDENT(base_dataset) matrix_file.aff12.1D".' - '* You CANNOT use this option with -duplo !!' - '* -iniwarp is usually used with -inilev to re-start 3dQwarp from' - 'a previous stopping point.', - argstr='-iniwarp %s', - xor=['duplo']) + desc="""\ +A dataset with an initial nonlinear warp to use. + +* If this option is not used, the initial warp is the identity. +* You can specify a catenation of warps (in quotes) here, as in + program 3dNwarpApply. +* As a special case, if you just input an affine matrix in a .1D + file, that will work also -- it is treated as giving the initial + warp via the string "IDENT(base_dataset) matrix_file.aff12.1D". +* You CANNOT use this option with -duplo !! +* -iniwarp is usually used with -inilev to re-start 3dQwarp from + a previous stopping point. + +""", + argstr="-iniwarp %s", + xor=["duplo"], + ) inilev = traits.Int( - desc='The initial refinement \'level\' at which to start.' - '* Usually used with -iniwarp; CANNOT be used with -duplo.' - '* The combination of -inilev and -iniwarp lets you take the' - 'results of a previous 3dQwarp run and refine them further:' - 'Note that the source dataset in the second run is the SAME as' - 'in the first run. If you don\'t see why this is necessary,' - 'then you probably need to seek help from an AFNI guru.', - argstr='-inilev %d', - xor=['duplo']) + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", + argstr="-inilev %d", + xor=["duplo"], + ) minpatch = traits.Int( - desc='* The value of mm should be an odd integer.' - '* The default value of mm is 25.' - '* For more accurate results than mm=25, try 19 or 13.' - '* The smallest allowed patch size is 5.' - '* You may want stop at a larger patch size (say 7 or 9) and use' - 'the -Qfinal option to run that final level with quintic warps,' - 'which might run faster and provide the same degree of warp detail.' - '* Trying to make two different brain volumes match in fine detail' - 'is usually a waste of time, especially in humans. There is too' - 'much variability in anatomy to match gyrus to gyrus accurately.' - 'For this reason, the default minimum patch size is 25 voxels.' - 'Using a smaller \'-minpatch\' might try to force the warp to' - 'match features that do not match, and the result can be useless' - 'image distortions -- another reason to LOOK AT THE RESULTS.', - argstr='-minpatch %d') + desc="""\ +The value of mm should be an odd integer. + +* The default value of mm is 25. +* For more accurate results than mm=25, try 19 or 13. +* The smallest allowed patch size is 5. +* You may want stop at a larger patch size (say 7 or 9) and use + the -Qfinal option to run that final level with quintic warps, + which might run faster and provide the same degree of warp detail. +* Trying to make two different brain volumes match in fine detail + is usually a waste of time, especially in humans. There is too + much variability in anatomy to match gyrus to gyrus accurately. + For this reason, the default minimum patch size is 25 voxels. + Using a smaller '-minpatch' might try to force the warp to + match features that do not match, and the result can be useless + image distortions -- another reason to LOOK AT THE RESULTS. + +""", + argstr="-minpatch %d", + ) maxlev = traits.Int( - desc='The initial refinement \'level\' at which to start.' - '* Usually used with -iniwarp; CANNOT be used with -duplo.' - '* The combination of -inilev and -iniwarp lets you take the' - 'results of a previous 3dQwarp run and refine them further:' - 'Note that the source dataset in the second run is the SAME as' - 'in the first run. If you don\'t see why this is necessary,' - 'then you probably need to seek help from an AFNI guru.', - argstr='-maxlev %d', - xor=['duplo'], - position=-1) + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", + argstr="-maxlev %d", + xor=["duplo"], + position=-1, + ) gridlist = File( - desc='This option provides an alternate way to specify the patch' - 'grid sizes used in the warp optimization process. \'gl\' is' - 'a 1D file with a list of patches to use -- in most cases,' - 'you will want to use it in the following form:' - '-gridlist \'1D: 0 151 101 75 51\'' - '* Here, a 0 patch size means the global domain. Patch sizes' - 'otherwise should be odd integers >= 5.' - '* If you use the \'0\' patch size again after the first position,' - 'you will actually get an iteration at the size of the' - 'default patch level 1, where the patch sizes are 75% of' - 'the volume dimension. There is no way to force the program' - 'to literally repeat the sui generis step of lev=0.' - '* You cannot use -gridlist with -duplo or -plusminus!', - argstr='-gridlist %s', + desc="""\ +This option provides an alternate way to specify the patch +grid sizes used in the warp optimization process. 'gl' is +a 1D file with a list of patches to use -- in most cases, +you will want to use it in the following form: +``-gridlist '1D: 0 151 101 75 51'`` + +* Here, a 0 patch size means the global domain. Patch sizes + otherwise should be odd integers >= 5. +* If you use the '0' patch size again after the first position, + you will actually get an iteration at the size of the + default patch level 1, where the patch sizes are 75% of + the volume dimension. There is no way to force the program + to literally repeat the sui generis step of lev=0. + +""", + argstr="-gridlist %s", exists=True, copyfile=False, - xor=['duplo', 'plusminus']) + xor=["duplo", "plusminus"], + ) allsave = traits.Bool( - desc='This option lets you save the output warps from each level' - 'of the refinement process. Mostly used for experimenting.' - '* Cannot be used with -nopadWARP, -duplo, or -plusminus.' - '* Will only save all the outputs if the program terminates' - 'normally -- if it crashes, or freezes, then all these' - 'warps are lost.', - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus']) + desc=""" +This option lets you save the output warps from each level" +of the refinement process. Mostly used for experimenting." +Will only save all the outputs if the program terminates" +normally -- if it crashes, or freezes, then all these" +warps are lost.""", + argstr="-allsave", + xor=["nopadWARP", "duplo", "plusminus"], + ) duplo = traits.Bool( - desc='Start off with 1/2 scale versions of the volumes,' - 'for getting a speedy coarse first alignment.' - '* Then scales back up to register the full volumes.' - 'The goal is greater speed, and it seems to help this' - 'positively piggish program to be more expeditious.' - '* However, accuracy is somewhat lower with \'-duplo\',' - 'for reasons that currenly elude Zhark; for this reason,' - 'the Emperor does not usually use \'-duplo\'.', - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', 'allsave' - ]) + desc="""\ +Start off with 1/2 scale versions of the volumes," +for getting a speedy coarse first alignment." + +* Then scales back up to register the full volumes." + The goal is greater speed, and it seems to help this" + positively piggish program to be more expeditious." +* However, accuracy is somewhat lower with '-duplo'," + for reasons that currently elude Zhark; for this reason," + the Emperor does not usually use '-duplo'. + +""", + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], + ) workhard = traits.Bool( - desc='Iterate more times, which can help when the volumes are' - 'hard to align at all, or when you hope to get a more precise' - 'alignment.' - '* Slows the program down (possibly a lot), of course.' - '* When you combine \'-workhard\' with \'-duplo\', only the' - 'full size volumes get the extra iterations.' - '* For finer control over which refinement levels work hard,' - 'you can use this option in the form (for example)' - ' -workhard:4:7' - 'which implies the extra iterations will be done at levels' - '4, 5, 6, and 7, but not otherwise.' - '* You can also use \'-superhard\' to iterate even more, but' - 'this extra option will REALLY slow things down.' - '-->>* Under most circumstances, you should not need to use either' - '-workhard or -superhard.' - '-->>* The fastest way to register to a template image is via the' - '-duplo option, and without the -workhard or -superhard options.' - '-->>* If you use this option in the form \'-Workhard\' (first letter' - 'in upper case), then the second iteration at each level is' - 'done with quintic polynomial warps.', - argstr='-workhard', - xor=['boxopt', 'ballopt']) + desc="""\ +Iterate more times, which can help when the volumes are +hard to align at all, or when you hope to get a more precise +alignment. + +* Slows the program down (possibly a lot), of course. +* When you combine '-workhard' with '-duplo', only the + full size volumes get the extra iterations. +* For finer control over which refinement levels work hard, + you can use this option in the form (for example) ``-workhard:4:7`` + which implies the extra iterations will be done at levels + 4, 5, 6, and 7, but not otherwise. +* You can also use '-superhard' to iterate even more, but + this extra option will REALLY slow things down. + + * Under most circumstances, you should not need to use either + ``-workhard`` or ``-superhard``. + * The fastest way to register to a template image is via the + ``-duplo`` option, and without the ``-workhard`` or ``-superhard`` options. + * If you use this option in the form '-Workhard' (first letter + in upper case), then the second iteration at each level is + done with quintic polynomial warps. + +""", + argstr="-workhard", + xor=["boxopt", "ballopt"], + ) Qfinal = traits.Bool( - desc='At the finest patch size (the final level), use Hermite' - 'quintic polynomials for the warp instead of cubic polynomials.' - '* In a 3D \'patch\', there are 2x2x2x3=24 cubic polynomial basis' - 'function parameters over which to optimize (2 polynomials' - 'dependent on each of the x,y,z directions, and 3 different' - 'directions of displacement).' - '* There are 3x3x3x3=81 quintic polynomial parameters per patch.' - '* With -Qfinal, the final level will have more detail in' - 'the allowed warps, at the cost of yet more CPU time.' - '* However, no patch below 7x7x7 in size will be done with quintic' - 'polynomials.' - '* This option is also not usually needed, and is experimental.', - argstr='-Qfinal') + desc="""\ +At the finest patch size (the final level), use Hermite +quintic polynomials for the warp instead of cubic polynomials. + +* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis + function parameters over which to optimize (2 polynomials + dependent on each of the x,y,z directions, and 3 different + directions of displacement). +* There are 3x3x3x3=81 quintic polynomial parameters per patch. +* With -Qfinal, the final level will have more detail in + the allowed warps, at the cost of yet more CPU time. +* However, no patch below 7x7x7 in size will be done with quintic + polynomials. +* This option is also not usually needed, and is experimental. + +""", + argstr="-Qfinal", + ) Qonly = traits.Bool( - desc='Use Hermite quintic polynomials at all levels.' - '* Very slow (about 4 times longer). Also experimental.' - '* Will produce a (discrete representation of a) C2 warp.', - argstr='-Qonly') + desc="""\ +Use Hermite quintic polynomials at all levels. + +* Very slow (about 4 times longer). Also experimental. +* Will produce a (discrete representation of a) C2 warp. + +""", + argstr="-Qonly", + ) plusminus = traits.Bool( - desc='Normally, the warp displacements dis(x) are defined to match' - 'base(x) to source(x+dis(x)). With this option, the match' - 'is between base(x-dis(x)) and source(x+dis(x)) -- the two' - 'images \'meet in the middle\'.' - '* One goal is to mimic the warping done to MRI EPI data by' - 'field inhomogeneities, when registering between a \'blip up\'' - 'and a \'blip down\' down volume, which will have opposite' - 'distortions.' - '* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since' - 'base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x))' - 'wherever we see x, we have base(x) matches source(Wp(INV(Wm(x))));' - 'that is, the warp V(x) that one would get from the \'usual\' way' - 'of running 3dQwarp is V(x) = Wp(INV(Wm(x))).' - '* Conversely, we can calculate Wp(x) in terms of V(x) as follows:' - 'If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2;' - 'then Wp(x) = V(INV(Vh(x)))' - '* With the above formulas, it is possible to compute Wp(x) from' - 'V(x) and vice-versa, using program 3dNwarpCalc. The requisite' - 'commands are left as an exercise for the aspiring AFNI Jedi Master.' - '* You can use the semi-secret \'-pmBASE\' option to get the V(x)' - 'warp and the source dataset warped to base space, in addition to' - 'the Wp(x) \'_PLUS\' and Wm(x) \'_MINUS\' warps.' - '-->>* Alas: -plusminus does not work with -duplo or -allineate :-(' - '* However, you can use -iniwarp with -plusminus :-)' - '-->>* The outputs have _PLUS (from the source dataset) and _MINUS' - '(from the base dataset) in their filenames, in addition to' - 'the prefix. The -iwarp option, if present, will be ignored.', - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp']) + desc="""\ +Normally, the warp displacements dis(x) are defined to match +base(x) to source(x+dis(x)). With this option, the match +is between base(x-dis(x)) and source(x+dis(x)) -- the two +images 'meet in the middle'. + +* One goal is to mimic the warping done to MRI EPI data by + field inhomogeneities, when registering between a 'blip up' + and a 'blip down' down volume, which will have opposite + distortions. +* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since + base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x)) + wherever we see x, we have base(x) matches source(Wp(INV(Wm(x)))); + that is, the warp V(x) that one would get from the 'usual' way + of running 3dQwarp is V(x) = Wp(INV(Wm(x))). +* Conversely, we can calculate Wp(x) in terms of V(x) as follows: + If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2; + then Wp(x) = V(INV(Vh(x))) +* With the above formulas, it is possible to compute Wp(x) from + V(x) and vice-versa, using program 3dNwarpCalc. The requisite + commands are left as an exercise for the aspiring AFNI Jedi Master. +* You can use the semi-secret '-pmBASE' option to get the V(x) + warp and the source dataset warped to base space, in addition to + the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps. + + * Alas: -plusminus does not work with -duplo or -allineate :-( + * However, you can use -iniwarp with -plusminus :-) + * The outputs have _PLUS (from the source dataset) and _MINUS + (from the base dataset) in their filenames, in addition to + the prefix. The -iwarp option, if present, will be ignored. + +""", + argstr="-plusminus", + xor=["duplo", "allsave", "iwarp"], + ) nopad = traits.Bool( - desc='Do NOT use zero-padding on the 3D base and source images.' - '[Default == zero-pad, if needed]' - '* The underlying model for deformations goes to zero at the' - 'edge of the volume being warped. However, if there is' - 'significant data near an edge of the volume, then it won\'t' - 'get displaced much, and so the results might not be good.' - '* Zero padding is designed as a way to work around this potential' - 'problem. You should NOT need the \'-nopad\' option for any' - 'reason that Zhark can think of, but it is here to be symmetrical' - 'with 3dAllineate.' - '* Note that the output (warped from source) dataset will be on the' - 'base dataset grid whether or not zero-padding is allowed. However,' - 'unless you use the following option, allowing zero-padding (i.e.,' - 'the default operation) will make the output WARP dataset(s) be' - 'on a larger grid (also see \'-expad\' below).', - argstr='-nopad') + desc="""\ +Do NOT use zero-padding on the 3D base and source images. +[Default == zero-pad, if needed] + +* The underlying model for deformations goes to zero at the + edge of the volume being warped. However, if there is + significant data near an edge of the volume, then it won't + get displaced much, and so the results might not be good. +* Zero padding is designed as a way to work around this potential + problem. You should NOT need the '-nopad' option for any + reason that Zhark can think of, but it is here to be symmetrical + with 3dAllineate. +* Note that the output (warped from source) dataset will be on the + base dataset grid whether or not zero-padding is allowed. However, + unless you use the following option, allowing zero-padding (i.e., + the default operation) will make the output WARP dataset(s) be + on a larger grid (also see '-expad' below). + +""", + argstr="-nopad", + ) nopadWARP = traits.Bool( - desc='If for some reason you require the warp volume to' - 'match the base volume, then use this option to have the output' - 'WARP dataset(s) truncated.', - argstr='-nopadWARP', - xor=['allsave', 'expad']) + desc="If for some reason you require the warp volume to" + "match the base volume, then use this option to have the output" + "WARP dataset(s) truncated.", + argstr="-nopadWARP", + xor=["allsave", "expad"], + ) expad = traits.Int( - desc='This option instructs the program to pad the warp by an extra' - '\'EE\' voxels (and then 3dQwarp starts optimizing it).' - '* This option is seldom needed, but can be useful if you' - 'might later catenate the nonlinear warp -- via 3dNwarpCat --' - 'with an affine transformation that contains a large shift.' - 'Under that circumstance, the nonlinear warp might be shifted' - 'partially outside its original grid, so expanding that grid' - 'can avoid this problem.' - '* Note that this option perforce turns off \'-nopadWARP\'.', - argstr='-expad %d', - xor=['nopadWARP']) + desc="This option instructs the program to pad the warp by an extra" + "'EE' voxels (and then 3dQwarp starts optimizing it)." + "This option is seldom needed, but can be useful if you" + "might later catenate the nonlinear warp -- via 3dNwarpCat --" + "with an affine transformation that contains a large shift." + "Under that circumstance, the nonlinear warp might be shifted" + "partially outside its original grid, so expanding that grid" + "can avoid this problem." + "Note that this option perforce turns off '-nopadWARP'.", + argstr="-expad %d", + xor=["nopadWARP"], + ) ballopt = traits.Bool( - desc='Normally, the incremental warp parameters are optimized inside' - 'a rectangular \'box\' (24 dimensional for cubic patches, 81 for' - 'quintic patches), whose limits define the amount of distortion' - 'allowed at each step. Using \'-ballopt\' switches these limits' - 'to be applied to a \'ball\' (interior of a hypersphere), which' - 'can allow for larger incremental displacements. Use this' - 'option if you think things need to be able to move farther.', - argstr='-ballopt', - xor=['workhard', 'boxopt']) + desc="Normally, the incremental warp parameters are optimized inside" + "a rectangular 'box' (24 dimensional for cubic patches, 81 for" + "quintic patches), whose limits define the amount of distortion" + "allowed at each step. Using '-ballopt' switches these limits" + "to be applied to a 'ball' (interior of a hypersphere), which" + "can allow for larger incremental displacements. Use this" + "option if you think things need to be able to move farther.", + argstr="-ballopt", + xor=["workhard", "boxopt"], + ) baxopt = traits.Bool( - desc='Use the \'box\' optimization limits instead of the \'ball\'' - '[this is the default at present].' - '* Note that if \'-workhard\' is used, then ball and box optimization' - 'are alternated in the different iterations at each level, so' - 'these two options have no effect in that case.', - argstr='-boxopt', - xor=['workhard', 'ballopt']) + desc="Use the 'box' optimization limits instead of the 'ball'" + "[this is the default at present]." + "Note that if '-workhard' is used, then ball and box optimization" + "are alternated in the different iterations at each level, so" + "these two options have no effect in that case.", + argstr="-boxopt", + xor=["workhard", "ballopt"], + ) verb = traits.Bool( - desc='more detailed description of the process', - argstr='-verb', - xor=['quiet']) + desc="more detailed description of the process", argstr="-verb", xor=["quiet"] + ) quiet = traits.Bool( - desc='Cut out most of the fun fun fun progress messages :-(', - argstr='-quiet', - xor=['verb']) + desc="Cut out most of the fun fun fun progress messages :-(", + argstr="-quiet", + xor=["verb"], + ) # Hidden and semi-hidden options - overwrite = traits.Bool(desc='Overwrite outputs', argstr='-overwrite') + overwrite = traits.Bool(desc="Overwrite outputs", argstr="-overwrite") lpc = traits.Bool( - desc='Local Pearson minimization (i.e., EPI-T1 registration)' - 'This option has not be extensively tested' - 'If you use \'-lpc\', then \'-maxlev 0\' is automatically set.' - 'If you want to go to more refined levels, you can set \'-maxlev\'' - 'This should be set up to have lpc as the second to last argument' - 'and maxlev as the second to last argument, as needed by AFNI' - 'Using maxlev > 1 is not recommended for EPI-T1 alignment.', - argstr='-lpc', - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], - position=-2) + desc="Local Pearson minimization (i.e., EPI-T1 registration)" + "This option has not be extensively tested" + "If you use '-lpc', then '-maxlev 0' is automatically set." + "If you want to go to more refined levels, you can set '-maxlev'" + "This should be set up to have lpc as the second to last argument" + "and maxlev as the second to last argument, as needed by AFNI" + "Using maxlev > 1 is not recommended for EPI-T1 alignment.", + argstr="-lpc", + xor=["nmi", "mi", "hel", "lpa", "pear"], + position=-2, + ) lpa = traits.Bool( - desc='Local Pearson maximization' - 'This option has not be extensively tested', - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear']) + desc="Local Pearson maximization. This option has not be extensively tested", + argstr="-lpa", + xor=["nmi", "mi", "lpc", "hel", "pear"], + ) hel = traits.Bool( - desc='Hellinger distance: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear']) + desc="Hellinger distance: a matching function for the adventurous" + "This option has NOT be extensively tested for usefulness" + "and should be considered experimental at this infundibulum.", + argstr="-hel", + xor=["nmi", "mi", "lpc", "lpa", "pear"], + ) mi = traits.Bool( - desc='Mutual Information: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear']) + desc="Mutual Information: a matching function for the adventurous" + "This option has NOT be extensively tested for usefulness" + "and should be considered experimental at this infundibulum.", + argstr="-mi", + xor=["mi", "hel", "lpc", "lpa", "pear"], + ) nmi = traits.Bool( - desc= - 'Normalized Mutual Information: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear']) + desc="Normalized Mutual Information: a matching function for the adventurous" + "This option has NOT been extensively tested for usefulness" + "and should be considered experimental at this infundibulum.", + argstr="-nmi", + xor=["nmi", "hel", "lpc", "lpa", "pear"], + ) class QwarpOutputSpec(TraitedSpec): warped_source = File( - desc='Warped source file. If plusminus is used, this is the undistorted' - 'source file.') - warped_base = File(desc='Undistorted base file.') + desc="Warped source file. If plusminus is used, this is the undistorted" + "source file." + ) + warped_base = File(desc="Undistorted base file.") source_warp = File( desc="Displacement in mm for the source image." "If plusminus is used this is the field suceptibility correction" - "warp (in 'mm') for source image.") + "warp (in 'mm') for source image." + ) base_warp = File( desc="Displacement in mm for the base image." "If plus minus is used, this is the field suceptibility correction" "warp (in 'mm') for base image. This is only output if plusminus" - "or iwarp options are passed") + "or iwarp options are passed" + ) weights = File(desc="Auto-computed weight volume.") class Qwarp(AFNICommand): - """A version of 3dQwarp + """ Allineate your images prior to passing them to this workflow. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -3617,7 +4275,8 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.inputs.plusminus = True >>> qwarp.cmdline - '3dQwarp -base sub-01_dir-RL_epi.nii.gz -source sub-01_dir-LR_epi.nii.gz -nopadWARP -prefix sub-01_dir-LR_epi_QW -plusminus' + '3dQwarp -base sub-01_dir-RL_epi.nii.gz -source sub-01_dir-LR_epi.nii.gz -nopadWARP \ +-prefix ppp_sub-01_dir-LR_epi -plusminus' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -3626,7 +4285,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.base_file = 'mni.nii' >>> qwarp.inputs.resample = True >>> qwarp.cmdline - '3dQwarp -base mni.nii -source structural.nii -prefix structural_QW -resample' + '3dQwarp -base mni.nii -source structural.nii -prefix ppp_structural -resample' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -3640,7 +4299,9 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.iwarp = True >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline - '3dQwarp -base epi.nii -blur 0.0 3.0 -source structural.nii -iwarp -prefix anatSSQ.nii.gz -resample -verb -lpc' + '3dQwarp -base epi.nii -blur 0.0 3.0 -source structural.nii -iwarp -prefix anatSSQ.nii.gz \ +-resample -verb -lpc' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -3650,7 +4311,8 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.duplo = True >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline - '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -prefix structural_QW' + '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -prefix ppp_structural' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -3663,6 +4325,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.out_file = 'Q25' >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -minpatch 25 -prefix Q25' + >>> res = qwarp.run() # doctest: +SKIP >>> qwarp2 = afni.Qwarp() >>> qwarp2.inputs.in_file = 'structural.nii' @@ -3672,7 +4335,9 @@ class Qwarp(AFNICommand): >>> qwarp2.inputs.inilev = 7 >>> qwarp2.inputs.iniwarp = ['Q25_warp+tlrc.HEAD'] >>> qwarp2.cmdline - '3dQwarp -base mni.nii -blur 0.0 2.0 -source structural.nii -inilev 7 -iniwarp Q25_warp+tlrc.HEAD -prefix Q11' + '3dQwarp -base mni.nii -blur 0.0 2.0 -source structural.nii -inilev 7 -iniwarp Q25_\ +warp+tlrc.HEAD -prefix Q11' + >>> res2 = qwarp2.run() # doctest: +SKIP >>> res2 = qwarp2.run() # doctest: +SKIP >>> qwarp3 = afni.Qwarp() @@ -3681,109 +4346,144 @@ class Qwarp(AFNICommand): >>> qwarp3.inputs.allineate = True >>> qwarp3.inputs.allineate_opts = '-cose lpa -verb' >>> qwarp3.cmdline - "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii -prefix structural_QW" - >>> res3 = qwarp3.run() # doctest: +SKIP """ - _cmd = '3dQwarp' + "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii \ +-prefix ppp_structural" + + >>> res3 = qwarp3.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + + """ + + _cmd = "3dQwarp" input_spec = QwarpInputSpec output_spec = QwarpOutputSpec - def _format_arg(self, name, spec, value): - if name == 'allineate_opts': - return spec.argstr % ("'" + value + "'") - return super(Qwarp, self)._format_arg(name, spec, value) + def _format_arg(self, name, trait_spec, value): + if name == "allineate_opts": + return trait_spec.argstr % ("'" + value + "'") + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - prefix = self._gen_fname(self.inputs.in_file, suffix='_QW') - ext = '.HEAD' - suffix = '+tlrc' + prefix = self._gen_fname(self.inputs.in_file, suffix="_QW") + outputtype = self.inputs.outputtype + if outputtype == "AFNI": + ext = ".HEAD" + suffix = "+tlrc" + else: + ext = Info.output_type_to_ext(outputtype) + suffix = "" else: prefix = self.inputs.out_file - ext_ind = max([ - prefix.lower().rfind('.nii.gz'), - prefix.lower().rfind('.nii.') - ]) + ext_ind = max( + [prefix.lower().rfind(".nii.gz"), prefix.lower().rfind(".nii")] + ) if ext_ind == -1: - ext = '.HEAD' - suffix = '+tlrc' + ext = ".HEAD" + suffix = "+tlrc" else: ext = prefix[ext_ind:] - suffix = '' + suffix = "" # All outputs should be in the same directory as the prefix out_dir = os.path.dirname(os.path.abspath(prefix)) - outputs['warped_source'] = fname_presuffix( - prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext + outputs["warped_source"] = ( + fname_presuffix(prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext + ) if not self.inputs.nowarp: - outputs['source_warp'] = fname_presuffix( - prefix, suffix='_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["source_warp"] = ( + fname_presuffix( + prefix, suffix="_WARP" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) if self.inputs.iwarp: - outputs['base_warp'] = fname_presuffix( - prefix, suffix='_WARPINV' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["base_warp"] = ( + fname_presuffix( + prefix, suffix="_WARPINV" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) if isdefined(self.inputs.out_weight_file): - outputs['weights'] = os.path.abspath(self.inputs.out_weight_file) + outputs["weights"] = os.path.abspath(self.inputs.out_weight_file) if self.inputs.plusminus: - outputs['warped_source'] = fname_presuffix( - prefix, suffix='_PLUS' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['warped_base'] = fname_presuffix( - prefix, suffix='_MINUS' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['source_warp'] = fname_presuffix( - prefix, suffix='_PLUS_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['base_warp'] = fname_presuffix( - prefix, suffix='_MINUS_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["warped_source"] = ( + fname_presuffix( + prefix, suffix="_PLUS" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["warped_base"] = ( + fname_presuffix( + prefix, suffix="_MINUS" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["source_warp"] = ( + fname_presuffix( + prefix, suffix="_PLUS_WARP" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["base_warp"] = ( + fname_presuffix( + prefix, + suffix="_MINUS_WARP" + suffix, + use_ext=False, + newpath=out_dir, + ) + + ext + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname(self.inputs.in_file, suffix='_QW') + if name == "out_file": + return self._gen_fname(self.inputs.in_file, suffix="_QW") class QwarpPlusMinusInputSpec(QwarpInputSpec): source_file = File( - desc='Source image (opposite phase encoding direction than base image)', - argstr='-source %s', + desc="Source image (opposite phase encoding direction than base image)", + argstr="-source %s", exists=True, - deprecated='1.1.2', - new_name='in_file', - copyfile=False) + deprecated="1.1.2", + new_name="in_file", + copyfile=False, + ) out_file = File( - argstr='-prefix %s', - value='Qwarp.nii.gz', + "Qwarp.nii.gz", + argstr="-prefix %s", position=0, usedefault=True, - desc="Output file") + desc="Output file", + ) plusminus = traits.Bool( True, usedefault=True, position=1, - desc='Normally, the warp displacements dis(x) are defined to match' - 'base(x) to source(x+dis(x)). With this option, the match' - 'is between base(x-dis(x)) and source(x+dis(x)) -- the two' - 'images \'meet in the middle\'. For more info, view Qwarp` interface', - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp']) + desc="Normally, the warp displacements dis(x) are defined to match" + "base(x) to source(x+dis(x)). With this option, the match" + "is between base(x-dis(x)) and source(x+dis(x)) -- the two" + "images 'meet in the middle'. For more info, view Qwarp` interface", + argstr="-plusminus", + xor=["duplo", "allsave", "iwarp"], + ) class QwarpPlusMinus(Qwarp): """A version of 3dQwarp for performing field susceptibility correction using two images with opposing phase encoding directions. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.QwarpPlusMinus() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -3791,9 +4491,14 @@ class QwarpPlusMinus(Qwarp): >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.cmdline '3dQwarp -prefix Qwarp.nii.gz -plusminus -base sub-01_dir-RL_epi.nii.gz \ - -source sub-01_dir-LR_epi.nii.gz -nopadWARP' +-source sub-01_dir-LR_epi.nii.gz -nopadWARP' >>> res = warp.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + """ input_spec = QwarpPlusMinusInputSpec diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index d465c1caaa..e7bd3c520a 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -1,10 +1,6 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft = python sts = 4 ts = 4 sw = 4 et: -"""Afni svm interfaces -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""AFNI's svm interfaces.""" from ..base import TraitedSpec, traits, File from .base import AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec @@ -13,68 +9,74 @@ class SVMTrainInputSpec(AFNICommandInputSpec): # training options ttype = traits.Str( - desc='tname: classification or regression', - argstr='-type %s', - mandatory=True) + desc="tname: classification or regression", argstr="-type %s", mandatory=True + ) in_file = File( - desc='A 3D+t AFNI brik dataset to be used for training.', - argstr='-trainvol %s', + desc="A 3D+t AFNI brik dataset to be used for training.", + argstr="-trainvol %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( name_template="%s_vectors", - desc='output sum of weighted linear support vectors file name', - argstr='-bucket %s', - suffix='_bucket', - name_source="in_file") + desc="output sum of weighted linear support vectors file name", + argstr="-bucket %s", + suffix="_bucket", + name_source="in_file", + ) model = File( name_template="%s_model", - desc='basename for the brik containing the SVM model', - argstr='-model %s', - suffix='_model', - name_source="in_file") + desc="basename for the brik containing the SVM model", + argstr="-model %s", + suffix="_model", + name_source="in_file", + ) alphas = File( name_template="%s_alphas", - desc='output alphas file name', - argstr='-alpha %s', - suffix='_alphas', - name_source="in_file") + desc="output alphas file name", + argstr="-alpha %s", + suffix="_alphas", + name_source="in_file", + ) mask = File( - desc='byte-format brik file used to mask voxels in the analysis', - argstr='-mask %s', + desc="byte-format brik file used to mask voxels in the analysis", + argstr="-mask %s", position=-1, exists=True, - copyfile=False) + copyfile=False, + ) nomodelmask = traits.Bool( - desc='Flag to enable the omission of a mask file', - argstr='-nomodelmask') + desc="Flag to enable the omission of a mask file", argstr="-nomodelmask" + ) trainlabels = File( - desc= - '.1D labels corresponding to the stimulus paradigm for the training data.', - argstr='-trainlabels %s', - exists=True) + desc=".1D labels corresponding to the stimulus paradigm for the training data.", + argstr="-trainlabels %s", + exists=True, + ) censor = File( - desc= - '.1D censor file that allows the user to ignore certain samples in the training data.', - argstr='-censor %s', - exists=True) + desc=".1D censor file that allows the user to ignore certain samples in the training data.", + argstr="-censor %s", + exists=True, + ) kernel = traits.Str( - desc= - 'string specifying type of kernel function:linear, polynomial, rbf, sigmoid', - argstr='-kernel %s') + desc="string specifying type of kernel function:linear, polynomial, rbf, sigmoid", + argstr="-kernel %s", + ) max_iterations = traits.Int( - desc='Specify the maximum number of iterations for the optimization.', - argstr='-max_iterations %d') + desc="Specify the maximum number of iterations for the optimization.", + argstr="-max_iterations %d", + ) w_out = traits.Bool( - desc='output sum of weighted linear support vectors', argstr='-wout') - options = traits.Str(desc='additional options for SVM-light', argstr='%s') + desc="output sum of weighted linear support vectors", argstr="-wout" + ) + options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTrainOutputSpec(TraitedSpec): - out_file = File(desc='sum of weighted linear support vectors file name') - model = File(desc='brik containing the SVM model file name') - alphas = File(desc='output alphas file name') + out_file = File(desc="sum of weighted linear support vectors file name") + model = File(desc="brik containing the SVM model file name") + alphas = File(desc="output alphas file name") class SVMTrain(AFNICommand): @@ -98,51 +100,55 @@ class SVMTrain(AFNICommand): """ - _cmd = '3dsvm' + _cmd = "3dsvm" input_spec = SVMTrainInputSpec output_spec = SVMTrainOutputSpec - _additional_metadata = ['suffix'] + _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): - return super(SVMTrain, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class SVMTestInputSpec(AFNICommandInputSpec): # testing options model = traits.Str( - desc='modname is the basename for the brik containing the SVM model', - argstr='-model %s', - mandatory=True) + desc="modname is the basename for the brik containing the SVM model", + argstr="-model %s", + mandatory=True, + ) in_file = File( - desc='A 3D or 3D+t AFNI brik dataset to be used for testing.', - argstr='-testvol %s', + desc="A 3D or 3D+t AFNI brik dataset to be used for testing.", + argstr="-testvol %s", exists=True, - mandatory=True) + mandatory=True, + ) out_file = File( name_template="%s_predictions", - desc='filename for .1D prediction file(s).', - argstr='-predictions %s') + desc="filename for .1D prediction file(s).", + argstr="-predictions %s", + ) testlabels = File( - desc= - '*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance', + desc="*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance", exists=True, - argstr='-testlabels %s') + argstr="-testlabels %s", + ) classout = traits.Bool( - desc= - 'Flag to specify that pname files should be integer-valued, corresponding to class category decisions.', - argstr='-classout') + desc="Flag to specify that pname files should be integer-valued, corresponding to class category decisions.", + argstr="-classout", + ) nopredcensord = traits.Bool( - desc= - 'Flag to prevent writing predicted values for censored time-points', - argstr='-nopredcensord') + desc="Flag to prevent writing predicted values for censored time-points", + argstr="-nopredcensord", + ) nodetrend = traits.Bool( - desc= - 'Flag to specify that pname files should not be linearly detrended', - argstr='-nodetrend') + desc="Flag to specify that pname files should not be linearly detrended", + argstr="-nodetrend", + ) multiclass = traits.Bool( - desc='Specifies multiclass algorithm for classification', - argstr='-multiclass %s') - options = traits.Str(desc='additional options for SVM-light', argstr='%s') + desc="Specifies multiclass algorithm for classification", + argstr="-multiclass %s", + ) + options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTest(AFNICommand): @@ -163,6 +169,7 @@ class SVMTest(AFNICommand): >>> res = svmTest.run() # doctest: +SKIP """ - _cmd = '3dsvm' + + _cmd = "3dsvm" input_spec = SVMTestInputSpec output_spec = AFNICommandOutputSpec diff --git a/nipype/interfaces/afni/tests/__init__.py b/nipype/interfaces/afni/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/afni/tests/__init__.py +++ b/nipype/interfaces/afni/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/afni/tests/test_auto_ABoverlap.py b/nipype/interfaces/afni/tests/test_auto_ABoverlap.py index 55cb12c27f..501f5331b7 100644 --- a/nipype/interfaces/afni/tests/test_auto_ABoverlap.py +++ b/nipype/interfaces/afni/tests/test_auto_ABoverlap.py @@ -1,47 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ABoverlap def test_ABoverlap_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file_a=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-3, ), in_file_b=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), - no_automask=dict(argstr='-no_automask', ), + no_automask=dict( + argstr="-no_automask", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr=' |& tee %s', + argstr=" |& tee %s", + extensions=None, position=-1, ), outputtype=dict(), - quiet=dict(argstr='-quiet', ), - verb=dict(argstr='-verb', ), + quiet=dict( + argstr="-quiet", + ), + verb=dict( + argstr="-verb", + ), ) inputs = ABoverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ABoverlap_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ABoverlap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py index 724c98dcb2..941667f49f 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import AFNICommand def test_AFNICommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -15,9 +16,10 @@ def test_AFNICommand_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) diff --git a/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py b/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py index 431baec30b..de23f6c05b 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import AFNICommandBase def test_AFNICommandBase_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py b/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py index ba2411edfb..fd4682947b 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import AFNIPythonCommand def test_AFNIPythonCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -15,9 +16,10 @@ def test_AFNIPythonCommand_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) diff --git a/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py b/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py index d89519d571..6983e839fb 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py @@ -1,50 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AFNItoNIFTI def test_AFNItoNIFTI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - denote=dict(argstr='-denote', ), + args=dict( + argstr="%s", + ), + denote=dict( + argstr="-denote", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), newid=dict( - argstr='-newid', - xor=['oldid'], + argstr="-newid", + xor=["oldid"], ), num_threads=dict( nohash=True, usedefault=True, ), oldid=dict( - argstr='-oldid', - xor=['newid'], + argstr="-oldid", + xor=["newid"], ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, hash_files=False, - name_source='in_file', - name_template='%s.nii', + name_source="in_file", + name_template="%s.nii", ), outputtype=dict(), - pure=dict(argstr='-pure', ), + pure=dict( + argstr="-pure", + ), ) inputs = AFNItoNIFTI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AFNItoNIFTI_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AFNItoNIFTI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py b/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py index dfbfc648f7..a3b376f55b 100644 --- a/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py +++ b/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py @@ -1,45 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import AlignEpiAnatPy def test_AlignEpiAnatPy_inputs(): input_map = dict( anat=dict( - argstr='-anat %s', + argstr="-anat %s", copyfile=False, + extensions=None, mandatory=True, ), - anat2epi=dict(argstr='-anat2epi', ), - args=dict(argstr='%s', ), + anat2epi=dict( + argstr="-anat2epi", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - epi2anat=dict(argstr='-epi2anat', ), + epi2anat=dict( + argstr="-epi2anat", + ), epi_base=dict( - argstr='-epi_base %s', + argstr="-epi_base %s", mandatory=True, ), - epi_strip=dict(argstr='-epi_strip %s', ), + epi_strip=dict( + argstr="-epi_strip %s", + ), in_file=dict( - argstr='-epi %s', + argstr="-epi %s", copyfile=False, + extensions=None, mandatory=True, ), outputtype=dict(), - py27_path=dict(usedefault=True, ), - save_skullstrip=dict(argstr='-save_skullstrip', ), + py27_path=dict( + usedefault=True, + ), + save_skullstrip=dict( + argstr="-save_skullstrip", + ), suffix=dict( - argstr='-suffix %s', + argstr="-suffix %s", usedefault=True, ), tshift=dict( - argstr='-tshift %s', + argstr="-tshift %s", usedefault=True, ), volreg=dict( - argstr='-volreg %s', + argstr="-volreg %s", usedefault=True, ), ) @@ -48,18 +61,40 @@ def test_AlignEpiAnatPy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AlignEpiAnatPy_outputs(): output_map = dict( - anat_al_mat=dict(), - anat_al_orig=dict(), - epi_al_mat=dict(), - epi_al_orig=dict(), - epi_al_tlrc_mat=dict(), - epi_reg_al_mat=dict(), - epi_tlrc_al=dict(), - epi_vr_al_mat=dict(), - epi_vr_motion=dict(), - skullstrip=dict(), + anat_al_mat=dict( + extensions=None, + ), + anat_al_orig=dict( + extensions=None, + ), + epi_al_mat=dict( + extensions=None, + ), + epi_al_orig=dict( + extensions=None, + ), + epi_al_tlrc_mat=dict( + extensions=None, + ), + epi_reg_al_mat=dict( + extensions=None, + ), + epi_tlrc_al=dict( + extensions=None, + ), + epi_vr_al_mat=dict( + extensions=None, + ), + epi_vr_motion=dict( + extensions=None, + ), + skullstrip=dict( + extensions=None, + ), ) outputs = AlignEpiAnatPy.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Allineate.py b/nipype/interfaces/afni/tests/test_auto_Allineate.py index be02af14da..afe6c3f24d 100644 --- a/nipype/interfaces/afni/tests/test_auto_Allineate.py +++ b/nipype/interfaces/afni/tests/test_auto_Allineate.py @@ -1,119 +1,224 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Allineate def test_Allineate_inputs(): input_map = dict( allcostx=dict( - argstr='-allcostx |& tee %s', + argstr="-allcostx |& tee %s", + extensions=None, position=-1, - xor=[ - 'out_file', 'out_matrix', 'out_param_file', 'out_weight_file' - ], - ), - args=dict(argstr='%s', ), - autobox=dict(argstr='-autobox', ), - automask=dict(argstr='-automask+%d', ), - autoweight=dict(argstr='-autoweight%s', ), - center_of_mass=dict(argstr='-cmass%s', ), - check=dict(argstr='-check %s', ), - convergence=dict(argstr='-conv %f', ), - cost=dict(argstr='-cost %s', ), + xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], + ), + args=dict( + argstr="%s", + ), + autobox=dict( + argstr="-autobox", + ), + automask=dict( + argstr="-automask+%d", + ), + autoweight=dict( + argstr="-autoweight%s", + ), + center_of_mass=dict( + argstr="-cmass%s", + ), + check=dict( + argstr="-check %s", + ), + convergence=dict( + argstr="-conv %f", + ), + cost=dict( + argstr="-cost %s", + ), environ=dict( nohash=True, usedefault=True, ), - epi=dict(argstr='-EPI', ), - final_interpolation=dict(argstr='-final %s', ), - fine_blur=dict(argstr='-fineblur %f', ), + epi=dict( + argstr="-EPI", + ), + final_interpolation=dict( + argstr="-final %s", + ), + fine_blur=dict( + argstr="-fineblur %f", + ), in_file=dict( - argstr='-source %s', + argstr="-source %s", copyfile=False, + extensions=None, mandatory=True, ), in_matrix=dict( - argstr='-1Dmatrix_apply %s', + argstr="-1Dmatrix_apply %s", + extensions=None, position=-3, - xor=['out_matrix'], + xor=["out_matrix"], ), in_param_file=dict( - argstr='-1Dparam_apply %s', - xor=['out_param_file'], - ), - interpolation=dict(argstr='-interp %s', ), - master=dict(argstr='-master %s', ), - maxrot=dict(argstr='-maxrot %f', ), - maxscl=dict(argstr='-maxscl %f', ), - maxshf=dict(argstr='-maxshf %f', ), - maxshr=dict(argstr='-maxshr %f', ), - newgrid=dict(argstr='-newgrid %f', ), - nmatch=dict(argstr='-nmatch %d', ), - no_pad=dict(argstr='-nopad', ), - nomask=dict(argstr='-nomask', ), + argstr="-1Dparam_apply %s", + extensions=None, + xor=["out_param_file"], + ), + interpolation=dict( + argstr="-interp %s", + ), + master=dict( + argstr="-master %s", + extensions=None, + ), + maxrot=dict( + argstr="-maxrot %f", + ), + maxscl=dict( + argstr="-maxscl %f", + ), + maxshf=dict( + argstr="-maxshf %f", + ), + maxshr=dict( + argstr="-maxshr %f", + ), + newgrid=dict( + argstr="-newgrid %f", + ), + nmatch=dict( + argstr="-nmatch %d", + ), + no_pad=dict( + argstr="-nopad", + ), + nomask=dict( + argstr="-nomask", + ), num_threads=dict( nohash=True, usedefault=True, ), - nwarp=dict(argstr='-nwarp %s', ), - nwarp_fixdep=dict(argstr='-nwarp_fixdep%s...', ), - nwarp_fixmot=dict(argstr='-nwarp_fixmot%s...', ), - one_pass=dict(argstr='-onepass', ), + nwarp=dict( + argstr="-nwarp %s", + ), + nwarp_fixdep=dict( + argstr="-nwarp_fixdep%s...", + ), + nwarp_fixmot=dict( + argstr="-nwarp_fixmot%s...", + ), + one_pass=dict( + argstr="-onepass", + ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, hash_files=False, - name_source='in_file', - name_template='%s_allineate', - xor=['allcostx'], + name_source="in_file", + name_template="%s_allineate", + xor=["allcostx"], ), out_matrix=dict( - argstr='-1Dmatrix_save %s', - xor=['in_matrix', 'allcostx'], + argstr="-1Dmatrix_save %s", + extensions=None, + xor=["in_matrix", "allcostx"], ), out_param_file=dict( - argstr='-1Dparam_save %s', - xor=['in_param_file', 'allcostx'], + argstr="-1Dparam_save %s", + extensions=None, + xor=["in_param_file", "allcostx"], ), out_weight_file=dict( - argstr='-wtprefix %s', - xor=['allcostx'], + argstr="-wtprefix %s", + extensions=None, + xor=["allcostx"], ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - quiet=dict(argstr='-quiet', ), - reference=dict(argstr='-base %s', ), - replacebase=dict(argstr='-replacebase', ), - replacemeth=dict(argstr='-replacemeth %s', ), - source_automask=dict(argstr='-source_automask+%d', ), - source_mask=dict(argstr='-source_mask %s', ), - two_best=dict(argstr='-twobest %d', ), - two_blur=dict(argstr='-twoblur %f', ), - two_first=dict(argstr='-twofirst', ), - two_pass=dict(argstr='-twopass', ), - usetemp=dict(argstr='-usetemp', ), - verbose=dict(argstr='-verb', ), - warp_type=dict(argstr='-warp %s', ), - warpfreeze=dict(argstr='-warpfreeze', ), - weight=dict(argstr='-weight %s', ), + overwrite=dict( + argstr="-overwrite", + ), + quiet=dict( + argstr="-quiet", + ), + reference=dict( + argstr="-base %s", + extensions=None, + ), + replacebase=dict( + argstr="-replacebase", + ), + replacemeth=dict( + argstr="-replacemeth %s", + ), + source_automask=dict( + argstr="-source_automask+%d", + ), + source_mask=dict( + argstr="-source_mask %s", + extensions=None, + ), + two_best=dict( + argstr="-twobest %d", + ), + two_blur=dict( + argstr="-twoblur %f", + ), + two_first=dict( + argstr="-twofirst", + ), + two_pass=dict( + argstr="-twopass", + ), + usetemp=dict( + argstr="-usetemp", + ), + verbose=dict( + argstr="-verb", + ), + warp_type=dict( + argstr="-warp %s", + ), + warpfreeze=dict( + argstr="-warpfreeze", + ), + weight=dict( + argstr="-weight %s", + ), weight_file=dict( - argstr='-weight %s', - deprecated='1.0.0', - new_name='weight', + argstr="-weight %s", + deprecated="1.0.0", + extensions=None, + new_name="weight", + ), + zclip=dict( + argstr="-zclip", ), - zclip=dict(argstr='-zclip', ), ) inputs = Allineate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Allineate_outputs(): output_map = dict( - allcostx=dict(), - out_file=dict(), - out_matrix=dict(), - out_param_file=dict(), - out_weight_file=dict(), + allcostx=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + out_matrix=dict( + extensions=None, + ), + out_param_file=dict( + extensions=None, + ), + out_weight_file=dict( + extensions=None, + ), ) outputs = Allineate.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py b/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py index bb00b3b585..eebfc73b6b 100644 --- a/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py +++ b/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import AutoTLRC def test_AutoTLRC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), base=dict( - argstr='-base %s', + argstr="-base %s", mandatory=True, ), environ=dict( @@ -15,11 +16,14 @@ def test_AutoTLRC_inputs(): usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, ), - no_ss=dict(argstr='-no_ss', ), + no_ss=dict( + argstr="-no_ss", + ), outputtype=dict(), ) inputs = AutoTLRC.input_spec() @@ -27,8 +31,14 @@ def test_AutoTLRC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AutoTLRC_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AutoTLRC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py index 424b7d25b1..14c59cba0c 100644 --- a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py +++ b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py @@ -1,50 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import AutoTcorrelate def test_AutoTcorrelate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - eta2=dict(argstr='-eta2', ), + eta2=dict( + argstr="-eta2", + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), mask_only_targets=dict( - argstr='-mask_only_targets', - xor=['mask_source'], + argstr="-mask_only_targets", + xor=["mask_source"], ), mask_source=dict( - argstr='-mask_source %s', - xor=['mask_only_targets'], + argstr="-mask_source %s", + extensions=None, + xor=["mask_only_targets"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_similarity_matrix.1D', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_similarity_matrix.1D", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), + polort=dict( + argstr="-polort %d", + ), ) inputs = AutoTcorrelate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AutoTcorrelate_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AutoTcorrelate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Autobox.py b/nipype/interfaces/afni/tests/test_auto_Autobox.py index f158263c54..8a13b14742 100644 --- a/nipype/interfaces/afni/tests/test_auto_Autobox.py +++ b/nipype/interfaces/afni/tests/test_auto_Autobox.py @@ -1,41 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Autobox def test_Autobox_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, ), - no_clustering=dict(argstr='-noclust', ), + no_clustering=dict( + argstr="-noclust", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_autobox', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_autobox", ), outputtype=dict(), - padding=dict(argstr='-npad %d', ), + padding=dict( + argstr="-npad %d", + ), ) inputs = Autobox.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Autobox_outputs(): output_map = dict( - out_file=dict(), + out_file=dict( + extensions=None, + ), x_max=dict(), x_min=dict(), y_max=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_Automask.py b/nipype/interfaces/afni/tests/test_auto_Automask.py index efffd19cba..1c2a3c4ee9 100644 --- a/nipype/interfaces/afni/tests/test_auto_Automask.py +++ b/nipype/interfaces/afni/tests/test_auto_Automask.py @@ -1,26 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Automask def test_Automask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brain_file=dict( - argstr='-apply_prefix %s', - name_source='in_file', - name_template='%s_masked', + argstr="-apply_prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_masked", + ), + clfrac=dict( + argstr="-clfrac %s", + ), + dilate=dict( + argstr="-dilate %s", ), - clfrac=dict(argstr='-clfrac %s', ), - dilate=dict(argstr='-dilate %s', ), environ=dict( nohash=True, usedefault=True, ), - erode=dict(argstr='-erode %s', ), + erode=dict( + argstr="-erode %s", + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -29,9 +38,10 @@ def test_Automask_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_mask', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_mask", ), outputtype=dict(), ) @@ -40,10 +50,16 @@ def test_Automask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Automask_outputs(): output_map = dict( - brain_file=dict(), - out_file=dict(), + brain_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = Automask.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Axialize.py b/nipype/interfaces/afni/tests/test_auto_Axialize.py index 62b425c932..bac640d601 100644 --- a/nipype/interfaces/afni/tests/test_auto_Axialize.py +++ b/nipype/interfaces/afni/tests/test_auto_Axialize.py @@ -1,26 +1,28 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Axialize def test_Axialize_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), axial=dict( - argstr='-axial', - xor=['coronal', 'sagittal'], + argstr="-axial", + xor=["coronal", "sagittal"], ), coronal=dict( - argstr='-coronal', - xor=['sagittal', 'axial'], + argstr="-coronal", + xor=["sagittal", "axial"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), @@ -28,26 +30,37 @@ def test_Axialize_inputs(): nohash=True, usedefault=True, ), - orientation=dict(argstr='-orient %s', ), + orientation=dict( + argstr="-orient %s", + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_axialize', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_axialize", ), outputtype=dict(), sagittal=dict( - argstr='-sagittal', - xor=['coronal', 'axial'], + argstr="-sagittal", + xor=["coronal", "axial"], + ), + verb=dict( + argstr="-verb", ), - verb=dict(argstr='-verb', ), ) inputs = Axialize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Axialize_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Axialize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Bandpass.py b/nipype/interfaces/afni/tests/test_auto_Bandpass.py index fb0861a747..8ae9966240 100644 --- a/nipype/interfaces/afni/tests/test_auto_Bandpass.py +++ b/nipype/interfaces/afni/tests/test_auto_Bandpass.py @@ -1,66 +1,98 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Bandpass def test_Bandpass_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - blur=dict(argstr='-blur %f', ), - despike=dict(argstr='-despike', ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), + blur=dict( + argstr="-blur %f", + ), + despike=dict( + argstr="-despike", + ), environ=dict( nohash=True, usedefault=True, ), highpass=dict( - argstr='%f', + argstr="%f", mandatory=True, position=-3, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - localPV=dict(argstr='-localPV %f', ), + localPV=dict( + argstr="-localPV %f", + ), lowpass=dict( - argstr='%f', + argstr="%f", mandatory=True, position=-2, ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=2, ), - nfft=dict(argstr='-nfft %d', ), - no_detrend=dict(argstr='-nodetrend', ), - normalize=dict(argstr='-norm', ), - notrans=dict(argstr='-notrans', ), + nfft=dict( + argstr="-nfft %d", + ), + no_detrend=dict( + argstr="-nodetrend", + ), + normalize=dict( + argstr="-norm", + ), + notrans=dict( + argstr="-notrans", + ), num_threads=dict( nohash=True, usedefault=True, ), - orthogonalize_dset=dict(argstr='-dsort %s', ), - orthogonalize_file=dict(argstr='-ort %s', ), + orthogonalize_dset=dict( + argstr="-dsort %s", + extensions=None, + ), + orthogonalize_file=dict( + argstr="-ort %s", + ), out_file=dict( - argstr='-prefix %s', - genfile=True, - name_source='in_file', - name_template='%s_bp', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_bp", position=1, ), outputtype=dict(), - tr=dict(argstr='-dt %f', ), + tr=dict( + argstr="-dt %f", + ), ) inputs = Bandpass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bandpass_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Bandpass.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BlurInMask.py b/nipype/interfaces/afni/tests/test_auto_BlurInMask.py index 334116d945..91114611dc 100644 --- a/nipype/interfaces/afni/tests/test_auto_BlurInMask.py +++ b/nipype/interfaces/afni/tests/test_auto_BlurInMask.py @@ -1,53 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import BlurInMask def test_BlurInMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), environ=dict( nohash=True, usedefault=True, ), - float_out=dict(argstr='-float', ), + float_out=dict( + argstr="-float", + ), fwhm=dict( - argstr='-FWHM %f', + argstr="-FWHM %f", mandatory=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=1, ), - mask=dict(argstr='-mask %s', ), - multimask=dict(argstr='-Mmask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + multimask=dict( + argstr="-Mmask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), options=dict( - argstr='%s', + argstr="%s", position=2, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_blur', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_blur", position=-1, ), outputtype=dict(), - preserve=dict(argstr='-preserve', ), + preserve=dict( + argstr="-preserve", + ), ) inputs = BlurInMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BlurInMask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BlurInMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py index 2f88a1edcb..f164ae815e 100644 --- a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py +++ b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py @@ -1,32 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import BlurToFWHM def test_BlurToFWHM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - blurmaster=dict(argstr='-blurmaster %s', ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), + blurmaster=dict( + argstr="-blurmaster %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - fwhm=dict(argstr='-FWHM %f', ), - fwhmxy=dict(argstr='-FWHMxy %f', ), + fwhm=dict( + argstr="-FWHM %f", + ), + fwhmxy=dict( + argstr="-FWHMxy %f", + ), in_file=dict( - argstr='-input %s', + argstr="-input %s", + extensions=None, mandatory=True, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) @@ -35,8 +50,14 @@ def test_BlurToFWHM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BlurToFWHM_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BlurToFWHM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BrickStat.py b/nipype/interfaces/afni/tests/test_auto_BrickStat.py index 0056df5980..a366953a5b 100644 --- a/nipype/interfaces/afni/tests/test_auto_BrickStat.py +++ b/nipype/interfaces/afni/tests/test_auto_BrickStat.py @@ -1,42 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import BrickStat def test_BrickStat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=2, ), - max=dict(argstr='-max', ), - mean=dict(argstr='-mean', ), + max=dict( + argstr="-max", + ), + mean=dict( + argstr="-mean", + ), min=dict( - argstr='-min', + argstr="-min", position=1, ), - percentile=dict(argstr='-percentile %.3f %.3f %.3f', ), - slow=dict(argstr='-slow', ), - sum=dict(argstr='-sum', ), - var=dict(argstr='-var', ), + percentile=dict( + argstr="-percentile %.3f %.3f %.3f", + ), + slow=dict( + argstr="-slow", + ), + sum=dict( + argstr="-sum", + ), + var=dict( + argstr="-var", + ), ) inputs = BrickStat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrickStat_outputs(): - output_map = dict(min_val=dict(), ) + output_map = dict( + min_val=dict(), + ) outputs = BrickStat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Bucket.py b/nipype/interfaces/afni/tests/test_auto_Bucket.py index c3faf87a09..34dbd18bc2 100644 --- a/nipype/interfaces/afni/tests/test_auto_Bucket.py +++ b/nipype/interfaces/afni/tests/test_auto_Bucket.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Bucket def test_Bucket_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), @@ -20,8 +21,9 @@ def test_Bucket_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_template='buck', + argstr="-prefix %s", + extensions=None, + name_template="buck", ), outputtype=dict(), ) @@ -30,8 +32,14 @@ def test_Bucket_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bucket_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Bucket.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Calc.py b/nipype/interfaces/afni/tests/test_auto_Calc.py index 28863d5a4b..dc50380317 100644 --- a/nipype/interfaces/afni/tests/test_auto_Calc.py +++ b/nipype/interfaces/afni/tests/test_auto_Calc.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Calc def test_Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -16,41 +17,60 @@ def test_Calc_inputs(): position=3, ), in_file_a=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, mandatory=True, position=0, ), in_file_b=dict( - argstr='-b %s', + argstr="-b %s", + extensions=None, position=1, ), in_file_c=dict( - argstr='-c %s', + argstr="-c %s", + extensions=None, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), - other=dict(argstr='', ), + other=dict( + argstr="", + extensions=None, + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file_a', - name_template='%s_calc', + argstr="-prefix %s", + extensions=None, + name_source="in_file_a", + name_template="%s_calc", ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), + overwrite=dict( + argstr="-overwrite", + ), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], ), - stop_idx=dict(requires=['start_idx'], ), + start_idx=dict( + requires=["stop_idx"], + ), + stop_idx=dict( + requires=["start_idx"], + ), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Calc_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Cat.py b/nipype/interfaces/afni/tests/test_auto_Cat.py index 3da86c66d6..e5c76b34b1 100644 --- a/nipype/interfaces/afni/tests/test_auto_Cat.py +++ b/nipype/interfaces/afni/tests/test_auto_Cat.py @@ -1,73 +1,82 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Cat def test_Cat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), - keepfree=dict(argstr='-nonfixed', ), + keepfree=dict( + argstr="-nonfixed", + ), num_threads=dict( nohash=True, usedefault=True, ), - omitconst=dict(argstr='-nonconst', ), + omitconst=dict( + argstr="-nonconst", + ), out_cint=dict( - xor=[ - 'out_format', 'out_nice', 'out_double', 'out_fint', 'out_int' - ], ), + xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], + ), out_double=dict( - argstr='-d', - xor=['out_format', 'out_nice', 'out_int', 'out_fint', 'out_cint'], + argstr="-d", + xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), out_fint=dict( - argstr='-f', - xor=[ - 'out_format', 'out_nice', 'out_double', 'out_int', 'out_cint' - ], + argstr="-f", + xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], ), out_format=dict( - argstr='-form %s', - xor=['out_int', 'out_nice', 'out_double', 'out_fint', 'out_cint'], + argstr="-form %s", + xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ), out_int=dict( - argstr='-i', - xor=[ - 'out_format', 'out_nice', 'out_double', 'out_fint', 'out_cint' - ], + argstr="-i", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], ), out_nice=dict( - argstr='-n', - xor=[ - 'out_format', 'out_int', 'out_double', 'out_fint', 'out_cint' - ], + argstr="-n", + xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], ), outputtype=dict(), - sel=dict(argstr='-sel %s', ), - stack=dict(argstr='-stack', ), + sel=dict( + argstr="-sel %s", + ), + stack=dict( + argstr="-stack", + ), ) inputs = Cat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Cat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_CatMatvec.py b/nipype/interfaces/afni/tests/test_auto_CatMatvec.py index b67ab485d4..6b6c2630f6 100644 --- a/nipype/interfaces/afni/tests/test_auto_CatMatvec.py +++ b/nipype/interfaces/afni/tests/test_auto_CatMatvec.py @@ -1,42 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CatMatvec def test_CatMatvec_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fourxfour=dict( - argstr='-4x4', - xor=['matrix', 'oneline'], + argstr="-4x4", + xor=["matrix", "oneline"], ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), matrix=dict( - argstr='-MATRIX', - xor=['oneline', 'fourxfour'], + argstr="-MATRIX", + xor=["oneline", "fourxfour"], ), num_threads=dict( nohash=True, usedefault=True, ), oneline=dict( - argstr='-ONELINE', - xor=['matrix', 'fourxfour'], + argstr="-ONELINE", + xor=["matrix", "fourxfour"], ), out_file=dict( - argstr=' > %s', + argstr=" > %s", + extensions=None, keep_extension=False, mandatory=True, - name_source='in_file', - name_template='%s_cat.aff12.1D', + name_source="in_file", + name_template="%s_cat.aff12.1D", position=-1, ), outputtype=dict(), @@ -46,8 +48,14 @@ def test_CatMatvec_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CatMatvec_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CatMatvec.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_CenterMass.py b/nipype/interfaces/afni/tests/test_auto_CenterMass.py index 322218a50d..7ec95938b4 100644 --- a/nipype/interfaces/afni/tests/test_auto_CenterMass.py +++ b/nipype/interfaces/afni/tests/test_auto_CenterMass.py @@ -1,19 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CenterMass def test_CenterMass_inputs(): input_map = dict( - all_rois=dict(argstr='-all_rois', ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + all_rois=dict( + argstr="-all_rois", + ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), cm_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, hash_files=False, keep_extension=False, - name_source='in_file', - name_template='%s_cm.out', + name_source="in_file", + name_template="%s_cm.out", position=-1, ), environ=dict( @@ -21,26 +27,42 @@ def test_CenterMass_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), - local_ijk=dict(argstr='-local_ijk', ), - mask_file=dict(argstr='-mask %s', ), - roi_vals=dict(argstr='-roi_vals %s', ), - set_cm=dict(argstr='-set %f %f %f', ), + local_ijk=dict( + argstr="-local_ijk", + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + roi_vals=dict( + argstr="-roi_vals %s", + ), + set_cm=dict( + argstr="-set %f %f %f", + ), ) inputs = CenterMass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CenterMass_outputs(): output_map = dict( cm=dict(), - cm_file=dict(), - out_file=dict(), + cm_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = CenterMass.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ClipLevel.py b/nipype/interfaces/afni/tests/test_auto_ClipLevel.py index 8b8c61208d..7a324fe7d4 100644 --- a/nipype/interfaces/afni/tests/test_auto_ClipLevel.py +++ b/nipype/interfaces/afni/tests/test_auto_ClipLevel.py @@ -1,32 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ClipLevel def test_ClipLevel_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), doall=dict( - argstr='-doall', + argstr="-doall", position=3, - xor='grad', + xor="grad", ), environ=dict( nohash=True, usedefault=True, ), grad=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, position=3, - xor='doall', + xor="doall", ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), mfrac=dict( - argstr='-mfrac %s', + argstr="-mfrac %s", position=2, ), ) @@ -35,8 +38,12 @@ def test_ClipLevel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ClipLevel_outputs(): - output_map = dict(clip_val=dict(), ) + output_map = dict( + clip_val=dict(), + ) outputs = ClipLevel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ConvertDset.py b/nipype/interfaces/afni/tests/test_auto_ConvertDset.py index 06ba3a54f2..226eac97b5 100644 --- a/nipype/interfaces/afni/tests/test_auto_ConvertDset.py +++ b/nipype/interfaces/afni/tests/test_auto_ConvertDset.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ConvertDset def test_ConvertDset_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", + extensions=None, mandatory=True, position=-2, ), @@ -20,12 +22,13 @@ def test_ConvertDset_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, mandatory=True, position=-1, ), out_type=dict( - argstr='-o_%s', + argstr="-o_%s", mandatory=True, position=0, ), @@ -36,8 +39,14 @@ def test_ConvertDset_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertDset_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ConvertDset.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Copy.py b/nipype/interfaces/afni/tests/test_auto_Copy.py index a8e67d5607..e96592b184 100644 --- a/nipype/interfaces/afni/tests/test_auto_Copy.py +++ b/nipype/interfaces/afni/tests/test_auto_Copy.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Copy def test_Copy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), @@ -21,21 +23,30 @@ def test_Copy_inputs(): usedefault=True, ), out_file=dict( - argstr='%s', - name_source='in_file', - name_template='%s_copy', + argstr="%s", + extensions=None, + name_source="in_file", + name_template="%s_copy", position=-1, ), outputtype=dict(), - verbose=dict(argstr='-verb', ), + verbose=dict( + argstr="-verb", + ), ) inputs = Copy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Copy_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Deconvolve.py b/nipype/interfaces/afni/tests/test_auto_Deconvolve.py index ae42a77019..c4195807eb 100644 --- a/nipype/interfaces/afni/tests/test_auto_Deconvolve.py +++ b/nipype/interfaces/afni/tests/test_auto_Deconvolve.py @@ -1,115 +1,194 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Deconvolve def test_Deconvolve_inputs(): input_map = dict( - STATmask=dict(argstr='-STATmask %s', ), - TR_1D=dict(argstr='-TR_1D %f', ), - allzero_OK=dict(argstr='-allzero_OK', ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - cbucket=dict(argstr='-cbucket %s', ), - censor=dict(argstr='-censor %s', ), - dmbase=dict(argstr='-dmbase', ), - dname=dict(argstr='-D%s=%s', ), + STATmask=dict( + argstr="-STATmask %s", + extensions=None, + ), + TR_1D=dict( + argstr="-TR_1D %f", + ), + allzero_OK=dict( + argstr="-allzero_OK", + ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), + cbucket=dict( + argstr="-cbucket %s", + ), + censor=dict( + argstr="-censor %s", + extensions=None, + ), + dmbase=dict( + argstr="-dmbase", + ), + dname=dict( + argstr="-D%s=%s", + ), environ=dict( nohash=True, usedefault=True, ), force_TR=dict( - argstr='-force_TR %f', + argstr="-force_TR %f", position=0, ), - fout=dict(argstr='-fout', ), + fout=dict( + argstr="-fout", + ), global_times=dict( - argstr='-global_times', - xor=['local_times'], + argstr="-global_times", + xor=["local_times"], ), glt_label=dict( - argstr='-glt_label %d %s...', + argstr="-glt_label %d %s...", position=-1, - requires=['gltsym'], + requires=["gltsym"], ), gltsym=dict( argstr="-gltsym 'SYM: %s'...", position=-2, ), - goforit=dict(argstr='-GOFORIT %i', ), + goforit=dict( + argstr="-GOFORIT %i", + ), in_files=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, position=1, - sep=' ', + sep=" ", + ), + input1D=dict( + argstr="-input1D %s", + extensions=None, + ), + legendre=dict( + argstr="-legendre", ), - input1D=dict(argstr='-input1D %s', ), - legendre=dict(argstr='-legendre', ), local_times=dict( - argstr='-local_times', - xor=['global_times'], - ), - mask=dict(argstr='-mask %s', ), - noblock=dict(argstr='-noblock', ), - nocond=dict(argstr='-nocond', ), - nodmbase=dict(argstr='-nodmbase', ), - nofdr=dict(argstr='-noFDR', ), - nolegendre=dict(argstr='-nolegendre', ), - nosvd=dict(argstr='-nosvd', ), + argstr="-local_times", + xor=["global_times"], + ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + noblock=dict( + argstr="-noblock", + ), + nocond=dict( + argstr="-nocond", + ), + nodmbase=dict( + argstr="-nodmbase", + ), + nofdr=dict( + argstr="-noFDR", + ), + nolegendre=dict( + argstr="-nolegendre", + ), + nosvd=dict( + argstr="-nosvd", + ), num_glt=dict( - argstr='-num_glt %d', + argstr="-num_glt %d", position=-3, ), num_stimts=dict( - argstr='-num_stimts %d', + argstr="-num_stimts %d", position=-6, ), num_threads=dict( - argstr='-jobs %d', + argstr="-jobs %d", nohash=True, ), - ortvec=dict(argstr='-ortvec %s %s', ), - out_file=dict(argstr='-bucket %s', ), + ortvec=dict( + argstr="-ortvec %s %s", + ), + out_file=dict( + argstr="-bucket %s", + extensions=None, + ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - rmsmin=dict(argstr='-rmsmin %f', ), - rout=dict(argstr='-rout', ), + polort=dict( + argstr="-polort %d", + ), + rmsmin=dict( + argstr="-rmsmin %f", + ), + rout=dict( + argstr="-rout", + ), sat=dict( - argstr='-sat', - xor=['trans'], + argstr="-sat", + xor=["trans"], + ), + singvals=dict( + argstr="-singvals", ), - singvals=dict(argstr='-singvals', ), stim_label=dict( - argstr='-stim_label %d %s...', + argstr="-stim_label %d %s...", position=-4, - requires=['stim_times'], + requires=["stim_times"], ), stim_times=dict( argstr="-stim_times %d %s '%s'...", position=-5, ), - stim_times_subtract=dict(argstr='-stim_times_subtract %f', ), - svd=dict(argstr='-svd', ), - tout=dict(argstr='-tout', ), + stim_times_subtract=dict( + argstr="-stim_times_subtract %f", + ), + svd=dict( + argstr="-svd", + ), + tout=dict( + argstr="-tout", + ), trans=dict( - argstr='-trans', - xor=['sat'], + argstr="-trans", + xor=["sat"], + ), + vout=dict( + argstr="-vout", + ), + x1D=dict( + argstr="-x1D %s", + extensions=None, + ), + x1D_stop=dict( + argstr="-x1D_stop", ), - vout=dict(argstr='-vout', ), - x1D=dict(argstr='-x1D %s', ), - x1D_stop=dict(argstr='-x1D_stop', ), ) inputs = Deconvolve.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Deconvolve_outputs(): output_map = dict( - cbucket=dict(), - out_file=dict(), - reml_script=dict(), - x1D=dict(), + cbucket=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + reml_script=dict( + extensions=None, + ), + x1D=dict( + extensions=None, + ), ) outputs = Deconvolve.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py index 664cca5985..afbc5a7d4f 100644 --- a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py +++ b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py @@ -1,48 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DegreeCentrality def test_DegreeCentrality_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), + args=dict( + argstr="%s", + ), + autoclip=dict( + argstr="-autoclip", + ), + automask=dict( + argstr="-automask", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), - oned_file=dict(argstr='-out1D %s', ), + oned_file=dict( + argstr="-out1D %s", + ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - sparsity=dict(argstr='-sparsity %f', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict( + argstr="-polort %d", + ), + sparsity=dict( + argstr="-sparsity %f", + ), + thresh=dict( + argstr="-thresh %f", + ), ) inputs = DegreeCentrality.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DegreeCentrality_outputs(): output_map = dict( - oned_file=dict(), - out_file=dict(), + oned_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = DegreeCentrality.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Despike.py b/nipype/interfaces/afni/tests/test_auto_Despike.py index ff0b8b532a..8835dd7e07 100644 --- a/nipype/interfaces/afni/tests/test_auto_Despike.py +++ b/nipype/interfaces/afni/tests/test_auto_Despike.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Despike def test_Despike_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -21,9 +23,10 @@ def test_Despike_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_despike', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_despike", ), outputtype=dict(), ) @@ -32,8 +35,14 @@ def test_Despike_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Despike_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Despike.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Detrend.py b/nipype/interfaces/afni/tests/test_auto_Detrend.py index 1938529cf7..5edbdd74ee 100644 --- a/nipype/interfaces/afni/tests/test_auto_Detrend.py +++ b/nipype/interfaces/afni/tests/test_auto_Detrend.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Detrend def test_Detrend_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -21,9 +23,10 @@ def test_Detrend_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_detrend', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_detrend", ), outputtype=dict(), ) @@ -32,8 +35,14 @@ def test_Detrend_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Detrend_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Detrend.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Dot.py b/nipype/interfaces/afni/tests/test_auto_Dot.py index 7623e90ca7..9cf8083ab9 100644 --- a/nipype/interfaces/afni/tests/test_auto_Dot.py +++ b/nipype/interfaces/afni/tests/test_auto_Dot.py @@ -1,48 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Dot def test_Dot_inputs(): input_map = dict( - args=dict(argstr='%s', ), - demean=dict(argstr='-demean', ), - docoef=dict(argstr='-docoef', ), - docor=dict(argstr='-docor', ), - dodice=dict(argstr='-dodice', ), - dodot=dict(argstr='-dodot', ), - doeta2=dict(argstr='-doeta2', ), - dosums=dict(argstr='-dosums', ), + args=dict( + argstr="%s", + ), + demean=dict( + argstr="-demean", + ), + docoef=dict( + argstr="-docoef", + ), + docor=dict( + argstr="-docor", + ), + dodice=dict( + argstr="-dodice", + ), + dodot=dict( + argstr="-dodot", + ), + doeta2=dict( + argstr="-doeta2", + ), + dosums=dict( + argstr="-dosums", + ), environ=dict( nohash=True, usedefault=True, ), - full=dict(argstr='-full', ), + full=dict( + argstr="-full", + ), in_files=dict( - argstr='%s ...', + argstr="%s ...", position=-2, ), - mask=dict(argstr='-mask %s', ), - mrange=dict(argstr='-mrange %s %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + mrange=dict( + argstr="-mrange %s %s", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr=' |& tee %s', + argstr=" |& tee %s", + extensions=None, position=-1, ), outputtype=dict(), - show_labels=dict(argstr='-show_labels', ), - upper=dict(argstr='-upper', ), + show_labels=dict( + argstr="-show_labels", + ), + upper=dict( + argstr="-upper", + ), ) inputs = Dot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dot_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Dot.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ECM.py b/nipype/interfaces/afni/tests/test_auto_ECM.py index 8a4793fb7f..030aaffe6a 100644 --- a/nipype/interfaces/afni/tests/test_auto_ECM.py +++ b/nipype/interfaces/afni/tests/test_auto_ECM.py @@ -1,52 +1,88 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ECM def test_ECM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), + args=dict( + argstr="%s", + ), + autoclip=dict( + argstr="-autoclip", + ), + automask=dict( + argstr="-automask", + ), environ=dict( nohash=True, usedefault=True, ), - eps=dict(argstr='-eps %f', ), - fecm=dict(argstr='-fecm', ), - full=dict(argstr='-full', ), + eps=dict( + argstr="-eps %f", + ), + fecm=dict( + argstr="-fecm", + ), + full=dict( + argstr="-full", + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask=dict(argstr='-mask %s', ), - max_iter=dict(argstr='-max_iter %d', ), - memory=dict(argstr='-memory %f', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + max_iter=dict( + argstr="-max_iter %d", + ), + memory=dict( + argstr="-memory %f", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - scale=dict(argstr='-scale %f', ), - shift=dict(argstr='-shift %f', ), - sparsity=dict(argstr='-sparsity %f', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict( + argstr="-polort %d", + ), + scale=dict( + argstr="-scale %f", + ), + shift=dict( + argstr="-shift %f", + ), + sparsity=dict( + argstr="-sparsity %f", + ), + thresh=dict( + argstr="-thresh %f", + ), ) inputs = ECM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ECM_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ECM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Edge3.py b/nipype/interfaces/afni/tests/test_auto_Edge3.py index 8fc6953c28..45b49fd243 100644 --- a/nipype/interfaces/afni/tests/test_auto_Edge3.py +++ b/nipype/interfaces/afni/tests/test_auto_Edge3.py @@ -1,56 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Edge3 def test_Edge3_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datum=dict(argstr='-datum %s', ), + args=dict( + argstr="%s", + ), + datum=dict( + argstr="-datum %s", + ), environ=dict( nohash=True, usedefault=True, ), fscale=dict( - argstr='-fscale', - xor=['gscale', 'nscale', 'scale_floats'], + argstr="-fscale", + xor=["gscale", "nscale", "scale_floats"], ), gscale=dict( - argstr='-gscale', - xor=['fscale', 'nscale', 'scale_floats'], + argstr="-gscale", + xor=["fscale", "nscale", "scale_floats"], ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=0, ), nscale=dict( - argstr='-nscale', - xor=['fscale', 'gscale', 'scale_floats'], + argstr="-nscale", + xor=["fscale", "gscale", "scale_floats"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, position=-1, ), outputtype=dict(), scale_floats=dict( - argstr='-scale_floats %f', - xor=['fscale', 'gscale', 'nscale'], + argstr="-scale_floats %f", + xor=["fscale", "gscale", "nscale"], + ), + verbose=dict( + argstr="-verbose", ), - verbose=dict(argstr='-verbose', ), ) inputs = Edge3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Edge3_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Edge3.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Eval.py b/nipype/interfaces/afni/tests/test_auto_Eval.py index 35c1360a6e..748bf05dfd 100644 --- a/nipype/interfaces/afni/tests/test_auto_Eval.py +++ b/nipype/interfaces/afni/tests/test_auto_Eval.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Eval def test_Eval_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -16,41 +17,60 @@ def test_Eval_inputs(): position=3, ), in_file_a=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, mandatory=True, position=0, ), in_file_b=dict( - argstr='-b %s', + argstr="-b %s", + extensions=None, position=1, ), in_file_c=dict( - argstr='-c %s', + argstr="-c %s", + extensions=None, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), - other=dict(argstr='', ), - out1D=dict(argstr='-1D', ), + other=dict( + argstr="", + extensions=None, + ), + out1D=dict( + argstr="-1D", + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file_a', - name_template='%s_calc', + argstr="-prefix %s", + extensions=None, + name_source="in_file_a", + name_template="%s_calc", ), outputtype=dict(), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], ), - stop_idx=dict(requires=['start_idx'], ), + start_idx=dict( + requires=["stop_idx"], + ), + stop_idx=dict( + requires=["start_idx"], + ), ) inputs = Eval.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Eval_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Eval.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_FWHMx.py b/nipype/interfaces/afni/tests/test_auto_FWHMx.py index 14ab26fa4f..06151c569a 100644 --- a/nipype/interfaces/afni/tests/test_auto_FWHMx.py +++ b/nipype/interfaces/afni/tests/test_auto_FWHMx.py @@ -1,81 +1,105 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import FWHMx def test_FWHMx_inputs(): input_map = dict( acf=dict( - argstr='-acf', + argstr="-acf", usedefault=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), arith=dict( - argstr='-arith', - xor=['geom'], + argstr="-arith", + xor=["geom"], ), automask=dict( - argstr='-automask', + argstr="-automask", usedefault=True, ), - combine=dict(argstr='-combine', ), - compat=dict(argstr='-compat', ), + combine=dict( + argstr="-combine", + ), + compat=dict( + argstr="-compat", + ), demed=dict( - argstr='-demed', - xor=['detrend'], + argstr="-demed", + xor=["detrend"], ), detrend=dict( - argstr='-detrend', + argstr="-detrend", usedefault=True, - xor=['demed'], + xor=["demed"], ), environ=dict( nohash=True, usedefault=True, ), geom=dict( - argstr='-geom', - xor=['arith'], + argstr="-geom", + xor=["arith"], ), in_file=dict( - argstr='-input %s', + argstr="-input %s", + extensions=None, mandatory=True, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), out_detrend=dict( - argstr='-detprefix %s', + argstr="-detprefix %s", + extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_detrend', + name_source="in_file", + name_template="%s_detrend", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_fwhmx.out', + name_source="in_file", + name_template="%s_fwhmx.out", position=-1, ), out_subbricks=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_subbricks.out', + name_source="in_file", + name_template="%s_subbricks.out", + ), + unif=dict( + argstr="-unif", ), - unif=dict(argstr='-unif', ), ) inputs = FWHMx.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FWHMx_outputs(): output_map = dict( acf_param=dict(), fwhm=dict(), - out_acf=dict(), - out_detrend=dict(), - out_file=dict(), - out_subbricks=dict(), + out_acf=dict( + extensions=None, + ), + out_detrend=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + out_subbricks=dict( + extensions=None, + ), ) outputs = FWHMx.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Fim.py b/nipype/interfaces/afni/tests/test_auto_Fim.py index 931e5ff244..aea43391bc 100644 --- a/nipype/interfaces/afni/tests/test_auto_Fim.py +++ b/nipype/interfaces/afni/tests/test_auto_Fim.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Fim def test_Fim_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fim_thr=dict( - argstr='-fim_thr %f', + argstr="-fim_thr %f", position=3, ), ideal_file=dict( - argstr='-ideal_file %s', + argstr="-ideal_file %s", + extensions=None, mandatory=True, position=2, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=1, ), @@ -30,13 +33,14 @@ def test_Fim_inputs(): usedefault=True, ), out=dict( - argstr='-out %s', + argstr="-out %s", position=4, ), out_file=dict( - argstr='-bucket %s', - name_source='in_file', - name_template='%s_fim', + argstr="-bucket %s", + extensions=None, + name_source="in_file", + name_template="%s_fim", ), outputtype=dict(), ) @@ -45,8 +49,14 @@ def test_Fim_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Fim_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Fim.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Fourier.py b/nipype/interfaces/afni/tests/test_auto_Fourier.py index 69cd955cbc..97764a2b9b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Fourier.py +++ b/nipype/interfaces/afni/tests/test_auto_Fourier.py @@ -1,27 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Fourier def test_Fourier_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), highpass=dict( - argstr='-highpass %f', + argstr="-highpass %f", mandatory=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), lowpass=dict( - argstr='-lowpass %f', + argstr="-lowpass %f", mandatory=True, ), num_threads=dict( @@ -29,20 +31,29 @@ def test_Fourier_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_fourier', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_fourier", ), outputtype=dict(), - retrend=dict(argstr='-retrend', ), + retrend=dict( + argstr="-retrend", + ), ) inputs = Fourier.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Fourier_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Fourier.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_GCOR.py b/nipype/interfaces/afni/tests/test_auto_GCOR.py index 9d603dda36..2e5f0f372d 100644 --- a/nipype/interfaces/afni/tests/test_auto_GCOR.py +++ b/nipype/interfaces/afni/tests/test_auto_GCOR.py @@ -1,35 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import GCOR def test_GCOR_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", copyfile=False, + extensions=None, + ), + nfirst=dict( + argstr="-nfirst %d", + ), + no_demean=dict( + argstr="-no_demean", ), - nfirst=dict(argstr='-nfirst %d', ), - no_demean=dict(argstr='-no_demean', ), ) inputs = GCOR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GCOR_outputs(): - output_map = dict(out=dict(), ) + output_map = dict( + out=dict(), + ) outputs = GCOR.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Hist.py b/nipype/interfaces/afni/tests/test_auto_Hist.py index 48499a9605..2263f3632b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Hist.py +++ b/nipype/interfaces/afni/tests/test_auto_Hist.py @@ -1,41 +1,56 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Hist def test_Hist_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bin_width=dict(argstr='-binwidth %f', ), + args=dict( + argstr="%s", + ), + bin_width=dict( + argstr="-binwidth %f", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=1, ), - mask=dict(argstr='-mask %s', ), - max_value=dict(argstr='-max %f', ), - min_value=dict(argstr='-min %f', ), - nbin=dict(argstr='-nbin %d', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + max_value=dict( + argstr="-max %f", + ), + min_value=dict( + argstr="-min %f", + ), + nbin=dict( + argstr="-nbin %d", + ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_hist', + name_source=["in_file"], + name_template="%s_hist", ), out_show=dict( - argstr='> %s', + argstr="> %s", + extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_hist.out', + name_source="in_file", + name_template="%s_hist.out", position=-1, ), showhist=dict( - argstr='-showhist', + argstr="-showhist", usedefault=True, ), ) @@ -44,10 +59,16 @@ def test_Hist_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Hist_outputs(): output_map = dict( - out_file=dict(), - out_show=dict(), + out_file=dict( + extensions=None, + ), + out_show=dict( + extensions=None, + ), ) outputs = Hist.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_LFCD.py b/nipype/interfaces/afni/tests/test_auto_LFCD.py index 9cbde10b56..bd4b76baee 100644 --- a/nipype/interfaces/afni/tests/test_auto_LFCD.py +++ b/nipype/interfaces/afni/tests/test_auto_LFCD.py @@ -1,44 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import LFCD def test_LFCD_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), + args=dict( + argstr="%s", + ), + autoclip=dict( + argstr="-autoclip", + ), + automask=dict( + argstr="-automask", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict( + argstr="-polort %d", + ), + thresh=dict( + argstr="-thresh %f", + ), ) inputs = LFCD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LFCD_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = LFCD.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_LocalBistat.py b/nipype/interfaces/afni/tests/test_auto_LocalBistat.py index 22f0b717d8..2ffe29dda0 100644 --- a/nipype/interfaces/afni/tests/test_auto_LocalBistat.py +++ b/nipype/interfaces/afni/tests/test_auto_LocalBistat.py @@ -1,30 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import LocalBistat def test_LocalBistat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), automask=dict( - argstr='-automask', - xor=['weight_file'], + argstr="-automask", + xor=["weight_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), neighborhood=dict( argstr="-nbhd '%s(%s)'", mandatory=True, @@ -34,20 +40,22 @@ def test_LocalBistat_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='in_file1', - name_template='%s_bistat', + name_source="in_file1", + name_template="%s_bistat", position=0, ), outputtype=dict(), stat=dict( - argstr='-stat %s...', + argstr="-stat %s...", mandatory=True, ), weight_file=dict( - argstr='-weight %s', - xor=['automask'], + argstr="-weight %s", + extensions=None, + xor=["automask"], ), ) inputs = LocalBistat.input_spec() @@ -55,8 +63,14 @@ def test_LocalBistat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LocalBistat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = LocalBistat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Localstat.py b/nipype/interfaces/afni/tests/test_auto_Localstat.py index c2f53c3db0..54c99b434d 100644 --- a/nipype/interfaces/afni/tests/test_auto_Localstat.py +++ b/nipype/interfaces/afni/tests/test_auto_Localstat.py @@ -1,59 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Localstat def test_Localstat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), environ=dict( nohash=True, usedefault=True, ), grid_rmode=dict( - argstr='-grid_rmode %s', - requires=['reduce_restore_grid'], + argstr="-grid_rmode %s", + requires=["reduce_restore_grid"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), neighborhood=dict( argstr="-nbhd '%s(%s)'", mandatory=True, ), - nonmask=dict(argstr='-use_nonmask', ), + nonmask=dict( + argstr="-use_nonmask", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_localstat', + name_source="in_file", + name_template="%s_localstat", position=0, ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - quiet=dict(argstr='-quiet', ), + overwrite=dict( + argstr="-overwrite", + ), + quiet=dict( + argstr="-quiet", + ), reduce_grid=dict( - argstr='-reduce_grid %s', - xor=['reduce_restore_grid', 'reduce_max_vox'], + argstr="-reduce_grid %s", + xor=["reduce_restore_grid", "reduce_max_vox"], ), reduce_max_vox=dict( - argstr='-reduce_max_vox %s', - xor=['reduce_restore_grid', 'reduce_grid'], + argstr="-reduce_max_vox %s", + xor=["reduce_restore_grid", "reduce_grid"], ), reduce_restore_grid=dict( - argstr='-reduce_restore_grid %s', - xor=['reduce_max_vox', 'reduce_grid'], + argstr="-reduce_restore_grid %s", + xor=["reduce_max_vox", "reduce_grid"], ), stat=dict( - argstr='-stat %s...', + argstr="-stat %s...", mandatory=True, ), ) @@ -62,8 +76,14 @@ def test_Localstat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Localstat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Localstat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_MaskTool.py b/nipype/interfaces/afni/tests/test_auto_MaskTool.py index 1644bee878..a0520df606 100644 --- a/nipype/interfaces/afni/tests/test_auto_MaskTool.py +++ b/nipype/interfaces/afni/tests/test_auto_MaskTool.py @@ -1,55 +1,79 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MaskTool def test_MaskTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), count=dict( - argstr='-count', + argstr="-count", position=2, ), - datum=dict(argstr='-datum %s', ), - dilate_inputs=dict(argstr='-dilate_inputs %s', ), - dilate_results=dict(argstr='-dilate_results %s', ), + datum=dict( + argstr="-datum %s", + ), + dilate_inputs=dict( + argstr="-dilate_inputs %s", + ), + dilate_results=dict( + argstr="-dilate_results %s", + ), environ=dict( nohash=True, usedefault=True, ), fill_dirs=dict( - argstr='-fill_dirs %s', - requires=['fill_holes'], + argstr="-fill_dirs %s", + requires=["fill_holes"], + ), + fill_holes=dict( + argstr="-fill_holes", + ), + frac=dict( + argstr="-frac %s", ), - fill_holes=dict(argstr='-fill_holes', ), - frac=dict(argstr='-frac %s', ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, mandatory=True, position=-1, ), - inter=dict(argstr='-inter', ), + inter=dict( + argstr="-inter", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_mask', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_mask", ), outputtype=dict(), - union=dict(argstr='-union', ), - verbose=dict(argstr='-verb %s', ), + union=dict( + argstr="-union", + ), + verbose=dict( + argstr="-verb %s", + ), ) inputs = MaskTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaskTool_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MaskTool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Maskave.py b/nipype/interfaces/afni/tests/test_auto_Maskave.py index a318e685a9..ce7a07c95e 100644 --- a/nipype/interfaces/afni/tests/test_auto_Maskave.py +++ b/nipype/interfaces/afni/tests/test_auto_Maskave.py @@ -1,23 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Maskave def test_Maskave_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=1, ), num_threads=dict( @@ -25,15 +28,16 @@ def test_Maskave_inputs(): usedefault=True, ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_maskave.1D', + name_source="in_file", + name_template="%s_maskave.1D", position=-1, ), outputtype=dict(), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=2, ), ) @@ -42,8 +46,14 @@ def test_Maskave_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Maskave_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Maskave.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Means.py b/nipype/interfaces/afni/tests/test_auto_Means.py index 2e422e68db..3aa3ada375 100644 --- a/nipype/interfaces/afni/tests/test_auto_Means.py +++ b/nipype/interfaces/afni/tests/test_auto_Means.py @@ -1,51 +1,79 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Means def test_Means_inputs(): input_map = dict( - args=dict(argstr='%s', ), - count=dict(argstr='-count', ), - datum=dict(argstr='-datum %s', ), + args=dict( + argstr="%s", + ), + count=dict( + argstr="-count", + ), + datum=dict( + argstr="-datum %s", + ), environ=dict( nohash=True, usedefault=True, ), in_file_a=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), in_file_b=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - mask_inter=dict(argstr='-mask_inter', ), - mask_union=dict(argstr='-mask_union', ), - non_zero=dict(argstr='-non_zero', ), + mask_inter=dict( + argstr="-mask_inter", + ), + mask_union=dict( + argstr="-mask_union", + ), + non_zero=dict( + argstr="-non_zero", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file_a', - name_template='%s_mean', + argstr="-prefix %s", + extensions=None, + name_source="in_file_a", + name_template="%s_mean", ), outputtype=dict(), - scale=dict(argstr='-%sscale', ), - sqr=dict(argstr='-sqr', ), - std_dev=dict(argstr='-stdev', ), - summ=dict(argstr='-sum', ), + scale=dict( + argstr="-%sscale", + ), + sqr=dict( + argstr="-sqr", + ), + std_dev=dict( + argstr="-stdev", + ), + summ=dict( + argstr="-sum", + ), ) inputs = Means.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Means_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Means.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Merge.py b/nipype/interfaces/afni/tests/test_auto_Merge.py index c36ee6f7a1..ac974184ea 100644 --- a/nipype/interfaces/afni/tests/test_auto_Merge.py +++ b/nipype/interfaces/afni/tests/test_auto_Merge.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Merge def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), blurfwhm=dict( - argstr='-1blur_fwhm %d', - units='mm', + argstr="-1blur_fwhm %d", + units="mm", + ), + doall=dict( + argstr="-doall", ), - doall=dict(argstr='-doall', ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, @@ -26,9 +29,10 @@ def test_Merge_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_files', - name_template='%s_merge', + argstr="-prefix %s", + extensions=None, + name_source="in_files", + name_template="%s_merge", ), outputtype=dict(), ) @@ -37,8 +41,14 @@ def test_Merge_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NetCorr.py b/nipype/interfaces/afni/tests/test_auto_NetCorr.py new file mode 100644 index 0000000000..e613dc13eb --- /dev/null +++ b/nipype/interfaces/afni/tests/test_auto_NetCorr.py @@ -0,0 +1,99 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import NetCorr + + +def test_NetCorr_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fish_z=dict( + argstr="-fish_z", + ), + ignore_LT=dict( + argstr="-ignore_LT", + ), + in_file=dict( + argstr="-inset %s", + extensions=None, + mandatory=True, + ), + in_rois=dict( + argstr="-in_rois %s", + extensions=None, + mandatory=True, + ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + nifti=dict( + argstr="-nifti", + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + out_file=dict( + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_netcorr", + position=1, + ), + output_mask_nonnull=dict( + argstr="-output_mask_nonnull", + ), + outputtype=dict(), + part_corr=dict( + argstr="-part_corr", + ), + push_thru_many_zeros=dict( + argstr="-push_thru_many_zeros", + ), + ts_indiv=dict( + argstr="-ts_indiv", + ), + ts_label=dict( + argstr="-ts_label", + ), + ts_out=dict( + argstr="-ts_out", + ), + ts_wb_Z=dict( + argstr="-ts_wb_Z", + ), + ts_wb_corr=dict( + argstr="-ts_wb_corr", + ), + ts_wb_strlabel=dict( + argstr="-ts_wb_strlabel", + ), + weight_ts=dict( + argstr="-weight_ts %s", + extensions=None, + ), + ) + inputs = NetCorr.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_NetCorr_outputs(): + output_map = dict( + out_corr_maps=dict(), + out_corr_matrix=dict( + extensions=None, + ), + ) + outputs = NetCorr.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/afni/tests/test_auto_Notes.py b/nipype/interfaces/afni/tests/test_auto_Notes.py index 965bacb000..c83a70f0f2 100644 --- a/nipype/interfaces/afni/tests/test_auto_Notes.py +++ b/nipype/interfaces/afni/tests/test_auto_Notes.py @@ -1,24 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Notes def test_Notes_inputs(): input_map = dict( - add=dict(argstr='-a "%s"', ), + add=dict( + argstr='-a "%s"', + ), add_history=dict( argstr='-h "%s"', - xor=['rep_history'], + xor=["rep_history"], + ), + args=dict( + argstr="%s", + ), + delete=dict( + argstr="-d %d", ), - args=dict(argstr='%s', ), - delete=dict(argstr='-d %d', ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -26,21 +32,32 @@ def test_Notes_inputs(): nohash=True, usedefault=True, ), - out_file=dict(argstr='%s', ), + out_file=dict( + argstr="%s", + extensions=None, + ), outputtype=dict(), rep_history=dict( argstr='-HH "%s"', - xor=['add_history'], + xor=["add_history"], + ), + ses=dict( + argstr="-ses", ), - ses=dict(argstr='-ses', ), ) inputs = Notes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Notes_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Notes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py b/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py index 48b59a2968..85fb2d3495 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py @@ -1,30 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import NwarpAdjust def test_NwarpAdjust_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - in_files=dict(argstr='-source %s', ), + in_files=dict( + argstr="-source %s", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='in_files', - name_template='%s_NwarpAdjust', - requires=['in_files'], + name_source="in_files", + name_template="%s_NwarpAdjust", + requires=["in_files"], ), outputtype=dict(), warps=dict( - argstr='-nwarp %s', + argstr="-nwarp %s", mandatory=True, ), ) @@ -33,8 +37,14 @@ def test_NwarpAdjust_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpAdjust_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = NwarpAdjust.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpApply.py b/nipype/interfaces/afni/tests/test_auto_NwarpApply.py index 63f6baa044..c9ebd2853e 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpApply.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpApply.py @@ -1,42 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import NwarpApply def test_NwarpApply_inputs(): input_map = dict( - ainterp=dict(argstr='-ainterp %s', ), - args=dict(argstr='%s', ), + ainterp=dict( + argstr="-ainterp %s", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-source %s', + argstr="-source %s", mandatory=True, ), interp=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), - inv_warp=dict(argstr='-iwarp', ), - master=dict(argstr='-master %s', ), + inv_warp=dict( + argstr="-iwarp", + ), + master=dict( + argstr="-master %s", + extensions=None, + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_Nwarp', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_Nwarp", ), quiet=dict( - argstr='-quiet', - xor=['verb'], + argstr="-quiet", + xor=["verb"], + ), + short=dict( + argstr="-short", ), - short=dict(argstr='-short', ), verb=dict( - argstr='-verb', - xor=['quiet'], + argstr="-verb", + xor=["quiet"], ), warp=dict( - argstr='-nwarp %s', + argstr="-nwarp %s", mandatory=True, ), ) @@ -45,8 +56,14 @@ def test_NwarpApply_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpApply_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = NwarpApply.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpCat.py b/nipype/interfaces/afni/tests/test_auto_NwarpCat.py index c6b3689c9e..b89aade9b0 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpCat.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpCat.py @@ -1,46 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import NwarpCat def test_NwarpCat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - expad=dict(argstr='-expad %d', ), + expad=dict( + argstr="-expad %d", + ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), interp=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), - inv_warp=dict(argstr='-iwarp', ), + inv_warp=dict( + argstr="-iwarp", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_files', - name_template='%s_NwarpCat', + argstr="-prefix %s", + extensions=None, + name_source="in_files", + name_template="%s_NwarpCat", ), outputtype=dict(), - space=dict(argstr='-space %s', ), - verb=dict(argstr='-verb', ), + space=dict( + argstr="-space %s", + ), + verb=dict( + argstr="-verb", + ), ) inputs = NwarpCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpCat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = NwarpCat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py b/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py index fe037851af..bb47517e27 100644 --- a/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py +++ b/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py @@ -1,47 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import OneDToolPy def test_OneDToolPy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - censor_motion=dict(argstr='-censor_motion %f %s', ), - censor_prev_TR=dict(argstr='-censor_prev_TR', ), - demean=dict(argstr='-demean', ), - derivative=dict(argstr='-derivative', ), + args=dict( + argstr="%s", + ), + censor_motion=dict( + argstr="-censor_motion %f %s", + ), + censor_prev_TR=dict( + argstr="-censor_prev_TR", + ), + demean=dict( + argstr="-demean", + ), + derivative=dict( + argstr="-derivative", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-infile %s', + argstr="-infile %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='-write %s', - xor=['show_cormat_warnings'], + argstr="-write %s", + extensions=None, + xor=["show_cormat_warnings"], ), outputtype=dict(), - py27_path=dict(usedefault=True, ), - set_nruns=dict(argstr='-set_nruns %d', ), - show_censor_count=dict(argstr='-show_censor_count', ), + py27_path=dict( + usedefault=True, + ), + set_nruns=dict( + argstr="-set_nruns %d", + ), + show_censor_count=dict( + argstr="-show_censor_count", + ), show_cormat_warnings=dict( - argstr='-show_cormat_warnings |& tee %s', + argstr="-show_cormat_warnings |& tee %s", + extensions=None, position=-1, - xor=['out_file'], + xor=["out_file"], + ), + show_indices_interest=dict( + argstr="-show_indices_interest", + ), + show_trs_run=dict( + argstr="-show_trs_run %d", + ), + show_trs_uncensored=dict( + argstr="-show_trs_uncensored %s", ), - show_indices_interest=dict(argstr='-show_indices_interest', ), - show_trs_run=dict(argstr='-show_trs_run %d', ), - show_trs_uncensored=dict(argstr='-show_trs_uncensored %s', ), ) inputs = OneDToolPy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneDToolPy_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = OneDToolPy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py index a63fbb8bef..511c1ca8f2 100644 --- a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py +++ b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py @@ -1,74 +1,89 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import OutlierCount def test_OutlierCount_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), autoclip=dict( - argstr='-autoclip', + argstr="-autoclip", usedefault=True, - xor=['mask'], + xor=["mask"], ), automask=dict( - argstr='-automask', + argstr="-automask", usedefault=True, - xor=['mask'], + xor=["mask"], ), environ=dict( nohash=True, usedefault=True, ), fraction=dict( - argstr='-fraction', + argstr="-fraction", usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), interval=dict( - argstr='-range', + argstr="-range", usedefault=True, ), legendre=dict( - argstr='-legendre', + argstr="-legendre", usedefault=True, ), mask=dict( - argstr='-mask %s', - xor=['autoclip', 'automask'], + argstr="-mask %s", + extensions=None, + xor=["autoclip", "automask"], ), out_file=dict( + extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_outliers', + name_source=["in_file"], + name_template="%s_outliers", ), outliers_file=dict( - argstr='-save %s', + argstr="-save %s", + extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_outliers', - output_name='out_outliers', + name_source=["in_file"], + name_template="%s_outliers", + output_name="out_outliers", + ), + polort=dict( + argstr="-polort %d", ), - polort=dict(argstr='-polort %d', ), qthr=dict( - argstr='-qthr %.5f', + argstr="-qthr %.5f", + usedefault=True, + ), + save_outliers=dict( usedefault=True, ), - save_outliers=dict(usedefault=True, ), ) inputs = OutlierCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OutlierCount_outputs(): output_map = dict( - out_file=dict(), - out_outliers=dict(), + out_file=dict( + extensions=None, + ), + out_outliers=dict( + extensions=None, + ), ) outputs = OutlierCount.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py index 679a3e0393..c759be87a4 100644 --- a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py +++ b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py @@ -1,52 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import QualityIndex def test_QualityIndex_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), autoclip=dict( - argstr='-autoclip', + argstr="-autoclip", usedefault=True, - xor=['mask'], + xor=["mask"], ), automask=dict( - argstr='-automask', + argstr="-automask", usedefault=True, - xor=['mask'], + xor=["mask"], + ), + clip=dict( + argstr="-clip %f", ), - clip=dict(argstr='-clip %f', ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), interval=dict( - argstr='-range', + argstr="-range", usedefault=True, ), mask=dict( - argstr='-mask %s', - xor=['autoclip', 'automask'], + argstr="-mask %s", + extensions=None, + xor=["autoclip", "automask"], ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_tqual', + name_source=["in_file"], + name_template="%s_tqual", position=-1, ), quadrant=dict( - argstr='-quadrant', + argstr="-quadrant", usedefault=True, ), spearman=dict( - argstr='-spearman', + argstr="-spearman", usedefault=True, ), ) @@ -55,8 +61,14 @@ def test_QualityIndex_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QualityIndex_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = QualityIndex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Qwarp.py b/nipype/interfaces/afni/tests/test_auto_Qwarp.py index f6df3d0ab5..01b7e32e17 100644 --- a/nipype/interfaces/afni/tests/test_auto_Qwarp.py +++ b/nipype/interfaces/afni/tests/test_auto_Qwarp.py @@ -1,151 +1,204 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Qwarp def test_Qwarp_inputs(): input_map = dict( - Qfinal=dict(argstr='-Qfinal', ), - Qonly=dict(argstr='-Qonly', ), - allineate=dict(argstr='-allineate', ), + Qfinal=dict( + argstr="-Qfinal", + ), + Qonly=dict( + argstr="-Qonly", + ), + allineate=dict( + argstr="-allineate", + ), allineate_opts=dict( - argstr='-allineate_opts %s', - requires=['allineate'], + argstr="-allineate_opts %s", + requires=["allineate"], ), allsave=dict( - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus'], + argstr="-allsave", + xor=["nopadWARP", "duplo", "plusminus"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), ballopt=dict( - argstr='-ballopt', - xor=['workhard', 'boxopt'], + argstr="-ballopt", + xor=["workhard", "boxopt"], + ), + bandpass=dict( + argstr="-bpass %f %f", ), base_file=dict( - argstr='-base %s', + argstr="-base %s", copyfile=False, + extensions=None, mandatory=True, ), baxopt=dict( - argstr='-boxopt', - xor=['workhard', 'ballopt'], + argstr="-boxopt", + xor=["workhard", "ballopt"], + ), + blur=dict( + argstr="-blur %s", ), - blur=dict(argstr='-blur %s', ), duplo=dict( - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', - 'allsave' - ], + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), emask=dict( - argstr='-emask %s', + argstr="-emask %s", copyfile=False, + extensions=None, ), environ=dict( nohash=True, usedefault=True, ), expad=dict( - argstr='-expad %d', - xor=['nopadWARP'], + argstr="-expad %d", + xor=["nopadWARP"], ), gridlist=dict( - argstr='-gridlist %s', + argstr="-gridlist %s", copyfile=False, - xor=['duplo', 'plusminus'], + extensions=None, + xor=["duplo", "plusminus"], ), hel=dict( - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear'], + argstr="-hel", + xor=["nmi", "mi", "lpc", "lpa", "pear"], ), in_file=dict( - argstr='-source %s', + argstr="-source %s", copyfile=False, + extensions=None, mandatory=True, ), inilev=dict( - argstr='-inilev %d', - xor=['duplo'], + argstr="-inilev %d", + xor=["duplo"], ), iniwarp=dict( - argstr='-iniwarp %s', - xor=['duplo'], + argstr="-iniwarp %s", + xor=["duplo"], ), iwarp=dict( - argstr='-iwarp', - xor=['plusminus'], + argstr="-iwarp", + xor=["plusminus"], ), lpa=dict( - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear'], + argstr="-lpa", + xor=["nmi", "mi", "lpc", "hel", "pear"], ), lpc=dict( - argstr='-lpc', + argstr="-lpc", position=-2, - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], + xor=["nmi", "mi", "hel", "lpa", "pear"], ), maxlev=dict( - argstr='-maxlev %d', + argstr="-maxlev %d", position=-1, - xor=['duplo'], + xor=["duplo"], ), mi=dict( - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear'], + argstr="-mi", + xor=["mi", "hel", "lpc", "lpa", "pear"], + ), + minpatch=dict( + argstr="-minpatch %d", ), - minpatch=dict(argstr='-minpatch %d', ), nmi=dict( - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear'], - ), - noXdis=dict(argstr='-noXdis', ), - noYdis=dict(argstr='-noYdis', ), - noZdis=dict(argstr='-noZdis', ), - noneg=dict(argstr='-noneg', ), - nopad=dict(argstr='-nopad', ), + argstr="-nmi", + xor=["nmi", "hel", "lpc", "lpa", "pear"], + ), + noXdis=dict( + argstr="-noXdis", + ), + noYdis=dict( + argstr="-noYdis", + ), + noZdis=dict( + argstr="-noZdis", + ), + noneg=dict( + argstr="-noneg", + ), + nopad=dict( + argstr="-nopad", + ), nopadWARP=dict( - argstr='-nopadWARP', - xor=['allsave', 'expad'], + argstr="-nopadWARP", + xor=["allsave", "expad"], + ), + nopenalty=dict( + argstr="-nopenalty", + ), + nowarp=dict( + argstr="-nowarp", + ), + noweight=dict( + argstr="-noweight", ), - nopenalty=dict(argstr='-nopenalty', ), - nowarp=dict(argstr='-nowarp', ), - noweight=dict(argstr='-noweight', ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - genfile=True, - name_source=['in_file'], - name_template='%s_QW', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="ppp_%s", + ), + out_weight_file=dict( + argstr="-wtprefix %s", + extensions=None, ), - out_weight_file=dict(argstr='-wtprefix %s', ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - pblur=dict(argstr='-pblur %s', ), - pear=dict(argstr='-pear', ), - penfac=dict(argstr='-penfac %f', ), + overwrite=dict( + argstr="-overwrite", + ), + pblur=dict( + argstr="-pblur %s", + ), + pear=dict( + argstr="-pear", + ), + penfac=dict( + argstr="-penfac %f", + ), plusminus=dict( - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp'], + argstr="-plusminus", + xor=["duplo", "allsave", "iwarp"], ), quiet=dict( - argstr='-quiet', - xor=['verb'], + argstr="-quiet", + xor=["verb"], + ), + resample=dict( + argstr="-resample", ), - resample=dict(argstr='-resample', ), verb=dict( - argstr='-verb', - xor=['quiet'], + argstr="-verb", + xor=["quiet"], + ), + wball=dict( + argstr="-wball %s", + xor=["wmask"], + ), + weight=dict( + argstr="-weight %s", + extensions=None, + ), + wmask=dict( + argstr="-wpass %s %f", + xor=["wball"], ), - wball=dict(argstr='-wball %s', ), - weight=dict(argstr='-weight %s', ), - wmask=dict(argstr='-wpass %s %f', ), workhard=dict( - argstr='-workhard', - xor=['boxopt', 'ballopt'], + argstr="-workhard", + xor=["boxopt", "ballopt"], ), ) inputs = Qwarp.input_spec() @@ -153,13 +206,25 @@ def test_Qwarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Qwarp_outputs(): output_map = dict( - base_warp=dict(), - source_warp=dict(), - warped_base=dict(), - warped_source=dict(), - weights=dict(), + base_warp=dict( + extensions=None, + ), + source_warp=dict( + extensions=None, + ), + warped_base=dict( + extensions=None, + ), + warped_source=dict( + extensions=None, + ), + weights=dict( + extensions=None, + ), ) outputs = Qwarp.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py index 340f3a0e4a..7e89576a3f 100644 --- a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py +++ b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py @@ -1,158 +1,213 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import QwarpPlusMinus def test_QwarpPlusMinus_inputs(): input_map = dict( - Qfinal=dict(argstr='-Qfinal', ), - Qonly=dict(argstr='-Qonly', ), - allineate=dict(argstr='-allineate', ), + Qfinal=dict( + argstr="-Qfinal", + ), + Qonly=dict( + argstr="-Qonly", + ), + allineate=dict( + argstr="-allineate", + ), allineate_opts=dict( - argstr='-allineate_opts %s', - requires=['allineate'], + argstr="-allineate_opts %s", + requires=["allineate"], ), allsave=dict( - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus'], + argstr="-allsave", + xor=["nopadWARP", "duplo", "plusminus"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), ballopt=dict( - argstr='-ballopt', - xor=['workhard', 'boxopt'], + argstr="-ballopt", + xor=["workhard", "boxopt"], + ), + bandpass=dict( + argstr="-bpass %f %f", ), base_file=dict( - argstr='-base %s', + argstr="-base %s", copyfile=False, + extensions=None, mandatory=True, ), baxopt=dict( - argstr='-boxopt', - xor=['workhard', 'ballopt'], + argstr="-boxopt", + xor=["workhard", "ballopt"], + ), + blur=dict( + argstr="-blur %s", ), - blur=dict(argstr='-blur %s', ), duplo=dict( - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', - 'allsave' - ], + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), emask=dict( - argstr='-emask %s', + argstr="-emask %s", copyfile=False, + extensions=None, ), environ=dict( nohash=True, usedefault=True, ), expad=dict( - argstr='-expad %d', - xor=['nopadWARP'], + argstr="-expad %d", + xor=["nopadWARP"], ), gridlist=dict( - argstr='-gridlist %s', + argstr="-gridlist %s", copyfile=False, - xor=['duplo', 'plusminus'], + extensions=None, + xor=["duplo", "plusminus"], ), hel=dict( - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear'], + argstr="-hel", + xor=["nmi", "mi", "lpc", "lpa", "pear"], ), in_file=dict( - argstr='-source %s', + argstr="-source %s", copyfile=False, + extensions=None, mandatory=True, ), inilev=dict( - argstr='-inilev %d', - xor=['duplo'], + argstr="-inilev %d", + xor=["duplo"], ), iniwarp=dict( - argstr='-iniwarp %s', - xor=['duplo'], + argstr="-iniwarp %s", + xor=["duplo"], ), iwarp=dict( - argstr='-iwarp', - xor=['plusminus'], + argstr="-iwarp", + xor=["plusminus"], ), lpa=dict( - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear'], + argstr="-lpa", + xor=["nmi", "mi", "lpc", "hel", "pear"], ), lpc=dict( - argstr='-lpc', + argstr="-lpc", position=-2, - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], + xor=["nmi", "mi", "hel", "lpa", "pear"], ), maxlev=dict( - argstr='-maxlev %d', + argstr="-maxlev %d", position=-1, - xor=['duplo'], + xor=["duplo"], ), mi=dict( - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear'], + argstr="-mi", + xor=["mi", "hel", "lpc", "lpa", "pear"], + ), + minpatch=dict( + argstr="-minpatch %d", ), - minpatch=dict(argstr='-minpatch %d', ), nmi=dict( - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear'], - ), - noXdis=dict(argstr='-noXdis', ), - noYdis=dict(argstr='-noYdis', ), - noZdis=dict(argstr='-noZdis', ), - noneg=dict(argstr='-noneg', ), - nopad=dict(argstr='-nopad', ), + argstr="-nmi", + xor=["nmi", "hel", "lpc", "lpa", "pear"], + ), + noXdis=dict( + argstr="-noXdis", + ), + noYdis=dict( + argstr="-noYdis", + ), + noZdis=dict( + argstr="-noZdis", + ), + noneg=dict( + argstr="-noneg", + ), + nopad=dict( + argstr="-nopad", + ), nopadWARP=dict( - argstr='-nopadWARP', - xor=['allsave', 'expad'], + argstr="-nopadWARP", + xor=["allsave", "expad"], + ), + nopenalty=dict( + argstr="-nopenalty", + ), + nowarp=dict( + argstr="-nowarp", + ), + noweight=dict( + argstr="-noweight", ), - nopenalty=dict(argstr='-nopenalty', ), - nowarp=dict(argstr='-nowarp', ), - noweight=dict(argstr='-noweight', ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, position=0, usedefault=True, ), - out_weight_file=dict(argstr='-wtprefix %s', ), + out_weight_file=dict( + argstr="-wtprefix %s", + extensions=None, + ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - pblur=dict(argstr='-pblur %s', ), - pear=dict(argstr='-pear', ), - penfac=dict(argstr='-penfac %f', ), + overwrite=dict( + argstr="-overwrite", + ), + pblur=dict( + argstr="-pblur %s", + ), + pear=dict( + argstr="-pear", + ), + penfac=dict( + argstr="-penfac %f", + ), plusminus=dict( - argstr='-plusminus', + argstr="-plusminus", position=1, usedefault=True, - xor=['duplo', 'allsave', 'iwarp'], + xor=["duplo", "allsave", "iwarp"], ), quiet=dict( - argstr='-quiet', - xor=['verb'], + argstr="-quiet", + xor=["verb"], + ), + resample=dict( + argstr="-resample", ), - resample=dict(argstr='-resample', ), source_file=dict( - argstr='-source %s', + argstr="-source %s", copyfile=False, - deprecated='1.1.2', - new_name='in_file', + deprecated="1.1.2", + extensions=None, + new_name="in_file", ), verb=dict( - argstr='-verb', - xor=['quiet'], + argstr="-verb", + xor=["quiet"], + ), + wball=dict( + argstr="-wball %s", + xor=["wmask"], + ), + weight=dict( + argstr="-weight %s", + extensions=None, + ), + wmask=dict( + argstr="-wpass %s %f", + xor=["wball"], ), - wball=dict(argstr='-wball %s', ), - weight=dict(argstr='-weight %s', ), - wmask=dict(argstr='-wpass %s %f', ), workhard=dict( - argstr='-workhard', - xor=['boxopt', 'ballopt'], + argstr="-workhard", + xor=["boxopt", "ballopt"], ), ) inputs = QwarpPlusMinus.input_spec() @@ -160,13 +215,25 @@ def test_QwarpPlusMinus_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QwarpPlusMinus_outputs(): output_map = dict( - base_warp=dict(), - source_warp=dict(), - warped_base=dict(), - warped_source=dict(), - weights=dict(), + base_warp=dict( + extensions=None, + ), + source_warp=dict( + extensions=None, + ), + warped_base=dict( + extensions=None, + ), + warped_source=dict( + extensions=None, + ), + weights=dict( + extensions=None, + ), ) outputs = QwarpPlusMinus.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ROIStats.py b/nipype/interfaces/afni/tests/test_auto_ROIStats.py index fbda92e9a5..36e7546990 100644 --- a/nipype/interfaces/afni/tests/test_auto_ROIStats.py +++ b/nipype/interfaces/afni/tests/test_auto_ROIStats.py @@ -1,53 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ROIStats def test_ROIStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), + args=dict( + argstr="%s", + ), + debug=dict( + argstr="-debug", + ), environ=dict( nohash=True, usedefault=True, ), format1D=dict( - argstr='-1Dformat', - xor=['format1DR'], + argstr="-1Dformat", + xor=["format1DR"], ), format1DR=dict( - argstr='-1DRformat', - xor=['format1D'], + argstr="-1DRformat", + xor=["format1D"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), mask=dict( - argstr='-mask %s', - deprecated='1.1.4', - new_name='mask_file', + argstr="-mask %s", + deprecated="1.1.4", + extensions=None, + new_name="mask_file", position=3, ), - mask_f2short=dict(argstr='-mask_f2short', ), - mask_file=dict(argstr='-mask %s', ), - nobriklab=dict(argstr='-nobriklab', ), - nomeanout=dict(argstr='-nomeanout', ), - num_roi=dict(argstr='-numroi %s', ), + mask_f2short=dict( + argstr="-mask_f2short", + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + nobriklab=dict( + argstr="-nobriklab", + ), + nomeanout=dict( + argstr="-nomeanout", + ), + num_roi=dict( + argstr="-numroi %s", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_roistat.1D', + name_source="in_file", + name_template="%s_roistat.1D", position=-1, ), - quiet=dict(argstr='-quiet', ), - roisel=dict(argstr='-roisel %s', ), - stat=dict(argstr='%s...', ), + quiet=dict( + argstr="-quiet", + ), + roisel=dict( + argstr="-roisel %s", + extensions=None, + ), + stat=dict( + argstr="%s...", + ), zerofill=dict( - argstr='-zerofill %s', - requires=['num_roi'], + argstr="-zerofill %s", + requires=["num_roi"], ), ) inputs = ROIStats.input_spec() @@ -55,8 +79,14 @@ def test_ROIStats_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ROIStats_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ROIStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ReHo.py b/nipype/interfaces/afni/tests/test_auto_ReHo.py index 1809759485..f9a1993ce1 100644 --- a/nipype/interfaces/afni/tests/test_auto_ReHo.py +++ b/nipype/interfaces/afni/tests/test_auto_ReHo.py @@ -1,42 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ReHo def test_ReHo_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chi_sq=dict(argstr='-chi_sq', ), + args=dict( + argstr="%s", + ), + chi_sq=dict( + argstr="-chi_sq", + ), ellipsoid=dict( - argstr='-neigh_X %s -neigh_Y %s -neigh_Z %s', - xor=['sphere', 'neighborhood'], + argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", + xor=["sphere", "neighborhood"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inset %s', + argstr="-inset %s", + extensions=None, mandatory=True, position=1, ), - label_set=dict(argstr='-in_rois %s', ), - mask_file=dict(argstr='-mask %s', ), + label_set=dict( + argstr="-in_rois %s", + extensions=None, + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), neighborhood=dict( - argstr='-nneigh %s', - xor=['sphere', 'ellipsoid'], + argstr="-nneigh %s", + xor=["sphere", "ellipsoid"], ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_reho', + name_source="in_file", + name_template="%s_reho", position=0, ), - overwrite=dict(argstr='-overwrite', ), + overwrite=dict( + argstr="-overwrite", + ), sphere=dict( - argstr='-neigh_RAD %s', - xor=['neighborhood', 'ellipsoid'], + argstr="-neigh_RAD %s", + xor=["neighborhood", "ellipsoid"], ), ) inputs = ReHo.input_spec() @@ -44,10 +57,16 @@ def test_ReHo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReHo_outputs(): output_map = dict( - out_file=dict(), - out_vals=dict(), + out_file=dict( + extensions=None, + ), + out_vals=dict( + extensions=None, + ), ) outputs = ReHo.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Refit.py b/nipype/interfaces/afni/tests/test_auto_Refit.py index 4983eafc81..205f7e0190 100644 --- a/nipype/interfaces/afni/tests/test_auto_Refit.py +++ b/nipype/interfaces/afni/tests/test_auto_Refit.py @@ -1,45 +1,86 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Refit def test_Refit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atrcopy=dict(argstr='-atrcopy %s %s', ), - atrfloat=dict(argstr='-atrfloat %s %s', ), - atrint=dict(argstr='-atrint %s %s', ), - atrstring=dict(argstr='-atrstring %s %s', ), - deoblique=dict(argstr='-deoblique', ), - duporigin_file=dict(argstr='-duporigin %s', ), + args=dict( + argstr="%s", + ), + atrcopy=dict( + argstr="-atrcopy %s %s", + ), + atrfloat=dict( + argstr="-atrfloat %s %s", + ), + atrint=dict( + argstr="-atrint %s %s", + ), + atrstring=dict( + argstr="-atrstring %s %s", + ), + deoblique=dict( + argstr="-deoblique", + ), + duporigin_file=dict( + argstr="-duporigin %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-1, ), - nosaveatr=dict(argstr='-nosaveatr', ), - saveatr=dict(argstr='-saveatr', ), - space=dict(argstr='-space %s', ), - xdel=dict(argstr='-xdel %f', ), - xorigin=dict(argstr='-xorigin %s', ), - xyzscale=dict(argstr='-xyzscale %f', ), - ydel=dict(argstr='-ydel %f', ), - yorigin=dict(argstr='-yorigin %s', ), - zdel=dict(argstr='-zdel %f', ), - zorigin=dict(argstr='-zorigin %s', ), + nosaveatr=dict( + argstr="-nosaveatr", + ), + saveatr=dict( + argstr="-saveatr", + ), + space=dict( + argstr="-space %s", + ), + xdel=dict( + argstr="-xdel %f", + ), + xorigin=dict( + argstr="-xorigin %s", + ), + xyzscale=dict( + argstr="-xyzscale %f", + ), + ydel=dict( + argstr="-ydel %f", + ), + yorigin=dict( + argstr="-yorigin %s", + ), + zdel=dict( + argstr="-zdel %f", + ), + zorigin=dict( + argstr="-zorigin %s", + ), ) inputs = Refit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Refit_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Refit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Remlfit.py b/nipype/interfaces/afni/tests/test_auto_Remlfit.py index 4cdc8b2ff2..cfffeeb40e 100644 --- a/nipype/interfaces/afni/tests/test_auto_Remlfit.py +++ b/nipype/interfaces/afni/tests/test_auto_Remlfit.py @@ -1,106 +1,208 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Remlfit def test_Remlfit_inputs(): input_map = dict( - STATmask=dict(argstr='-STATmask %s', ), + STATmask=dict( + argstr="-STATmask %s", + extensions=None, + ), addbase=dict( - argstr='-addbase %s', + argstr="-addbase %s", copyfile=False, - sep=' ', + sep=" ", + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), automask=dict( - argstr='-automask', + argstr="-automask", usedefault=True, ), dsort=dict( - argstr='-dsort %s', + argstr="-dsort %s", copyfile=False, + extensions=None, ), dsort_nods=dict( - argstr='-dsort_nods', - requires=['dsort'], + argstr="-dsort_nods", + requires=["dsort"], ), environ=dict( nohash=True, usedefault=True, ), - errts_file=dict(argstr='-Rerrts %s', ), - fitts_file=dict(argstr='-Rfitts %s', ), - fout=dict(argstr='-fout', ), - glt_file=dict(argstr='-Rglt %s', ), - gltsym=dict(argstr='-gltsym "%s" %s...', ), + errts_file=dict( + argstr="-Rerrts %s", + extensions=None, + ), + fitts_file=dict( + argstr="-Rfitts %s", + extensions=None, + ), + fout=dict( + argstr="-fout", + ), + glt_file=dict( + argstr="-Rglt %s", + extensions=None, + ), + gltsym=dict( + argstr='-gltsym "%s" %s...', + ), + goforit=dict( + argstr="-GOFORIT", + ), in_files=dict( argstr='-input "%s"', copyfile=False, mandatory=True, - sep=' ', + sep=" ", + ), + mask=dict( + argstr="-mask %s", + extensions=None, ), - mask=dict(argstr='-mask %s', ), matim=dict( - argstr='-matim %s', - xor=['matrix'], + argstr="-matim %s", + extensions=None, + xor=["matrix"], ), matrix=dict( - argstr='-matrix %s', + argstr="-matrix %s", + extensions=None, mandatory=True, ), - nobout=dict(argstr='-nobout', ), + nobout=dict( + argstr="-nobout", + ), nodmbase=dict( - argstr='-nodmbase', - requires=['addbase', 'dsort'], + argstr="-nodmbase", + requires=["addbase", "dsort"], + ), + nofdr=dict( + argstr="-noFDR", ), - nofdr=dict(argstr='-noFDR', ), num_threads=dict( nohash=True, usedefault=True, ), - obeta=dict(argstr='-Obeta %s', ), - obuck=dict(argstr='-Obuck %s', ), - oerrts=dict(argstr='-Oerrts %s', ), - ofitts=dict(argstr='-Ofitts %s', ), - oglt=dict(argstr='-Oglt %s', ), - out_file=dict(argstr='-Rbuck %s', ), + obeta=dict( + argstr="-Obeta %s", + extensions=None, + ), + obuck=dict( + argstr="-Obuck %s", + extensions=None, + ), + oerrts=dict( + argstr="-Oerrts %s", + extensions=None, + ), + ofitts=dict( + argstr="-Ofitts %s", + extensions=None, + ), + oglt=dict( + argstr="-Oglt %s", + extensions=None, + ), + out_file=dict( + argstr="-Rbuck %s", + extensions=None, + ), outputtype=dict(), - ovar=dict(argstr='-Ovar %s', ), + ovar=dict( + argstr="-Ovar %s", + extensions=None, + ), polort=dict( - argstr='-polort %d', - xor=['matrix'], - ), - quiet=dict(argstr='-quiet', ), - rbeta_file=dict(argstr='-Rbeta %s', ), - rout=dict(argstr='-rout', ), - slibase=dict(argstr='-slibase %s', ), - slibase_sm=dict(argstr='-slibase_sm %s', ), - tout=dict(argstr='-tout', ), - usetemp=dict(argstr='-usetemp', ), - var_file=dict(argstr='-Rvar %s', ), - verb=dict(argstr='-verb', ), - wherr_file=dict(argstr='-Rwherr %s', ), + argstr="-polort %d", + xor=["matrix"], + ), + quiet=dict( + argstr="-quiet", + ), + rbeta_file=dict( + argstr="-Rbeta %s", + extensions=None, + ), + rout=dict( + argstr="-rout", + ), + slibase=dict( + argstr="-slibase %s", + ), + slibase_sm=dict( + argstr="-slibase_sm %s", + ), + tout=dict( + argstr="-tout", + ), + usetemp=dict( + argstr="-usetemp", + ), + var_file=dict( + argstr="-Rvar %s", + extensions=None, + ), + verb=dict( + argstr="-verb", + ), + wherr_file=dict( + argstr="-Rwherr %s", + extensions=None, + ), ) inputs = Remlfit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Remlfit_outputs(): output_map = dict( - errts_file=dict(), - fitts_file=dict(), - glt_file=dict(), - obeta=dict(), - obuck=dict(), - oerrts=dict(), - ofitts=dict(), - oglt=dict(), - out_file=dict(), - ovar=dict(), - rbeta_file=dict(), - var_file=dict(), - wherr_file=dict(), + errts_file=dict( + extensions=None, + ), + fitts_file=dict( + extensions=None, + ), + glt_file=dict( + extensions=None, + ), + obeta=dict( + extensions=None, + ), + obuck=dict( + extensions=None, + ), + oerrts=dict( + extensions=None, + ), + ofitts=dict( + extensions=None, + ), + oglt=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + ovar=dict( + extensions=None, + ), + rbeta_file=dict( + extensions=None, + ), + var_file=dict( + extensions=None, + ), + wherr_file=dict( + extensions=None, + ), ) outputs = Remlfit.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Resample.py b/nipype/interfaces/afni/tests/test_auto_Resample.py index 9ab2309307..792c03aa9f 100644 --- a/nipype/interfaces/afni/tests/test_auto_Resample.py +++ b/nipype/interfaces/afni/tests/test_auto_Resample.py @@ -1,43 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Resample def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inset %s', + argstr="-inset %s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - master=dict(argstr='-master %s', ), + master=dict( + argstr="-master %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), - orientation=dict(argstr='-orient %s', ), + orientation=dict( + argstr="-orient %s", + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_resample', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_resample", ), outputtype=dict(), - resample_mode=dict(argstr='-rmode %s', ), - voxel_size=dict(argstr='-dxyz %f %f %f', ), + resample_mode=dict( + argstr="-rmode %s", + ), + voxel_size=dict( + argstr="-dxyz %f %f %f", + ), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Retroicor.py b/nipype/interfaces/afni/tests/test_auto_Retroicor.py index 4c2a1c2393..03039a291f 100644 --- a/nipype/interfaces/afni/tests/test_auto_Retroicor.py +++ b/nipype/interfaces/afni/tests/test_auto_Retroicor.py @@ -1,17 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Retroicor def test_Retroicor_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), card=dict( - argstr='-card %s', + argstr="-card %s", + extensions=None, position=-2, ), cardphase=dict( - argstr='-cardphase %s', + argstr="-cardphase %s", + extensions=None, hash_files=False, position=-6, ), @@ -20,8 +23,9 @@ def test_Retroicor_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -30,27 +34,30 @@ def test_Retroicor_inputs(): usedefault=True, ), order=dict( - argstr='-order %s', + argstr="-order %s", position=-5, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_retroicor', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_retroicor", position=1, ), outputtype=dict(), resp=dict( - argstr='-resp %s', + argstr="-resp %s", + extensions=None, position=-3, ), respphase=dict( - argstr='-respphase %s', + argstr="-respphase %s", + extensions=None, hash_files=False, position=-7, ), threshold=dict( - argstr='-threshold %d', + argstr="-threshold %d", position=-4, ), ) @@ -59,8 +66,14 @@ def test_Retroicor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Retroicor_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Retroicor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SVMTest.py b/nipype/interfaces/afni/tests/test_auto_SVMTest.py index 4b31d3a11c..665a4a6156 100644 --- a/nipype/interfaces/afni/tests/test_auto_SVMTest.py +++ b/nipype/interfaces/afni/tests/test_auto_SVMTest.py @@ -1,46 +1,68 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..svm import SVMTest def test_SVMTest_inputs(): input_map = dict( - args=dict(argstr='%s', ), - classout=dict(argstr='-classout', ), + args=dict( + argstr="%s", + ), + classout=dict( + argstr="-classout", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-testvol %s', + argstr="-testvol %s", + extensions=None, mandatory=True, ), model=dict( - argstr='-model %s', + argstr="-model %s", mandatory=True, ), - multiclass=dict(argstr='-multiclass %s', ), - nodetrend=dict(argstr='-nodetrend', ), - nopredcensord=dict(argstr='-nopredcensord', ), + multiclass=dict( + argstr="-multiclass %s", + ), + nodetrend=dict( + argstr="-nodetrend", + ), + nopredcensord=dict( + argstr="-nopredcensord", + ), num_threads=dict( nohash=True, usedefault=True, ), - options=dict(argstr='%s', ), + options=dict( + argstr="%s", + ), out_file=dict( - argstr='-predictions %s', - name_template='%s_predictions', + argstr="-predictions %s", + extensions=None, + name_template="%s_predictions", ), outputtype=dict(), - testlabels=dict(argstr='-testlabels %s', ), + testlabels=dict( + argstr="-testlabels %s", + extensions=None, + ), ) inputs = SVMTest.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVMTest_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SVMTest.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SVMTrain.py b/nipype/interfaces/afni/tests/test_auto_SVMTrain.py index ddb3f8c05f..f79bf1b9ac 100644 --- a/nipype/interfaces/afni/tests/test_auto_SVMTrain.py +++ b/nipype/interfaces/afni/tests/test_auto_SVMTrain.py @@ -1,70 +1,100 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..svm import SVMTrain def test_SVMTrain_inputs(): input_map = dict( alphas=dict( - argstr='-alpha %s', - name_source='in_file', - name_template='%s_alphas', - suffix='_alphas', + argstr="-alpha %s", + extensions=None, + name_source="in_file", + name_template="%s_alphas", + suffix="_alphas", + ), + args=dict( + argstr="%s", + ), + censor=dict( + argstr="-censor %s", + extensions=None, ), - args=dict(argstr='%s', ), - censor=dict(argstr='-censor %s', ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-trainvol %s', + argstr="-trainvol %s", copyfile=False, + extensions=None, mandatory=True, ), - kernel=dict(argstr='-kernel %s', ), + kernel=dict( + argstr="-kernel %s", + ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", copyfile=False, + extensions=None, position=-1, ), - max_iterations=dict(argstr='-max_iterations %d', ), + max_iterations=dict( + argstr="-max_iterations %d", + ), model=dict( - argstr='-model %s', - name_source='in_file', - name_template='%s_model', - suffix='_model', + argstr="-model %s", + extensions=None, + name_source="in_file", + name_template="%s_model", + suffix="_model", + ), + nomodelmask=dict( + argstr="-nomodelmask", ), - nomodelmask=dict(argstr='-nomodelmask', ), num_threads=dict( nohash=True, usedefault=True, ), - options=dict(argstr='%s', ), + options=dict( + argstr="%s", + ), out_file=dict( - argstr='-bucket %s', - name_source='in_file', - name_template='%s_vectors', - suffix='_bucket', + argstr="-bucket %s", + extensions=None, + name_source="in_file", + name_template="%s_vectors", + suffix="_bucket", ), outputtype=dict(), - trainlabels=dict(argstr='-trainlabels %s', ), + trainlabels=dict( + argstr="-trainlabels %s", + extensions=None, + ), ttype=dict( - argstr='-type %s', + argstr="-type %s", mandatory=True, ), - w_out=dict(argstr='-wout', ), + w_out=dict( + argstr="-wout", + ), ) inputs = SVMTrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVMTrain_outputs(): output_map = dict( - alphas=dict(), - model=dict(), - out_file=dict(), + alphas=dict( + extensions=None, + ), + model=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = SVMTrain.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Seg.py b/nipype/interfaces/afni/tests/test_auto_Seg.py index e93d81a7aa..38b256d1ea 100644 --- a/nipype/interfaces/afni/tests/test_auto_Seg.py +++ b/nipype/interfaces/afni/tests/test_auto_Seg.py @@ -1,43 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Seg def test_Seg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_classes=dict(argstr='-bias_classes %s', ), - bias_fwhm=dict(argstr='-bias_fwhm %f', ), - blur_meth=dict(argstr='-blur_meth %s', ), - bmrf=dict(argstr='-bmrf %f', ), - classes=dict(argstr='-classes %s', ), + args=dict( + argstr="%s", + ), + bias_classes=dict( + argstr="-bias_classes %s", + ), + bias_fwhm=dict( + argstr="-bias_fwhm %f", + ), + blur_meth=dict( + argstr="-blur_meth %s", + ), + bmrf=dict( + argstr="-bmrf %f", + ), + classes=dict( + argstr="-classes %s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-anat %s', + argstr="-anat %s", copyfile=True, + extensions=None, mandatory=True, position=-1, ), - main_N=dict(argstr='-main_N %d', ), + main_N=dict( + argstr="-main_N %d", + ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", mandatory=True, position=-2, ), - mixfloor=dict(argstr='-mixfloor %f', ), - mixfrac=dict(argstr='-mixfrac %s', ), - prefix=dict(argstr='-prefix %s', ), + mixfloor=dict( + argstr="-mixfloor %f", + ), + mixfrac=dict( + argstr="-mixfrac %s", + ), + prefix=dict( + argstr="-prefix %s", + ), ) inputs = Seg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Seg_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Seg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SkullStrip.py b/nipype/interfaces/afni/tests/test_auto_SkullStrip.py index e960369787..4f78254e47 100644 --- a/nipype/interfaces/afni/tests/test_auto_SkullStrip.py +++ b/nipype/interfaces/afni/tests/test_auto_SkullStrip.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SkullStrip def test_SkullStrip_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=1, ), @@ -21,9 +23,10 @@ def test_SkullStrip_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_skullstrip', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_skullstrip", ), outputtype=dict(), ) @@ -32,8 +35,14 @@ def test_SkullStrip_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SkullStrip_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SkullStrip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Synthesize.py b/nipype/interfaces/afni/tests/test_auto_Synthesize.py index 87278098fb..9f787edbfc 100644 --- a/nipype/interfaces/afni/tests/test_auto_Synthesize.py +++ b/nipype/interfaces/afni/tests/test_auto_Synthesize.py @@ -1,26 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Synthesize def test_Synthesize_inputs(): input_map = dict( - TR=dict(argstr='-TR %f', ), - args=dict(argstr='%s', ), + TR=dict( + argstr="-TR %f", + ), + args=dict( + argstr="%s", + ), cbucket=dict( - argstr='-cbucket %s', + argstr="-cbucket %s", copyfile=False, + extensions=None, mandatory=True, ), - cenfill=dict(argstr='-cenfill %s', ), - dry_run=dict(argstr='-dry', ), + cenfill=dict( + argstr="-cenfill %s", + ), + dry_run=dict( + argstr="-dry", + ), environ=dict( nohash=True, usedefault=True, ), matrix=dict( - argstr='-matrix %s', + argstr="-matrix %s", copyfile=False, + extensions=None, mandatory=True, ), num_threads=dict( @@ -28,12 +37,13 @@ def test_Synthesize_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_template='syn', + argstr="-prefix %s", + extensions=None, + name_template="syn", ), outputtype=dict(), select=dict( - argstr='-select %s', + argstr="-select %s", mandatory=True, ), ) @@ -42,8 +52,14 @@ def test_Synthesize_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Synthesize_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Synthesize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCat.py b/nipype/interfaces/afni/tests/test_auto_TCat.py index 5c51b02637..595e91383e 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCat.py +++ b/nipype/interfaces/afni/tests/test_auto_TCat.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TCat def test_TCat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr=' %s', + argstr=" %s", copyfile=False, mandatory=True, position=-1, @@ -21,24 +22,33 @@ def test_TCat_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_files', - name_template='%s_tcat', + argstr="-prefix %s", + extensions=None, + name_source="in_files", + name_template="%s_tcat", ), outputtype=dict(), rlt=dict( - argstr='-rlt%s', + argstr="-rlt%s", position=1, ), - verbose=dict(argstr='-verb', ), + verbose=dict( + argstr="-verb", + ), ) inputs = TCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TCat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py b/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py index 09b21ea408..728d281d27 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py +++ b/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TCatSubBrick def test_TCatSubBrick_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s%s ...', + argstr="%s%s ...", copyfile=False, mandatory=True, position=-1, @@ -21,12 +22,13 @@ def test_TCatSubBrick_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, genfile=True, ), outputtype=dict(), rlt=dict( - argstr='-rlt%s', + argstr="-rlt%s", position=1, ), ) @@ -35,8 +37,14 @@ def test_TCatSubBrick_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCatSubBrick_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TCatSubBrick.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py index e1b23a3387..665a0dfc3d 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py @@ -1,54 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TCorr1D def test_TCorr1D_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), ktaub=dict( - argstr=' -ktaub', + argstr=" -ktaub", position=1, - xor=['pearson', 'spearman', 'quadrant'], + xor=["pearson", "spearman", "quadrant"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='xset', - name_template='%s_correlation.nii.gz', + name_source="xset", + name_template="%s_correlation.nii.gz", ), outputtype=dict(), pearson=dict( - argstr=' -pearson', + argstr=" -pearson", position=1, - xor=['spearman', 'quadrant', 'ktaub'], + xor=["spearman", "quadrant", "ktaub"], ), quadrant=dict( - argstr=' -quadrant', + argstr=" -quadrant", position=1, - xor=['pearson', 'spearman', 'ktaub'], + xor=["pearson", "spearman", "ktaub"], ), spearman=dict( - argstr=' -spearman', + argstr=" -spearman", position=1, - xor=['pearson', 'quadrant', 'ktaub'], + xor=["pearson", "quadrant", "ktaub"], ), xset=dict( - argstr=' %s', + argstr=" %s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), y_1d=dict( - argstr=' %s', + argstr=" %s", + extensions=None, mandatory=True, position=-1, ), @@ -58,8 +62,14 @@ def test_TCorr1D_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorr1D_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TCorr1D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py index 32778fcf11..8e6b1860ff 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py @@ -1,40 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TCorrMap def test_TCorrMap_inputs(): input_map = dict( absolute_threshold=dict( - argstr='-Thresh %f %s', - name_source='in_file', - suffix='_thresh', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), - ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + argstr="-Thresh %f %s", + extensions=None, + name_source="in_file", + suffix="_thresh", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), + ), + args=dict( + argstr="%s", + ), + automask=dict( + argstr="-automask", + ), average_expr=dict( - argstr='-Aexpr %s %s', - name_source='in_file', - suffix='_aexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + argstr="-Aexpr %s %s", + extensions=None, + name_source="in_file", + suffix="_aexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), average_expr_nonzero=dict( - argstr='-Cexpr %s %s', - name_source='in_file', - suffix='_cexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + argstr="-Cexpr %s %s", + extensions=None, + name_source="in_file", + suffix="_cexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), + ), + bandpass=dict( + argstr="-bpass %f %f", + ), + blur_fwhm=dict( + argstr="-Gblur %f", ), - bandpass=dict(argstr='-bpass %f %f', ), - blur_fwhm=dict(argstr='-Gblur %f', ), correlation_maps=dict( - argstr='-CorrMap %s', - name_source='in_file', + argstr="-CorrMap %s", + extensions=None, + name_source="in_file", ), correlation_maps_masked=dict( - argstr='-CorrMask %s', - name_source='in_file', + argstr="-CorrMask %s", + extensions=None, + name_source="in_file", ), environ=dict( nohash=True, @@ -42,77 +57,102 @@ def test_TCorrMap_inputs(): ), expr=dict(), histogram=dict( - argstr='-Hist %d %s', - name_source='in_file', - suffix='_hist', + argstr="-Hist %d %s", + extensions=None, + name_source="in_file", + suffix="_hist", ), histogram_bin_numbers=dict(), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), mean_file=dict( - argstr='-Mean %s', - name_source='in_file', - suffix='_mean', + argstr="-Mean %s", + extensions=None, + name_source="in_file", + suffix="_mean", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_file'], - name_template='%s_afni', + argstr="-prefix %s", + extensions=None, + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), pmean=dict( - argstr='-Pmean %s', - name_source='in_file', - suffix='_pmean', + argstr="-Pmean %s", + extensions=None, + name_source="in_file", + suffix="_pmean", + ), + polort=dict( + argstr="-polort %d", ), - polort=dict(argstr='-polort %d', ), qmean=dict( - argstr='-Qmean %s', - name_source='in_file', - suffix='_qmean', + argstr="-Qmean %s", + extensions=None, + name_source="in_file", + suffix="_qmean", + ), + regress_out_timeseries=dict( + argstr="-ort %s", + extensions=None, ), - regress_out_timeseries=dict(argstr='-ort %s', ), seeds=dict( - argstr='-seed %s', - xor='seeds_width', + argstr="-seed %s", + extensions=None, + xor="seeds_width", ), seeds_width=dict( - argstr='-Mseed %f', - xor='seeds', + argstr="-Mseed %f", + xor="seeds", ), sum_expr=dict( - argstr='-Sexpr %s %s', - name_source='in_file', - suffix='_sexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + argstr="-Sexpr %s %s", + extensions=None, + name_source="in_file", + suffix="_sexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), thresholds=dict(), var_absolute_threshold=dict( - argstr='-VarThresh %f %f %f %s', - name_source='in_file', - suffix='_varthresh', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), + argstr="-VarThresh %f %f %f %s", + extensions=None, + name_source="in_file", + suffix="_varthresh", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), ), var_absolute_threshold_normalize=dict( - argstr='-VarThreshN %f %f %f %s', - name_source='in_file', - suffix='_varthreshn', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), + argstr="-VarThreshN %f %f %f %s", + extensions=None, + name_source="in_file", + suffix="_varthreshn", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), ), zmean=dict( - argstr='-Zmean %s', - name_source='in_file', - suffix='_zmean', + argstr="-Zmean %s", + extensions=None, + name_source="in_file", + suffix="_zmean", ), ) inputs = TCorrMap.input_spec() @@ -120,21 +160,49 @@ def test_TCorrMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorrMap_outputs(): output_map = dict( - absolute_threshold=dict(), - average_expr=dict(), - average_expr_nonzero=dict(), - correlation_maps=dict(), - correlation_maps_masked=dict(), - histogram=dict(), - mean_file=dict(), - pmean=dict(), - qmean=dict(), - sum_expr=dict(), - var_absolute_threshold=dict(), - var_absolute_threshold_normalize=dict(), - zmean=dict(), + absolute_threshold=dict( + extensions=None, + ), + average_expr=dict( + extensions=None, + ), + average_expr_nonzero=dict( + extensions=None, + ), + correlation_maps=dict( + extensions=None, + ), + correlation_maps_masked=dict( + extensions=None, + ), + histogram=dict( + extensions=None, + ), + mean_file=dict( + extensions=None, + ), + pmean=dict( + extensions=None, + ), + qmean=dict( + extensions=None, + ), + sum_expr=dict( + extensions=None, + ), + var_absolute_threshold=dict( + extensions=None, + ), + var_absolute_threshold_normalize=dict( + extensions=None, + ), + zmean=dict( + extensions=None, + ), ) outputs = TCorrMap.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_TCorrelate.py b/nipype/interfaces/afni/tests/test_auto_TCorrelate.py index 8d773f429e..1e85d44b68 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorrelate.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorrelate.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TCorrelate def test_TCorrelate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -15,22 +16,29 @@ def test_TCorrelate_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='xset', - name_template='%s_tcorr', + argstr="-prefix %s", + extensions=None, + name_source="xset", + name_template="%s_tcorr", ), outputtype=dict(), - pearson=dict(argstr='-pearson', ), - polort=dict(argstr='-polort %d', ), + pearson=dict( + argstr="-pearson", + ), + polort=dict( + argstr="-polort %d", + ), xset=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), yset=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), @@ -40,8 +48,14 @@ def test_TCorrelate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorrelate_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TCorrelate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TNorm.py b/nipype/interfaces/afni/tests/test_auto_TNorm.py index 3fb246684b..975345d92a 100644 --- a/nipype/interfaces/afni/tests/test_auto_TNorm.py +++ b/nipype/interfaces/afni/tests/test_auto_TNorm.py @@ -1,45 +1,66 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TNorm def test_TNorm_inputs(): input_map = dict( - L1fit=dict(argstr='-L1fit', ), - args=dict(argstr='%s', ), + L1fit=dict( + argstr="-L1fit", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - norm1=dict(argstr='-norm1', ), - norm2=dict(argstr='-norm2', ), - normR=dict(argstr='-normR', ), - normx=dict(argstr='-normx', ), + norm1=dict( + argstr="-norm1", + ), + norm2=dict( + argstr="-norm2", + ), + normR=dict( + argstr="-normR", + ), + normx=dict( + argstr="-normx", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_tnorm', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_tnorm", ), outputtype=dict(), - polort=dict(argstr='-polort %s', ), + polort=dict( + argstr="-polort %s", + ), ) inputs = TNorm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TNorm_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TNorm.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TProject.py b/nipype/interfaces/afni/tests/test_auto_TProject.py index ba4efd6189..d8f9990817 100644 --- a/nipype/interfaces/afni/tests/test_auto_TProject.py +++ b/nipype/interfaces/afni/tests/test_auto_TProject.py @@ -1,58 +1,99 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TProject def test_TProject_inputs(): input_map = dict( - TR=dict(argstr='-TR %g', ), - args=dict(argstr='%s', ), + TR=dict( + argstr="-TR %g", + ), + args=dict( + argstr="%s", + ), automask=dict( - argstr='-automask', - xor=['mask'], - ), - bandpass=dict(argstr='-bandpass %g %g', ), - blur=dict(argstr='-blur %g', ), - cenmode=dict(argstr='-cenmode %s', ), - censor=dict(argstr='-censor %s', ), - censortr=dict(argstr='-CENSORTR %s', ), - concat=dict(argstr='-concat %s', ), - dsort=dict(argstr='-dsort %s...', ), + argstr="-automask", + xor=["mask"], + ), + bandpass=dict( + argstr="-bandpass %g %g", + ), + blur=dict( + argstr="-blur %g", + ), + cenmode=dict( + argstr="-cenmode %s", + ), + censor=dict( + argstr="-censor %s", + extensions=None, + ), + censortr=dict( + argstr="-CENSORTR %s", + ), + concat=dict( + argstr="-concat %s", + extensions=None, + ), + dsort=dict( + argstr="-dsort %s...", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=1, ), - mask=dict(argstr='-mask %s', ), - noblock=dict(argstr='-noblock', ), - norm=dict(argstr='-norm', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + noblock=dict( + argstr="-noblock", + ), + norm=dict( + argstr="-norm", + ), num_threads=dict( nohash=True, usedefault=True, ), - ort=dict(argstr='-ort %s', ), + ort=dict( + argstr="-ort %s", + extensions=None, + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_tproject', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_tproject", position=-1, ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - stopband=dict(argstr='-stopband %g %g', ), + polort=dict( + argstr="-polort %d", + ), + stopband=dict( + argstr="-stopband %g %g", + ), ) inputs = TProject.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TProject_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TProject.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TShift.py b/nipype/interfaces/afni/tests/test_auto_TShift.py index b267986952..78af699c7c 100644 --- a/nipype/interfaces/afni/tests/test_auto_TShift.py +++ b/nipype/interfaces/afni/tests/test_auto_TShift.py @@ -1,52 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import TShift def test_TShift_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - ignore=dict(argstr='-ignore %s', ), + ignore=dict( + argstr="-ignore %s", + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - interp=dict(argstr='-%s', ), + interp=dict( + argstr="-%s", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_tshift', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_tshift", ), outputtype=dict(), - rlt=dict(argstr='-rlt', ), - rltplus=dict(argstr='-rlt+', ), - slice_encoding_direction=dict(usedefault=True, ), + rlt=dict( + argstr="-rlt", + ), + rltplus=dict( + argstr="-rlt+", + ), + slice_encoding_direction=dict( + usedefault=True, + ), slice_timing=dict( - argstr='-tpattern @%s', - xor=['tpattern'], + argstr="-tpattern @%s", + xor=["tpattern"], ), tpattern=dict( - argstr='-tpattern %s', - xor=['slice_timing'], + argstr="-tpattern %s", + xor=["slice_timing"], + ), + tr=dict( + argstr="-TR %s", ), - tr=dict(argstr='-TR %s', ), tslice=dict( - argstr='-slice %s', - xor=['tzero'], + argstr="-slice %s", + xor=["tzero"], ), tzero=dict( - argstr='-tzero %s', - xor=['tslice'], + argstr="-tzero %s", + xor=["tslice"], ), ) inputs = TShift.input_spec() @@ -54,10 +69,16 @@ def test_TShift_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TShift_outputs(): output_map = dict( - out_file=dict(), - timing_file=dict(), + out_file=dict( + extensions=None, + ), + timing_file=dict( + extensions=None, + ), ) outputs = TShift.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_TSmooth.py b/nipype/interfaces/afni/tests/test_auto_TSmooth.py new file mode 100644 index 0000000000..1836d31c22 --- /dev/null +++ b/nipype/interfaces/afni/tests/test_auto_TSmooth.py @@ -0,0 +1,78 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import TSmooth + + +def test_TSmooth_inputs(): + input_map = dict( + adaptive=dict( + argstr="-adaptive %d", + ), + args=dict( + argstr="%s", + ), + blackman=dict( + argstr="-blackman %d", + ), + custom=dict( + argstr="-custom %s", + extensions=None, + ), + datum=dict( + argstr="-datum %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + hamming=dict( + argstr="-hamming %d", + ), + in_file=dict( + argstr="%s", + copyfile=False, + extensions=None, + mandatory=True, + position=-1, + ), + lin=dict( + argstr="-lin", + ), + lin3=dict( + argstr="-3lin %d", + ), + med=dict( + argstr="-med", + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + osf=dict( + argstr="-osf", + ), + out_file=dict( + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_smooth", + ), + outputtype=dict(), + ) + inputs = TSmooth.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_TSmooth_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = TSmooth.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/afni/tests/test_auto_TStat.py b/nipype/interfaces/afni/tests/test_auto_TStat.py index 5a3ebd8a21..b7499c5442 100644 --- a/nipype/interfaces/afni/tests/test_auto_TStat.py +++ b/nipype/interfaces/afni/tests/test_auto_TStat.py @@ -1,31 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TStat def test_TStat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), - options=dict(argstr='%s', ), + options=dict( + argstr="%s", + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_tstat', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_tstat", ), outputtype=dict(), ) @@ -34,8 +42,14 @@ def test_TStat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TStat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TStat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_To3D.py b/nipype/interfaces/afni/tests/test_auto_To3D.py index 66ccabefcb..6121efbe57 100644 --- a/nipype/interfaces/afni/tests/test_auto_To3D.py +++ b/nipype/interfaces/afni/tests/test_auto_To3D.py @@ -1,21 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import To3D def test_To3D_inputs(): input_map = dict( - args=dict(argstr='%s', ), - assumemosaic=dict(argstr='-assume_dicom_mosaic', ), - datatype=dict(argstr='-datum %s', ), + args=dict( + argstr="%s", + ), + assumemosaic=dict( + argstr="-assume_dicom_mosaic", + ), + datatype=dict( + argstr="-datum %s", + ), environ=dict( nohash=True, usedefault=True, ), - filetype=dict(argstr='-%s', ), - funcparams=dict(argstr='-time:zt %s alt+z2', ), + filetype=dict( + argstr="-%s", + ), + funcparams=dict( + argstr="-time:zt %s alt+z2", + ), in_folder=dict( - argstr='%s/*.dcm', + argstr="%s/*.dcm", mandatory=True, position=-1, ), @@ -24,20 +33,29 @@ def test_To3D_inputs(): usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source=['in_folder'], - name_template='%s', + argstr="-prefix %s", + extensions=None, + name_source=["in_folder"], + name_template="%s", ), outputtype=dict(), - skipoutliers=dict(argstr='-skip_outliers', ), + skipoutliers=dict( + argstr="-skip_outliers", + ), ) inputs = To3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_To3D_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = To3D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Undump.py b/nipype/interfaces/afni/tests/test_auto_Undump.py index 07d0ff8e81..64c98cf25a 100644 --- a/nipype/interfaces/afni/tests/test_auto_Undump.py +++ b/nipype/interfaces/afni/tests/test_auto_Undump.py @@ -1,46 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Undump def test_Undump_inputs(): input_map = dict( - args=dict(argstr='%s', ), - coordinates_specification=dict(argstr='-%s', ), - datatype=dict(argstr='-datum %s', ), - default_value=dict(argstr='-dval %f', ), + args=dict( + argstr="%s", + ), + coordinates_specification=dict( + argstr="-%s", + ), + datatype=dict( + argstr="-datum %s", + ), + default_value=dict( + argstr="-dval %f", + ), environ=dict( nohash=True, usedefault=True, ), - fill_value=dict(argstr='-fval %f', ), - head_only=dict(argstr='-head_only', ), + fill_value=dict( + argstr="-fval %f", + ), + head_only=dict( + argstr="-head_only", + ), in_file=dict( - argstr='-master %s', + argstr="-master %s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), - orient=dict(argstr='-orient %s', ), + orient=dict( + argstr="-orient %s", + ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', + argstr="-prefix %s", + extensions=None, + name_source="in_file", ), outputtype=dict(), - srad=dict(argstr='-srad %f', ), + srad=dict( + argstr="-srad %f", + ), ) inputs = Undump.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Undump_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Undump.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Unifize.py b/nipype/interfaces/afni/tests/test_auto_Unifize.py index 3215d51e64..15832152c2 100644 --- a/nipype/interfaces/afni/tests/test_auto_Unifize.py +++ b/nipype/interfaces/afni/tests/test_auto_Unifize.py @@ -1,55 +1,83 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Unifize def test_Unifize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cl_frac=dict(argstr='-clfrac %f', ), + args=dict( + argstr="%s", + ), + cl_frac=dict( + argstr="-clfrac %f", + ), environ=dict( nohash=True, usedefault=True, ), epi=dict( - argstr='-EPI', - requires=['no_duplo', 't2'], - xor=['gm'], + argstr="-EPI", + requires=["no_duplo", "t2"], + xor=["gm"], + ), + gm=dict( + argstr="-GM", ), - gm=dict(argstr='-GM', ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - no_duplo=dict(argstr='-noduplo', ), + no_duplo=dict( + argstr="-noduplo", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_unifized', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_unifized", ), outputtype=dict(), - quiet=dict(argstr='-quiet', ), - rbt=dict(argstr='-rbt %f %f %f', ), - scale_file=dict(argstr='-ssave %s', ), - t2=dict(argstr='-T2', ), - t2_up=dict(argstr='-T2up %f', ), - urad=dict(argstr='-Urad %s', ), + quiet=dict( + argstr="-quiet", + ), + rbt=dict( + argstr="-rbt %f %f %f", + ), + scale_file=dict( + argstr="-ssave %s", + extensions=None, + ), + t2=dict( + argstr="-T2", + ), + t2_up=dict( + argstr="-T2up %f", + ), + urad=dict( + argstr="-Urad %s", + ), ) inputs = Unifize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Unifize_outputs(): output_map = dict( - out_file=dict(), - scale_file=dict(), + out_file=dict( + extensions=None, + ), + scale_file=dict( + extensions=None, + ), ) outputs = Unifize.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Volreg.py b/nipype/interfaces/afni/tests/test_auto_Volreg.py index 293b7613e8..658b933391 100644 --- a/nipype/interfaces/afni/tests/test_auto_Volreg.py +++ b/nipype/interfaces/afni/tests/test_auto_Volreg.py @@ -1,33 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Volreg def test_Volreg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), basefile=dict( - argstr='-base %s', + argstr="-base %s", + extensions=None, position=-6, ), - copyorigin=dict(argstr='-twodup', ), + copyorigin=dict( + argstr="-twodup", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - in_weight_volume=dict(argstr="-weight '%s[%d]'", ), - interp=dict(argstr='-%s', ), + in_weight_volume=dict( + argstr="-weight '%s[%d]'", + ), + interp=dict( + argstr="-%s", + ), md1d_file=dict( - argstr='-maxdisp1D %s', + argstr="-maxdisp1D %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_md.1D', + name_source="in_file", + name_template="%s_md.1D", position=-4, ), num_threads=dict( @@ -35,27 +45,34 @@ def test_Volreg_inputs(): usedefault=True, ), oned_file=dict( - argstr='-1Dfile %s', + argstr="-1Dfile %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s.1D', + name_source="in_file", + name_template="%s.1D", ), oned_matrix_save=dict( - argstr='-1Dmatrix_save %s', + argstr="-1Dmatrix_save %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s.aff12.1D', + name_source="in_file", + name_template="%s.aff12.1D", ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_volreg', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_volreg", ), outputtype=dict(), - timeshift=dict(argstr='-tshift 0', ), - verbose=dict(argstr='-verbose', ), + timeshift=dict( + argstr="-tshift 0", + ), + verbose=dict( + argstr="-verbose", + ), zpad=dict( - argstr='-zpad %d', + argstr="-zpad %d", position=-5, ), ) @@ -64,12 +81,22 @@ def test_Volreg_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volreg_outputs(): output_map = dict( - md1d_file=dict(), - oned_file=dict(), - oned_matrix_save=dict(), - out_file=dict(), + md1d_file=dict( + extensions=None, + ), + oned_file=dict( + extensions=None, + ), + oned_matrix_save=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = Volreg.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Warp.py b/nipype/interfaces/afni/tests/test_auto_Warp.py index b85692310a..9d694f8425 100644 --- a/nipype/interfaces/afni/tests/test_auto_Warp.py +++ b/nipype/interfaces/afni/tests/test_auto_Warp.py @@ -1,53 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Warp def test_Warp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deoblique=dict(argstr='-deoblique', ), + args=dict( + argstr="%s", + ), + deoblique=dict( + argstr="-deoblique", + ), environ=dict( nohash=True, usedefault=True, ), - gridset=dict(argstr='-gridset %s', ), + gridset=dict( + argstr="-gridset %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - interp=dict(argstr='-%s', ), - matparent=dict(argstr='-matparent %s', ), - mni2tta=dict(argstr='-mni2tta', ), - newgrid=dict(argstr='-newgrid %f', ), + interp=dict( + argstr="-%s", + ), + matparent=dict( + argstr="-matparent %s", + extensions=None, + ), + mni2tta=dict( + argstr="-mni2tta", + ), + newgrid=dict( + argstr="-newgrid %f", + ), num_threads=dict( nohash=True, usedefault=True, ), - oblique_parent=dict(argstr='-oblique_parent %s', ), + oblique_parent=dict( + argstr="-oblique_parent %s", + extensions=None, + ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_warp', + name_source="in_file", + name_template="%s_warp", ), outputtype=dict(), - save_warp=dict(requires=['verbose'], ), - tta2mni=dict(argstr='-tta2mni', ), - verbose=dict(argstr='-verb', ), - zpad=dict(argstr='-zpad %d', ), + save_warp=dict( + requires=["verbose"], + ), + tta2mni=dict( + argstr="-tta2mni", + ), + verbose=dict( + argstr="-verb", + ), + zpad=dict( + argstr="-zpad %d", + ), ) inputs = Warp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Warp_outputs(): output_map = dict( - out_file=dict(), - warp_file=dict(), + out_file=dict( + extensions=None, + ), + warp_file=dict( + extensions=None, + ), ) outputs = Warp.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ZCutUp.py b/nipype/interfaces/afni/tests/test_auto_ZCutUp.py index cacb6b1534..3c51d6dd1d 100644 --- a/nipype/interfaces/afni/tests/test_auto_ZCutUp.py +++ b/nipype/interfaces/afni/tests/test_auto_ZCutUp.py @@ -1,30 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ZCutUp def test_ZCutUp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), - keep=dict(argstr='-keep %s', ), + keep=dict( + argstr="-keep %s", + ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_file', - name_template='%s_zcutup', + argstr="-prefix %s", + extensions=None, + name_source="in_file", + name_template="%s_zcutup", ), outputtype=dict(), ) @@ -33,8 +38,14 @@ def test_ZCutUp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ZCutUp_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ZCutUp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Zcat.py b/nipype/interfaces/afni/tests/test_auto_Zcat.py index 3a2b7ff853..e06f343591 100644 --- a/nipype/interfaces/afni/tests/test_auto_Zcat.py +++ b/nipype/interfaces/afni/tests/test_auto_Zcat.py @@ -1,49 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Zcat def test_Zcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datum=dict(argstr='-datum %s', ), + args=dict( + argstr="%s", + ), + datum=dict( + argstr="-datum %s", + ), environ=dict( nohash=True, usedefault=True, ), fscale=dict( - argstr='-fscale', - xor=['nscale'], + argstr="-fscale", + xor=["nscale"], ), in_files=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, ), nscale=dict( - argstr='-nscale', - xor=['fscale'], + argstr="-nscale", + xor=["fscale"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_source='in_files', - name_template='%s_zcat', + argstr="-prefix %s", + extensions=None, + name_source="in_files", + name_template="%s_zcat", ), outputtype=dict(), - verb=dict(argstr='-verb', ), + verb=dict( + argstr="-verb", + ), ) inputs = Zcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Zcat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Zcat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Zeropad.py b/nipype/interfaces/afni/tests/test_auto_Zeropad.py index cfba8e2f3c..4d6742f21e 100644 --- a/nipype/interfaces/afni/tests/test_auto_Zeropad.py +++ b/nipype/interfaces/afni/tests/test_auto_Zeropad.py @@ -1,77 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Zeropad def test_Zeropad_inputs(): input_map = dict( A=dict( - argstr='-A %i', - xor=['master'], + argstr="-A %i", + xor=["master"], ), AP=dict( - argstr='-AP %i', - xor=['master'], + argstr="-AP %i", + xor=["master"], ), I=dict( - argstr='-I %i', - xor=['master'], + argstr="-I %i", + xor=["master"], ), IS=dict( - argstr='-IS %i', - xor=['master'], + argstr="-IS %i", + xor=["master"], ), L=dict( - argstr='-L %i', - xor=['master'], + argstr="-L %i", + xor=["master"], ), P=dict( - argstr='-P %i', - xor=['master'], + argstr="-P %i", + xor=["master"], ), R=dict( - argstr='-R %i', - xor=['master'], + argstr="-R %i", + xor=["master"], ), RL=dict( - argstr='-RL %i', - xor=['master'], + argstr="-RL %i", + xor=["master"], ), S=dict( - argstr='-S %i', - xor=['master'], + argstr="-S %i", + xor=["master"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-1, ), master=dict( - argstr='-master %s', - xor=['I', 'S', 'A', 'P', 'L', 'R', 'z', 'RL', 'AP', 'IS', 'mm'], + argstr="-master %s", + extensions=None, + xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], ), mm=dict( - argstr='-mm', - xor=['master'], + argstr="-mm", + xor=["master"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='-prefix %s', - name_template='zeropad', + argstr="-prefix %s", + extensions=None, + name_template="zeropad", ), outputtype=dict(), z=dict( - argstr='-z %i', - xor=['master'], + argstr="-z %i", + xor=["master"], ), ) inputs = Zeropad.input_spec() @@ -79,8 +83,14 @@ def test_Zeropad_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Zeropad_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Zeropad.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_extra_Deconvolve.py b/nipype/interfaces/afni/tests/test_extra_Deconvolve.py index 93adc3b748..1efa14c66b 100644 --- a/nipype/interfaces/afni/tests/test_extra_Deconvolve.py +++ b/nipype/interfaces/afni/tests/test_extra_Deconvolve.py @@ -2,10 +2,11 @@ from ..model import Deconvolve + def test_x1dstop(): deconv = Deconvolve() - deconv.inputs.out_file = 'file.nii' - assert 'out_file' in deconv._list_outputs() + deconv.inputs.out_file = "file.nii" + assert "out_file" in deconv._list_outputs() deconv.inputs.x1D_stop = True - assert 'out_file' not in deconv._list_outputs() - assert 'cbucket' not in deconv._list_outputs() + assert "out_file" not in deconv._list_outputs() + assert "cbucket" not in deconv._list_outputs() diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 987fcec135..54c6b44b49 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -1,55 +1,64 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft = python sts = 4 ts = 4 sw = 4 et: -"""AFNI utility interfaces - -Examples --------- -See the docstrings of the individual classes for examples. -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, bytes - +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""AFNI utility interfaces.""" import os import os.path as op import re import numpy as np -from ...utils.filemanip import (load_json, save_json, split_filename) -from ..base import (CommandLineInputSpec, CommandLine, Directory, TraitedSpec, - traits, isdefined, File, InputMultiObject, InputMultiPath, - Undefined, Str) +from ...utils.filemanip import load_json, save_json, split_filename +from ..base import ( + CommandLineInputSpec, + CommandLine, + Directory, + TraitedSpec, + traits, + Tuple, + isdefined, + File, + InputMultiObject, + InputMultiPath, + Undefined, + Str, +) from ...external.due import BibTeX -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec, AFNIPythonCommandInputSpec, - AFNIPythonCommand) +from .base import ( + AFNICommandBase, + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, + AFNIPythonCommandInputSpec, + AFNIPythonCommand, +) class ABoverlapInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file A', - argstr='%s', + desc="input file A", + argstr="%s", position=-3, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) in_file_b = File( - desc='input file B', - argstr='%s', + desc="input file B", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) - out_file = File( - desc='collect output to a file', argstr=' |& tee %s', position=-1) + copyfile=False, + ) + out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) no_automask = traits.Bool( - desc='consider input datasets as masks', argstr='-no_automask') + desc="consider input datasets as masks", argstr="-no_automask" + ) quiet = traits.Bool( - desc='be as quiet as possible (without being entirely mute)', - argstr='-quiet') + desc="be as quiet as possible (without being entirely mute)", argstr="-quiet" + ) verb = traits.Bool( - desc='print out some progress reports (to stderr)', argstr='-verb') + desc="print out some progress reports (to stderr)", argstr="-verb" + ) class ABoverlap(AFNICommand): @@ -60,8 +69,7 @@ class ABoverlap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> aboverlap = afni.ABoverlap() >>> aboverlap.inputs.in_file_a = 'functional.nii' @@ -73,50 +81,56 @@ class ABoverlap(AFNICommand): """ - _cmd = '3dABoverlap' + _cmd = "3dABoverlap" input_spec = ABoverlapInputSpec output_spec = AFNICommandOutputSpec class AFNItoNIFTIInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAFNItoNIFTI', - argstr='%s', + desc="input file to 3dAFNItoNIFTI", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s.nii', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file', - hash_files=False) + name_template="%s.nii", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + hash_files=False, + ) float_ = traits.Bool( - desc='Force the output dataset to be 32-bit floats. This option ' - 'should be used when the input AFNI dataset has different float ' - 'scale factors for different sub-bricks, an option that ' - 'NIfTI-1.1 does not support.', - argstr='-float') + desc="Force the output dataset to be 32-bit floats. This option " + "should be used when the input AFNI dataset has different float " + "scale factors for different sub-bricks, an option that " + "NIfTI-1.1 does not support.", + argstr="-float", + ) pure = traits.Bool( - desc='Do NOT write an AFNI extension field into the output file. Only ' - 'use this option if needed. You can also use the \'nifti_tool\' ' - 'program to strip extensions from a file.', - argstr='-pure') + desc="Do NOT write an AFNI extension field into the output file. Only " + "use this option if needed. You can also use the 'nifti_tool' " + "program to strip extensions from a file.", + argstr="-pure", + ) denote = traits.Bool( - desc='When writing the AFNI extension field, remove text notes that ' - 'might contain subject identifying information.', - argstr='-denote') + desc="When writing the AFNI extension field, remove text notes that " + "might contain subject identifying information.", + argstr="-denote", + ) oldid = traits.Bool( - desc='Give the new dataset the input dataset' - 's AFNI ID code.', - argstr='-oldid', - xor=['newid']) + desc="Give the new dataset the input datasets AFNI ID code.", + argstr="-oldid", + xor=["newid"], + ) newid = traits.Bool( - desc='Give the new dataset a new AFNI ID code, to distinguish it from ' - 'the input dataset.', - argstr='-newid', - xor=['oldid']) + desc="Give the new dataset a new AFNI ID code, to distinguish it from " + "the input dataset.", + argstr="-newid", + xor=["oldid"], + ) class AFNItoNIFTI(AFNICommand): @@ -127,8 +141,7 @@ class AFNItoNIFTI(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> a2n = afni.AFNItoNIFTI() >>> a2n.inputs.in_file = 'afni_output.3D' @@ -139,38 +152,41 @@ class AFNItoNIFTI(AFNICommand): """ - _cmd = '3dAFNItoNIFTI' + _cmd = "3dAFNItoNIFTI" input_spec = AFNItoNIFTIInputSpec output_spec = AFNICommandOutputSpec - def _overload_extension(self, value): + def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) - if ext.lower() not in ['.nii', '.nii.gz', '.1d', '.1D']: - ext += '.nii' + if ext.lower() not in [".nii", ".nii.gz", ".1d", ".1D"]: + ext += ".nii" return os.path.join(path, base + ext) def _gen_filename(self, name): - return os.path.abspath(super(AFNItoNIFTI, self)._gen_filename(name)) + return os.path.abspath(super()._gen_filename(name)) class AutoboxInputSpec(AFNICommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='-input %s', - desc='input file', - copyfile=False) + argstr="-input %s", + desc="input file", + copyfile=False, + ) padding = traits.Int( - argstr='-npad %d', - desc='Number of extra voxels to pad on each side of box') + argstr="-npad %d", desc="Number of extra voxels to pad on each side of box" + ) out_file = File( - argstr='-prefix %s', name_source='in_file', name_template='%s_autobox') + argstr="-prefix %s", name_source="in_file", name_template="%s_autobox" + ) no_clustering = traits.Bool( - argstr='-noclust', - desc='Don\'t do any clustering to find box. Any non-zero voxel will ' - 'be preserved in the cropped volume. The default method uses ' - 'some clustering to find the cropping box, and will clip off ' - 'small isolated blobs.') + argstr="-noclust", + desc="Don't do any clustering to find box. Any non-zero voxel will " + "be preserved in the cropped volume. The default method uses " + "some clustering to find the cropping box, and will clip off " + "small isolated blobs.", + ) class AutoboxOutputSpec(TraitedSpec): # out_file not mandatory @@ -181,7 +197,7 @@ class AutoboxOutputSpec(TraitedSpec): # out_file not mandatory z_min = traits.Int() z_max = traits.Int() - out_file = File(desc='output file') + out_file = File(desc="output file") class Autobox(AFNICommand): @@ -192,8 +208,7 @@ class Autobox(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> abox = afni.Autobox() >>> abox.inputs.in_file = 'structural.nii' @@ -204,60 +219,62 @@ class Autobox(AFNICommand): """ - _cmd = '3dAutobox' + _cmd = "3dAutobox" input_spec = AutoboxInputSpec output_spec = AutoboxOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = super(Autobox, self).aggregate_outputs( - runtime, needed_outputs) - pattern = 'x=(?P-?\d+)\.\.(?P-?\d+) '\ - 'y=(?P-?\d+)\.\.(?P-?\d+) '\ - 'z=(?P-?\d+)\.\.(?P-?\d+)' - for line in runtime.stderr.split('\n'): + outputs = super().aggregate_outputs(runtime, needed_outputs) + pattern = ( + r"x=(?P-?\d+)\.\.(?P-?\d+) " + r"y=(?P-?\d+)\.\.(?P-?\d+) " + r"z=(?P-?\d+)\.\.(?P-?\d+)" + ) + for line in runtime.stderr.split("\n"): m = re.search(pattern, line) if m: d = m.groupdict() - outputs.trait_set(**{k: int(d[k]) for k in d.keys()}) + outputs.trait_set(**{k: int(v) for k, v in d.items()}) return outputs class BrickStatInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dmaskave', - argstr='%s', + desc="input file to 3dmaskave", + argstr="%s", position=-1, mandatory=True, - exists=True) + exists=True, + ) mask = File( - desc='-mask dset = use dset as mask to include/exclude voxels', - argstr='-mask %s', + desc="-mask dset = use dset as mask to include/exclude voxels", + argstr="-mask %s", position=2, - exists=True) + exists=True, + ) min = traits.Bool( - desc='print the minimum value in dataset', argstr='-min', position=1) + desc="print the minimum value in dataset", argstr="-min", position=1 + ) slow = traits.Bool( - desc='read the whole dataset to find the min and max values', - argstr='-slow') - max = traits.Bool( - desc='print the maximum value in the dataset', argstr='-max') - mean = traits.Bool( - desc='print the mean value in the dataset', argstr='-mean') - sum = traits.Bool( - desc='print the sum of values in the dataset', argstr='-sum') - var = traits.Bool(desc='print the variance in the dataset', argstr='-var') - percentile = traits.Tuple( + desc="read the whole dataset to find the min and max values", argstr="-slow" + ) + max = traits.Bool(desc="print the maximum value in the dataset", argstr="-max") + mean = traits.Bool(desc="print the mean value in the dataset", argstr="-mean") + sum = traits.Bool(desc="print the sum of values in the dataset", argstr="-sum") + var = traits.Bool(desc="print the variance in the dataset", argstr="-var") + percentile = Tuple( traits.Float, traits.Float, traits.Float, - desc='p0 ps p1 write the percentile values starting ' - 'at p0% and ending at p1% at a step of ps%. ' - 'only one sub-brick is accepted.', - argstr='-percentile %.3f %.3f %.3f') + desc="p0 ps p1 write the percentile values starting " + "at p0% and ending at p1% at a step of ps%. " + "only one sub-brick is accepted.", + argstr="-percentile %.3f %.3f %.3f", + ) class BrickStatOutputSpec(TraitedSpec): - min_val = traits.Float(desc='output') + min_val = traits.Float(desc="output") class BrickStat(AFNICommandBase): @@ -268,8 +285,7 @@ class BrickStat(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> brickstat = afni.BrickStat() >>> brickstat.inputs.in_file = 'functional.nii' @@ -280,24 +296,24 @@ class BrickStat(AFNICommandBase): >>> res = brickstat.run() # doctest: +SKIP """ - _cmd = '3dBrickStat' + + _cmd = "3dBrickStat" input_spec = BrickStatInputSpec output_spec = BrickStatOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = self._outputs() - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - min_val = load_json(outfile)['stat'] - except IOError: + min_val = load_json(outfile)["stat"] + except OSError: return self.run().outputs else: min_val = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -315,65 +331,71 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class BucketInputSpec(AFNICommandInputSpec): in_file = traits.List( - traits.Tuple( + Tuple( (File(exists=True, copyfile=False), traits.Str(argstr="'%s'")), - artstr="%s%s"), + artstr="%s%s", + ), position=-1, mandatory=True, argstr="%s", - desc='List of tuples of input datasets and subbrick selection strings' - 'as described in more detail in the following afni help string' - 'Input dataset specified using one of these forms:' - ' \'prefix+view\', \'prefix+view.HEAD\', or \'prefix+view.BRIK\'.' - 'You can also add a sub-brick selection list after the end of the' - 'dataset name. This allows only a subset of the sub-bricks to be' - 'included into the output (by default, all of the input dataset' - 'is copied into the output). A sub-brick selection list looks like' - 'one of the following forms:' - ' fred+orig[5] ==> use only sub-brick #5' - ' fred+orig[5,9,17] ==> use #5, #9, and #17' - ' fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8' - ' fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13' - 'Sub-brick indexes start at 0. You can use the character \'$\'' - 'to indicate the last sub-brick in a dataset; for example, you' - 'can select every third sub-brick by using the selection list' - ' fred+orig[0..$(3)]' - 'N.B.: The sub-bricks are output in the order specified, which may' - ' not be the order in the original datasets. For example, using' - ' fred+orig[0..$(2),1..$(2)]' - ' will cause the sub-bricks in fred+orig to be output into the' - ' new dataset in an interleaved fashion. Using' - ' fred+orig[$..0]' - ' will reverse the order of the sub-bricks in the output.' - 'N.B.: Bucket datasets have multiple sub-bricks, but do NOT have' - ' a time dimension. You can input sub-bricks from a 3D+time dataset' - ' into a bucket dataset. You can use the \'3dinfo\' program to see' - ' how many sub-bricks a 3D+time or a bucket dataset contains.' - 'N.B.: In non-bucket functional datasets (like the \'fico\' datasets' - ' output by FIM, or the \'fitt\' datasets output by 3dttest), sub-brick' - ' [0] is the \'intensity\' and sub-brick [1] is the statistical parameter' - ' used as a threshold. Thus, to create a bucket dataset using the' - ' intensity from dataset A and the threshold from dataset B, and' - ' calling the output dataset C, you would type' - ' 3dbucket -prefix C -fbuc \'A+orig[0]\' -fbuc \'B+orig[1]\'' - 'WARNING: using this program, it is possible to create a dataset that' - ' has different basic datum types for different sub-bricks' - ' (e.g., shorts for brick 0, floats for brick 1).' - ' Do NOT do this! Very few AFNI programs will work correctly' - ' with such datasets!') - out_file = File(argstr='-prefix %s', name_template='buck') + desc="""\ +List of tuples of input datasets and subbrick selection strings +as described in more detail in the following afni help string +Input dataset specified using one of these forms: +``prefix+view``, ``prefix+view.HEAD``, or ``prefix+view.BRIK``. +You can also add a sub-brick selection list after the end of the +dataset name. This allows only a subset of the sub-bricks to be +included into the output (by default, all of the input dataset +is copied into the output). A sub-brick selection list looks like +one of the following forms:: + + fred+orig[5] ==> use only sub-brick #5 + fred+orig[5,9,17] ==> use #5, #9, and #17 + fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8 + fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13 + +Sub-brick indexes start at 0. You can use the character '$' +to indicate the last sub-brick in a dataset; for example, you +can select every third sub-brick by using the selection list +``fred+orig[0..$(3)]`` +N.B.: The sub-bricks are output in the order specified, which may +not be the order in the original datasets. For example, using +``fred+orig[0..$(2),1..$(2)]`` +will cause the sub-bricks in fred+orig to be output into the +new dataset in an interleaved fashion. Using ``fred+orig[$..0]`` +will reverse the order of the sub-bricks in the output. +N.B.: Bucket datasets have multiple sub-bricks, but do NOT have +a time dimension. You can input sub-bricks from a 3D+time dataset +into a bucket dataset. You can use the '3dinfo' program to see +how many sub-bricks a 3D+time or a bucket dataset contains. +N.B.: In non-bucket functional datasets (like the 'fico' datasets +output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick +``[0]`` is the 'intensity' and sub-brick [1] is the statistical parameter +used as a threshold. Thus, to create a bucket dataset using the +intensity from dataset A and the threshold from dataset B, and +calling the output dataset C, you would type:: + + 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1] + +""", + ) + out_file = File(argstr="-prefix %s", name_template="buck") class Bucket(AFNICommand): """Concatenate sub-bricks from input datasets into one big 'bucket' dataset. - For complete details, see the `3dbucket Documentation. - `_ + .. danger:: - Examples - ======== + Using this program, it is possible to create a dataset that + has different basic datum types for different sub-bricks + (e.g., shorts for brick 0, floats for brick 1). + Do NOT do this! Very few AFNI programs will work correctly + with such datasets! + Examples + -------- >>> from nipype.interfaces import afni >>> bucket = afni.Bucket() >>> bucket.inputs.in_file = [('functional.nii',"{2..$}"), ('functional.nii',"{1}")] @@ -382,43 +404,49 @@ class Bucket(AFNICommand): "3dbucket -prefix vr_base functional.nii'{2..$}' functional.nii'{1}'" >>> res = bucket.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dbucket Documentation. + `__. + """ - _cmd = '3dbucket' + _cmd = "3dbucket" input_spec = BucketInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_file': - return spec.argstr % ( - ' '.join([i[0] + "'" + i[1] + "'" for i in value])) - return super(Bucket, self)._format_arg(name, spec, value) + if name == "in_file": + return spec.argstr % (" ".join([i[0] + "'" + i[1] + "'" for i in value])) + return super()._format_arg(name, spec, value) class CalcInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 3dcalc', - argstr='-a %s', + desc="input file to 3dcalc", + argstr="-a %s", position=0, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='operand file to 3dcalc', argstr='-b %s', position=1, exists=True) + desc="operand file to 3dcalc", argstr="-b %s", position=1, exists=True + ) in_file_c = File( - desc='operand file to 3dcalc', argstr='-c %s', position=2, exists=True) + desc="operand file to 3dcalc", argstr="-c %s", position=2, exists=True + ) out_file = File( - name_template='%s_calc', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - expr = Str(desc='expr', argstr='-expr "%s"', position=3, mandatory=True) - start_idx = traits.Int( - desc='start index for in_file_a', requires=['stop_idx']) - stop_idx = traits.Int( - desc='stop index for in_file_a', requires=['start_idx']) - single_idx = traits.Int(desc='volume index for in_file_a') - overwrite = traits.Bool(desc='overwrite output', argstr='-overwrite') - other = File(desc='other options', argstr='') + name_template="%s_calc", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) + start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) + stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) + single_idx = traits.Int(desc="volume index for in_file_a") + overwrite = traits.Bool(desc="overwrite output", argstr="-overwrite") + other = File(desc="other options", argstr="") class Calc(AFNICommand): @@ -428,8 +456,7 @@ class Calc(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> calc = afni.Calc() >>> calc.inputs.in_file_a = 'functional.nii' @@ -453,82 +480,87 @@ class Calc(AFNICommand): """ - _cmd = '3dcalc' + _cmd = "3dcalc" input_spec = CalcInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file_a': + if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): - arg += '[%d..%d]' % (self.inputs.start_idx, - self.inputs.stop_idx) + arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): - arg += '[%d]' % (self.inputs.single_idx) + arg += "[%d]" % (self.inputs.single_idx) return arg - return super(Calc, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): - """Skip the arguments without argstr metadata - """ - return super( - Calc, self)._parse_inputs(skip=('start_idx', 'stop_idx', 'other')) + """Skip the arguments without argstr metadata""" + return super()._parse_inputs(skip=("start_idx", "stop_idx", "other")) class CatInputSpec(AFNICommandInputSpec): - in_files = traits.List( - File(exists=True), argstr="%s", mandatory=True, position=-2) + in_files = traits.List(File(exists=True), argstr="%s", mandatory=True, position=-2) out_file = File( - argstr='> %s', - value='catout.1d', + argstr="> %s", + value="catout.1d", usedefault=True, - desc='output (concatenated) file name', + desc="output (concatenated) file name", position=-1, - mandatory=True) + mandatory=True, + ) omitconst = traits.Bool( - desc='Omit columns that are identically constant from output.', - argstr='-nonconst') + desc="Omit columns that are identically constant from output.", + argstr="-nonconst", + ) keepfree = traits.Bool( - desc='Keep only columns that are marked as \'free\' in the ' - '3dAllineate header from \'-1Dparam_save\'. ' - 'If there is no such header, all columns are kept.', - argstr='-nonfixed') + desc="Keep only columns that are marked as 'free' in the " + "3dAllineate header from '-1Dparam_save'. " + "If there is no such header, all columns are kept.", + argstr="-nonfixed", + ) out_format = traits.Enum( - 'int', - 'nice', - 'double', - 'fint', - 'cint', - argstr='-form %s', - desc='specify data type for output. Valid types are \'int\', ' - '\'nice\', \'double\', \'fint\', and \'cint\'.', - xor=['out_int', 'out_nice', 'out_double', 'out_fint', 'out_cint']) + "int", + "nice", + "double", + "fint", + "cint", + argstr="-form %s", + desc="specify data type for output.", + xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], + ) stack = traits.Bool( - desc='Stack the columns of the resultant matrix in the output.', - argstr='-stack') + desc="Stack the columns of the resultant matrix in the output.", argstr="-stack" + ) sel = traits.Str( - desc='Apply the same column/row selection string to all filenames ' - 'on the command line.', - argstr='-sel %s') + desc="Apply the same column/row selection string to all filenames " + "on the command line.", + argstr="-sel %s", + ) out_int = traits.Bool( - desc='specifiy int data type for output', - argstr='-i', - xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_cint']) + desc="specify int data type for output", + argstr="-i", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], + ) out_nice = traits.Bool( - desc='specifiy nice data type for output', - argstr='-n', - xor=['out_format', 'out_int', 'out_double', 'out_fint', 'out_cint']) + desc="specify nice data type for output", + argstr="-n", + xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], + ) out_double = traits.Bool( - desc='specifiy double data type for output', - argstr='-d', - xor=['out_format', 'out_nice', 'out_int', 'out_fint', 'out_cint']) + desc="specify double data type for output", + argstr="-d", + xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], + ) out_fint = traits.Bool( - desc='specifiy int, rounded down, data type for output', - argstr='-f', - xor=['out_format', 'out_nice', 'out_double', 'out_int', 'out_cint']) + desc="specify int, rounded down, data type for output", + argstr="-f", + xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], + ) out_cint = traits.Bool( - desc='specifiy int, rounded up, data type for output', - xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_int']) + desc="specify int, rounded up, data type for output", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], + ) class Cat(AFNICommand): @@ -540,8 +572,7 @@ class Cat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cat1d = afni.Cat() >>> cat1d.inputs.sel = "'[0,2]'" @@ -553,43 +584,48 @@ class Cat(AFNICommand): """ - _cmd = '1dcat' + _cmd = "1dcat" input_spec = CatInputSpec output_spec = AFNICommandOutputSpec class CatMatvecInputSpec(AFNICommandInputSpec): in_file = traits.List( - traits.Tuple(traits.Str(), traits.Str()), + Tuple(traits.Str(), traits.Str()), desc="list of tuples of mfiles and associated opkeys", mandatory=True, argstr="%s", - position=-2) + position=-2, + ) out_file = File( argstr=" > %s", - name_template='%s_cat.aff12.1D', - name_source='in_file', + name_template="%s_cat.aff12.1D", + name_source="in_file", keep_extension=False, desc="File to write concattenated matvecs to", position=-1, - mandatory=True) + mandatory=True, + ) matrix = traits.Bool( desc="indicates that the resulting matrix will" "be written to outfile in the 'MATRIX(...)' format (FORM 3)." "This feature could be used, with clever scripting, to input" "a matrix directly on the command line to program 3dWarp.", argstr="-MATRIX", - xor=['oneline', 'fourxfour']) + xor=["oneline", "fourxfour"], + ) oneline = traits.Bool( desc="indicates that the resulting matrix" "will simply be written as 12 numbers on one line.", argstr="-ONELINE", - xor=['matrix', 'fourxfour']) + xor=["matrix", "fourxfour"], + ) fourxfour = traits.Bool( desc="Output matrix in augmented form (last row is 0 0 0 1)" "This option does not work with -MATRIX or -ONELINE", argstr="-4x4", - xor=['matrix', 'oneline']) + xor=["matrix", "oneline"], + ) class CatMatvec(AFNICommand): @@ -599,8 +635,7 @@ class CatMatvec(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cmv = afni.CatMatvec() >>> cmv.inputs.in_file = [('structural.BRIK::WARP_DATA','I')] @@ -611,67 +646,75 @@ class CatMatvec(AFNICommand): """ - _cmd = 'cat_matvec' + _cmd = "cat_matvec" input_spec = CatMatvecInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_file': - return spec.argstr % (' '.join([i[0] + ' -' + i[1] - for i in value])) - return super(CatMatvec, self)._format_arg(name, spec, value) + if name == "in_file": + # Concatenate a series of filenames, with optional opkeys + return " ".join( + f"{mfile} -{opkey}" if opkey else mfile for mfile, opkey in value + ) + return super()._format_arg(name, spec, value) class CenterMassInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dCM', - argstr='%s', + desc="input file to 3dCM", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) cm_file = File( - name_source='in_file', - name_template='%s_cm.out', + name_source="in_file", + name_template="%s_cm.out", hash_files=False, keep_extension=False, desc="File to write center of mass to", argstr="> %s", - position=-1) + position=-1, + ) mask_file = File( - desc='Only voxels with nonzero values in the provided mask will be ' - 'averaged.', - argstr='-mask %s', - exists=True) - automask = traits.Bool( - desc='Generate the mask automatically', argstr='-automask') - set_cm = traits.Tuple( + desc="Only voxels with nonzero values in the provided mask will be " + "averaged.", + argstr="-mask %s", + exists=True, + ) + automask = traits.Bool(desc="Generate the mask automatically", argstr="-automask") + set_cm = Tuple( (traits.Float(), traits.Float(), traits.Float()), - desc='After computing the center of mass, set the origin fields in ' - 'the header so that the center of mass will be at (x,y,z) in ' - 'DICOM coords.', - argstr='-set %f %f %f') + desc="After computing the center of mass, set the origin fields in " + "the header so that the center of mass will be at (x,y,z) in " + "DICOM coords.", + argstr="-set %f %f %f", + ) local_ijk = traits.Bool( - desc='Output values as (i,j,k) in local orienation', - argstr='-local_ijk') + desc="Output values as (i,j,k) in local orientation", argstr="-local_ijk" + ) roi_vals = traits.List( traits.Int, - desc='Compute center of mass for each blob with voxel value of v0, ' - 'v1, v2, etc. This option is handy for getting ROI centers of ' - 'mass.', - argstr='-roi_vals %s') + desc="Compute center of mass for each blob with voxel value of v0, " + "v1, v2, etc. This option is handy for getting ROI centers of " + "mass.", + argstr="-roi_vals %s", + ) all_rois = traits.Bool( - desc='Don\'t bother listing the values of ROIs you want: The program ' - 'will find all of them and produce a full list', - argstr='-all_rois') + desc="Don't bother listing the values of ROIs you want: The program " + "will find all of them and produce a full list", + argstr="-all_rois", + ) class CenterMassOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file') - cm_file = File(desc='file with the center of mass coordinates') + out_file = File(exists=True, desc="output file") + cm_file = File(desc="file with the center of mass coordinates") cm = traits.List( - traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - desc='center of mass') + Tuple(traits.Float(), traits.Float(), traits.Float()), + desc="center of mass", + ) class CenterMass(AFNICommandBase): @@ -687,8 +730,7 @@ class CenterMass(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cm = afni.CenterMass() >>> cm.inputs.in_file = 'structural.nii' @@ -697,43 +739,56 @@ class CenterMass(AFNICommandBase): >>> cm.cmdline '3dCM -roi_vals 2 10 structural.nii > cm.txt' >>> res = 3dcm.run() # doctest: +SKIP + """ - _cmd = '3dCM' + _cmd = "3dCM" input_spec = CenterMassInputSpec output_spec = CenterMassOutputSpec def _list_outputs(self): - outputs = super(CenterMass, self)._list_outputs() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) - outputs['cm_file'] = os.path.abspath(self.inputs.cm_file) - sout = np.loadtxt(outputs['cm_file'], ndmin=2) - outputs['cm'] = [tuple(s) for s in sout] + outputs = super()._list_outputs() + outputs["out_file"] = os.path.abspath(self.inputs.in_file) + outputs["cm_file"] = os.path.abspath(self.inputs.cm_file) + sout = np.loadtxt(outputs["cm_file"], ndmin=2) + outputs["cm"] = [tuple(s) for s in sout] return outputs class ConvertDsetInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to ConvertDset', - argstr='-input %s', + desc="input file to ConvertDset", + argstr="-input %s", position=-2, mandatory=True, - exists=True) + exists=True, + ) out_file = File( - desc='output file for ConvertDset', - argstr='-prefix %s', + desc="output file for ConvertDset", + argstr="-prefix %s", position=-1, - mandatory=True) + mandatory=True, + ) out_type = traits.Enum( - ('niml', 'niml_asc', 'niml_bi', - '1D', '1Dp', '1Dpt', - 'gii', 'gii_asc', 'gii_b64', 'gii_b64gz'), - desc='output type', - argstr='-o_%s', + ( + "niml", + "niml_asc", + "niml_bi", + "1D", + "1Dp", + "1Dpt", + "gii", + "gii_asc", + "gii_b64", + "gii_b64gz", + ), + desc="output type", + argstr="-o_%s", mandatory=True, - position=0) + position=0, + ) class ConvertDset(AFNICommandBase): @@ -743,8 +798,7 @@ class ConvertDset(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> convertdset = afni.ConvertDset() >>> convertdset.inputs.in_file = 'lh.pial_converted.gii' @@ -753,33 +807,36 @@ class ConvertDset(AFNICommandBase): >>> convertdset.cmdline 'ConvertDset -o_niml_asc -input lh.pial_converted.gii -prefix lh.pial_converted.niml.dset' >>> res = convertdset.run() # doctest: +SKIP + """ - _cmd = 'ConvertDset' + _cmd = "ConvertDset" input_spec = ConvertDsetInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class CopyInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dcopy', - argstr='%s', + desc="input file to 3dcopy", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_copy', - desc='output image file name', - argstr='%s', + name_template="%s_copy", + desc="output image file name", + argstr="%s", position=-1, - name_source='in_file') - verbose = traits.Bool(desc='print progress reports', argstr='-verb') + name_source="in_file", + ) + verbose = traits.Bool(desc="print progress reports", argstr="-verb") class Copy(AFNICommand): @@ -787,11 +844,10 @@ class Copy(AFNICommand): or different type using 3dcopy command For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> copy3d = afni.Copy() >>> copy3d.inputs.in_file = 'functional.nii' @@ -820,7 +876,7 @@ class Copy(AFNICommand): """ - _cmd = '3dcopy' + _cmd = "3dcopy" input_spec = CopyInputSpec output_spec = AFNICommandOutputSpec @@ -830,54 +886,59 @@ class DotInputSpec(AFNICommandInputSpec): (File()), desc="list of input files, possibly with subbrick selectors", argstr="%s ...", - position=-2) - out_file = File( - desc='collect output to a file', argstr=' |& tee %s', position=-1) - mask = File(desc='Use this dataset as a mask', argstr='-mask %s') - mrange = traits.Tuple( + position=-2, + ) + out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) + mask = File(desc="Use this dataset as a mask", argstr="-mask %s") + mrange = Tuple( (traits.Float(), traits.Float()), - desc='Means to further restrict the voxels from \'mset\' so that' - 'only those mask values within this range (inclusive) willbe used.', - argstr='-mrange %s %s') + desc="Means to further restrict the voxels from 'mset' so that" + "only those mask values within this range (inclusive) willbe used.", + argstr="-mrange %s %s", + ) demean = traits.Bool( - desc= - 'Remove the mean from each volume prior to computing the correlation', - argstr='-demean') + desc="Remove the mean from each volume prior to computing the correlation", + argstr="-demean", + ) docor = traits.Bool( - desc='Return the correlation coefficient (default).', argstr='-docor') - dodot = traits.Bool( - desc='Return the dot product (unscaled).', argstr='-dodot') + desc="Return the correlation coefficient (default).", argstr="-docor" + ) + dodot = traits.Bool(desc="Return the dot product (unscaled).", argstr="-dodot") docoef = traits.Bool( - desc= - 'Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b*dset1', - argstr='-docoef') + desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b\\*dset1", + argstr="-docoef", + ) dosums = traits.Bool( - desc= - 'Return the 6 numbers xbar= ybar= <(x-xbar)^2> <(y-ybar)^2> <(x-xbar)(y-ybar)> and the correlation coefficient.', - argstr='-dosums') + desc="Return the 6 numbers xbar= ybar= <(x-xbar)^2> <(y-ybar)^2> <(x-xbar)(y-ybar)> and the correlation coefficient.", + argstr="-dosums", + ) dodice = traits.Bool( - desc='Return the Dice coefficient (the Sorensen-Dice index).', - argstr='-dodice') + desc="Return the Dice coefficient (the Sorensen-Dice index).", argstr="-dodice" + ) doeta2 = traits.Bool( - desc='Return eta-squared (Cohen, NeuroImage 2008).', argstr='-doeta2') + desc="Return eta-squared (Cohen, NeuroImage 2008).", argstr="-doeta2" + ) full = traits.Bool( - desc= - 'Compute the whole matrix. A waste of time, but handy for parsing.', - argstr='-full') + desc="Compute the whole matrix. A waste of time, but handy for parsing.", + argstr="-full", + ) show_labels = traits.Bool( - desc= - 'Print sub-brick labels to help identify what is being correlated. This option is useful when' - 'you have more than 2 sub-bricks at input.', - argstr='-show_labels') - upper = traits.Bool( - desc='Compute upper triangular matrix', argstr='-upper') + desc="Print sub-brick labels to help identify what is being correlated. This option is useful when" + "you have more than 2 sub-bricks at input.", + argstr="-show_labels", + ) + upper = traits.Bool(desc="Compute upper triangular matrix", argstr="-upper") class Dot(AFNICommand): """Correlation coefficient between sub-brick pairs. All datasets in in_files list will be concatenated. You can use sub-brick selectors in the file specification. - Note: This program is not efficient when more than two subbricks are input. + + .. warning:: + + This program is not efficient when more than two subbricks are input. + For complete details, see the `3ddot Documentation. `_ @@ -891,87 +952,71 @@ class Dot(AFNICommand): >>> res = copy3d.run() # doctest: +SKIP """ - _cmd = '3dDot' + + _cmd = "3dDot" input_spec = DotInputSpec output_spec = AFNICommandOutputSpec class Edge3InputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dedge3', - argstr='-input %s', + desc="input file to 3dedge3", + argstr="-input %s", position=0, mandatory=True, exists=True, - copyfile=False) - out_file = File( - desc='output image file name', position=-1, argstr='-prefix %s') + copyfile=False, + ) + out_file = File(desc="output image file name", position=-1, argstr="-prefix %s") datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output. Valid types are 'byte', " + "'short' and 'float'.", + ) fscale = traits.Bool( - desc='Force scaling of the output to the maximum integer range.', - argstr='-fscale', - xor=['gscale', 'nscale', 'scale_floats']) + desc="Force scaling of the output to the maximum integer range.", + argstr="-fscale", + xor=["gscale", "nscale", "scale_floats"], + ) gscale = traits.Bool( - desc='Same as \'-fscale\', but also forces each output sub-brick to ' - 'to get the same scaling factor.', - argstr='-gscale', - xor=['fscale', 'nscale', 'scale_floats']) + desc="Same as '-fscale', but also forces each output sub-brick to " + "to get the same scaling factor.", + argstr="-gscale", + xor=["fscale", "nscale", "scale_floats"], + ) nscale = traits.Bool( - desc='Don\'t do any scaling on output to byte or short datasets.', - argstr='-nscale', - xor=['fscale', 'gscale', 'scale_floats']) + desc="Don't do any scaling on output to byte or short datasets.", + argstr="-nscale", + xor=["fscale", "gscale", "scale_floats"], + ) scale_floats = traits.Float( - desc='Multiply input by VAL, but only if the input datum is ' - 'float. This is needed when the input dataset ' - 'has a small range, like 0 to 2.0 for instance. ' - 'With such a range, very few edges are detected due to ' - 'what I suspect to be truncation problems. ' - 'Multiplying such a dataset by 10000 fixes the problem ' - 'and the scaling is undone at the output.', - argstr='-scale_floats %f', - xor=['fscale', 'gscale', 'nscale']) + desc="Multiply input by VAL, but only if the input datum is " + "float. This is needed when the input dataset " + "has a small range, like 0 to 2.0 for instance. " + "With such a range, very few edges are detected due to " + "what I suspect to be truncation problems. " + "Multiplying such a dataset by 10000 fixes the problem " + "and the scaling is undone at the output.", + argstr="-scale_floats %f", + xor=["fscale", "gscale", "nscale"], + ) verbose = traits.Bool( - desc='Print out some information along the way.', argstr='-verbose') + desc="Print out some information along the way.", argstr="-verbose" + ) class Edge3(AFNICommand): """Does 3D Edge detection using the library 3DEdge - by Gregoire Malandain (gregoire.malandain@sophia.inria.fr). + by Gregoire Malandain. For complete details, see the `3dedge3 Documentation. `_ - references_ = [{'entry': BibTeX('@article{Deriche1987,' - 'author={R. Deriche},' - 'title={Optimal edge detection using recursive filtering},' - 'journal={International Journal of Computer Vision},' - 'volume={2},', - 'pages={167-187},' - 'year={1987},' - '}'), - 'tags': ['method'], - }, - {'entry': BibTeX('@article{MongaDericheMalandainCocquerez1991,' - 'author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez},' - 'title={Recursive filtering and edge tracking: two primary tools for 3D edge detection},' - 'journal={Image and vision computing},' - 'volume={9},', - 'pages={203-214},' - 'year={1991},' - '}'), - 'tags': ['method'], - }, - ] - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> edge3 = afni.Edge3() >>> edge3.inputs.in_file = 'functional.nii' @@ -983,35 +1028,67 @@ class Edge3(AFNICommand): """ - _cmd = '3dedge3' + _cmd = "3dedge3" input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec + _references = [ + { + "entry": BibTeX( + """\ +@article{Deriche1987, +author={R. Deriche}, +title={Optimal edge detection using recursive filtering}, +journal={International Journal of Computer Vision}, +volume={2},' +pages={167-187}, +year={1987}, +}""" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + """\ +@article{MongaDericheMalandainCocquerez1991, + author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez}, + title={Recursive filtering and edge tracking: two primary tools for 3D edge detection}, + journal={Image and vision computing}, + volume={9},' + pages={203-214}, + year={1991}, +}""" + ), + "tags": ["method"], + }, + ] class EvalInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 1deval', - argstr='-a %s', + desc="input file to 1deval", + argstr="-a %s", position=0, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='operand file to 1deval', argstr='-b %s', position=1, exists=True) + desc="operand file to 1deval", argstr="-b %s", position=1, exists=True + ) in_file_c = File( - desc='operand file to 1deval', argstr='-c %s', position=2, exists=True) + desc="operand file to 1deval", argstr="-c %s", position=2, exists=True + ) out_file = File( - name_template='%s_calc', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - out1D = traits.Bool(desc='output in 1D', argstr='-1D') - expr = Str(desc='expr', argstr='-expr "%s"', position=3, mandatory=True) - start_idx = traits.Int( - desc='start index for in_file_a', requires=['stop_idx']) - stop_idx = traits.Int( - desc='stop index for in_file_a', requires=['start_idx']) - single_idx = traits.Int(desc='volume index for in_file_a') - other = File(desc='other options', argstr='') + name_template="%s_calc", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + out1D = traits.Bool(desc="output in 1D", argstr="-1D") + expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) + start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) + stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) + single_idx = traits.Int(desc="volume index for in_file_a") + other = File(desc="other options", argstr="") class Eval(AFNICommand): @@ -1022,8 +1099,7 @@ class Eval(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> eval = afni.Eval() >>> eval.inputs.in_file_a = 'seed.1D' @@ -1037,125 +1113,131 @@ class Eval(AFNICommand): """ - _cmd = '1deval' + _cmd = "1deval" input_spec = EvalInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file_a': + if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): - arg += '[%d..%d]' % (self.inputs.start_idx, - self.inputs.stop_idx) + arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): - arg += '[%d]' % (self.inputs.single_idx) + arg += "[%d]" % (self.inputs.single_idx) return arg - return super(Eval, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): - """Skip the arguments without argstr metadata - """ - return super( - Eval, self)._parse_inputs(skip=('start_idx', 'stop_idx', 'other')) + """Skip the arguments without argstr metadata""" + return super()._parse_inputs(skip=("start_idx", "stop_idx", "other")) class FWHMxInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', argstr='-input %s', mandatory=True, exists=True) + desc="input dataset", argstr="-input %s", mandatory=True, exists=True + ) out_file = File( - argstr='> %s', - name_source='in_file', - name_template='%s_fwhmx.out', + argstr="> %s", + name_source="in_file", + name_template="%s_fwhmx.out", position=-1, keep_extension=False, - desc='output file') + desc="output file", + ) out_subbricks = File( - argstr='-out %s', - name_source='in_file', - name_template='%s_subbricks.out', + argstr="-out %s", + name_source="in_file", + name_template="%s_subbricks.out", keep_extension=False, - desc='output file listing the subbricks FWHM') + desc="output file listing the subbricks FWHM", + ) mask = File( - desc='use only voxels that are nonzero in mask', - argstr='-mask %s', - exists=True) + desc="use only voxels that are nonzero in mask", argstr="-mask %s", exists=True + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - desc='compute a mask from THIS dataset, a la 3dAutomask') + argstr="-automask", + desc="compute a mask from THIS dataset, a la 3dAutomask", + ) detrend = traits.Either( traits.Bool(), traits.Int(), default=False, - argstr='-detrend', - xor=['demed'], + argstr="-detrend", + xor=["demed"], usedefault=True, - desc='instead of demed (0th order detrending), detrend to the ' - 'specified order. If order is not given, the program picks ' - 'q=NT/30. -detrend disables -demed, and includes -unif.') + desc="instead of demed (0th order detrending), detrend to the " + "specified order. If order is not given, the program picks " + "q=NT/30. -detrend disables -demed, and includes -unif.", + ) demed = traits.Bool( False, - argstr='-demed', - xor=['detrend'], - desc='If the input dataset has more than one sub-brick (e.g., has a ' - 'time axis), then subtract the median of each voxel\'s time ' - 'series before processing FWHM. This will tend to remove ' - 'intrinsic spatial structure and leave behind the noise.') + argstr="-demed", + xor=["detrend"], + desc="If the input dataset has more than one sub-brick (e.g., has a " + "time axis), then subtract the median of each voxel's time " + "series before processing FWHM. This will tend to remove " + "intrinsic spatial structure and leave behind the noise.", + ) unif = traits.Bool( False, - argstr='-unif', - desc='If the input dataset has more than one sub-brick, then ' - 'normalize each voxel\'s time series to have the same MAD before ' - 'processing FWHM.') + argstr="-unif", + desc="If the input dataset has more than one sub-brick, then " + "normalize each voxel's time series to have the same MAD before " + "processing FWHM.", + ) out_detrend = File( - argstr='-detprefix %s', - name_source='in_file', - name_template='%s_detrend', + argstr="-detprefix %s", + name_source="in_file", + name_template="%s_detrend", keep_extension=False, - desc='Save the detrended file into a dataset') + desc="Save the detrended file into a dataset", + ) geom = traits.Bool( - argstr='-geom', - xor=['arith'], - desc='if in_file has more than one sub-brick, compute the final ' - 'estimate as the geometric mean of the individual sub-brick FWHM ' - 'estimates') + argstr="-geom", + xor=["arith"], + desc="if in_file has more than one sub-brick, compute the final " + "estimate as the geometric mean of the individual sub-brick FWHM " + "estimates", + ) arith = traits.Bool( - argstr='-arith', - xor=['geom'], - desc='if in_file has more than one sub-brick, compute the final ' - 'estimate as the arithmetic mean of the individual sub-brick ' - 'FWHM estimates') + argstr="-arith", + xor=["geom"], + desc="if in_file has more than one sub-brick, compute the final " + "estimate as the arithmetic mean of the individual sub-brick " + "FWHM estimates", + ) combine = traits.Bool( - argstr='-combine', - desc='combine the final measurements along each axis') - compat = traits.Bool( - argstr='-compat', desc='be compatible with the older 3dFWHM') + argstr="-combine", desc="combine the final measurements along each axis" + ) + compat = traits.Bool(argstr="-compat", desc="be compatible with the older 3dFWHM") acf = traits.Either( traits.Bool(), File(), - traits.Tuple(File(exists=True), traits.Float()), + Tuple(File(exists=True), traits.Float()), default=False, usedefault=True, - argstr='-acf', - desc='computes the spatial autocorrelation') + argstr="-acf", + desc="computes the spatial autocorrelation", + ) class FWHMxOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file') - out_subbricks = File(exists=True, desc='output file (subbricks)') - out_detrend = File(desc='output file, detrended') + out_file = File(exists=True, desc="output file") + out_subbricks = File(exists=True, desc="output file (subbricks)") + out_detrend = File(desc="output file, detrended") fwhm = traits.Either( - traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - traits.Tuple(traits.Float(), traits.Float(), traits.Float(), - traits.Float()), - desc='FWHM along each axis') + Tuple(traits.Float(), traits.Float(), traits.Float()), + Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), + desc="FWHM along each axis", + ) acf_param = traits.Either( - traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - traits.Tuple(traits.Float(), traits.Float(), traits.Float(), - traits.Float()), - desc='fitted ACF model parameters') - out_acf = File(exists=True, desc='output acf file') + Tuple(traits.Float(), traits.Float(), traits.Float()), + Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), + desc="fitted ACF model parameters", + ) + out_acf = File(exists=True, desc="output acf file") class FWHMx(AFNICommandBase): @@ -1170,17 +1252,6 @@ class FWHMx(AFNICommandBase): For complete details, see the `3dFWHMx Documentation. `_ - Examples - -------- - - >>> from nipype.interfaces import afni - >>> fwhm = afni.FWHMx() - >>> fwhm.inputs.in_file = 'functional.nii' - >>> fwhm.cmdline - '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' - >>> res = fwhm.run() # doctest: +SKIP - - (Classic) METHOD: * Calculate ratio of variance of first differences to data variance. @@ -1214,14 +1285,12 @@ class FWHMx(AFNICommandBase): 3dClustSim has also been modified to use the ACF model given above to generate noise random fields. - .. note:: TL;DR or summary The take-awaymessage is that the 'classic' 3dFWHMx and 3dClustSim analysis, using a pure Gaussian ACF, is not very correct for FMRI data -- I cannot speak for PET or MEG data. - .. warning:: Do NOT use 3dFWHMx on the statistical results (e.g., '-bucket') from @@ -1229,7 +1298,6 @@ class FWHMx(AFNICommandBase): the smoothness of the time series NOISE, not of the statistics. This proscription is especially true if you plan to use 3dClustSim next!! - .. note:: Recommendations * For FMRI statistical purposes, you DO NOT want the FWHM to reflect @@ -1245,7 +1313,6 @@ class FWHMx(AFNICommandBase): * If you do not use '-detrend', the program attempts to find non-zero spatial structure in the input, and will print a warning message if it is detected. - .. note:: Notes on -demend * I recommend this option, and it is not the default only for historical @@ -1258,23 +1325,33 @@ class FWHMx(AFNICommandBase): structure in the image will bias the estimation of the FWHM of the image time series NOISE (which is usually the point of using 3dFWHMx). + Examples + -------- + >>> from nipype.interfaces import afni + >>> fwhm = afni.FWHMx() + >>> fwhm.inputs.in_file = 'functional.nii' + >>> fwhm.cmdline + '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' + >>> res = fwhm.run() # doctest: +SKIP """ - _cmd = '3dFWHMx' + + _cmd = "3dFWHMx" input_spec = FWHMxInputSpec output_spec = FWHMxOutputSpec - references_ = [ + _references = [ { - 'entry': - BibTeX('@article{CoxReynoldsTaylor2016,' - 'author={R.W. Cox, R.C. Reynolds, and P.A. Taylor},' - 'title={AFNI and clustering: false positive rates redux},' - 'journal={bioRxiv},' - 'year={2016},' - '}'), - 'tags': ['method'], - }, + "entry": BibTeX( + "@article{CoxReynoldsTaylor2016," + "author={R.W. Cox, R.C. Reynolds, and P.A. Taylor}," + "title={AFNI and clustering: false positive rates redux}," + "journal={bioRxiv}," + "year={2016}," + "}" + ), + "tags": ["method"], + } ] _acf = True @@ -1282,59 +1359,57 @@ def _parse_inputs(self, skip=None): if not self.inputs.detrend: if skip is None: skip = [] - skip += ['out_detrend'] - return super(FWHMx, self)._parse_inputs(skip=skip) + skip += ["out_detrend"] + return super()._parse_inputs(skip=skip) def _format_arg(self, name, trait_spec, value): - if name == 'detrend': - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - return None + if name == "detrend": + if value is True: + return trait_spec.argstr + elif value is False: + return None elif isinstance(value, int): - return trait_spec.argstr + ' %d' % value - - if name == 'acf': - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - self._acf = False - return None + return trait_spec.argstr + " %d" % value + + if name == "acf": + if value is True: + return trait_spec.argstr + elif value is False: + self._acf = False + return None elif isinstance(value, tuple): - return trait_spec.argstr + ' %s %f' % value + return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): - return trait_spec.argstr + ' ' + value - return super(FWHMx, self)._format_arg(name, trait_spec, value) + return trait_spec.argstr + " " + value + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): - outputs = super(FWHMx, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.detrend: fname, ext = op.splitext(self.inputs.in_file) - if '.gz' in ext: + if ".gz" in ext: _, ext2 = op.splitext(fname) ext = ext2 + ext - outputs['out_detrend'] += ext + outputs["out_detrend"] += ext else: - outputs['out_detrend'] = Undefined + outputs["out_detrend"] = Undefined - sout = np.loadtxt(outputs['out_file']) + sout = np.loadtxt(outputs["out_file"]) # handle newer versions of AFNI if sout.size == 8: - outputs['fwhm'] = tuple(sout[0, :]) + outputs["fwhm"] = tuple(sout[0, :]) else: - outputs['fwhm'] = tuple(sout) + outputs["fwhm"] = tuple(sout) if self._acf: assert sout.size == 8, "Wrong number of elements in %s" % str(sout) - outputs['acf_param'] = tuple(sout[1]) + outputs["acf_param"] = tuple(sout[1]) - outputs['out_acf'] = op.abspath('3dFWHMx.1D') + outputs["out_acf"] = op.abspath("3dFWHMx.1D") if isinstance(self.inputs.acf, (str, bytes)): - outputs['out_acf'] = op.abspath(self.inputs.acf) + outputs["out_acf"] = op.abspath(self.inputs.acf) return outputs @@ -1343,81 +1418,106 @@ class LocalBistatInputSpec(AFNICommandInputSpec): in_file1 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='Filename of the first image') + desc="Filename of the first image", + ) in_file2 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-1, - desc='Filename of the second image') + desc="Filename of the second image", + ) neighborhood = traits.Either( - traits.Tuple(traits.Enum('SPHERE', 'RHDD', 'TOHD'), traits.Float()), - traits.Tuple(traits.Enum('RECT'), traits.Tuple(traits.Float(), - traits.Float(), - traits.Float())), + Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), + Tuple( + traits.Enum("RECT"), + Tuple(traits.Float(), traits.Float(), traits.Float()), + ), mandatory=True, - desc='The region around each voxel that will be extracted for ' - 'the statistics calculation. Possible regions are: ' - '\'SPHERE\', \'RHDD\' (rhombic dodecahedron), \'TOHD\' ' - '(truncated octahedron) with a given radius in mm or ' - '\'RECT\' (rectangular block) with dimensions to specify in mm.', - argstr="-nbhd '%s(%s)'") - _stat_names = ['pearson', 'spearman', 'quadrant', 'mutinfo', 'normuti', - 'jointent', 'hellinger', 'crU', 'crM', 'crA', 'L2slope', - 'L1slope', 'num', 'ALL'] + desc="The region around each voxel that will be extracted for " + "the statistics calculation. Possible regions are: " + "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " + "(truncated octahedron) with a given radius in mm or " + "'RECT' (rectangular block) with dimensions to specify in mm.", + argstr="-nbhd '%s(%s)'", + ) + _stat_names = [ + "pearson", + "spearman", + "quadrant", + "mutinfo", + "normuti", + "jointent", + "hellinger", + "crU", + "crM", + "crA", + "L2slope", + "L1slope", + "num", + "ALL", + ] stat = InputMultiPath( traits.Enum(_stat_names), mandatory=True, - desc='statistics to compute. Possible names are :' - ' * pearson = Pearson correlation coefficient' - ' * spearman = Spearman correlation coefficient' - ' * quadrant = Quadrant correlation coefficient' - ' * mutinfo = Mutual Information' - ' * normuti = Normalized Mutual Information' - ' * jointent = Joint entropy' - ' * hellinger= Hellinger metric' - ' * crU = Correlation ratio (Unsymmetric)' - ' * crM = Correlation ratio (symmetrized by Multiplication)' - ' * crA = Correlation ratio (symmetrized by Addition)' - ' * L2slope = slope of least-squares (L2) linear regression of ' - ' the data from dataset1 vs. the dataset2 ' - ' (i.e., d2 = a + b*d1 ==> this is \'b\')' - ' * L1slope = slope of least-absolute-sum (L1) linear ' - ' regression of the data from dataset1 vs. ' - ' the dataset2' - ' * num = number of the values in the region: ' - ' with the use of -mask or -automask, ' - ' the size of the region around any given ' - ' voxel will vary; this option lets you ' - ' map that size.' - ' * ALL = all of the above, in that order' - 'More than one option can be used.', - argstr='-stat %s...') - mask_file = traits.File( + desc="""\ +Statistics to compute. Possible names are: + + * pearson = Pearson correlation coefficient + * spearman = Spearman correlation coefficient + * quadrant = Quadrant correlation coefficient + * mutinfo = Mutual Information + * normuti = Normalized Mutual Information + * jointent = Joint entropy + * hellinger= Hellinger metric + * crU = Correlation ratio (Unsymmetric) + * crM = Correlation ratio (symmetrized by Multiplication) + * crA = Correlation ratio (symmetrized by Addition) + * L2slope = slope of least-squares (L2) linear regression of + the data from dataset1 vs. the dataset2 + (i.e., d2 = a + b*d1 ==> this is 'b') + * L1slope = slope of least-absolute-sum (L1) linear + regression of the data from dataset1 vs. + the dataset2 + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. + * ALL = all of the above, in that order + +More than one option can be used.""", + argstr="-stat %s...", + ) + mask_file = File( exists=True, - desc='mask image file name. Voxels NOT in the mask will not be used ' - 'in the neighborhood of any voxel. Also, a voxel NOT in the mask ' - 'will have its statistic(s) computed as zero (0).', - argstr='-mask %s') + desc="mask image file name. Voxels NOT in the mask will not be used " + "in the neighborhood of any voxel. Also, a voxel NOT in the mask " + "will have its statistic(s) computed as zero (0).", + argstr="-mask %s", + ) automask = traits.Bool( - desc='Compute the mask as in program 3dAutomask.', - argstr='-automask', - xor=['weight_file']) - weight_file = traits.File( + desc="Compute the mask as in program 3dAutomask.", + argstr="-automask", + xor=["weight_file"], + ) + weight_file = File( exists=True, - desc='File name of an image to use as a weight. Only applies to ' - '\'pearson\' statistics.', - argstr='-weight %s', - xor=['automask']) - out_file = traits.File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file1', - name_template='%s_bistat', + desc="File name of an image to use as a weight. Only applies to " + "'pearson' statistics.", + argstr="-weight %s", + xor=["automask"], + ) + out_file = File( + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file1", + name_template="%s_bistat", keep_extension=True, - position=0) + position=0, + ) class LocalBistat(AFNICommand): @@ -1428,8 +1528,7 @@ class LocalBistat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bistat = afni.LocalBistat() >>> bistat.inputs.in_file1 = 'functional.nii' @@ -1443,157 +1542,187 @@ class LocalBistat(AFNICommand): """ - _cmd = '3dLocalBistat' + _cmd = "3dLocalBistat" input_spec = LocalBistatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'neighborhood' and value[0] == 'RECT': - value = ('RECT', '%s,%s,%s' % value[1]) + if name == "neighborhood" and value[0] == "RECT": + value = ("RECT", "%s,%s,%s" % value[1]) - return super(LocalBistat, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class LocalstatInputSpec(AFNICommandInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr='%s', - position=-1, - desc='input dataset') + exists=True, mandatory=True, argstr="%s", position=-1, desc="input dataset" + ) neighborhood = traits.Either( - traits.Tuple(traits.Enum('SPHERE', 'RHDD', 'TOHD'), traits.Float()), - traits.Tuple(traits.Enum('RECT'), traits.Tuple(traits.Float(), - traits.Float(), - traits.Float())), + Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), + Tuple( + traits.Enum("RECT"), + Tuple(traits.Float(), traits.Float(), traits.Float()), + ), mandatory=True, - desc='The region around each voxel that will be extracted for ' - 'the statistics calculation. Possible regions are: ' - '\'SPHERE\', \'RHDD\' (rhombic dodecahedron), \'TOHD\' ' - '(truncated octahedron) with a given radius in mm or ' - '\'RECT\' (rectangular block) with dimensions to specify in mm.', - argstr="-nbhd '%s(%s)'") - _stat_names = ['mean', 'stdev', 'var', 'cvar', 'median', 'MAD', 'min', - 'max', 'absmax', 'num', 'sum', 'FWHM', 'FWHMbar', 'rank', - 'frank', 'P2skew', 'ALL', 'mMP2s', 'mmMP2s'] + desc="The region around each voxel that will be extracted for " + "the statistics calculation. Possible regions are: " + "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " + "(truncated octahedron) with a given radius in mm or " + "'RECT' (rectangular block) with dimensions to specify in mm.", + argstr="-nbhd '%s(%s)'", + ) + _stat_names = [ + "mean", + "stdev", + "var", + "cvar", + "median", + "MAD", + "min", + "max", + "absmax", + "num", + "sum", + "FWHM", + "FWHMbar", + "rank", + "frank", + "P2skew", + "ALL", + "mMP2s", + "mmMP2s", + ] stat = InputMultiObject( traits.Either( - traits.Enum(_stat_names), - traits.Tuple(traits.Enum('perc'), - traits.Tuple(traits.Float, traits.Float, traits.Float))), + traits.Enum(_stat_names), + Tuple( + traits.Enum("perc"), + Tuple(traits.Float, traits.Float, traits.Float), + ), + ), mandatory=True, - desc='statistics to compute. Possible names are :\n' - ' * mean = average of the values\n' - ' * stdev = standard deviation\n' - ' * var = variance (stdev*stdev)\n' - ' * cvar = coefficient of variation = stdev/fabs(mean)\n' - ' * median = median of the values\n' - ' * MAD = median absolute deviation\n' - ' * min = minimum\n' - ' * max = maximum\n' - ' * absmax = maximum of the absolute values\n' - ' * num = number of the values in the region:\n' - ' with the use of -mask or -automask,' - ' the size of the region around any given' - ' voxel will vary; this option lets you' - ' map that size. It may be useful if you' - ' plan to compute a t-statistic (say) from' - ' the mean and stdev outputs.\n' - ' * sum = sum of the values in the region\n' - ' * FWHM = compute (like 3dFWHM) image smoothness' - ' inside each voxel\'s neighborhood. Results' - ' are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz.' - ' Places where an output is -1 are locations' - ' where the FWHM value could not be computed' - ' (e.g., outside the mask).\n' - ' * FWHMbar= Compute just the average of the 3 FWHM values' - ' (normally would NOT do this with FWHM also).\n' - ' * perc:P0:P1:Pstep = \n' - ' Compute percentiles between P0 and P1 with a ' - ' step of Pstep.\n' - ' Default P1 is equal to P0 and default P2 = 1\n' - ' * rank = rank of the voxel\'s intensity\n' - ' * frank = rank / number of voxels in neighborhood\n' - ' * P2skew = Pearson\'s second skewness coefficient' - ' 3 * (mean - median) / stdev\n' - ' * ALL = all of the above, in that order ' - ' (except for FWHMbar and perc).\n' - ' * mMP2s = Exactly the same output as:' - ' median, MAD, P2skew,' - ' but a little faster\n' - ' * mmMP2s = Exactly the same output as:' - ' mean, median, MAD, P2skew\n' - 'More than one option can be used.', - argstr='-stat %s...') - mask_file = traits.File( + desc="""\ +statistics to compute. Possible names are: + + * mean = average of the values + * stdev = standard deviation + * var = variance (stdev\\*stdev) + * cvar = coefficient of variation = stdev/fabs(mean) + * median = median of the values + * MAD = median absolute deviation + * min = minimum + * max = maximum + * absmax = maximum of the absolute values + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. It may be useful if you + plan to compute a t-statistic (say) from + the mean and stdev outputs. + * sum = sum of the values in the region + * FWHM = compute (like 3dFWHM) image smoothness + inside each voxel's neighborhood. Results + are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz. + Places where an output is -1 are locations + where the FWHM value could not be computed + (e.g., outside the mask). + * FWHMbar= Compute just the average of the 3 FWHM values + (normally would NOT do this with FWHM also). + * perc:P0:P1:Pstep = + Compute percentiles between P0 and P1 with a + step of Pstep. + Default P1 is equal to P0 and default P2 = 1 + * rank = rank of the voxel's intensity + * frank = rank / number of voxels in neighborhood + * P2skew = Pearson's second skewness coefficient + 3 \\* (mean - median) / stdev + * ALL = all of the above, in that order + (except for FWHMbar and perc). + * mMP2s = Exactly the same output as: + median, MAD, P2skew, + but a little faster + * mmMP2s = Exactly the same output as: + mean, median, MAD, P2skew + +More than one option can be used.""", + argstr="-stat %s...", + ) + mask_file = File( exists=True, - desc='Mask image file name. Voxels NOT in the mask will not be used ' - 'in the neighborhood of any voxel. Also, a voxel NOT in the ' - 'mask will have its statistic(s) computed as zero (0) unless ' - 'the parameter \'nonmask\' is set to true.', - argstr='-mask %s') + desc="Mask image file name. Voxels NOT in the mask will not be used " + "in the neighborhood of any voxel. Also, a voxel NOT in the " + "mask will have its statistic(s) computed as zero (0) unless " + "the parameter 'nonmask' is set to true.", + argstr="-mask %s", + ) automask = traits.Bool( - desc='Compute the mask as in program 3dAutomask.', - argstr='-automask') + desc="Compute the mask as in program 3dAutomask.", argstr="-automask" + ) nonmask = traits.Bool( - desc='Voxels not in the mask WILL have their local statistics ' - 'computed from all voxels in their neighborhood that ARE in ' - 'the mask.\n' - ' * For instance, this option can be used to compute the ' - ' average local white matter time series, even at non-WM ' - ' voxels.', - argstr='-use_nonmask') + desc="""\ +Voxels not in the mask WILL have their local statistics +computed from all voxels in their neighborhood that ARE in +the mask. For instance, this option can be used to compute the +average local white matter time series, even at non-WM +voxels.""", + argstr="-use_nonmask", + ) reduce_grid = traits.Either( traits.Float, - traits.Tuple(traits.Float, traits.Float, traits.Float), - argstr='-reduce_grid %s', - xor=['reduce_restore_grid', 'reduce_max_vox'], - desc='Compute output on a grid that is reduced by the specified ' - 'factors. If a single value is passed, output is resampled ' - 'to the specified isotropic grid. Otherwise, the 3 inputs ' - 'describe the reduction in the X, Y, and Z directions. This ' - 'option speeds up computations at the expense of resolution. ' - 'It should only be used when the nbhd is quite large with ' - 'respect to the input\'s resolution, and the resultant stats ' - 'are expected to be smooth.') + Tuple(traits.Float, traits.Float, traits.Float), + argstr="-reduce_grid %s", + xor=["reduce_restore_grid", "reduce_max_vox"], + desc="Compute output on a grid that is reduced by the specified " + "factors. If a single value is passed, output is resampled " + "to the specified isotropic grid. Otherwise, the 3 inputs " + "describe the reduction in the X, Y, and Z directions. This " + "option speeds up computations at the expense of resolution. " + "It should only be used when the nbhd is quite large with " + "respect to the input's resolution, and the resultant stats " + "are expected to be smooth.", + ) reduce_restore_grid = traits.Either( traits.Float, - traits.Tuple(traits.Float, traits.Float, traits.Float), - argstr='-reduce_restore_grid %s', - xor=['reduce_max_vox', 'reduce_grid'], - desc='Like reduce_grid, but also resample output back to input' - 'grid.') + Tuple(traits.Float, traits.Float, traits.Float), + argstr="-reduce_restore_grid %s", + xor=["reduce_max_vox", "reduce_grid"], + desc="Like reduce_grid, but also resample output back to input grid.", + ) reduce_max_vox = traits.Float( - argstr='-reduce_max_vox %s', - xor=['reduce_restore_grid', 'reduce_grid'], - desc='Like reduce_restore_grid, but automatically set Rx Ry Rz so' - 'that the computation grid is at a resolution of nbhd/MAX_VOX' - 'voxels.') + argstr="-reduce_max_vox %s", + xor=["reduce_restore_grid", "reduce_grid"], + desc="Like reduce_restore_grid, but automatically set Rx Ry Rz so" + "that the computation grid is at a resolution of nbhd/MAX_VOX" + "voxels.", + ) grid_rmode = traits.Enum( - 'NN', - 'Li', - 'Cu', - 'Bk', - argstr='-grid_rmode %s', - requires=['reduce_restore_grid'], - desc='Interpolant to use when resampling the output with the' - 'reduce_restore_grid option. The resampling method string ' - 'RESAM should come from the set {\'NN\', \'Li\', \'Cu\', ' - '\'Bk\'}. These stand for \'Nearest Neighbor\', \'Linear\', ' - '\'Cubic\', and \'Blocky\' interpolation, respectively.') + "NN", + "Li", + "Cu", + "Bk", + argstr="-grid_rmode %s", + requires=["reduce_restore_grid"], + desc="Interpolant to use when resampling the output with the" + "reduce_restore_grid option. The resampling method string " + "RESAM should come from the set {'NN', 'Li', 'Cu', " + "'Bk'}. These stand for 'Nearest Neighbor', 'Linear', " + "'Cubic', and 'Blocky' interpolation, respectively.", + ) quiet = traits.Bool( - argstr='-quiet', - desc='Stop the highly informative progress reports.') + argstr="-quiet", desc="Stop the highly informative progress reports." + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', - argstr='-overwrite') - out_file = traits.File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file', - name_template='%s_localstat', + desc="overwrite output file if it already exists", argstr="-overwrite" + ) + out_file = File( + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file", + name_template="%s_localstat", keep_extension=True, - position=0) + position=0, + ) class Localstat(AFNICommand): @@ -1604,8 +1733,7 @@ class Localstat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> localstat = afni.Localstat() >>> localstat.inputs.in_file = 'functional.nii' @@ -1619,80 +1747,87 @@ class Localstat(AFNICommand): >>> res = localstat.run() # doctest: +SKIP """ - _cmd = '3dLocalstat' + + _cmd = "3dLocalstat" input_spec = LocalstatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'neighborhood' and value[0] == 'RECT': - value = ('RECT', '%s,%s,%s' % value[1]) - if name == 'stat': - value = ['perc:%s:%s:%s' % v[1] if len(v) == 2 else v for v in value] - if name == 'reduce_grid' or name == 'reduce_restore_grid': + if name == "neighborhood" and value[0] == "RECT": + value = ("RECT", "%s,%s,%s" % value[1]) + if name == "stat": + value = ["perc:%s:%s:%s" % v[1] if len(v) == 2 else v for v in value] + if name == "reduce_grid" or name == "reduce_restore_grid": if len(value) == 3: - value = '%s %s %s' % value + value = "%s %s %s" % value - return super(Localstat, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class MaskToolInputSpec(AFNICommandInputSpec): - in_file = File( - desc='input file or files to 3dmask_tool', - argstr='-input %s', + in_file = InputMultiPath( + File(exists=True), + desc="input file or files to 3dmask_tool", + argstr="-input %s", position=-1, mandatory=True, - exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_mask', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_mask", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) count = traits.Bool( - desc='Instead of created a binary 0/1 mask dataset, create one with ' - 'counts of voxel overlap, i.e., each voxel will contain the ' - 'number of masks that it is set in.', - argstr='-count', - position=2) + desc="Instead of created a binary 0/1 mask dataset, create one with " + "counts of voxel overlap, i.e., each voxel will contain the " + "number of masks that it is set in.", + argstr="-count", + position=2, + ) datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output.", + ) dilate_inputs = Str( - desc='Use this option to dilate and/or erode datasets as they are ' - 'read. ex. \'5 -5\' to dilate and erode 5 times', - argstr='-dilate_inputs %s') + desc="Use this option to dilate and/or erode datasets as they are " + "read. ex. '5 -5' to dilate and erode 5 times", + argstr="-dilate_inputs %s", + ) dilate_results = Str( - desc='dilate and/or erode combined mask at the given levels.', - argstr='-dilate_results %s') + desc="dilate and/or erode combined mask at the given levels.", + argstr="-dilate_results %s", + ) frac = traits.Float( - desc='When combining masks (across datasets and sub-bricks), use ' - 'this option to restrict the result to a certain fraction of the ' - 'set of volumes', - argstr='-frac %s') - inter = traits.Bool( - desc='intersection, this means -frac 1.0', argstr='-inter') - union = traits.Bool(desc='union, this means -frac 0', argstr='-union') + desc="When combining masks (across datasets and sub-bricks), use " + "this option to restrict the result to a certain fraction of the " + "set of volumes", + argstr="-frac %s", + ) + inter = traits.Bool(desc="intersection, this means -frac 1.0", argstr="-inter") + union = traits.Bool(desc="union, this means -frac 0", argstr="-union") fill_holes = traits.Bool( - desc='This option can be used to fill holes in the resulting mask, ' - 'i.e. after all other processing has been done.', - argstr='-fill_holes') + desc="This option can be used to fill holes in the resulting mask, " + "i.e. after all other processing has been done.", + argstr="-fill_holes", + ) fill_dirs = Str( - desc='fill holes only in the given directions. This option is for use ' - 'with -fill holes. should be a single string that specifies ' - '1-3 of the axes using {x,y,z} labels (i.e. dataset axis order), ' - 'or using the labels in {R,L,A,P,I,S}.', - argstr='-fill_dirs %s', - requires=['fill_holes']) - verbose = traits.Int( - desc='specify verbosity level, for 0 to 3', argstr='-verb %s') + desc="fill holes only in the given directions. This option is for use " + "with -fill holes. should be a single string that specifies " + "1-3 of the axes using {x,y,z} labels (i.e. dataset axis order), " + "or using the labels in {R,L,A,P,I,S}.", + argstr="-fill_dirs %s", + requires=["fill_holes"], + ) + verbose = traits.Int(desc="specify verbosity level, for 0 to 3", argstr="-verb %s") class MaskToolOutputSpec(TraitedSpec): - out_file = File(desc='mask file', exists=True) + out_file = File(desc="mask file", exists=True) class MaskTool(AFNICommand): @@ -1702,8 +1837,7 @@ class MaskTool(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> masktool = afni.MaskTool() >>> masktool.inputs.in_file = 'functional.nii' @@ -1714,27 +1848,31 @@ class MaskTool(AFNICommand): """ - _cmd = '3dmask_tool' + _cmd = "3dmask_tool" input_spec = MaskToolInputSpec output_spec = MaskToolOutputSpec class MergeInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( - File(desc='input file to 3dmerge', exists=True), - argstr='%s', + File(desc="input file to 3dmerge", exists=True), + argstr="%s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_merge', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_merge", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) doall = traits.Bool( - desc='apply options to all sub-bricks in dataset', argstr='-doall') + desc="apply options to all sub-bricks in dataset", argstr="-doall" + ) blurfwhm = traits.Int( - desc='FWHM blur value (mm)', argstr='-1blur_fwhm %d', units='mm') + desc="FWHM blur value (mm)", argstr="-1blur_fwhm %d", units="mm" + ) class Merge(AFNICommand): @@ -1744,8 +1882,7 @@ class Merge(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> merge = afni.Merge() >>> merge.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -1758,29 +1895,32 @@ class Merge(AFNICommand): """ - _cmd = '3dmerge' + _cmd = "3dmerge" input_spec = MergeInputSpec output_spec = AFNICommandOutputSpec class NotesInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dNotes', - argstr='%s', + desc="input file to 3dNotes", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) - add = Str(desc='note to add', argstr='-a "%s"') + copyfile=False, + ) + add = Str(desc="note to add", argstr='-a "%s"') add_history = Str( - desc='note to add to history', argstr='-h "%s"', xor=['rep_history']) + desc="note to add to history", argstr='-h "%s"', xor=["rep_history"] + ) rep_history = Str( - desc='note with which to replace history', + desc="note with which to replace history", argstr='-HH "%s"', - xor=['add_history']) - delete = traits.Int(desc='delete note number num', argstr='-d %d') - ses = traits.Bool(desc='print to stdout the expanded notes', argstr='-ses') - out_file = File(desc='output image file name', argstr='%s') + xor=["add_history"], + ) + delete = traits.Int(desc="delete note number num", argstr="-d %d") + ses = traits.Bool(desc="print to stdout the expanded notes", argstr="-ses") + out_file = File(desc="output image file name", argstr="%s") class Notes(CommandLine): @@ -1790,8 +1930,7 @@ class Notes(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> notes = afni.Notes() >>> notes.inputs.in_file = 'functional.HEAD' @@ -1800,15 +1939,16 @@ class Notes(CommandLine): >>> notes.cmdline '3dNotes -a "This note is added." -h "This note is added to history." functional.HEAD' >>> res = notes.run() # doctest: +SKIP + """ - _cmd = '3dNotes' + _cmd = "3dNotes" input_spec = NotesInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) + outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs @@ -1817,24 +1957,27 @@ class NwarpAdjustInputSpec(AFNICommandInputSpec): File(exists=True), minlen=5, mandatory=True, - argstr='-nwarp %s', - desc='List of input 3D warp datasets') + argstr="-nwarp %s", + desc="List of input 3D warp datasets", + ) in_files = InputMultiPath( File(exists=True), minlen=5, - argstr='-source %s', - desc='List of input 3D datasets to be warped by the adjusted warp ' - 'datasets. There must be exactly as many of these datasets as ' - 'there are input warps.') + argstr="-source %s", + desc="List of input 3D datasets to be warped by the adjusted warp " + "datasets. There must be exactly as many of these datasets as " + "there are input warps.", + ) out_file = File( - desc='Output mean dataset, only needed if in_files are also given. ' - 'The output dataset will be on the common grid shared by the ' - 'source datasets.', - argstr='-prefix %s', - name_source='in_files', - name_template='%s_NwarpAdjust', + desc="Output mean dataset, only needed if in_files are also given. " + "The output dataset will be on the common grid shared by the " + "source datasets.", + argstr="-prefix %s", + name_source="in_files", + name_template="%s_NwarpAdjust", keep_extension=True, - requires=['in_files']) + requires=["in_files"], + ) class NwarpAdjust(AFNICommandBase): @@ -1849,8 +1992,7 @@ class NwarpAdjust(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> adjust = afni.NwarpAdjust() >>> adjust.inputs.warps = ['func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz'] @@ -1859,7 +2001,8 @@ class NwarpAdjust(AFNICommandBase): >>> res = adjust.run() # doctest: +SKIP """ - _cmd = '3dNwarpAdjust' + + _cmd = "3dNwarpAdjust" input_spec = NwarpAdjustInputSpec output_spec = AFNICommandOutputSpec @@ -1867,23 +2010,24 @@ def _parse_inputs(self, skip=None): if not self.inputs.in_files: if skip is None: skip = [] - skip += ['out_file'] - return super(NwarpAdjust, self)._parse_inputs(skip=skip) + skip += ["out_file"] + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() if self.inputs.in_files: if self.inputs.out_file: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: basename = os.path.basename(self.inputs.in_files[0]) basename_noext, ext = op.splitext(basename) - if '.gz' in ext: + if ".gz" in ext: basename_noext, ext2 = op.splitext(basename_noext) ext = ext2 + ext - outputs['out_file'] = os.path.abspath( - basename_noext + '_NwarpAdjust' + ext) + outputs["out_file"] = os.path.abspath( + basename_noext + "_NwarpAdjust" + ext + ) return outputs @@ -1892,62 +2036,67 @@ class NwarpApplyInputSpec(CommandLineInputSpec): File(exists=True), traits.List(File(exists=True)), mandatory=True, - argstr='-source %s', - desc='the name of the dataset to be warped ' - 'can be multiple datasets') + argstr="-source %s", + desc="the name of the dataset to be warped can be multiple datasets", + ) warp = traits.String( - desc='the name of the warp dataset. ' - 'multiple warps can be concatenated (make sure they exist)', - argstr='-nwarp %s', - mandatory=True) + desc="the name of the warp dataset. " + "multiple warps can be concatenated (make sure they exist)", + argstr="-nwarp %s", + mandatory=True, + ) inv_warp = traits.Bool( - desc='After the warp specified in \'-nwarp\' is computed, invert it', - argstr='-iwarp') - master = traits.File( + desc="After the warp specified in '-nwarp' is computed, invert it", + argstr="-iwarp", + ) + master = File( exists=True, - desc='the name of the master dataset, which defines the output grid', - argstr='-master %s') + desc="the name of the master dataset, which defines the output grid", + argstr="-master %s", + ) interp = traits.Enum( - 'wsinc5', - 'NN', - 'nearestneighbour', - 'nearestneighbor', - 'linear', - 'trilinear', - 'cubic', - 'tricubic', - 'quintic', - 'triquintic', - desc='defines interpolation method to use during warp', - argstr='-interp %s', - usedefault=True) + "wsinc5", + "NN", + "nearestneighbour", + "nearestneighbor", + "linear", + "trilinear", + "cubic", + "tricubic", + "quintic", + "triquintic", + desc="defines interpolation method to use during warp", + argstr="-interp %s", + usedefault=True, + ) ainterp = traits.Enum( - 'NN', - 'nearestneighbour', - 'nearestneighbor', - 'linear', - 'trilinear', - 'cubic', - 'tricubic', - 'quintic', - 'triquintic', - 'wsinc5', - desc='specify a different interpolation method than might ' - 'be used for the warp', - argstr='-ainterp %s') + "NN", + "nearestneighbour", + "nearestneighbor", + "linear", + "trilinear", + "cubic", + "tricubic", + "quintic", + "triquintic", + "wsinc5", + desc="specify a different interpolation method than might " + "be used for the warp", + argstr="-ainterp %s", + ) out_file = File( - name_template='%s_Nwarp', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_Nwarp", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) short = traits.Bool( - desc='Write output dataset using 16-bit short integers, rather than ' - 'the usual 32-bit floats.', - argstr='-short') - quiet = traits.Bool( - desc='don\'t be verbose :(', argstr='-quiet', xor=['verb']) - verb = traits.Bool( - desc='be extra verbose :)', argstr='-verb', xor=['quiet']) + desc="Write output dataset using 16-bit short integers, rather than " + "the usual 32-bit floats.", + argstr="-short", + ) + quiet = traits.Bool(desc="don't be verbose :(", argstr="-quiet", xor=["verb"]) + verb = traits.Bool(desc="be extra verbose :)", argstr="-verb", xor=["quiet"]) class NwarpApply(AFNICommandBase): @@ -1959,8 +2108,7 @@ class NwarpApply(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarp = afni.NwarpApply() >>> nwarp.inputs.in_file = 'Fred+orig' @@ -1971,46 +2119,49 @@ class NwarpApply(AFNICommandBase): >>> res = nwarp.run() # doctest: +SKIP """ - _cmd = '3dNwarpApply' + + _cmd = "3dNwarpApply" input_spec = NwarpApplyInputSpec output_spec = AFNICommandOutputSpec class NwarpCatInputSpec(AFNICommandInputSpec): in_files = traits.List( - traits.Either(traits.File(), - traits.Tuple( - traits.Enum('IDENT', 'INV', 'SQRT', 'SQRTINV'), - traits.File())), + traits.Either( + File(), Tuple(traits.Enum("IDENT", "INV", "SQRT", "SQRTINV"), File()) + ), desc="list of tuples of 3D warps and associated functions", mandatory=True, argstr="%s", - position=-1) + position=-1, + ) space = traits.String( - desc='string to attach to the output dataset as its atlas space ' - 'marker.', - argstr='-space %s') - inv_warp = traits.Bool( - desc='invert the final warp before output', argstr='-iwarp') + desc="string to attach to the output dataset as its atlas space marker.", + argstr="-space %s", + ) + inv_warp = traits.Bool(desc="invert the final warp before output", argstr="-iwarp") interp = traits.Enum( - 'wsinc5', - 'linear', - 'quintic', - desc='specify a different interpolation method than might ' - 'be used for the warp', - argstr='-interp %s', - usedefault=True) + "wsinc5", + "linear", + "quintic", + desc="specify a different interpolation method than might " + "be used for the warp", + argstr="-interp %s", + usedefault=True, + ) expad = traits.Int( - desc='Pad the nonlinear warps by the given number of voxels voxels in ' - 'all directions. The warp displacements are extended by linear ' - 'extrapolation from the faces of the input grid..', - argstr='-expad %d') + desc="Pad the nonlinear warps by the given number of voxels in " + "all directions. The warp displacements are extended by linear " + "extrapolation from the faces of the input grid..", + argstr="-expad %d", + ) out_file = File( - name_template='%s_NwarpCat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') - verb = traits.Bool(desc='be verbose', argstr='-verb') + name_template="%s_NwarpCat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) + verb = traits.Bool(desc="be verbose", argstr="-verb") class NwarpCat(AFNICommand): @@ -2054,8 +2205,7 @@ class NwarpCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarpcat = afni.NwarpCat() >>> nwarpcat.inputs.in_files = ['Q25_warp+tlrc.HEAD', ('IDENT', 'structural.nii')] @@ -2065,91 +2215,99 @@ class NwarpCat(AFNICommand): >>> res = nwarpcat.run() # doctest: +SKIP """ - _cmd = '3dNwarpCat' + + _cmd = "3dNwarpCat" input_spec = NwarpCatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_files': - return spec.argstr % (' '.join([ - "'" + v[0] + "(" + v[1] + ")'" if isinstance(v, tuple) else v - for v in value - ])) - return super(NwarpCat, self)._format_arg(name, spec, value) + if name == "in_files": + return spec.argstr % ( + " ".join( + [ + "'" + v[0] + "(" + v[1] + ")'" if isinstance(v, tuple) else v + for v in value + ] + ) + ) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname( - self.inputs.in_files[0][0], suffix='_NwarpCat') + if name == "out_file": + return self._gen_fname(self.inputs.in_files[0][0], suffix="_NwarpCat") def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - outputs['out_file'] = os.path.abspath( + outputs["out_file"] = os.path.abspath( self._gen_fname( - self.inputs.in_files[0], - suffix='_NwarpCat+tlrc', - ext='.HEAD')) + self.inputs.in_files[0], suffix="_NwarpCat+tlrc", ext=".HEAD" + ) + ) return outputs class OneDToolPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( - desc='input file to OneDTool', - argstr='-infile %s', - mandatory=True, - exists=True) + desc="input file to OneDTool", argstr="-infile %s", mandatory=True, exists=True + ) set_nruns = traits.Int( - desc='treat the input data as if it has nruns', argstr='-set_nruns %d') + desc="treat the input data as if it has nruns", argstr="-set_nruns %d" + ) derivative = traits.Bool( - desc= - 'take the temporal derivative of each vector (done as first backward difference)', - argstr='-derivative') + desc="take the temporal derivative of each vector (done as first backward difference)", + argstr="-derivative", + ) demean = traits.Bool( - desc='demean each run (new mean of each run = 0.0)', argstr='-demean') + desc="demean each run (new mean of each run = 0.0)", argstr="-demean" + ) out_file = File( - desc='write the current 1D data to FILE', - argstr='-write %s', - xor=['show_cormat_warnings']) + desc="write the current 1D data to FILE", + argstr="-write %s", + xor=["show_cormat_warnings"], + ) show_censor_count = traits.Bool( - desc= - 'display the total number of censored TRs Note : if input is a valid xmat.1D dataset, ' - 'then the count will come from the header. Otherwise the input is assumed to be a binary censor' - 'file, and zeros are simply counted.', - argstr="-show_censor_count") - censor_motion = traits.Tuple( + desc="display the total number of censored TRs Note : if input is a valid xmat.1D dataset, " + "then the count will come from the header. Otherwise the input is assumed to be a binary censor" + "file, and zeros are simply counted.", + argstr="-show_censor_count", + ) + censor_motion = Tuple( (traits.Float(), File()), - desc= - 'Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths', - argstr="-censor_motion %f %s") + desc="Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths", + argstr="-censor_motion %f %s", + ) censor_prev_TR = traits.Bool( - desc='for each censored TR, also censor previous', - argstr='-censor_prev_TR') + desc="for each censored TR, also censor previous", argstr="-censor_prev_TR" + ) show_trs_uncensored = traits.Enum( - 'comma', - 'space', - 'encoded', - 'verbose', - desc= - 'display a list of TRs which were not censored in the specified style', - argstr='-show_trs_uncensored %s') - show_cormat_warnings = traits.File( - desc='Write cormat warnings to a file', + "comma", + "space", + "encoded", + "verbose", + desc="display a list of TRs which were not censored in the specified style", + argstr="-show_trs_uncensored %s", + ) + show_cormat_warnings = File( + desc="Write cormat warnings to a file", argstr="-show_cormat_warnings |& tee %s", position=-1, - xor=['out_file']) + xor=["out_file"], + ) show_indices_interest = traits.Bool( desc="display column indices for regs of interest", - argstr="-show_indices_interest") + argstr="-show_indices_interest", + ) show_trs_run = traits.Int( desc="restrict -show_trs_[un]censored to the given 1-based run", - argstr="-show_trs_run %d") + argstr="-show_trs_run %d", + ) class OneDToolPyOutputSpec(AFNICommandOutputSpec): - out_file = File(desc='output of 1D_tool.py') + out_file = File(desc="output of 1D_tool.py") class OneDToolPy(AFNIPythonCommand): @@ -2164,10 +2322,9 @@ class OneDToolPy(AFNIPythonCommand): >>> odt.inputs.out_file = 'motion_dmean.1D' >>> odt.cmdline # doctest: +ELLIPSIS 'python2 ...1d_tool.py -demean -infile f1.1D -write motion_dmean.1D -set_nruns 3' - >>> res = odt.run() # doctest: +SKIP -""" + >>> res = odt.run() # doctest: +SKIP""" - _cmd = '1d_tool.py' + _cmd = "1d_tool.py" input_spec = OneDToolPyInputSpec output_spec = OneDToolPyOutputSpec @@ -2176,95 +2333,102 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.out_file) + outputs["out_file"] = os.path.join(os.getcwd(), self.inputs.out_file) if isdefined(self.inputs.show_cormat_warnings): - outputs['out_file'] = os.path.join( - os.getcwd(), self.inputs.show_cormat_warnings) + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.show_cormat_warnings + ) if isdefined(self.inputs.censor_motion): - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.censor_motion[1] + - '_censor.1D') + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.censor_motion[1] + "_censor.1D" + ) return outputs class RefitInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3drefit', - argstr='%s', + desc="input file to 3drefit", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) deoblique = traits.Bool( - desc='replace current transformation matrix with cardinal matrix', - argstr='-deoblique') - xorigin = Str( - desc='x distance for edge voxel offset', argstr='-xorigin %s') - yorigin = Str( - desc='y distance for edge voxel offset', argstr='-yorigin %s') - zorigin = Str( - desc='z distance for edge voxel offset', argstr='-zorigin %s') + desc="replace current transformation matrix with cardinal matrix", + argstr="-deoblique", + ) + xorigin = Str(desc="x distance for edge voxel offset", argstr="-xorigin %s") + yorigin = Str(desc="y distance for edge voxel offset", argstr="-yorigin %s") + zorigin = Str(desc="z distance for edge voxel offset", argstr="-zorigin %s") duporigin_file = File( - argstr='-duporigin %s', + argstr="-duporigin %s", exists=True, - desc='Copies the xorigin, yorigin, and zorigin values from the header ' - 'of the given dataset') - xdel = traits.Float(desc='new x voxel dimension in mm', argstr='-xdel %f') - ydel = traits.Float(desc='new y voxel dimension in mm', argstr='-ydel %f') - zdel = traits.Float(desc='new z voxel dimension in mm', argstr='-zdel %f') + desc="Copies the xorigin, yorigin, and zorigin values from the header " + "of the given dataset", + ) + xdel = traits.Float(desc="new x voxel dimension in mm", argstr="-xdel %f") + ydel = traits.Float(desc="new y voxel dimension in mm", argstr="-ydel %f") + zdel = traits.Float(desc="new z voxel dimension in mm", argstr="-zdel %f") xyzscale = traits.Float( - desc='Scale the size of the dataset voxels by the given factor', - argstr='-xyzscale %f') + desc="Scale the size of the dataset voxels by the given factor", + argstr="-xyzscale %f", + ) space = traits.Enum( - 'TLRC', - 'MNI', - 'ORIG', - argstr='-space %s', - desc='Associates the dataset with a specific template type, e.g. ' - 'TLRC, MNI, ORIG') - atrcopy = traits.Tuple( - traits.File(exists=True), + "TLRC", + "MNI", + "ORIG", + argstr="-space %s", + desc="Associates the dataset with a specific template type, e.g. " + "TLRC, MNI, ORIG", + ) + atrcopy = Tuple( + File(exists=True), traits.Str(), - argstr='-atrcopy %s %s', - desc='Copy AFNI header attribute from the given file into the header ' - 'of the dataset(s) being modified. For more information on AFNI ' - 'header attributes, see documentation file README.attributes. ' - 'More than one \'-atrcopy\' option can be used. For AFNI ' - 'advanced users only. Do NOT use -atrcopy or -atrstring with ' - 'other modification options. See also -copyaux.') - atrstring = traits.Tuple( + argstr="-atrcopy %s %s", + desc="Copy AFNI header attribute from the given file into the header " + "of the dataset(s) being modified. For more information on AFNI " + "header attributes, see documentation file README.attributes. " + "More than one '-atrcopy' option can be used. For AFNI " + "advanced users only. Do NOT use -atrcopy or -atrstring with " + "other modification options. See also -copyaux.", + ) + atrstring = Tuple( traits.Str(), traits.Str(), - argstr='-atrstring %s %s', - desc='Copy the last given string into the dataset(s) being modified, ' - 'giving it the attribute name given by the last string.' - 'To be safe, the last string should be in quotes.') - atrfloat = traits.Tuple( + argstr="-atrstring %s %s", + desc="Copy the last given string into the dataset(s) being modified, " + "giving it the attribute name given by the last string." + "To be safe, the last string should be in quotes.", + ) + atrfloat = Tuple( traits.Str(), traits.Str(), - argstr='-atrfloat %s %s', - desc='Create or modify floating point attributes. ' - 'The input values may be specified as a single string in quotes ' - 'or as a 1D filename or string, example ' - '\'1 0.2 0 0 -0.2 1 0 0 0 0 1 0\' or ' - 'flipZ.1D or \'1D:1,0.2,2@0,-0.2,1,2@0,2@0,1,0\'') - atrint = traits.Tuple( + argstr="-atrfloat %s %s", + desc="Create or modify floating point attributes. " + "The input values may be specified as a single string in quotes " + "or as a 1D filename or string, example " + "'1 0.2 0 0 -0.2 1 0 0 0 0 1 0' or " + "flipZ.1D or '1D:1,0.2,2@0,-0.2,1,2@0,2@0,1,0'", + ) + atrint = Tuple( traits.Str(), traits.Str(), - argstr='-atrint %s %s', - desc='Create or modify integer attributes. ' - 'The input values may be specified as a single string in quotes ' - 'or as a 1D filename or string, example ' - '\'1 0 0 0 0 1 0 0 0 0 1 0\' or ' - 'flipZ.1D or \'1D:1,0,2@0,-0,1,2@0,2@0,1,0\'') + argstr="-atrint %s %s", + desc="Create or modify integer attributes. " + "The input values may be specified as a single string in quotes " + "or as a 1D filename or string, example " + "'1 0 0 0 0 1 0 0 0 0 1 0' or " + "flipZ.1D or '1D:1,0,2@0,-0,1,2@0,2@0,1,0'", + ) saveatr = traits.Bool( - argstr='-saveatr', - desc='(default) Copy the attributes that are known to AFNI into ' - 'the dset->dblk structure thereby forcing changes to known ' - 'attributes to be present in the output. This option only makes ' - 'sense with -atrcopy.') - nosaveatr = traits.Bool(argstr='-nosaveatr', desc='Opposite of -saveatr') + argstr="-saveatr", + desc="(default) Copy the attributes that are known to AFNI into " + "the dset->dblk structure thereby forcing changes to known " + "attributes to be present in the output. This option only makes " + "sense with -atrcopy.", + ) + nosaveatr = traits.Bool(argstr="-nosaveatr", desc="Opposite of -saveatr") class Refit(AFNICommandBase): @@ -2274,8 +2438,7 @@ class Refit(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> refit = afni.Refit() >>> refit.inputs.in_file = 'structural.nii' @@ -2290,106 +2453,118 @@ class Refit(AFNICommandBase): >>> refit_2.cmdline "3drefit -atrfloat IJK_TO_DICOM_REAL '1 0.2 0 0 -0.2 1 0 0 0 0 1 0' structural.nii" >>> res = refit_2.run() # doctest: +SKIP + """ - _cmd = '3drefit' + + _cmd = "3drefit" input_spec = RefitInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) + outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs class ReHoInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', - argstr='-inset %s', + desc="input dataset", + argstr="-inset %s", position=1, mandatory=True, - exists=True) - out_file = traits.File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file', - name_template='%s_reho', + exists=True, + ) + out_file = File( + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file", + name_template="%s_reho", keep_extension=True, - position=0) + position=0, + ) chi_sq = traits.Bool( - argstr='-chi_sq', - desc='Output the Friedman chi-squared value in addition to the ' - 'Kendall\'s W. This option is currently compatible only with ' - 'the AFNI (BRIK/HEAD) output type; the chi-squared value will ' - 'be the second sub-brick of the output dataset.') - mask_file = traits.File( - desc='Mask within which ReHo should be calculated voxelwise', - argstr='-mask %s') + argstr="-chi_sq", + desc="Output the Friedman chi-squared value in addition to the " + "Kendall's W. This option is currently compatible only with " + "the AFNI (BRIK/HEAD) output type; the chi-squared value will " + "be the second sub-brick of the output dataset.", + ) + mask_file = File( + desc="Mask within which ReHo should be calculated voxelwise", argstr="-mask %s" + ) neighborhood = traits.Enum( - 'faces', - 'edges', - 'vertices', - xor=['sphere', 'ellipsoid'], - argstr='-nneigh %s', - desc='voxels in neighborhood. can be: ' - '* faces (for voxel and 6 facewise neighbors, only),\n' - '* edges (for voxel and 18 face- and edge-wise neighbors),\n' - '* vertices (for voxel and 26 face-, edge-, and node-wise ' - 'neighbors).\n') + "faces", + "edges", + "vertices", + xor=["sphere", "ellipsoid"], + argstr="-nneigh %s", + desc=""" +voxels in neighborhood. can be: +``faces`` (for voxel and 6 facewise neighbors, only), +``edges`` (for voxel and 18 face- and edge-wise neighbors), +``vertices`` (for voxel and 26 face-, edge-, and node-wise neighbors).""", + ) sphere = traits.Float( - argstr='-neigh_RAD %s', - xor=['neighborhood', 'ellipsoid'], - desc='for additional voxelwise neighborhood control, the ' - 'radius R of a desired neighborhood can be put in; R is ' - 'a floating point number, and must be >1. Examples of ' - 'the numbers of voxels in a given radius are as follows ' - '(you can roughly approximate with the ol\' 4*PI*(R^3)/3 ' - 'thing):\n' - ' R=2.0 -> V=33,\n' - ' R=2.3 -> V=57, \n' - ' R=2.9 -> V=93, \n' - ' R=3.1 -> V=123, \n' - ' R=3.9 -> V=251, \n' - ' R=4.5 -> V=389, \n' - ' R=6.1 -> V=949, \n' - 'but you can choose most any value.') - ellipsoid = traits.Tuple( + argstr="-neigh_RAD %s", + xor=["neighborhood", "ellipsoid"], + desc=r"""\ +For additional voxelwise neighborhood control, the +radius R of a desired neighborhood can be put in; R is +a floating point number, and must be >1. Examples of +the numbers of voxels in a given radius are as follows +(you can roughly approximate with the ol' :math:`4\pi\,R^3/3` +thing): + + * R=2.0 -> V=33 + * R=2.3 -> V=57, + * R=2.9 -> V=93, + * R=3.1 -> V=123, + * R=3.9 -> V=251, + * R=4.5 -> V=389, + * R=6.1 -> V=949, + +but you can choose most any value.""", + ) + ellipsoid = Tuple( traits.Float, traits.Float, traits.Float, - xor=['sphere', 'neighborhood'], - argstr='-neigh_X %s -neigh_Y %s -neigh_Z %s', - desc='Tuple indicating the x, y, and z radius of an ellipsoid ' - 'defining the neighbourhood of each voxel.\n' - 'The \'hood is then made according to the following relation:' - '(i/A)^2 + (j/B)^2 + (k/C)^2 <=1.\n' - 'which will have approx. V=4*PI*A*B*C/3. The impetus for ' - 'this freedom was for use with data having anisotropic ' - 'voxel edge lengths.') + xor=["sphere", "neighborhood"], + argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", + desc=r"""\ +Tuple indicating the x, y, and z radius of an ellipsoid +defining the neighbourhood of each voxel. +The 'hood is then made according to the following relation: +:math:`(i/A)^2 + (j/B)^2 + (k/C)^2 \le 1.` +which will have approx. :math:`V=4 \pi \, A B C/3`. The impetus for +this freedom was for use with data having anisotropic +voxel edge lengths.""", + ) label_set = File( exists=True, - argstr='-in_rois %s', - desc='a set of ROIs, each labelled with distinct ' - 'integers. ReHo will then be calculated per ROI.') + argstr="-in_rois %s", + desc="a set of ROIs, each labelled with distinct " + "integers. ReHo will then be calculated per ROI.", + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', - argstr='-overwrite') + desc="overwrite output file if it already exists", argstr="-overwrite" + ) class ReHoOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Voxelwise regional homogeneity map') - out_vals = File(desc='Table of labelwise regional homogenity values') + out_file = File(exists=True, desc="Voxelwise regional homogeneity map") + out_vals = File(desc="Table of labelwise regional homogeneity values") class ReHo(AFNICommandBase): - """Compute regional homogenity for a given neighbourhood.l, + """Compute regional homogeneity for a given neighbourhood.l, based on a local neighborhood of that voxel. For complete details, see the `3dReHo Documentation. `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> reho = afni.ReHo() >>> reho.inputs.in_file = 'functional.nii' @@ -2400,57 +2575,56 @@ class ReHo(AFNICommandBase): >>> res = reho.run() # doctest: +SKIP """ - _cmd = '3dReHo' + + _cmd = "3dReHo" input_spec = ReHoInputSpec output_spec = ReHoOutputSpec def _list_outputs(self): - outputs = super(ReHo, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.label_set: - outputs['out_vals'] = outputs['out_file'] + '_ROI_reho.vals' + outputs["out_vals"] = outputs["out_file"] + "_ROI_reho.vals" return outputs def _format_arg(self, name, spec, value): - _neigh_dict = { - 'faces': 7, - 'edges': 19, - 'vertices': 27, - } - if name == 'neighborhood': + _neigh_dict = {"faces": 7, "edges": 19, "vertices": 27} + if name == "neighborhood": value = _neigh_dict[value] - return super(ReHo, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ResampleInputSpec(AFNICommandInputSpec): - in_file = File( - desc='input file to 3dresample', - argstr='-inset %s', + desc="input file to 3dresample", + argstr="-inset %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_resample', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - orientation = Str(desc='new orientation code', argstr='-orient %s') + name_template="%s_resample", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + orientation = Str(desc="new orientation code", argstr="-orient %s") resample_mode = traits.Enum( - 'NN', - 'Li', - 'Cu', - 'Bk', - argstr='-rmode %s', + "NN", + "Li", + "Cu", + "Bk", + argstr="-rmode %s", desc='resampling method from set {"NN", "Li", "Cu", "Bk"}. These are ' 'for "Nearest Neighbor", "Linear", "Cubic" and "Blocky"' - 'interpolation, respectively. Default is NN.') - voxel_size = traits.Tuple( + "interpolation, respectively. Default is NN.", + ) + voxel_size = Tuple( *[traits.Float()] * 3, - argstr='-dxyz %f %f %f', - desc='resample to new dx, dy and dz') - master = traits.File( - argstr='-master %s', desc='align dataset grid to a reference file') + argstr="-dxyz %f %f %f", + desc="resample to new dx, dy and dz", + ) + master = File(argstr="-master %s", desc="align dataset grid to a reference file") class Resample(AFNICommand): @@ -2460,8 +2634,7 @@ class Resample(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> resample = afni.Resample() >>> resample.inputs.in_file = 'functional.nii' @@ -2473,7 +2646,7 @@ class Resample(AFNICommand): """ - _cmd = '3dresample' + _cmd = "3dresample" input_spec = ResampleInputSpec output_spec = AFNICommandOutputSpec @@ -2481,29 +2654,33 @@ class Resample(AFNICommand): class TCatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), - desc='input file to 3dTcat', - argstr=' %s', + desc="input file to 3dTcat", + argstr=" %s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tcat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_tcat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) rlt = traits.Enum( - '', - '+', - '++', - argstr='-rlt%s', - desc='Remove linear trends in each voxel time series loaded from each ' - 'input dataset, SEPARATELY. Option -rlt removes the least squares ' - 'fit of \'a+b*t\' to each voxel time series. Option -rlt+ adds ' - 'dataset mean back in. Option -rlt++ adds overall mean of all ' - 'dataset timeseries back in.', - position=1) + "", + "+", + "++", + argstr="-rlt%s", + desc="Remove linear trends in each voxel time series loaded from each " + "input dataset, SEPARATELY. Option -rlt removes the least squares " + "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " + "dataset mean back in. Option -rlt++ adds overall mean of all " + "dataset timeseries back in.", + position=1, + ) verbose = traits.Bool( - desc='Print out some verbose output as the program', argstr='-verb') + desc="Print out some verbose output as the program", argstr="-verb" + ) class TCat(AFNICommand): @@ -2516,8 +2693,7 @@ class TCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcat = afni.TCat() >>> tcat.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -2529,46 +2705,46 @@ class TCat(AFNICommand): """ - _cmd = '3dTcat' + _cmd = "3dTcat" input_spec = TCatInputSpec output_spec = AFNICommandOutputSpec class TCatSBInputSpec(AFNICommandInputSpec): in_files = traits.List( - traits.Tuple(File(exists=True), Str()), - desc='List of tuples of file names and subbrick selectors as strings.' - 'Don\'t forget to protect the single quotes in the subbrick selector' - 'so the contents are protected from the command line interpreter.', - argstr='%s%s ...', + Tuple(File(exists=True), Str()), + desc="List of tuples of file names and subbrick selectors as strings." + "Don't forget to protect the single quotes in the subbrick selector" + "so the contents are protected from the command line interpreter.", + argstr="%s%s ...", position=-1, mandatory=True, - copyfile=False) - out_file = File( - desc='output image file name', argstr='-prefix %s', genfile=True) + copyfile=False, + ) + out_file = File(desc="output image file name", argstr="-prefix %s", genfile=True) rlt = traits.Enum( - '', - '+', - '++', - argstr='-rlt%s', - desc='Remove linear trends in each voxel time series loaded from each ' - 'input dataset, SEPARATELY. Option -rlt removes the least squares ' - 'fit of \'a+b*t\' to each voxel time series. Option -rlt+ adds ' - 'dataset mean back in. Option -rlt++ adds overall mean of all ' - 'dataset timeseries back in.', - position=1) + "", + "+", + "++", + argstr="-rlt%s", + desc="Remove linear trends in each voxel time series loaded from each " + "input dataset, SEPARATELY. Option -rlt removes the least squares " + "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " + "dataset mean back in. Option -rlt++ adds overall mean of all " + "dataset timeseries back in.", + position=1, + ) class TCatSubBrick(AFNICommand): """Hopefully a temporary function to allow sub-brick selection until - afni file managment is improved. + afni file management is improved. For complete details, see the `3dTcat Documentation. `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcsb = afni.TCatSubBrick() >>> tcsb.inputs.in_files = [('functional.nii', "'{2..$}'"), ('functional2.nii', "'{2..$}'")] @@ -2580,30 +2756,32 @@ class TCatSubBrick(AFNICommand): """ - _cmd = '3dTcat' + _cmd = "3dTcat" input_spec = TCatSBInputSpec output_spec = AFNICommandOutputSpec def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname(self.inputs.in_files[0][0], suffix='_tcat') + if name == "out_file": + return self._gen_fname(self.inputs.in_files[0][0], suffix="_tcat") class TStatInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTstat', - argstr='%s', + desc="input file to 3dTstat", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tstat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - mask = File(desc='mask file', argstr='-mask %s', exists=True) - options = Str(desc='selected statistical output', argstr='%s') + name_template="%s_tstat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + mask = File(desc="mask file", argstr="-mask %s", exists=True) + options = Str(desc="selected statistical output", argstr="%s") class TStat(AFNICommand): @@ -2613,8 +2791,7 @@ class TStat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tstat = afni.TStat() >>> tstat.inputs.in_file = 'functional.nii' @@ -2626,64 +2803,66 @@ class TStat(AFNICommand): """ - _cmd = '3dTstat' + _cmd = "3dTstat" input_spec = TStatInputSpec output_spec = AFNICommandOutputSpec class To3DInputSpec(AFNICommandInputSpec): out_file = File( - name_template='%s', - desc='output image file name', - argstr='-prefix %s', - name_source=['in_folder']) + name_template="%s", + desc="output image file name", + argstr="-prefix %s", + name_source=["in_folder"], + ) in_folder = Directory( - desc='folder with DICOM images to convert', - argstr='%s/*.dcm', + desc="folder with DICOM images to convert", + argstr="%s/*.dcm", position=-1, mandatory=True, - exists=True) + exists=True, + ) filetype = traits.Enum( - 'spgr', - 'fse', - 'epan', - 'anat', - 'ct', - 'spct', - 'pet', - 'mra', - 'bmap', - 'diff', - 'omri', - 'abuc', - 'fim', - 'fith', - 'fico', - 'fitt', - 'fift', - 'fizt', - 'fict', - 'fibt', - 'fibn', - 'figt', - 'fipt', - 'fbuc', - argstr='-%s', - desc='type of datafile being converted') - skipoutliers = traits.Bool( - desc='skip the outliers check', argstr='-skip_outliers') + "spgr", + "fse", + "epan", + "anat", + "ct", + "spct", + "pet", + "mra", + "bmap", + "diff", + "omri", + "abuc", + "fim", + "fith", + "fico", + "fitt", + "fift", + "fizt", + "fict", + "fibt", + "fibn", + "figt", + "fipt", + "fbuc", + argstr="-%s", + desc="type of datafile being converted", + ) + skipoutliers = traits.Bool(desc="skip the outliers check", argstr="-skip_outliers") assumemosaic = traits.Bool( - desc='assume that Siemens image is mosaic', - argstr='-assume_dicom_mosaic') + desc="assume that Siemens image is mosaic", argstr="-assume_dicom_mosaic" + ) datatype = traits.Enum( - 'short', - 'float', - 'byte', - 'complex', - desc='set output file datatype', - argstr='-datum %s') - funcparams = Str( - desc='parameters for functional data', argstr='-time:zt %s alt+z2') + "short", + "float", + "byte", + "complex", + desc="set output file datatype", + argstr="-datum %s", + ) + funcparams = Str(desc="parameters for functional data", argstr="-time:zt %s alt+z2") class To3D(AFNICommand): @@ -2693,8 +2872,7 @@ class To3D(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> to3d = afni.To3D() >>> to3d.inputs.datatype = 'float' @@ -2705,78 +2883,82 @@ class To3D(AFNICommand): 'to3d -datum float -anat -prefix dicomdir.nii ./*.dcm' >>> res = to3d.run() # doctest: +SKIP - """ + """ - _cmd = 'to3d' + _cmd = "to3d" input_spec = To3DInputSpec output_spec = AFNICommandOutputSpec class UndumpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dUndump, whose geometry will determine' - 'the geometry of the output', - argstr='-master %s', + desc="input file to 3dUndump, whose geometry will determine" + "the geometry of the output", + argstr="-master %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + desc="output image file name", argstr="-prefix %s", name_source="in_file" + ) mask_file = File( - desc='mask image file name. Only voxels that are nonzero in the mask ' - 'can be set.', - argstr='-mask %s') + desc="mask image file name. Only voxels that are nonzero in the mask " + "can be set.", + argstr="-mask %s", + ) datatype = traits.Enum( - 'short', - 'float', - 'byte', - desc='set output file datatype', - argstr='-datum %s') + "short", "float", "byte", desc="set output file datatype", argstr="-datum %s" + ) default_value = traits.Float( - desc='default value stored in each input voxel that does not have ' - 'a value supplied in the input file', - argstr='-dval %f') + desc="default value stored in each input voxel that does not have " + "a value supplied in the input file", + argstr="-dval %f", + ) fill_value = traits.Float( - desc='value, used for each voxel in the output dataset that is NOT ' - 'listed in the input file', - argstr='-fval %f') + desc="value, used for each voxel in the output dataset that is NOT " + "listed in the input file", + argstr="-fval %f", + ) coordinates_specification = traits.Enum( - 'ijk', - 'xyz', - desc='Coordinates in the input file as index triples (i, j, k) ' - 'or spatial coordinates (x, y, z) in mm', - argstr='-%s') + "ijk", + "xyz", + desc="Coordinates in the input file as index triples (i, j, k) " + "or spatial coordinates (x, y, z) in mm", + argstr="-%s", + ) srad = traits.Float( - desc='radius in mm of the sphere that will be filled about each input ' - '(x,y,z) or (i,j,k) voxel. If the radius is not given, or is 0, ' - 'then each input data line sets the value in only one voxel.', - argstr='-srad %f') - orient = traits.Tuple( - traits.Enum('R', 'L'), - traits.Enum('A', 'P'), - traits.Enum('I', 'S'), - desc='Specifies the coordinate order used by -xyz. ' - 'The code must be 3 letters, one each from the pairs ' - '{R,L} {A,P} {I,S}. The first letter gives the ' - 'orientation of the x-axis, the second the orientation ' - 'of the y-axis, the third the z-axis: ' - 'R = right-to-left L = left-to-right ' - 'A = anterior-to-posterior P = posterior-to-anterior ' - 'I = inferior-to-superior S = superior-to-inferior ' - 'If -orient isn\'t used, then the coordinate order of the ' - '-master (in_file) dataset is used to interpret (x,y,z) inputs.', - argstr='-orient %s') + desc="radius in mm of the sphere that will be filled about each input " + "(x,y,z) or (i,j,k) voxel. If the radius is not given, or is 0, " + "then each input data line sets the value in only one voxel.", + argstr="-srad %f", + ) + orient = Tuple( + traits.Enum("R", "L"), + traits.Enum("A", "P"), + traits.Enum("I", "S"), + desc="Specifies the coordinate order used by -xyz. " + "The code must be 3 letters, one each from the pairs " + "{R,L} {A,P} {I,S}. The first letter gives the " + "orientation of the x-axis, the second the orientation " + "of the y-axis, the third the z-axis: " + "R = right-to-left L = left-to-right " + "A = anterior-to-posterior P = posterior-to-anterior " + "I = inferior-to-superior S = superior-to-inferior " + "If -orient isn't used, then the coordinate order of the " + "-master (in_file) dataset is used to interpret (x,y,z) inputs.", + argstr="-orient %s", + ) head_only = traits.Bool( - desc='create only the .HEAD file which gets exploited by ' - 'the AFNI matlab library function New_HEAD.m', - argstr='-head_only') + desc="create only the .HEAD file which gets exploited by " + "the AFNI matlab library function New_HEAD.m", + argstr="-head_only", + ) class UndumpOutputSpec(TraitedSpec): - out_file = File(desc='assembled file', exists=True) + out_file = File(desc="assembled file", exists=True) class Undump(AFNICommand): @@ -2804,8 +2986,7 @@ class Undump(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unndump = afni.Undump() >>> unndump.inputs.in_file = 'structural.nii' @@ -2816,89 +2997,99 @@ class Undump(AFNICommand): """ - _cmd = '3dUndump' + _cmd = "3dUndump" input_spec = UndumpInputSpec output_spec = UndumpOutputSpec class UnifizeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dUnifize', - argstr='-input %s', + desc="input file to 3dUnifize", + argstr="-input %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_unifized', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_unifized", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) t2 = traits.Bool( - desc='Treat the input as if it were T2-weighted, rather than ' - 'T1-weighted. This processing is done simply by inverting ' - 'the image contrast, processing it as if that result were ' - 'T1-weighted, and then re-inverting the results ' - 'counts of voxel overlap, i.e., each voxel will contain the ' - 'number of masks that it is set in.', - argstr='-T2') + desc="Treat the input as if it were T2-weighted, rather than " + "T1-weighted. This processing is done simply by inverting " + "the image contrast, processing it as if that result were " + "T1-weighted, and then re-inverting the results " + "counts of voxel overlap, i.e., each voxel will contain the " + "number of masks that it is set in.", + argstr="-T2", + ) gm = traits.Bool( - desc='Also scale to unifize \'gray matter\' = lower intensity voxels ' - '(to aid in registering images from different scanners).', - argstr='-GM') + desc="Also scale to unifize 'gray matter' = lower intensity voxels " + "(to aid in registering images from different scanners).", + argstr="-GM", + ) urad = traits.Float( - desc='Sets the radius (in voxels) of the ball used for the sneaky ' - 'trick. Default value is 18.3, and should be changed ' - 'proportionally if the dataset voxel size differs significantly ' - 'from 1 mm.', - argstr='-Urad %s') + desc="Sets the radius (in voxels) of the ball used for the sneaky " + "trick. Default value is 18.3, and should be changed " + "proportionally if the dataset voxel size differs significantly " + "from 1 mm.", + argstr="-Urad %s", + ) scale_file = File( - desc='output file name to save the scale factor used at each voxel ', - argstr='-ssave %s') + desc="output file name to save the scale factor used at each voxel ", + argstr="-ssave %s", + ) no_duplo = traits.Bool( - desc='Do NOT use the \'duplo down\' step; this can be useful for ' - 'lower resolution datasets.', - argstr='-noduplo') + desc="Do NOT use the 'duplo down' step; this can be useful for " + "lower resolution datasets.", + argstr="-noduplo", + ) epi = traits.Bool( - desc='Assume the input dataset is a T2 (or T2*) weighted EPI time ' - 'series. After computing the scaling, apply it to ALL volumes ' - '(TRs) in the input dataset. That is, a given voxel will be ' - 'scaled by the same factor at each TR. ' - 'This option also implies \'-noduplo\' and \'-T2\'.' - 'This option turns off \'-GM\' if you turned it on.', - argstr='-EPI', - requires=['no_duplo', 't2'], - xor=['gm']) - rbt = traits.Tuple( + desc="Assume the input dataset is a T2 (or T2\\*) weighted EPI time " + "series. After computing the scaling, apply it to ALL volumes " + "(TRs) in the input dataset. That is, a given voxel will be " + "scaled by the same factor at each TR. " + "This option also implies '-noduplo' and '-T2'." + "This option turns off '-GM' if you turned it on.", + argstr="-EPI", + requires=["no_duplo", "t2"], + xor=["gm"], + ) + rbt = Tuple( traits.Float(), traits.Float(), traits.Float(), - desc='Option for AFNI experts only.' - 'Specify the 3 parameters for the algorithm:\n' - 'R = radius; same as given by option \'-Urad\', [default=18.3]\n' - 'b = bottom percentile of normalizing data range, [default=70.0]\n' - 'r = top percentile of normalizing data range, [default=80.0]\n', - argstr='-rbt %f %f %f') + desc="Option for AFNI experts only." + "Specify the 3 parameters for the algorithm:\n" + "R = radius; same as given by option '-Urad', [default=18.3]\n" + "b = bottom percentile of normalizing data range, [default=70.0]\n" + "r = top percentile of normalizing data range, [default=80.0]\n", + argstr="-rbt %f %f %f", + ) t2_up = traits.Float( - desc='Option for AFNI experts only.' - 'Set the upper percentile point used for T2-T1 inversion. ' - 'Allowed to be anything between 90 and 100 (inclusive), with ' - 'default to 98.5 (for no good reason).', - argstr='-T2up %f') + desc="Option for AFNI experts only." + "Set the upper percentile point used for T2-T1 inversion. " + "Allowed to be anything between 90 and 100 (inclusive), with " + "default to 98.5 (for no good reason).", + argstr="-T2up %f", + ) cl_frac = traits.Float( - desc='Option for AFNI experts only.' - 'Set the automask \'clip level fraction\'. Must be between ' - '0.1 and 0.9. A small fraction means to make the initial ' - 'threshold for clipping (a la 3dClipLevel) smaller, which ' - 'will tend to make the mask larger. [default=0.1]', - argstr='-clfrac %f') - quiet = traits.Bool( - desc='Don\'t print the progress messages.', argstr='-quiet') + desc="Option for AFNI experts only." + "Set the automask 'clip level fraction'. Must be between " + "0.1 and 0.9. A small fraction means to make the initial " + "threshold for clipping (a la 3dClipLevel) smaller, which " + "will tend to make the mask larger. [default=0.1]", + argstr="-clfrac %f", + ) + quiet = traits.Bool(desc="Don't print the progress messages.", argstr="-quiet") class UnifizeOutputSpec(TraitedSpec): - scale_file = File(desc='scale factor file') - out_file = File(desc='unifized file', exists=True) + scale_file = File(desc="scale factor file") + out_file = File(desc="unifized file", exists=True) class Unifize(AFNICommand): @@ -2933,8 +3124,7 @@ class Unifize(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unifize = afni.Unifize() >>> unifize.inputs.in_file = 'structural.nii' @@ -2945,25 +3135,27 @@ class Unifize(AFNICommand): """ - _cmd = '3dUnifize' + _cmd = "3dUnifize" input_spec = UnifizeInputSpec output_spec = UnifizeOutputSpec class ZCutUpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dZcutup', - argstr='%s', + desc="input file to 3dZcutup", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_zcutup', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - keep = Str(desc='slice range to keep in output', argstr='-keep %s') + name_template="%s_zcutup", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + keep = Str(desc="slice range to keep in output", argstr="-keep %s") class ZCutUp(AFNICommand): @@ -2973,8 +3165,7 @@ class ZCutUp(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcutup = afni.ZCutUp() >>> zcutup.inputs.in_file = 'functional.nii' @@ -2986,36 +3177,38 @@ class ZCutUp(AFNICommand): """ - _cmd = '3dZcutup' + _cmd = "3dZcutup" input_spec = ZCutUpInputSpec output_spec = AFNICommandOutputSpec class GCORInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset to compute the GCOR over', - argstr='-input %s', + desc="input dataset to compute the GCOR over", + argstr="-input %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) mask = File( - desc='mask dataset, for restricting the computation', - argstr='-mask %s', + desc="mask dataset, for restricting the computation", + argstr="-mask %s", exists=True, - copyfile=False) + copyfile=False, + ) nfirst = traits.Int( - 0, argstr='-nfirst %d', desc='specify number of initial TRs to ignore') + 0, argstr="-nfirst %d", desc="specify number of initial TRs to ignore" + ) no_demean = traits.Bool( - False, - argstr='-no_demean', - desc='do not (need to) demean as first step') + False, argstr="-no_demean", desc="do not (need to) demean as first step" + ) class GCOROutputSpec(TraitedSpec): - out = traits.Float(desc='global correlation value') + out = traits.Float(desc="global correlation value") class GCOR(CommandLine): @@ -3028,8 +3221,7 @@ class GCOR(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> gcor = afni.GCOR() >>> gcor.inputs.in_file = 'structural.nii' @@ -3040,56 +3232,62 @@ class GCOR(CommandLine): """ - _cmd = '@compute_gcor' + _cmd = "@compute_gcor" input_spec = GCORInputSpec output_spec = GCOROutputSpec def _run_interface(self, runtime): - runtime = super(GCOR, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) gcor_line = [ - line.strip() for line in runtime.stdout.split('\n') - if line.strip().startswith('GCOR = ') + line.strip() + for line in runtime.stdout.split("\n") + if line.strip().startswith("GCOR = ") ][-1] - setattr(self, '_gcor', float(gcor_line[len('GCOR = '):])) + self._gcor = float(gcor_line[len("GCOR = ") :]) return runtime def _list_outputs(self): - return {'out': getattr(self, '_gcor')} + return {"out": self._gcor} class AxializeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3daxialize', - argstr='%s', + desc="input file to 3daxialize", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_axialize', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - verb = traits.Bool(desc='Print out a progerss report', argstr='-verb') + name_template="%s_axialize", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + verb = traits.Bool(desc="Print out a progerss report", argstr="-verb") sagittal = traits.Bool( - desc='Do sagittal slice order [-orient ASL]', - argstr='-sagittal', - xor=['coronal', 'axial']) + desc="Do sagittal slice order [-orient ASL]", + argstr="-sagittal", + xor=["coronal", "axial"], + ) coronal = traits.Bool( - desc='Do coronal slice order [-orient RSA]', - argstr='-coronal', - xor=['sagittal', 'axial']) + desc="Do coronal slice order [-orient RSA]", + argstr="-coronal", + xor=["sagittal", "axial"], + ) axial = traits.Bool( - desc='Do axial slice order [-orient RAI]' - 'This is the default AFNI axial order, and' - 'is the one currently required by the' - 'volume rendering plugin; this is also' - 'the default orientation output by this' + desc="Do axial slice order [-orient RAI]" + "This is the default AFNI axial order, and" + "is the one currently required by the" + "volume rendering plugin; this is also" + "the default orientation output by this" "program (hence the program's name).", - argstr='-axial', - xor=['coronal', 'sagittal']) - orientation = Str(desc='new orientation code', argstr='-orient %s') + argstr="-axial", + xor=["coronal", "sagittal"], + ) + orientation = Str(desc="new orientation code", argstr="-orient %s") class Axialize(AFNICommand): @@ -3097,10 +3295,10 @@ class Axialize(AFNICommand): with the data brick oriented as axial slices. For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> axial3d = afni.Axialize() >>> axial3d.inputs.in_file = 'functional.nii' @@ -3111,48 +3309,53 @@ class Axialize(AFNICommand): """ - _cmd = '3daxialize' + _cmd = "3daxialize" input_spec = AxializeInputSpec output_spec = AFNICommandOutputSpec class ZcatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( - File(desc='input files to 3dZcat', exists=True), - argstr='%s', + File(desc="input files to 3dZcat", exists=True), + argstr="%s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_zcat', - desc='output dataset prefix name (default \'zcat\')', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_zcat", + desc="output dataset prefix name (default 'zcat')", + argstr="-prefix %s", + name_source="in_files", + ) datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output. Valid types are 'byte', " + "'short' and 'float'.", + ) verb = traits.Bool( - desc='print out some verbositiness as the program proceeds.', - argstr='-verb') + desc="print out some verbositiness as the program proceeds.", argstr="-verb" + ) fscale = traits.Bool( - desc='Force scaling of the output to the maximum integer ' - 'range. This only has effect if the output datum is ' - 'byte or short (either forced or defaulted). This ' - 'option is sometimes necessary to eliminate ' - 'unpleasant truncation artifacts.', - argstr='-fscale', - xor=['nscale']) + desc="Force scaling of the output to the maximum integer " + "range. This only has effect if the output datum is " + "byte or short (either forced or defaulted). This " + "option is sometimes necessary to eliminate " + "unpleasant truncation artifacts.", + argstr="-fscale", + xor=["nscale"], + ) nscale = traits.Bool( - desc='Don\'t do any scaling on output to byte or short ' - 'datasets. This may be especially useful when ' - 'operating on mask datasets whose output values ' - 'are only 0\'s and 1\'s.', - argstr='-nscale', - xor=['fscale']) + desc="Don't do any scaling on output to byte or short " + "datasets. This may be especially useful when " + "operating on mask datasets whose output values " + "are only 0's and 1's.", + argstr="-nscale", + xor=["fscale"], + ) class Zcat(AFNICommand): @@ -3163,8 +3366,7 @@ class Zcat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcat = afni.Zcat() >>> zcat.inputs.in_files = ['functional2.nii', 'functional3.nii'] @@ -3172,101 +3374,111 @@ class Zcat(AFNICommand): >>> zcat.cmdline '3dZcat -prefix cat_functional.nii functional2.nii functional3.nii' >>> res = zcat.run() # doctest: +SKIP + """ - _cmd = '3dZcat' + _cmd = "3dZcat" input_spec = ZcatInputSpec output_spec = AFNICommandOutputSpec class ZeropadInputSpec(AFNICommandInputSpec): in_files = File( - desc='input dataset', - argstr='%s', + desc="input dataset", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='zeropad', - desc='output dataset prefix name (default \'zeropad\')', - argstr='-prefix %s') + name_template="zeropad", + desc="output dataset prefix name (default 'zeropad')", + argstr="-prefix %s", + ) I = traits.Int( - desc='adds \'n\' planes of zero at the Inferior edge', - argstr='-I %i', - xor=['master']) + desc="adds 'n' planes of zero at the Inferior edge", + argstr="-I %i", + xor=["master"], + ) S = traits.Int( - desc='adds \'n\' planes of zero at the Superior edge', - argstr='-S %i', - xor=['master']) + desc="adds 'n' planes of zero at the Superior edge", + argstr="-S %i", + xor=["master"], + ) A = traits.Int( - desc='adds \'n\' planes of zero at the Anterior edge', - argstr='-A %i', - xor=['master']) + desc="adds 'n' planes of zero at the Anterior edge", + argstr="-A %i", + xor=["master"], + ) P = traits.Int( - desc='adds \'n\' planes of zero at the Posterior edge', - argstr='-P %i', - xor=['master']) + desc="adds 'n' planes of zero at the Posterior edge", + argstr="-P %i", + xor=["master"], + ) L = traits.Int( - desc='adds \'n\' planes of zero at the Left edge', - argstr='-L %i', - xor=['master']) + desc="adds 'n' planes of zero at the Left edge", argstr="-L %i", xor=["master"] + ) R = traits.Int( - desc='adds \'n\' planes of zero at the Right edge', - argstr='-R %i', - xor=['master']) + desc="adds 'n' planes of zero at the Right edge", argstr="-R %i", xor=["master"] + ) z = traits.Int( - desc='adds \'n\' planes of zero on EACH of the ' - 'dataset z-axis (slice-direction) faces', - argstr='-z %i', - xor=['master']) + desc="adds 'n' planes of zero on EACH of the " + "dataset z-axis (slice-direction) faces", + argstr="-z %i", + xor=["master"], + ) RL = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the right-left direction', - argstr='-RL %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the right-left direction", + argstr="-RL %i", + xor=["master"], + ) AP = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the anterior-posterior direction', - argstr='-AP %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the anterior-posterior direction", + argstr="-AP %i", + xor=["master"], + ) IS = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the inferior-superior direction', - argstr='-IS %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the inferior-superior direction", + argstr="-IS %i", + xor=["master"], + ) mm = traits.Bool( - desc='pad counts \'n\' are in mm instead of slices, ' - 'where each \'n\' is an integer and at least \'n\' ' - 'mm of slices will be added/removed; e.g., n = 3 ' - 'and slice thickness = 2.5 mm ==> 2 slices added', - argstr='-mm', - xor=['master']) - master = traits.File( - desc='match the volume described in dataset ' - '\'mset\', where mset must have the same ' - 'orientation and grid spacing as dataset to be ' - 'padded. the goal of -master is to make the ' - 'output dataset from 3dZeropad match the ' - 'spatial \'extents\' of mset by adding or ' - 'subtracting slices as needed. You can\'t use ' - '-I,-S,..., or -mm with -master', - argstr='-master %s', - xor=['I', 'S', 'A', 'P', 'L', 'R', 'z', 'RL', 'AP', 'IS', 'mm']) + desc="pad counts 'n' are in mm instead of slices, " + "where each 'n' is an integer and at least 'n' " + "mm of slices will be added/removed; e.g., n = 3 " + "and slice thickness = 2.5 mm ==> 2 slices added", + argstr="-mm", + xor=["master"], + ) + master = File( + desc="match the volume described in dataset " + "'mset', where mset must have the same " + "orientation and grid spacing as dataset to be " + "padded. the goal of -master is to make the " + "output dataset from 3dZeropad match the " + "spatial 'extents' of mset by adding or " + "subtracting slices as needed. You can't use " + "-I,-S,..., or -mm with -master", + argstr="-master %s", + xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], + ) class Zeropad(AFNICommand): """Adds planes of zeros to a dataset (i.e., pads it out). For complete details, see the `3dZeropad Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zeropad = afni.Zeropad() >>> zeropad.inputs.in_files = 'functional.nii' @@ -3280,8 +3492,9 @@ class Zeropad(AFNICommand): >>> zeropad.cmdline '3dZeropad -A 10 -I 10 -L 10 -P 10 -R 10 -S 10 -prefix pad_functional.nii functional.nii' >>> res = zeropad.run() # doctest: +SKIP + """ - _cmd = '3dZeropad' + _cmd = "3dZeropad" input_spec = ZeropadInputSpec output_spec = AFNICommandOutputSpec diff --git a/nipype/interfaces/ants/__init__.py b/nipype/interfaces/ants/__init__.py index fa441944a2..e8157a0312 100644 --- a/nipype/interfaces/ants/__init__.py +++ b/nipype/interfaces/ants/__init__.py @@ -1,26 +1,83 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for ants.""" -# Registraiton programs -from .registration import (ANTS, Registration, RegistrationSynQuick, - CompositeTransformUtil, MeasureImageSimilarity) +# Registration programs +from .registration import ( + ANTS, + CompositeTransformUtil, + MeasureImageSimilarity, + Registration, + RegistrationSynQuick, +) # Resampling Programs -from .resampling import (ApplyTransforms, ApplyTransformsToPoints, - WarpImageMultiTransform, - WarpTimeSeriesImageMultiTransform) +from .resampling import ( + ApplyTransforms, + ApplyTransformsToPoints, + WarpImageMultiTransform, + WarpTimeSeriesImageMultiTransform, +) # Segmentation Programs -from .segmentation import (Atropos, LaplacianThickness, N4BiasFieldCorrection, - JointFusion, CorticalThickness, BrainExtraction, - DenoiseImage, AntsJointFusion) +from .segmentation import ( + AntsJointFusion, + Atropos, + BrainExtraction, + CorticalThickness, + DenoiseImage, + JointFusion, + LaplacianThickness, + N4BiasFieldCorrection, +) # Visualization Programs from .visualization import ConvertScalarImageToRGB, CreateTiledMosaic # Utility Programs -from .utils import (AverageAffineTransform, AverageImages, MultiplyImages, - CreateJacobianDeterminantImage, AffineInitializer, - ComposeMultiTransform, LabelGeometry) +from .utils import ( + AffineInitializer, + AI, + AverageAffineTransform, + AverageImages, + ComposeMultiTransform, + CreateJacobianDeterminantImage, + ImageMath, + LabelGeometry, + MultiplyImages, + ResampleImageBySpacing, + ThresholdImage, +) + +__all__ = [ + "AI", + "ANTS", + "AffineInitializer", + "AntsJointFusion", + "ApplyTransforms", + "ApplyTransformsToPoints", + "Atropos", + "AverageAffineTransform", + "AverageImages", + "BrainExtraction", + "ComposeMultiTransform", + "CompositeTransformUtil", + "ConvertScalarImageToRGB", + "CorticalThickness", + "CreateJacobianDeterminantImage", + "CreateTiledMosaic", + "DenoiseImage", + "ImageMath", + "JointFusion", + "LabelGeometry", + "LaplacianThickness", + "MeasureImageSimilarity", + "MultiplyImages", + "N4BiasFieldCorrection", + "Registration", + "RegistrationSynQuick", + "ResampleImageBySpacing", + "ThresholdImage", + "WarpImageMultiTransform", + "WarpTimeSeriesImageMultiTransform", +] diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index 34b64a0ec1..c78a375b02 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -1,20 +1,16 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The ants module provides basic functions for interfacing with ANTS tools.""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str - import os +from packaging.version import parse # Local imports -from ... import logging, LooseVersion -from ..base import (CommandLine, CommandLineInputSpec, traits, isdefined, - PackageInfo) -iflogger = logging.getLogger('nipype.interface') +from ... import logging +from ..base import CommandLine, CommandLineInputSpec, traits, isdefined, PackageInfo + +iflogger = logging.getLogger("nipype.interface") -# -Using -1 gives primary responsibilty to ITKv4 to do the correct +# -Using -1 gives primary responsibility to ITKv4 to do the correct # thread limitings. # -Using 1 takes a very conservative approach to avoid overloading # the computer (when running MultiProc) by forcing everything to @@ -23,58 +19,62 @@ LOCAL_DEFAULT_NUMBER_OF_THREADS = 1 # -Using NSLOTS has the same behavior as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS # as long as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS is not set. Otherwise -# ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence. +# ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precedence. # This behavior states that you the user explicitly specifies # num_threads, then respect that no matter what SGE tries to limit. -PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = 'NSLOTS' -ALT_ITKv4_THREAD_LIMIT_VARIABLE = 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS' +PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = "NSLOTS" +ALT_ITKv4_THREAD_LIMIT_VARIABLE = "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS" class Info(PackageInfo): - version_cmd = os.path.join(os.getenv('ANTSPATH', ''), - 'antsRegistration') + ' --version' + version_cmd = ( + os.path.join(os.getenv("ANTSPATH", ""), "antsRegistration") + " --version" + ) @staticmethod def parse_version(raw_info): for line in raw_info.splitlines(): - if line.startswith('ANTs Version: '): + if line.startswith("ANTs Version: "): v_string = line.split()[2] break else: return None # -githash may or may not be appended - v_string = v_string.split('-')[0] + v_string = v_string.split("-")[0] - # 2.2.0-equivalent version string - if 'post' in v_string and \ - LooseVersion(v_string) >= LooseVersion('2.1.0.post789'): - return '2.2.0' - else: - return '.'.join(v_string.split('.')[:3]) + version = parse(v_string) + + # Known mislabeled versions + if version.is_postrelease: + if version.base_version == "2.1.0" and version.post >= 789: + return "2.2.0" + + # Unless we know of a specific reason to re-version, we will + # treat the base version (before pre/post/dev) as authoritative + return version.base_version class ANTSCommandInputSpec(CommandLineInputSpec): - """Base Input Specification for all ANTS Commands - """ + """Base Input Specification for all ANTS Commands""" num_threads = traits.Int( LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, nohash=True, - desc="Number of ITK threads to use") + desc="Number of ITK threads to use", + ) class ANTSCommand(CommandLine): - """Base class for ANTS interfaces - """ + """Base class for ANTS interfaces""" input_spec = ANTSCommandInputSpec _num_threads = LOCAL_DEFAULT_NUMBER_OF_THREADS def __init__(self, **inputs): - super(ANTSCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + super().__init__(**inputs) + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads @@ -92,22 +92,21 @@ def _num_threads_update(self): # default behavior should be the one specified by ITKv4 rules # (i.e. respect SGE $NSLOTS or environmental variables of threads, or # user environmental settings) - if (self.inputs.num_threads == -1): - if (ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): + if self.inputs.num_threads == -1: + if ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE] - if (PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): + if PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE] else: - self.inputs.environ.update({ - PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: - '%s' % self.inputs.num_threads - }) + self.inputs.environ.update( + {PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: "%s" % self.inputs.num_threads} + ) @staticmethod def _format_xarray(val): - """ Convenience method for converting input arrays [1,2,3] to - commandline format '1x2x3' """ - return 'x'.join([str(x) for x in val]) + """Convenience method for converting input arrays [1,2,3] to + commandline format '1x2x3'""" + return "x".join([str(x) for x in val]) @classmethod def set_default_num_threads(cls, num_threads): diff --git a/nipype/interfaces/ants/legacy.py b/nipype/interfaces/ants/legacy.py index 40f2def728..373ca4982a 100644 --- a/nipype/interfaces/ants/legacy.py +++ b/nipype/interfaces/ants/legacy.py @@ -1,12 +1,8 @@ -# -*- coding: utf-8 -*- -# NOTE: This implementation has been superceeded buy the antsApplyTransform -# implmeentation that more closely follows the strucutre and capabilities -# of the antsApplyTransform program. This implementation is here -# for backwards compatibility. -"""ANTS Apply Transforms interface -""" +"""ANTS Legacy Interfaces -from builtins import range +These interfaces are for programs that have been deprecated by ANTs, but +are preserved for backwards compatibility. +""" import os from glob import glob @@ -20,88 +16,103 @@ class antsIntroductionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='-d %d', + argstr="-d %d", usedefault=True, - desc='image dimension (2 or 3)', - position=1) + desc="image dimension (2 or 3)", + position=1, + ) reference_image = File( exists=True, - argstr='-r %s', - desc='template file to warp to', + argstr="-r %s", + desc="template file to warp to", mandatory=True, - copyfile=True) + copyfile=True, + ) input_image = File( exists=True, - argstr='-i %s', - desc='input image to warp to template', + argstr="-i %s", + desc="input image to warp to template", mandatory=True, - copyfile=False) + copyfile=False, + ) force_proceed = traits.Bool( - argstr='-f 1', - desc=('force script to proceed even if headers ' - 'may be incompatible')) + argstr="-f 1", + desc=("force script to proceed even if headers may be incompatible"), + ) inverse_warp_template_labels = traits.Bool( - argstr='-l', - desc=('Applies inverse warp to the template labels ' - 'to estimate label positions in target space (use ' - 'for template-based segmentation)')) + argstr="-l", + desc=( + "Applies inverse warp to the template labels " + "to estimate label positions in target space (use " + "for template-based segmentation)" + ), + ) max_iterations = traits.List( traits.Int, - argstr='-m %s', - sep='x', - desc=('maximum number of iterations (must be ' - 'list of integers in the form [J,K,L...]: ' - 'J = coarsest resolution iterations, K = ' - 'middle resolution interations, L = fine ' - 'resolution iterations')) + argstr="-m %s", + sep="x", + desc=( + "maximum number of iterations (must be " + "list of integers in the form [J,K,L...]: " + "J = coarsest resolution iterations, K = " + "middle resolution iterations, L = fine " + "resolution iterations" + ), + ) bias_field_correction = traits.Bool( - argstr='-n 1', - desc=('Applies bias field correction to moving ' - 'image')) + argstr="-n 1", desc=("Applies bias field correction to moving image") + ) similarity_metric = traits.Enum( - 'PR', - 'CC', - 'MI', - 'MSQ', - argstr='-s %s', - desc=('Type of similartiy metric used for registration ' - '(CC = cross correlation, MI = mutual information, ' - 'PR = probability mapping, MSQ = mean square difference)')) + "PR", + "CC", + "MI", + "MSQ", + argstr="-s %s", + desc=( + "Type of similartiy metric used for registration " + "(CC = cross correlation, MI = mutual information, " + "PR = probability mapping, MSQ = mean square difference)" + ), + ) transformation_model = traits.Enum( - 'GR', - 'EL', - 'SY', - 'S2', - 'EX', - 'DD', - 'RI', - 'RA', - argstr='-t %s', + "GR", + "EL", + "SY", + "S2", + "EX", + "DD", + "RI", + "RA", + argstr="-t %s", usedefault=True, - desc=('Type of transofmration model used for registration ' - '(EL = elastic transformation model, SY = SyN with time, ' - 'arbitrary number of time points, S2 = SyN with time ' - 'optimized for 2 time points, GR = greedy SyN, EX = ' - 'exponential, DD = diffeomorphic demons style exponential ' - 'mapping, RI = purely rigid, RA = affine rigid')) + desc=( + "Type of transofmration model used for registration " + "(EL = elastic transformation model, SY = SyN with time, " + "arbitrary number of time points, S2 = SyN with time " + "optimized for 2 time points, GR = greedy SyN, EX = " + "exponential, DD = diffeomorphic demons style exponential " + "mapping, RI = purely rigid, RA = affine rigid" + ), + ) out_prefix = traits.Str( - 'ants_', - argstr='-o %s', + "ants_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output ' - 'files (default = ants_)')) + desc=("Prefix that is prepended to all output files (default = ants_)"), + ) quality_check = traits.Bool( - argstr='-q 1', desc='Perform a quality check of the result') + argstr="-q 1", desc="Perform a quality check of the result" + ) class antsIntroductionOutputSpec(TraitedSpec): - affine_transformation = File( - exists=True, desc='affine (prefix_Affine.txt)') - warp_field = File(exists=True, desc='warp field (prefix_Warp.nii)') + affine_transformation = File(exists=True, desc="affine (prefix_Affine.txt)") + warp_field = File(exists=True, desc="warp field (prefix_Warp.nii)") inverse_warp_field = File( - exists=True, desc='inverse warp field (prefix_InverseWarp.nii)') - input_file = File(exists=True, desc='input image (prefix_repaired.nii)') - output_file = File(exists=True, desc='output image (prefix_deformed.nii)') + exists=True, desc="inverse warp field (prefix_InverseWarp.nii)" + ) + input_file = File(exists=True, desc="input image (prefix_repaired.nii)") + output_file = File(exists=True, desc="output image (prefix_deformed.nii)") class antsIntroduction(ANTSCommand): @@ -120,7 +131,7 @@ class antsIntroduction(ANTSCommand): """ - _cmd = 'antsIntroduction.sh' + _cmd = "antsIntroduction.sh" input_spec = antsIntroductionInputSpec output_spec = antsIntroductionOutputSpec @@ -130,19 +141,25 @@ def _list_outputs(self): # When transform is set as 'RI'/'RA', wrap fields should not be expected # The default transformation is GR, which outputs the wrap fields - if not isdefined(transmodel) or (isdefined(transmodel) - and transmodel not in ['RI', 'RA']): - outputs['warp_field'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'Warp.nii.gz') - outputs['inverse_warp_field'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'InverseWarp.nii.gz') - - outputs['affine_transformation'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'Affine.txt') - outputs['input_file'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'repaired.nii.gz') - outputs['output_file'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'deformed.nii.gz') + if not isdefined(transmodel) or ( + isdefined(transmodel) and transmodel not in ["RI", "RA"] + ): + outputs["warp_field"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "Warp.nii.gz" + ) + outputs["inverse_warp_field"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "InverseWarp.nii.gz" + ) + + outputs["affine_transformation"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "Affine.txt" + ) + outputs["input_file"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "repaired.nii.gz" + ) + outputs["output_file"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "deformed.nii.gz" + ) return outputs @@ -159,103 +176,123 @@ class buildtemplateparallelInputSpec(ANTSCommandInputSpec): 3, 2, 4, - argstr='-d %d', + argstr="-d %d", usedefault=True, - desc='image dimension (2, 3 or 4)', - position=1) + desc="image dimension (2, 3 or 4)", + position=1, + ) out_prefix = traits.Str( - 'antsTMPL_', - argstr='-o %s', + "antsTMPL_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output ' - 'files (default = antsTMPL_)')) + desc=("Prefix that is prepended to all output files (default = antsTMPL_)"), + ) in_files = traits.List( File(exists=True), mandatory=True, - desc='list of images to generate template from', - argstr='%s', - position=-1) + desc="list of images to generate template from", + argstr="%s", + position=-1, + ) parallelization = traits.Enum( 0, 1, 2, - argstr='-c %d', + argstr="-c %d", usedefault=True, - desc=('control for parallel processing (0 = ' - 'serial, 1 = use PBS, 2 = use PEXEC, 3 = ' - 'use Apple XGrid')) + desc=( + "control for parallel processing (0 = " + "serial, 1 = use PBS, 2 = use PEXEC, 3 = " + "use Apple XGrid" + ), + ) gradient_step_size = traits.Float( - argstr='-g %f', - desc=('smaller magnitude results in ' - 'more cautious steps (default = ' - '.25)')) + argstr="-g %f", + desc=("smaller magnitude results in more cautious steps (default = .25)"), + ) iteration_limit = traits.Int( - 4, - argstr='-i %d', - usedefault=True, - desc='iterations of template construction') + 4, argstr="-i %d", usedefault=True, desc="iterations of template construction" + ) num_cores = traits.Int( - argstr='-j %d', - requires=['parallelization'], - desc=('Requires parallelization = 2 (PEXEC). ' - 'Sets number of cpu cores to use')) + argstr="-j %d", + requires=["parallelization"], + desc=("Requires parallelization = 2 (PEXEC). Sets number of cpu cores to use"), + ) max_iterations = traits.List( traits.Int, - argstr='-m %s', - sep='x', - desc=('maximum number of iterations (must be ' - 'list of integers in the form [J,K,L...]: ' - 'J = coarsest resolution iterations, K = ' - 'middle resolution interations, L = fine ' - 'resolution iterations')) + argstr="-m %s", + sep="x", + desc=( + "maximum number of iterations (must be " + "list of integers in the form [J,K,L...]: " + "J = coarsest resolution iterations, K = " + "middle resolution iterations, L = fine " + "resolution iterations" + ), + ) bias_field_correction = traits.Bool( - argstr='-n 1', - desc=('Applies bias field correction to moving ' - 'image')) + argstr="-n 1", desc=("Applies bias field correction to moving image") + ) rigid_body_registration = traits.Bool( - argstr='-r 1', - desc=('registers inputs before creating template ' - '(useful if no initial template available)')) + argstr="-r 1", + desc=( + "registers inputs before creating template " + "(useful if no initial template available)" + ), + ) similarity_metric = traits.Enum( - 'PR', - 'CC', - 'MI', - 'MSQ', - argstr='-s %s', - desc=('Type of similartiy metric used for registration ' - '(CC = cross correlation, MI = mutual information, ' - 'PR = probability mapping, MSQ = mean square difference)')) + "PR", + "CC", + "MI", + "MSQ", + argstr="-s %s", + desc=( + "Type of similartiy metric used for registration " + "(CC = cross correlation, MI = mutual information, " + "PR = probability mapping, MSQ = mean square difference)" + ), + ) transformation_model = traits.Enum( - 'GR', - 'EL', - 'SY', - 'S2', - 'EX', - 'DD', - argstr='-t %s', + "GR", + "EL", + "SY", + "S2", + "EX", + "DD", + argstr="-t %s", usedefault=True, - desc=('Type of transofmration model used for registration ' - '(EL = elastic transformation model, SY = SyN with time, ' - 'arbitrary number of time points, S2 = SyN with time ' - 'optimized for 2 time points, GR = greedy SyN, EX = ' - 'exponential, DD = diffeomorphic demons style exponential ' - 'mapping')) + desc=( + "Type of transofmration model used for registration " + "(EL = elastic transformation model, SY = SyN with time, " + "arbitrary number of time points, S2 = SyN with time " + "optimized for 2 time points, GR = greedy SyN, EX = " + "exponential, DD = diffeomorphic demons style exponential " + "mapping" + ), + ) use_first_as_target = traits.Bool( - desc=('uses first volume as target of ' - 'all inputs. When not used, an ' - 'unbiased average image is used ' - 'to start.')) + desc=( + "uses first volume as target of " + "all inputs. When not used, an " + "unbiased average image is used " + "to start." + ) + ) class buildtemplateparallelOutputSpec(TraitedSpec): - final_template_file = File(exists=True, desc='final ANTS template') + final_template_file = File(exists=True, desc="final ANTS template") template_files = OutputMultiPath( - File(exists=True), desc='Templates from different stages of iteration') + File(exists=True), desc="Templates from different stages of iteration" + ) subject_outfiles = OutputMultiPath( File(exists=True), - desc=('Outputs for each input image. Includes warp ' - 'field, inverse warp, Affine, original image ' - '(repaired) and warped image (deformed)')) + desc=( + "Outputs for each input image. Includes warp " + "field, inverse warp, Affine, original image " + "(repaired) and warped image (deformed)" + ), + ) class buildtemplateparallel(ANTSCommand): @@ -277,49 +314,54 @@ class buildtemplateparallel(ANTSCommand): """ - _cmd = 'buildtemplateparallel.sh' + _cmd = "buildtemplateparallel.sh" input_spec = buildtemplateparallelInputSpec output_spec = buildtemplateparallelOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'num_cores': + if opt == "num_cores": if self.inputs.parallelization == 2: - return '-j ' + str(val) + return "-j " + str(val) else: - return '' - if opt == 'in_files': + return "" + if opt == "in_files": if self.inputs.use_first_as_target: - start = '-z ' + start = "-z " else: - start = '' - return start + ' '.join(name for name in val) - return super(buildtemplateparallel, self)._format_arg(opt, spec, val) + start = "" + return start + " ".join(name for name in val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['template_files'] = [] - for i in range(len(glob(os.path.realpath('*iteration*')))): - temp = os.path.realpath('%s_iteration_%d/%stemplate.nii.gz' % - (self.inputs.transformation_model, i, - self.inputs.out_prefix)) + outputs["template_files"] = [] + for i in range(len(glob(os.path.realpath("*iteration*")))): + temp = os.path.realpath( + "%s_iteration_%d/%stemplate.nii.gz" + % (self.inputs.transformation_model, i, self.inputs.out_prefix) + ) os.rename( temp, - os.path.realpath('%s_iteration_%d/%stemplate_i%d.nii.gz' % - (self.inputs.transformation_model, i, - self.inputs.out_prefix, i))) - file_ = ('%s_iteration_%d/%stemplate_i%d.nii.gz' % - (self.inputs.transformation_model, i, - self.inputs.out_prefix, i)) - - outputs['template_files'].append(os.path.realpath(file_)) - outputs['final_template_file'] = \ - os.path.realpath('%stemplate.nii.gz' % - self.inputs.out_prefix) - outputs['subject_outfiles'] = [] + os.path.realpath( + "%s_iteration_%d/%stemplate_i%d.nii.gz" + % (self.inputs.transformation_model, i, self.inputs.out_prefix, i) + ), + ) + file_ = "%s_iteration_%d/%stemplate_i%d.nii.gz" % ( + self.inputs.transformation_model, + i, + self.inputs.out_prefix, + i, + ) + + outputs["template_files"].append(os.path.realpath(file_)) + outputs["final_template_file"] = os.path.realpath( + "%stemplate.nii.gz" % self.inputs.out_prefix + ) + outputs["subject_outfiles"] = [] for filename in self.inputs.in_files: _, base, _ = split_filename(filename) - temp = glob( - os.path.realpath('%s%s*' % (self.inputs.out_prefix, base))) + temp = glob(os.path.realpath(f"{self.inputs.out_prefix}{base}*")) for file_ in temp: - outputs['subject_outfiles'].append(file_) + outputs["subject_outfiles"].append(file_) return outputs diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 7c4a696c5a..41037ffc5f 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1,36 +1,33 @@ -# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str + import os from ...utils.filemanip import ensure_list -from ..base import TraitedSpec, File, Str, traits, InputMultiPath, isdefined +from ..base import TraitedSpec, File, Str, traits, InputMultiPath, isdefined, Tuple from .base import ANTSCommand, ANTSCommandInputSpec, LOCAL_DEFAULT_NUMBER_OF_THREADS class ANTSInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - position=1, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", position=1, desc="image dimension (2 or 3)" + ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, - desc=('image to which the moving image is ' - 'warped')) + desc=("image to which the moving image is warped"), + ) moving_image = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, - desc=('image to apply transformation to ' - '(generally a coregistered' - 'functional)')) + desc=( + "image to apply transformation to " + "(generally a coregistered" + "functional)" + ), + ) # Not all metrics are appropriate for all modalities. Also, not all metrics # are efficeint or appropriate at all resolution levels, Some metrics @@ -53,75 +50,80 @@ class ANTSInputSpec(ANTSCommandInputSpec): # # Cost = Sum_i ( metricweight[i] Metric_i ( fixedimage[i], movingimage[i]) ) metric = traits.List( - traits.Enum('CC', 'MI', 'SMI', 'PR', 'SSD', 'MSQ', 'PSE'), + traits.Enum("CC", "MI", "SMI", "PR", "SSD", "MSQ", "PSE"), mandatory=True, - desc='') + desc="", + ) metric_weight = traits.List( traits.Float(), value=[1.0], usedefault=True, - requires=['metric'], + requires=["metric"], mandatory=True, - desc='the metric weight(s) for each stage. ' - 'The weights must sum to 1 per stage.') + desc="the metric weight(s) for each stage. " + "The weights must sum to 1 per stage.", + ) radius = traits.List( traits.Int(), - requires=['metric'], + requires=["metric"], mandatory=True, - desc='radius of the region (i.e. number of layers around a voxel/pixel)' - ' that is used for computing cross correlation') + desc="radius of the region (i.e. number of layers around a voxel/pixel)" + " that is used for computing cross correlation", + ) output_transform_prefix = Str( - 'out', - usedefault=True, - argstr='--output-naming %s', - mandatory=True, - desc='') + "out", usedefault=True, argstr="--output-naming %s", mandatory=True, desc="" + ) transformation_model = traits.Enum( - 'Diff', - 'Elast', - 'Exp', - 'Greedy Exp', - 'SyN', - argstr='%s', + "Diff", + "Elast", + "Exp", + "Greedy Exp", + "SyN", + argstr="%s", mandatory=True, - desc='') - gradient_step_length = traits.Float( - requires=['transformation_model'], desc='') - number_of_time_steps = traits.Float( - requires=['gradient_step_length'], desc='') - delta_time = traits.Float(requires=['number_of_time_steps'], desc='') - symmetry_type = traits.Float(requires=['delta_time'], desc='') + desc="", + ) + gradient_step_length = traits.Float(requires=["transformation_model"], desc="") + number_of_time_steps = traits.Int(requires=["gradient_step_length"], desc="") + delta_time = traits.Float(requires=["number_of_time_steps"], desc="") + symmetry_type = traits.Float(requires=["delta_time"], desc="") use_histogram_matching = traits.Bool( - argstr='%s', default_value=True, usedefault=True) + argstr="%s", default_value=True, usedefault=True + ) number_of_iterations = traits.List( - traits.Int(), argstr='--number-of-iterations %s', sep='x') + traits.Int(), argstr="--number-of-iterations %s", sep="x" + ) smoothing_sigmas = traits.List( - traits.Int(), argstr='--gaussian-smoothing-sigmas %s', sep='x') + traits.Int(), argstr="--gaussian-smoothing-sigmas %s", sep="x" + ) subsampling_factors = traits.List( - traits.Int(), argstr='--subsampling-factors %s', sep='x') - affine_gradient_descent_option = traits.List(traits.Float(), argstr='%s') + traits.Int(), argstr="--subsampling-factors %s", sep="x" + ) + affine_gradient_descent_option = traits.List(traits.Float(), argstr="%s") - mi_option = traits.List(traits.Int(), argstr='--MI-option %s', sep='x') - regularization = traits.Enum('Gauss', 'DMFFD', argstr='%s', desc='') + mi_option = traits.List(traits.Int(), argstr="--MI-option %s", sep="x") + regularization = traits.Enum("Gauss", "DMFFD", argstr="%s", desc="") regularization_gradient_field_sigma = traits.Float( - requires=['regularization'], desc='') + requires=["regularization"], desc="" + ) regularization_deformation_field_sigma = traits.Float( - requires=['regularization'], desc='') + requires=["regularization"], desc="" + ) number_of_affine_iterations = traits.List( - traits.Int(), argstr='--number-of-affine-iterations %s', sep='x') + traits.Int(), argstr="--number-of-affine-iterations %s", sep="x" + ) class ANTSOutputSpec(TraitedSpec): - affine_transform = File(exists=True, desc='Affine transform file') - warp_transform = File(exists=True, desc='Warping deformation field') - inverse_warp_transform = File( - exists=True, desc='Inverse warping deformation field') - metaheader = File(exists=True, desc='VTK metaheader .mhd file') - metaheader_raw = File(exists=True, desc='VTK metaheader .raw file') + affine_transform = File(exists=True, desc="Affine transform file") + warp_transform = File(exists=True, desc="Warping deformation field") + inverse_warp_transform = File(exists=True, desc="Inverse warping deformation field") + metaheader = File(exists=True, desc="VTK metaheader .mhd file") + metaheader_raw = File(exists=True, desc="VTK metaheader .raw file") class ANTS(ANTSCommand): @@ -154,26 +156,32 @@ class ANTS(ANTSCommand): 10000x10000x10000x10000x10000 --number-of-iterations 50x35x15 --output-naming MY --regularization Gauss[3.0,0.0] \ --transformation-model SyN[0.25] --use-Histogram-Matching 1' """ - _cmd = 'ANTS' + + _cmd = "ANTS" input_spec = ANTSInputSpec output_spec = ANTSOutputSpec def _image_metric_constructor(self): retval = [] - intensity_based = ['CC', 'MI', 'SMI', 'PR', 'SSD', 'MSQ'] - point_set_based = ['PSE', 'JTB'] + intensity_based = ["CC", "MI", "SMI", "PR", "SSD", "MSQ"] + point_set_based = ["PSE", "JTB"] for ii in range(len(self.inputs.moving_image)): if self.inputs.metric[ii] in intensity_based: retval.append( - '--image-metric %s[ %s, %s, %g, %d ]' % - (self.inputs.metric[ii], self.inputs.fixed_image[ii], - self.inputs.moving_image[ii], - self.inputs.metric_weight[ii], self.inputs.radius[ii])) + "--image-metric %s[ %s, %s, %g, %d ]" + % ( + self.inputs.metric[ii], + self.inputs.fixed_image[ii], + self.inputs.moving_image[ii], + self.inputs.metric_weight[ii], + self.inputs.radius[ii], + ) + ) elif self.inputs.metric[ii] == point_set_based: pass # retval.append('--image-metric %s[%s, %s, ...'.format(self.inputs.metric[ii], # self.inputs.fixed_image[ii], self.inputs.moving_image[ii], ...)) - return ' '.join(retval) + return " ".join(retval) def _transformation_constructor(self): model = self.inputs.transformation_model @@ -181,62 +189,68 @@ def _transformation_constructor(self): time_step = self.inputs.number_of_time_steps delta_time = self.inputs.delta_time symmetry_type = self.inputs.symmetry_type - retval = ['--transformation-model %s' % model] - parameters = [] - for elem in (step_length, time_step, delta_time, symmetry_type): - if elem is not traits.Undefined: - parameters.append('%#.2g' % elem) + retval = ["--transformation-model %s" % model] + parameters = [ + "%#.2g" % elem + for elem in (step_length, time_step, delta_time, symmetry_type) + if elem is not traits.Undefined + ] if len(parameters) > 0: if len(parameters) > 1: - parameters = ','.join(parameters) + parameters = ",".join(parameters) else: - parameters = ''.join(parameters) - retval.append('[%s]' % parameters) - return ''.join(retval) + parameters = "".join(parameters) + retval.append("[%s]" % parameters) + return "".join(retval) def _regularization_constructor(self): - return '--regularization {0}[{1},{2}]'.format( + return "--regularization {}[{},{}]".format( self.inputs.regularization, self.inputs.regularization_gradient_field_sigma, - self.inputs.regularization_deformation_field_sigma) + self.inputs.regularization_deformation_field_sigma, + ) def _affine_gradient_descent_option_constructor(self): values = self.inputs.affine_gradient_descent_option - defaults = [0.1, 0.5, 1.e-4, 1.e-4] + defaults = [0.1, 0.5, 1.0e-4, 1.0e-4] for ii in range(len(defaults)): try: defaults[ii] = values[ii] except IndexError: break parameters = self._format_xarray( - [('%g' % defaults[index]) for index in range(4)]) - retval = ['--affine-gradient-descent-option', parameters] - return ' '.join(retval) + [("%g" % defaults[index]) for index in range(4)] + ) + retval = ["--affine-gradient-descent-option", parameters] + return " ".join(retval) def _format_arg(self, opt, spec, val): - if opt == 'moving_image': + if opt == "moving_image": return self._image_metric_constructor() - elif opt == 'transformation_model': + elif opt == "transformation_model": return self._transformation_constructor() - elif opt == 'regularization': + elif opt == "regularization": return self._regularization_constructor() - elif opt == 'affine_gradient_descent_option': + elif opt == "affine_gradient_descent_option": return self._affine_gradient_descent_option_constructor() - elif opt == 'use_histogram_matching': + elif opt == "use_histogram_matching": if self.inputs.use_histogram_matching: - return '--use-Histogram-Matching 1' + return "--use-Histogram-Matching 1" else: - return '--use-Histogram-Matching 0' - return super(ANTS, self)._format_arg(opt, spec, val) + return "--use-Histogram-Matching 0" + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['affine_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'Affine.txt') - outputs['warp_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'Warp.nii.gz') - outputs['inverse_warp_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'InverseWarp.nii.gz') + outputs["affine_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "Affine.txt" + ) + outputs["warp_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "Warp.nii.gz" + ) + outputs["inverse_warp_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "InverseWarp.nii.gz" + ) # outputs['metaheader'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.mhd') # outputs['metaheader_raw'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.raw') return outputs @@ -246,313 +260,368 @@ class RegistrationInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='--dimensionality %d', + argstr="--dimensionality %d", usedefault=True, - desc='image dimension (2 or 3)') + desc="image dimension (2 or 3)", + ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, - desc='Image to which the moving_image should be transformed' - '(usually a structural image)') + desc="Image to which the moving_image should be transformed" + "(usually a structural image)", + ) fixed_image_mask = File( exists=True, - argstr='%s', - max_ver='2.1.0', - xor=['fixed_image_masks'], - desc='Mask used to limit metric sampling region of the fixed image' - 'in all stages') + argstr="%s", + max_ver="2.1.0", + xor=["fixed_image_masks"], + desc="Mask used to limit metric sampling region of the fixed image" + "in all stages", + ) fixed_image_masks = InputMultiPath( - traits.Either('NULL', File(exists=True)), - min_ver='2.2.0', - xor=['fixed_image_mask'], - desc= - 'Masks used to limit metric sampling region of the fixed image, defined per registration stage' - '(Use "NULL" to omit a mask at a given stage)') + traits.Either("NULL", File(exists=True)), + min_ver="2.2.0", + xor=["fixed_image_mask"], + desc="Masks used to limit metric sampling region of the fixed image, defined per registration stage" + '(Use "NULL" to omit a mask at a given stage)', + ) moving_image = InputMultiPath( File(exists=True), mandatory=True, - desc= - 'Image that will be registered to the space of fixed_image. This is the' - 'image on which the transformations will be applied to') + desc="Image that will be registered to the space of fixed_image. This is the" + "image on which the transformations will be applied to", + ) moving_image_mask = File( exists=True, - requires=['fixed_image_mask'], - max_ver='2.1.0', - xor=['moving_image_masks'], - desc='mask used to limit metric sampling region of the moving image' - 'in all stages') + requires=["fixed_image_mask"], + max_ver="2.1.0", + xor=["moving_image_masks"], + desc="mask used to limit metric sampling region of the moving image" + "in all stages", + ) moving_image_masks = InputMultiPath( - traits.Either('NULL', File(exists=True)), - min_ver='2.2.0', - xor=['moving_image_mask'], - desc= - 'Masks used to limit metric sampling region of the moving image, defined per registration stage' - '(Use "NULL" to omit a mask at a given stage)') + traits.Either("NULL", File(exists=True)), + min_ver="2.2.0", + xor=["moving_image_mask"], + desc="Masks used to limit metric sampling region of the moving image, defined per registration stage" + '(Use "NULL" to omit a mask at a given stage)', + ) save_state = File( - argstr='--save-state %s', + argstr="--save-state %s", exists=False, - desc= - 'Filename for saving the internal restorable state of the registration' + desc="Filename for saving the internal restorable state of the registration", ) restore_state = File( - argstr='--restore-state %s', + argstr="--restore-state %s", exists=True, - desc= - 'Filename for restoring the internal restorable state of the registration' + desc="Filename for restoring the internal restorable state of the registration", ) initial_moving_transform = InputMultiPath( File(exists=True), - argstr='%s', - desc='A transform or a list of transforms that should be applied' - 'before the registration begins. Note that, when a list is given,' - 'the transformations are applied in reverse order.', - xor=['initial_moving_transform_com']) + argstr="%s", + desc="A transform or a list of transforms that should be applied " + "before the registration begins. Note that, when a list is given, " + "the transformations are applied in reverse order.", + xor=["initial_moving_transform_com"], + ) invert_initial_moving_transform = InputMultiPath( traits.Bool(), requires=["initial_moving_transform"], - desc='One boolean or a list of booleans that indicate' - 'whether the inverse(s) of the transform(s) defined' - 'in initial_moving_transform should be used.', - xor=['initial_moving_transform_com']) + desc="One boolean or a list of booleans that indicate" + "whether the inverse(s) of the transform(s) defined" + "in initial_moving_transform should be used.", + xor=["initial_moving_transform_com"], + ) initial_moving_transform_com = traits.Enum( 0, 1, 2, - argstr='%s', - xor=['initial_moving_transform'], - desc="Align the moving_image nad fixed_image befor registration using" - "the geometric center of the images (=0), the image intensities (=1)," - "or the origin of the images (=2)") - metric_item_trait = traits.Enum("CC", "MeanSquares", "Demons", "GC", "MI", - "Mattes") - metric_stage_trait = traits.Either(metric_item_trait, - traits.List(metric_item_trait)) + argstr="%s", + xor=["initial_moving_transform"], + desc="Align the moving_image and fixed_image before registration using " + "the geometric center of the images (=0), the image intensities (=1), " + "or the origin of the images (=2).", + ) + metric_item_trait = traits.Enum("CC", "MeanSquares", "Demons", "GC", "MI", "Mattes") + metric_stage_trait = traits.Either( + metric_item_trait, traits.List(metric_item_trait) + ) metric = traits.List( metric_stage_trait, mandatory=True, - desc='the metric(s) to use for each stage. ' - 'Note that multiple metrics per stage are not supported ' - 'in ANTS 1.9.1 and earlier.') + desc="the metric(s) to use for each stage. " + "Note that multiple metrics per stage are not supported " + "in ANTS 1.9.1 and earlier.", + ) metric_weight_item_trait = traits.Float(1.0, usedefault=True) metric_weight_stage_trait = traits.Either( - metric_weight_item_trait, traits.List(metric_weight_item_trait)) + metric_weight_item_trait, traits.List(metric_weight_item_trait) + ) metric_weight = traits.List( metric_weight_stage_trait, value=[1.0], usedefault=True, - requires=['metric'], + requires=["metric"], mandatory=True, - desc='the metric weight(s) for each stage. ' - 'The weights must sum to 1 per stage.') + desc="the metric weight(s) for each stage. " + "The weights must sum to 1 per stage.", + ) radius_bins_item_trait = traits.Int(5, usedefault=True) radius_bins_stage_trait = traits.Either( - radius_bins_item_trait, traits.List(radius_bins_item_trait)) + radius_bins_item_trait, traits.List(radius_bins_item_trait) + ) radius_or_number_of_bins = traits.List( radius_bins_stage_trait, value=[5], usedefault=True, - requires=['metric_weight'], - desc='the number of bins in each stage for the MI and Mattes metric, ' - 'the radius for other metrics') - sampling_strategy_item_trait = traits.Enum("None", "Regular", "Random", - None) + requires=["metric_weight"], + desc="the number of bins in each stage for the MI and Mattes metric, " + "the radius for other metrics", + ) + sampling_strategy_item_trait = traits.Enum("None", "Regular", "Random", None) sampling_strategy_stage_trait = traits.Either( - sampling_strategy_item_trait, - traits.List(sampling_strategy_item_trait)) + sampling_strategy_item_trait, traits.List(sampling_strategy_item_trait) + ) sampling_strategy = traits.List( trait=sampling_strategy_stage_trait, - requires=['metric_weight'], - desc='the metric sampling strategy (strategies) for each stage') + requires=["metric_weight"], + desc="the metric sampling strategy (strategies) for each stage", + ) sampling_percentage_item_trait = traits.Either( - traits.Range(low=0.0, high=1.0), None) + traits.Range(low=0.0, high=1.0), None + ) sampling_percentage_stage_trait = traits.Either( - sampling_percentage_item_trait, - traits.List(sampling_percentage_item_trait)) + sampling_percentage_item_trait, traits.List(sampling_percentage_item_trait) + ) sampling_percentage = traits.List( trait=sampling_percentage_stage_trait, - requires=['sampling_strategy'], - desc="the metric sampling percentage(s) to use for each stage") - use_estimate_learning_rate_once = traits.List(traits.Bool(), desc='') + requires=["sampling_strategy"], + desc="the metric sampling percentage(s) to use for each stage", + ) + use_estimate_learning_rate_once = traits.List(traits.Bool(), desc="") use_histogram_matching = traits.Either( traits.Bool, - traits.List(traits.Bool(argstr='%s')), + traits.List(traits.Bool(argstr="%s")), default=True, usedefault=True, - desc='Histogram match the images before registration.') + desc="Histogram match the images before registration.", + ) interpolation = traits.Enum( - 'Linear', - 'NearestNeighbor', - 'CosineWindowedSinc', - 'WelchWindowedSinc', - 'HammingWindowedSinc', - 'LanczosWindowedSinc', - 'BSpline', - 'MultiLabel', - 'Gaussian', - argstr='%s', - usedefault=True) + "Linear", + "NearestNeighbor", + "CosineWindowedSinc", + "WelchWindowedSinc", + "HammingWindowedSinc", + "LanczosWindowedSinc", + "BSpline", + "MultiLabel", + "Gaussian", + "GenericLabel", + argstr="%s", + usedefault=True, + ) interpolation_parameters = traits.Either( - traits.Tuple(traits.Int()), # BSpline (order) - traits.Tuple( - traits.Float(), # Gaussian/MultiLabel (sigma, alpha) - traits.Float())) + Tuple(traits.Int()), # BSpline (order) + Tuple(traits.Float(), traits.Float()), # Gaussian/MultiLabel (sigma, alpha) + Tuple(traits.Str()), # GenericLabel (interpolator) + ) write_composite_transform = traits.Bool( - argstr='--write-composite-transform %d', + argstr="--write-composite-transform %d", default_value=False, usedefault=True, - desc='') + desc="", + ) collapse_output_transforms = traits.Bool( - argstr='--collapse-output-transforms %d', + argstr="--collapse-output-transforms %d", default_value=True, usedefault=True, # This should be true for explicit completeness - desc=('Collapse output transforms. Specifically, enabling this option ' - 'combines all adjacent linear transforms and composes all ' - 'adjacent displacement field transforms before writing the ' - 'results to disk.')) + desc=( + "Collapse output transforms. Specifically, enabling this option " + "combines all adjacent linear transforms and composes all " + "adjacent displacement field transforms before writing the " + "results to disk." + ), + ) initialize_transforms_per_stage = traits.Bool( - argstr='--initialize-transforms-per-stage %d', + argstr="--initialize-transforms-per-stage %d", default_value=False, usedefault=True, # This should be true for explicit completeness - desc= - ('Initialize linear transforms from the previous stage. By enabling this option, ' - 'the current linear stage transform is directly intialized from the previous ' - 'stages linear transform; this allows multiple linear stages to be run where ' - 'each stage directly updates the estimated linear transform from the previous ' - 'stage. (e.g. Translation -> Rigid -> Affine). ')) + desc=( + "Initialize linear transforms from the previous stage. By enabling this option, " + "the current linear stage transform is directly initialized from the previous " + "stages linear transform; this allows multiple linear stages to be run where " + "each stage directly updates the estimated linear transform from the previous " + "stage. (e.g. Translation -> Rigid -> Affine). " + ), + ) # NOTE: Even though only 0=False and 1=True are allowed, ants uses integer # values instead of booleans float = traits.Bool( - argstr='--float %d', + argstr="--float %d", default_value=False, - desc='Use float instead of double for computations.') + desc="Use float instead of double for computations.", + ) transforms = traits.List( - traits.Enum('Rigid', 'Affine', 'CompositeAffine', 'Similarity', - 'Translation', 'BSpline', 'GaussianDisplacementField', - 'TimeVaryingVelocityField', - 'TimeVaryingBSplineVelocityField', 'SyN', 'BSplineSyN', - 'Exponential', 'BSplineExponential'), - argstr='%s', - mandatory=True) + traits.Enum( + "Rigid", + "Affine", + "CompositeAffine", + "Similarity", + "Translation", + "BSpline", + "GaussianDisplacementField", + "TimeVaryingVelocityField", + "TimeVaryingBSplineVelocityField", + "SyN", + "BSplineSyN", + "Exponential", + "BSplineExponential", + ), + argstr="%s", + mandatory=True, + ) # TODO: input checking and allow defaults # All parameters must be specified for BSplineDisplacementField, TimeVaryingBSplineVelocityField, BSplineSyN, # Exponential, and BSplineExponential. EVEN DEFAULTS! transform_parameters = traits.List( traits.Either( - traits.Tuple(traits.Float()), # Translation, Rigid, Affine, + Tuple(traits.Float()), # Translation, Rigid, Affine, # CompositeAffine, Similarity - traits.Tuple( + Tuple( traits.Float(), # GaussianDisplacementField, SyN traits.Float(), - traits.Float()), - traits.Tuple( + traits.Float(), + ), + Tuple( traits.Float(), # BSplineSyn, traits.Int(), # BSplineDisplacementField, traits.Int(), # TimeVaryingBSplineVelocityField - traits.Int()), - traits.Tuple( + traits.Int(), + ), + Tuple( traits.Float(), # TimeVaryingVelocityField traits.Int(), traits.Float(), traits.Float(), traits.Float(), - traits.Float()), - traits.Tuple( + traits.Float(), + ), + Tuple( traits.Float(), # Exponential traits.Float(), traits.Float(), - traits.Int()), - traits.Tuple( + traits.Int(), + ), + Tuple( traits.Float(), # BSplineExponential traits.Int(), traits.Int(), traits.Int(), - traits.Int()), - )) + traits.Int(), + ), + ) + ) restrict_deformation = traits.List( - traits.List(traits.Enum(0, 1)), - desc=("This option allows the user to restrict the optimization of " - "the displacement field, translation, rigid or affine transform " - "on a per-component basis. For example, if one wants to limit " - "the deformation or rotation of 3-D volume to the first two " - "dimensions, this is possible by specifying a weight vector of " - "'1x1x0' for a deformation field or '1x1x0x1x1x0' for a rigid " - "transformation. Low-dimensional restriction only works if " - "there are no preceding transformations.")) + traits.List(traits.Range(low=0.0, high=1.0)), + desc=( + "This option allows the user to restrict the optimization of " + "the displacement field, translation, rigid or affine transform " + "on a per-component basis. For example, if one wants to limit " + "the deformation or rotation of 3-D volume to the first two " + "dimensions, this is possible by specifying a weight vector of " + "'1x1x0' for a deformation field or '1x1x0x1x1x0' for a rigid " + "transformation. Low-dimensional restriction only works if " + "there are no preceding transformations." + ), + ) # Convergence flags number_of_iterations = traits.List(traits.List(traits.Int())) smoothing_sigmas = traits.List(traits.List(traits.Float()), mandatory=True) sigma_units = traits.List( - traits.Enum('mm', 'vox'), - requires=['smoothing_sigmas'], - desc="units for smoothing sigmas") + traits.Enum("mm", "vox"), + requires=["smoothing_sigmas"], + desc="units for smoothing sigmas", + ) shrink_factors = traits.List(traits.List(traits.Int()), mandatory=True) convergence_threshold = traits.List( trait=traits.Float(), value=[1e-6], minlen=1, - requires=['number_of_iterations'], - usedefault=True) + requires=["number_of_iterations"], + usedefault=True, + ) convergence_window_size = traits.List( trait=traits.Int(), value=[10], minlen=1, - requires=['convergence_threshold'], - usedefault=True) + requires=["convergence_threshold"], + usedefault=True, + ) # Output flags - output_transform_prefix = Str( - "transform", usedefault=True, argstr="%s", desc="") - output_warped_image = traits.Either( - traits.Bool, File(), hash_files=False, desc="") + output_transform_prefix = Str("transform", usedefault=True, argstr="%s", desc="") + output_warped_image = traits.Either(traits.Bool, File(), hash_files=False, desc="") output_inverse_warped_image = traits.Either( - traits.Bool, - File(), - hash_files=False, - requires=['output_warped_image'], - desc="") + traits.Bool, File(), hash_files=False, requires=["output_warped_image"], desc="" + ) winsorize_upper_quantile = traits.Range( low=0.0, high=1.0, value=1.0, - argstr='%s', + argstr="%s", usedefault=True, - desc="The Upper quantile to clip image ranges") + desc="The Upper quantile to clip image ranges", + ) winsorize_lower_quantile = traits.Range( low=0.0, high=1.0, value=0.0, - argstr='%s', + argstr="%s", usedefault=True, - desc="The Lower quantile to clip image ranges") - - verbose = traits.Bool(argstr='-v', default_value=False, usedefault=True) + desc="The Lower quantile to clip image ranges", + ) + random_seed = traits.Int( + argstr="--random-seed %d", + desc="Fixed seed for random number generation", + min_ver="2.3.0", + ) + verbose = traits.Bool( + argstr="-v", default_value=False, usedefault=True, nohash=True + ) class RegistrationOutputSpec(TraitedSpec): forward_transforms = traits.List( + File(exists=True), desc="List of output transforms for forward registration" + ) + reverse_forward_transforms = traits.List( File(exists=True), - desc='List of output transforms for forward registration') + desc="List of output transforms for forward registration reversed for antsApplyTransform", + ) reverse_transforms = traits.List( - File(exists=True), - desc='List of output transforms for reverse registration') + File(exists=True), desc="List of output transforms for reverse registration" + ) forward_invert_flags = traits.List( + traits.Bool(), desc="List of flags corresponding to the forward transforms" + ) + reverse_forward_invert_flags = traits.List( traits.Bool(), - desc='List of flags corresponding to the forward transforms') + desc="List of flags corresponding to the forward transforms reversed for antsApplyTransform", + ) reverse_invert_flags = traits.List( - traits.Bool(), - desc='List of flags corresponding to the reverse transforms') - composite_transform = File(exists=True, desc='Composite transform file') - inverse_composite_transform = File(desc='Inverse composite transform file') + traits.Bool(), desc="List of flags corresponding to the reverse transforms" + ) + composite_transform = File(exists=True, desc="Composite transform file") + inverse_composite_transform = File(desc="Inverse composite transform file") warped_image = File(desc="Outputs warped image") inverse_warped_image = File(desc="Outputs the inverse of the warped image") save_state = File(desc="The saved registration state to be restored") - metric_value = traits.Float(desc='the final value of metric') - elapsed_time = traits.Float( - desc='the total elapsed time as reported by ANTs') + metric_value = traits.Float(desc="the final value of metric") + elapsed_time = traits.Float(desc="the total elapsed time as reported by ANTs") class Registration(ANTSCommand): @@ -571,7 +640,7 @@ class Registration(ANTSCommand): *stages*. For example first an Affine, then a Rigid, and ultimately a non-linear (Syn)-transformation. - antsRegistration can be initialized using one ore more transforms from moving_image + antsRegistration can be initialized using one or more transforms from moving_image to fixed_image with the ``initial_moving_transform``-input. For example, when you already have a warpfield that corrects for geometrical distortions in an EPI (functional) image, that you want to apply before an Affine registration to a structural image. @@ -743,6 +812,8 @@ class Registration(ANTSCommand): 'inverse_composite_transform': '...data/output_InverseComposite.h5', 'inverse_warped_image': , 'metric_value': , + 'reverse_forward_invert_flags': [], + 'reverse_forward_transforms': [], 'reverse_invert_flags': [], 'reverse_transforms': [], 'save_state': '...data/trans.mat', @@ -772,6 +843,9 @@ class Registration(ANTSCommand): 'inverse_composite_transform': , 'inverse_warped_image': , 'metric_value': , + 'reverse_forward_invert_flags': [False, False], + 'reverse_forward_transforms': ['...data/output_1Warp.nii.gz', + '...data/output_0GenericAffine.mat'], 'reverse_invert_flags': [True, False], 'reverse_transforms': ['...data/output_0GenericAffine.mat', \ '...data/output_1InverseWarp.nii.gz'], @@ -790,7 +864,7 @@ class Registration(ANTSCommand): --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first - performs a linear registation using only the Mutual Information ('Mattes')-metric. + performs a linear registration using only the Mutual Information ('Mattes')-metric. In a second stage, it performs a non-linear registration ('Syn') using both a Mutual Information and a local cross-correlation ('CC')-metric. Both metrics are weighted equally ('metric_weight' is .5 for both). The Mutual Information- metric uses 32 bins. @@ -920,36 +994,42 @@ class Registration(ANTSCommand): --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ - DEF_SAMPLING_STRATEGY = 'None' + + DEF_SAMPLING_STRATEGY = "None" """The default sampling strategy argument.""" - _cmd = 'antsRegistration' + _cmd = "antsRegistration" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec _quantilesDone = False _linear_transform_names = [ - 'Rigid', 'Affine', 'Translation', 'CompositeAffine', 'Similarity' + "Rigid", + "Affine", + "Translation", + "CompositeAffine", + "Similarity", ] def __init__(self, **inputs): - super(Registration, self).__init__(**inputs) + super().__init__(**inputs) self._elapsed_time = None self._metric_value = None - def _run_interface(self, runtime, correct_return_codes=(0, )): - runtime = super(Registration, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super()._run_interface(runtime) # Parse some profiling info output = runtime.stdout or runtime.merged if output: - lines = output.split('\n') + lines = output.split("\n") for l in lines[::-1]: # This should be the last line - if l.strip().startswith('Total elapsed time:'): - self._elapsed_time = float(l.strip().replace( - 'Total elapsed time: ', '')) - elif 'DIAGNOSTIC' in l: - self._metric_value = float(l.split(',')[2]) + if l.strip().startswith("Total elapsed time:"): + self._elapsed_time = float( + l.strip().replace("Total elapsed time: ", "") + ) + elif "DIAGNOSTIC" in l: + self._metric_value = float(l.split(",")[2]) break return runtime @@ -971,18 +1051,20 @@ def _format_metric(self, index): metric=name_input, weight=self.inputs.metric_weight[index], radius_or_bins=self.inputs.radius_or_number_of_bins[index], - optional=self.inputs.radius_or_number_of_bins[index]) + optional=self.inputs.radius_or_number_of_bins[index], + ) # The optional sampling strategy and percentage. - if isdefined(self.inputs.sampling_strategy - ) and self.inputs.sampling_strategy: + if isdefined(self.inputs.sampling_strategy) and self.inputs.sampling_strategy: sampling_strategy = self.inputs.sampling_strategy[index] if sampling_strategy: - stage_inputs['sampling_strategy'] = sampling_strategy - if isdefined(self.inputs.sampling_percentage - ) and self.inputs.sampling_percentage: + stage_inputs["sampling_strategy"] = sampling_strategy + if ( + isdefined(self.inputs.sampling_percentage) + and self.inputs.sampling_percentage + ): sampling_percentage = self.inputs.sampling_percentage[index] if sampling_percentage: - stage_inputs['sampling_percentage'] = sampling_percentage + stage_inputs["sampling_percentage"] = sampling_percentage # Make a list of metric specifications, one per -m command line # argument for the current stage. @@ -992,10 +1074,10 @@ def _format_metric(self, index): # from the non-list inputs. if isinstance(name_input, list): items = list(stage_inputs.items()) - indexes = list(range(0, len(name_input))) + indexes = list(range(len(name_input))) specs = list() for i in indexes: - temp = dict([(k, v[i]) for k, v in items]) + temp = {k: v[i] for k, v in items} if len(self.inputs.fixed_image) == 1: temp["fixed_image"] = self.inputs.fixed_image[0] else: @@ -1016,16 +1098,18 @@ def _format_metric(self, index): @staticmethod def _format_metric_argument(**kwargs): - retval = '%s[ %s, %s, %g, %d' % (kwargs['metric'], - kwargs['fixed_image'], - kwargs['moving_image'], - kwargs['weight'], - kwargs['radius_or_bins']) + retval = "%s[ %s, %s, %g, %d" % ( + kwargs["metric"], + kwargs["fixed_image"], + kwargs["moving_image"], + kwargs["weight"], + kwargs["radius_or_bins"], + ) # The optional sampling strategy. - if 'sampling_strategy' in kwargs: - sampling_strategy = kwargs['sampling_strategy'] - elif 'sampling_percentage' in kwargs: + if "sampling_strategy" in kwargs: + sampling_strategy = kwargs["sampling_strategy"] + elif "sampling_percentage" in kwargs: # The sampling percentage is specified but not the # sampling strategy. Use the default strategy. sampling_strategy = Registration.DEF_SAMPLING_STRATEGY @@ -1033,44 +1117,48 @@ def _format_metric_argument(**kwargs): sampling_strategy = None # Format the optional sampling arguments. if sampling_strategy: - retval += ', %s' % sampling_strategy - if 'sampling_percentage' in kwargs: - retval += ', %g' % kwargs['sampling_percentage'] + retval += ", %s" % sampling_strategy + if "sampling_percentage" in kwargs: + retval += ", %g" % kwargs["sampling_percentage"] - retval += ' ]' + retval += " ]" return retval def _format_transform(self, index): - retval = [] - retval.append('%s[ ' % self.inputs.transforms[index]) - parameters = ', '.join([ - str(element) for element in self.inputs.transform_parameters[index] - ]) - retval.append('%s' % parameters) - retval.append(' ]') - return "".join(retval) + parameters = ", ".join( + [str(element) for element in self.inputs.transform_parameters[index]] + ) + return f"{self.inputs.transforms[index]}[ {parameters} ]" def _format_registration(self): retval = [] for ii in range(len(self.inputs.transforms)): - retval.append('--transform %s' % (self._format_transform(ii))) - for metric in self._format_metric(ii): - retval.append('--metric %s' % metric) - retval.append('--convergence %s' % self._format_convergence(ii)) + retval.append("--transform %s" % (self._format_transform(ii))) + retval.extend("--metric %s" % metric for metric in self._format_metric(ii)) + retval.append("--convergence %s" % self._format_convergence(ii)) if isdefined(self.inputs.sigma_units): retval.append( - '--smoothing-sigmas %s%s' % - (self._format_xarray(self.inputs.smoothing_sigmas[ii]), - self.inputs.sigma_units[ii])) + "--smoothing-sigmas %s%s" + % ( + self._format_xarray(self.inputs.smoothing_sigmas[ii]), + self.inputs.sigma_units[ii], + ) + ) else: - retval.append('--smoothing-sigmas %s' % self._format_xarray( - self.inputs.smoothing_sigmas[ii])) - retval.append('--shrink-factors %s' % self._format_xarray( - self.inputs.shrink_factors[ii])) + retval.append( + "--smoothing-sigmas %s" + % self._format_xarray(self.inputs.smoothing_sigmas[ii]) + ) + retval.append( + "--shrink-factors %s" + % self._format_xarray(self.inputs.shrink_factors[ii]) + ) if isdefined(self.inputs.use_estimate_learning_rate_once): - retval.append('--use-estimate-learning-rate-once %d' % - self.inputs.use_estimate_learning_rate_once[ii]) + retval.append( + "--use-estimate-learning-rate-once %d" + % self.inputs.use_estimate_learning_rate_once[ii] + ) if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags @@ -1078,50 +1166,59 @@ def _format_registration(self): histval = self.inputs.use_histogram_matching else: histval = self.inputs.use_histogram_matching[ii] - retval.append('--use-histogram-matching %d' % histval) + retval.append("--use-histogram-matching %d" % histval) if isdefined(self.inputs.restrict_deformation): retval.append( - '--restrict-deformation %s' % self._format_xarray( - self.inputs.restrict_deformation[ii])) - if any((isdefined(self.inputs.fixed_image_masks), - isdefined(self.inputs.moving_image_masks))): + "--restrict-deformation %s" + % self._format_xarray(self.inputs.restrict_deformation[ii]) + ) + if any( + ( + isdefined(self.inputs.fixed_image_masks), + isdefined(self.inputs.moving_image_masks), + ) + ): if isdefined(self.inputs.fixed_image_masks): - fixed_masks = ensure_list( - self.inputs.fixed_image_masks) + fixed_masks = ensure_list(self.inputs.fixed_image_masks) fixed_mask = fixed_masks[ii if len(fixed_masks) > 1 else 0] else: - fixed_mask = 'NULL' + fixed_mask = "NULL" if isdefined(self.inputs.moving_image_masks): - moving_masks = ensure_list( - self.inputs.moving_image_masks) - moving_mask = moving_masks[ii - if len(moving_masks) > 1 else 0] + moving_masks = ensure_list(self.inputs.moving_image_masks) + moving_mask = moving_masks[ii if len(moving_masks) > 1 else 0] else: - moving_mask = 'NULL' - retval.append('--masks [ %s, %s ]' % (fixed_mask, moving_mask)) + moving_mask = "NULL" + retval.append(f"--masks [ {fixed_mask}, {moving_mask} ]") return " ".join(retval) def _get_outputfilenames(self, inverse=False): output_filename = None if not inverse: - if isdefined(self.inputs.output_warped_image) and \ - self.inputs.output_warped_image: + if ( + isdefined(self.inputs.output_warped_image) + and self.inputs.output_warped_image + ): output_filename = self.inputs.output_warped_image if isinstance(output_filename, bool): - output_filename = '%s_Warped.nii.gz' % self.inputs.output_transform_prefix + output_filename = ( + "%s_Warped.nii.gz" % self.inputs.output_transform_prefix + ) return output_filename inv_output_filename = None - if isdefined(self.inputs.output_inverse_warped_image) and \ - self.inputs.output_inverse_warped_image: + if ( + isdefined(self.inputs.output_inverse_warped_image) + and self.inputs.output_inverse_warped_image + ): inv_output_filename = self.inputs.output_inverse_warped_image if isinstance(inv_output_filename, bool): - inv_output_filename = '%s_InverseWarped.nii.gz' % self.inputs.output_transform_prefix + inv_output_filename = ( + "%s_InverseWarped.nii.gz" % self.inputs.output_transform_prefix + ) return inv_output_filename def _format_convergence(self, ii): - convergence_iter = self._format_xarray( - self.inputs.number_of_iterations[ii]) + convergence_iter = self._format_xarray(self.inputs.number_of_iterations[ii]) if len(self.inputs.convergence_threshold) > ii: convergence_value = self.inputs.convergence_threshold[ii] else: @@ -1130,19 +1227,25 @@ def _format_convergence(self, ii): convergence_ws = self.inputs.convergence_window_size[ii] else: convergence_ws = self.inputs.convergence_window_size[0] - return '[ %s, %g, %d ]' % (convergence_iter, convergence_value, - convergence_ws) + return "[ %s, %g, %d ]" % (convergence_iter, convergence_value, convergence_ws) def _format_winsorize_image_intensities(self): - if not self.inputs.winsorize_upper_quantile > self.inputs.winsorize_lower_quantile: + if ( + not self.inputs.winsorize_upper_quantile + > self.inputs.winsorize_lower_quantile + ): raise RuntimeError( - "Upper bound MUST be more than lower bound: %g > %g" % - (self.inputs.winsorize_upper_quantile, - self.inputs.winsorize_lower_quantile)) + "Upper bound MUST be more than lower bound: %g > %g" + % ( + self.inputs.winsorize_upper_quantile, + self.inputs.winsorize_lower_quantile, + ) + ) self._quantilesDone = True - return '--winsorize-image-intensities [ %s, %s ]' % ( + return "--winsorize-image-intensities [ {}, {} ]".format( self.inputs.winsorize_lower_quantile, - self.inputs.winsorize_upper_quantile) + self.inputs.winsorize_upper_quantile, + ) def _get_initial_transform_filenames(self): n_transforms = len(self.inputs.initial_moving_transform) @@ -1153,75 +1256,92 @@ def _get_initial_transform_filenames(self): if len(self.inputs.invert_initial_moving_transform) != n_transforms: raise Exception( 'Inputs "initial_moving_transform" and "invert_initial_moving_transform"' - 'should have the same length.') + "should have the same length." + ) invert_flags = self.inputs.invert_initial_moving_transform retval = [ - "[ %s, %d ]" % (xfm, int(flag)) for xfm, flag in zip( - self.inputs.initial_moving_transform, invert_flags) + "[ %s, %d ]" % (xfm, int(flag)) + for xfm, flag in zip(self.inputs.initial_moving_transform, invert_flags) ] - return " ".join(['--initial-moving-transform'] + retval) + return " ".join(["--initial-moving-transform"] + retval) def _format_arg(self, opt, spec, val): - if opt == 'fixed_image_mask': + if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): - return '--masks [ %s, %s ]' % (self.inputs.fixed_image_mask, - self.inputs.moving_image_mask) + return "--masks [ {}, {} ]".format( + self.inputs.fixed_image_mask, + self.inputs.moving_image_mask, + ) else: - return '--masks %s' % self.inputs.fixed_image_mask - elif opt == 'transforms': + return "--masks %s" % self.inputs.fixed_image_mask + elif opt == "transforms": return self._format_registration() - elif opt == 'initial_moving_transform': + elif opt == "initial_moving_transform": return self._get_initial_transform_filenames() - elif opt == 'initial_moving_transform_com': - do_center_of_mass_init = self.inputs.initial_moving_transform_com \ - if isdefined(self.inputs.initial_moving_transform_com) else 0 # Just do the default behavior - return '--initial-moving-transform [ %s, %s, %d ]' % ( - self.inputs.fixed_image[0], self.inputs.moving_image[0], - do_center_of_mass_init) - elif opt == 'interpolation': - if self.inputs.interpolation in ['BSpline', 'MultiLabel', 'Gaussian'] and \ - isdefined(self.inputs.interpolation_parameters): - return '--interpolation %s[ %s ]' % ( - self.inputs.interpolation, ', '.join([ - str(param) - for param in self.inputs.interpolation_parameters - ])) + elif opt == "initial_moving_transform_com": + do_center_of_mass_init = ( + self.inputs.initial_moving_transform_com + if isdefined(self.inputs.initial_moving_transform_com) + else 0 + ) # Just do the default behavior + return "--initial-moving-transform [ %s, %s, %d ]" % ( + self.inputs.fixed_image[0], + self.inputs.moving_image[0], + do_center_of_mass_init, + ) + elif opt == "interpolation": + if self.inputs.interpolation in [ + "BSpline", + "MultiLabel", + "Gaussian", + "GenericLabel", + ] and isdefined(self.inputs.interpolation_parameters): + return "--interpolation {}[ {} ]".format( + self.inputs.interpolation, + ", ".join( + [str(param) for param in self.inputs.interpolation_parameters] + ), + ) else: - return '--interpolation %s' % self.inputs.interpolation - elif opt == 'output_transform_prefix': + return "--interpolation %s" % self.inputs.interpolation + elif opt == "output_transform_prefix": out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename and inv_out_filename: - return '--output [ %s, %s, %s ]' % ( - self.inputs.output_transform_prefix, out_filename, - inv_out_filename) + return "--output [ {}, {}, {} ]".format( + self.inputs.output_transform_prefix, + out_filename, + inv_out_filename, + ) elif out_filename: - return '--output [ %s, %s ]' % ( - self.inputs.output_transform_prefix, out_filename) + return "--output [ {}, {} ]".format( + self.inputs.output_transform_prefix, + out_filename, + ) else: - return '--output %s' % self.inputs.output_transform_prefix - elif opt == 'winsorize_upper_quantile' or opt == 'winsorize_lower_quantile': + return "--output %s" % self.inputs.output_transform_prefix + elif opt == "winsorize_upper_quantile" or opt == "winsorize_lower_quantile": if not self._quantilesDone: return self._format_winsorize_image_intensities() else: self._quantilesDone = False - return '' # Must return something for argstr! + return "" # Must return something for argstr! # This feature was removed from recent versions of antsRegistration due to corrupt outputs. # elif opt == 'collapse_linear_transforms_to_fixed_image_header': # return self._formatCollapseLinearTransformsToFixedImageHeader() - return super(Registration, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _output_filenames(self, prefix, count, transform, inverse=False): self.low_dimensional_transform_map = { - 'Rigid': 'Rigid.mat', - 'Affine': 'Affine.mat', - 'GenericAffine': 'GenericAffine.mat', - 'CompositeAffine': 'Affine.mat', - 'Similarity': 'Similarity.mat', - 'Translation': 'Translation.mat', - 'BSpline': 'BSpline.txt', - 'Initial': 'DerivedInitialMovingTranslation.mat' + "Rigid": "Rigid.mat", + "Affine": "Affine.mat", + "GenericAffine": "GenericAffine.mat", + "CompositeAffine": "Affine.mat", + "Similarity": "Similarity.mat", + "Translation": "Translation.mat", + "BSpline": "BSpline.txt", + "Initial": "DerivedInitialMovingTranslation.mat", } if transform in list(self.low_dimensional_transform_map.keys()): suffix = self.low_dimensional_transform_map[transform] @@ -1229,126 +1349,150 @@ def _output_filenames(self, prefix, count, transform, inverse=False): else: inverse_mode = False # These are not analytically invertable if inverse: - suffix = 'InverseWarp.nii.gz' + suffix = "InverseWarp.nii.gz" else: - suffix = 'Warp.nii.gz' - return '%s%d%s' % (prefix, count, suffix), inverse_mode + suffix = "Warp.nii.gz" + return "%s%d%s" % (prefix, count, suffix), inverse_mode def _list_outputs(self): outputs = self._outputs().get() - outputs['forward_transforms'] = [] - outputs['forward_invert_flags'] = [] - outputs['reverse_transforms'] = [] - outputs['reverse_invert_flags'] = [] + outputs["forward_transforms"] = [] + outputs["forward_invert_flags"] = [] + outputs["reverse_transforms"] = [] + outputs["reverse_invert_flags"] = [] # invert_initial_moving_transform should be always defined, even if # there's no initial transform invert_initial_moving_transform = [False] * len( - self.inputs.initial_moving_transform) + self.inputs.initial_moving_transform + ) if isdefined(self.inputs.invert_initial_moving_transform): - invert_initial_moving_transform = self.inputs.invert_initial_moving_transform + invert_initial_moving_transform = ( + self.inputs.invert_initial_moving_transform + ) if self.inputs.write_composite_transform: - filename = self.inputs.output_transform_prefix + 'Composite.h5' - outputs['composite_transform'] = os.path.abspath(filename) - filename = self.inputs.output_transform_prefix + \ - 'InverseComposite.h5' - outputs['inverse_composite_transform'] = os.path.abspath(filename) + filename = self.inputs.output_transform_prefix + "Composite.h5" + outputs["composite_transform"] = os.path.abspath(filename) + filename = self.inputs.output_transform_prefix + "InverseComposite.h5" + outputs["inverse_composite_transform"] = os.path.abspath(filename) # If composite transforms are written, then individuals are not written (as of 2014-10-26 else: if not self.inputs.collapse_output_transforms: transform_count = 0 if isdefined(self.inputs.initial_moving_transform): outputs[ - 'forward_transforms'] += self.inputs.initial_moving_transform - outputs[ - 'forward_invert_flags'] += invert_initial_moving_transform - outputs['reverse_transforms'] = self.inputs.initial_moving_transform + \ - outputs['reverse_transforms'] - outputs['reverse_invert_flags'] = [ + "forward_transforms" + ] += self.inputs.initial_moving_transform + outputs["forward_invert_flags"] += invert_initial_moving_transform + outputs["reverse_transforms"] = ( + self.inputs.initial_moving_transform + + outputs["reverse_transforms"] + ) + outputs["reverse_invert_flags"] = [ not e for e in invert_initial_moving_transform - ] + outputs['reverse_invert_flags'] # Prepend - transform_count += len( - self.inputs.initial_moving_transform) + ] + outputs[ + "reverse_invert_flags" + ] # Prepend + transform_count += len(self.inputs.initial_moving_transform) elif isdefined(self.inputs.initial_moving_transform_com): forward_filename, forward_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - 'Initial') + self.inputs.output_transform_prefix, transform_count, "Initial" + ) reverse_filename, reverse_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - 'Initial', True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(False) - outputs['reverse_transforms'].insert( - 0, os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].insert(0, True) + self.inputs.output_transform_prefix, + transform_count, + "Initial", + True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(False) + outputs["reverse_transforms"].insert( + 0, os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].insert(0, True) transform_count += 1 for count in range(len(self.inputs.transforms)): forward_filename, forward_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - self.inputs.transforms[count]) + self.inputs.output_transform_prefix, + transform_count, + self.inputs.transforms[count], + ) reverse_filename, reverse_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - self.inputs.transforms[count], True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(forward_inversemode) - outputs['reverse_transforms'].insert( - 0, os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].insert( - 0, reverse_inversemode) + self.inputs.output_transform_prefix, + transform_count, + self.inputs.transforms[count], + True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(forward_inversemode) + outputs["reverse_transforms"].insert( + 0, os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].insert(0, reverse_inversemode) transform_count += 1 else: transform_count = 0 is_linear = [ - t in self._linear_transform_names - for t in self.inputs.transforms + t in self._linear_transform_names for t in self.inputs.transforms ] collapse_list = [] - if isdefined(self.inputs.initial_moving_transform) or \ - isdefined(self.inputs.initial_moving_transform_com): + if isdefined(self.inputs.initial_moving_transform) or isdefined( + self.inputs.initial_moving_transform_com + ): is_linear.insert(0, True) # Only files returned by collapse_output_transforms if any(is_linear): - collapse_list.append('GenericAffine') + collapse_list.append("GenericAffine") if not all(is_linear): - collapse_list.append('SyN') + collapse_list.append("SyN") for transform in collapse_list: forward_filename, forward_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, - inverse=False) + inverse=False, + ) reverse_filename, reverse_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, - inverse=True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(forward_inversemode) - outputs['reverse_transforms'].append( - os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].append(reverse_inversemode) + inverse=True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(forward_inversemode) + outputs["reverse_transforms"].append( + os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].append(reverse_inversemode) transform_count += 1 out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename: - outputs['warped_image'] = os.path.abspath(out_filename) + outputs["warped_image"] = os.path.abspath(out_filename) if inv_out_filename: - outputs['inverse_warped_image'] = os.path.abspath(inv_out_filename) + outputs["inverse_warped_image"] = os.path.abspath(inv_out_filename) if len(self.inputs.save_state): - outputs['save_state'] = os.path.abspath(self.inputs.save_state) + outputs["save_state"] = os.path.abspath(self.inputs.save_state) if self._metric_value: - outputs['metric_value'] = self._metric_value + outputs["metric_value"] = self._metric_value if self._elapsed_time: - outputs['elapsed_time'] = self._elapsed_time + outputs["elapsed_time"] = self._elapsed_time + + outputs["reverse_forward_transforms"] = outputs["forward_transforms"][::-1] + outputs["reverse_forward_invert_flags"] = outputs["forward_invert_flags"][::-1] + return outputs @@ -1357,67 +1501,58 @@ class MeasureImageSimilarityInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', + argstr="--dimensionality %d", position=1, - desc='Dimensionality of the fixed/moving image pair', + desc="Dimensionality of the fixed/moving image pair", ) fixed_image = File( - exists=True, - mandatory=True, - desc='Image to which the moving image is warped', + exists=True, mandatory=True, desc="Image to which the moving image is warped" ) moving_image = File( exists=True, mandatory=True, - desc= - 'Image to apply transformation to (generally a coregistered functional)', + desc="Image to apply transformation to (generally a coregistered functional)", ) metric = traits.Enum( - "CC", - "MI", - "Mattes", - "MeanSquares", - "Demons", - "GC", - argstr="%s", - mandatory=True, + "CC", "MI", "Mattes", "MeanSquares", "Demons", "GC", argstr="%s", mandatory=True ) metric_weight = traits.Float( - requires=['metric'], + requires=["metric"], default_value=1.0, usedefault=True, desc='The "metricWeight" variable is not used.', ) radius_or_number_of_bins = traits.Int( - requires=['metric'], + requires=["metric"], mandatory=True, - desc='The number of bins in each stage for the MI and Mattes metric, ' - 'or the radius for other metrics', + desc="The number of bins in each stage for the MI and Mattes metric, " + "or the radius for other metrics", ) sampling_strategy = traits.Enum( "None", "Regular", "Random", - requires=['metric'], + requires=["metric"], usedefault=True, - desc='Manner of choosing point set over which to optimize the metric. ' - 'Defaults to "None" (i.e. a dense sampling of one sample per voxel).') + desc="Manner of choosing point set over which to optimize the metric. " + 'Defaults to "None" (i.e. a dense sampling of one sample per voxel).', + ) sampling_percentage = traits.Either( traits.Range(low=0.0, high=1.0), - requires=['metric'], + requires=["metric"], mandatory=True, - desc= - 'Percentage of points accessible to the sampling strategy over which ' - 'to optimize the metric.') + desc="Percentage of points accessible to the sampling strategy over which " + "to optimize the metric.", + ) fixed_image_mask = File( exists=True, - argstr='%s', - desc='mask used to limit metric sampling region of the fixed image', + argstr="%s", + desc="mask used to limit metric sampling region of the fixed image", ) moving_image_mask = File( exists=True, - requires=['fixed_image_mask'], - desc='mask used to limit metric sampling region of the moving image', + requires=["fixed_image_mask"], + desc="mask used to limit metric sampling region of the moving image", ) @@ -1448,14 +1583,15 @@ class MeasureImageSimilarity(ANTSCommand): 'MeasureImageSimilarity --dimensionality 3 --masks ["mask.nii","mask.nii.gz"] \ --metric MI["T1.nii","resting.nii",1.0,5,Regular,1.0]' """ - _cmd = 'MeasureImageSimilarity' + + _cmd = "MeasureImageSimilarity" input_spec = MeasureImageSimilarityInputSpec output_spec = MeasureImageSimilarityOutputSpec def _metric_constructor(self): - retval = '--metric {metric}["{fixed_image}","{moving_image}",{metric_weight},'\ - '{radius_or_number_of_bins},{sampling_strategy},{sampling_percentage}]'\ - .format( + retval = ( + '--metric {metric}["{fixed_image}","{moving_image}",{metric_weight},' + "{radius_or_number_of_bins},{sampling_strategy},{sampling_percentage}]".format( metric=self.inputs.metric, fixed_image=self.inputs.fixed_image, moving_image=self.inputs.moving_image, @@ -1464,78 +1600,125 @@ def _metric_constructor(self): sampling_strategy=self.inputs.sampling_strategy, sampling_percentage=self.inputs.sampling_percentage, ) + ) return retval def _mask_constructor(self): if self.inputs.moving_image_mask: - retval = '--masks ["{fixed_image_mask}","{moving_image_mask}"]'\ - .format( - fixed_image_mask=self.inputs.fixed_image_mask, - moving_image_mask=self.inputs.moving_image_mask, - ) + retval = '--masks ["{fixed_image_mask}","{moving_image_mask}"]'.format( + fixed_image_mask=self.inputs.fixed_image_mask, + moving_image_mask=self.inputs.moving_image_mask, + ) else: - retval = '--masks "{fixed_image_mask}"'\ - .format( - fixed_image_mask=self.inputs.fixed_image_mask, - ) + retval = '--masks "{fixed_image_mask}"'.format( + fixed_image_mask=self.inputs.fixed_image_mask + ) return retval def _format_arg(self, opt, spec, val): - if opt == 'metric': + if opt == "metric": return self._metric_constructor() - elif opt == 'fixed_image_mask': + elif opt == "fixed_image_mask": return self._mask_constructor() - return super(MeasureImageSimilarity, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - stdout = runtime.stdout.split('\n') + stdout = runtime.stdout.split("\n") outputs.similarity = float(stdout[0]) return outputs class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): - dimension = traits.Enum(3, 2, argstr='-d %d', - usedefault=True, desc='image dimension (2 or 3)') - fixed_image = InputMultiPath(File(exists=True), mandatory=True, argstr='-f %s...', - desc='Fixed image or source image or reference image') - moving_image = InputMultiPath(File(exists=True), mandatory=True, argstr='-m %s...', - desc='Moving image or target image') - output_prefix = Str("transform", usedefault=True, argstr='-o %s', - desc="A prefix that is prepended to all output files") - num_threads = traits.Int(default_value=LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, - desc='Number of threads (default = 1)', argstr='-n %d') - - transform_type = traits.Enum('s', 't', 'r', 'a', 'sr', 'b', 'br', argstr='-t %s', - desc=""" - transform type - t: translation - r: rigid - a: rigid + affine - s: rigid + affine + deformable syn (default) - sr: rigid + deformable syn - b: rigid + affine + deformable b-spline syn - br: rigid + deformable b-spline syn""", - usedefault=True) - - use_histogram_matching = traits.Bool(False, argstr='-j %d', - desc='use histogram matching') - histogram_bins = traits.Int(default_value=32, usedefault=True, argstr='-r %d', - desc='histogram bins for mutual information in SyN stage \ - (default = 32)') - spline_distance = traits.Int(default_value=26, usedefault=True, argstr='-s %d', - desc='spline distance for deformable B-spline SyN transform \ - (default = 26)') - precision_type = traits.Enum('double', 'float', argstr='-p %s', - desc='precision type (default = double)', usedefault=True) + dimension = traits.Enum( + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) + fixed_image = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="-f %s...", + desc="Fixed image or source image or reference image", + ) + moving_image = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="-m %s...", + desc="Moving image or target image", + ) + output_prefix = Str( + "transform", + usedefault=True, + argstr="-o %s", + desc="A prefix that is prepended to all output files", + ) + num_threads = traits.Int( + default_value=LOCAL_DEFAULT_NUMBER_OF_THREADS, + usedefault=True, + desc="Number of threads (default = 1)", + argstr="-n %d", + ) + + transform_type = traits.Enum( + "s", + "t", + "r", + "a", + "sr", + "b", + "br", + argstr="-t %s", + desc="""\ +Transform type + + * t: translation + * r: rigid + * a: rigid + affine + * s: rigid + affine + deformable syn (default) + * sr: rigid + deformable syn + * b: rigid + affine + deformable b-spline syn + * br: rigid + deformable b-spline syn + +""", + usedefault=True, + ) + + use_histogram_matching = traits.Bool( + False, argstr="-j %d", desc="use histogram matching" + ) + histogram_bins = traits.Int( + default_value=32, + usedefault=True, + argstr="-r %d", + desc="histogram bins for mutual information in SyN stage \ + (default = 32)", + ) + spline_distance = traits.Int( + default_value=26, + usedefault=True, + argstr="-s %d", + desc="spline distance for deformable B-spline SyN transform \ + (default = 26)", + ) + precision_type = traits.Enum( + "double", + "float", + argstr="-p %s", + desc="precision type (default = double)", + usedefault=True, + ) + random_seed = traits.Int( + argstr="-e %d", + desc="fixed random seed", + min_ver="2.3.0", + ) class RegistrationSynQuickOutputSpec(TraitedSpec): warped_image = File(exists=True, desc="Warped image") inverse_warped_image = File(exists=True, desc="Inverse warped image") - out_matrix = File(exists=True, desc='Affine matrix') - forward_warp_field = File(exists=True, desc='Forward warp field') - inverse_warp_field = File(exists=True, desc='Inverse warp field') + out_matrix = File(exists=True, desc="Affine matrix") + forward_warp_field = File(exists=True, desc="Forward warp field") + inverse_warp_field = File(exists=True, desc="Inverse warp field") class RegistrationSynQuick(ANTSCommand): @@ -1569,7 +1752,7 @@ class RegistrationSynQuick(ANTSCommand): >>> reg.run() # doctest: +SKIP """ - _cmd = 'antsRegistrationSyNQuick.sh' + _cmd = "antsRegistrationSyNQuick.sh" input_spec = RegistrationSynQuickInputSpec output_spec = RegistrationSynQuickOutputSpec @@ -1581,39 +1764,60 @@ def _num_threads_update(self): pass def _format_arg(self, name, spec, value): - if name == 'precision_type': + if name == "precision_type": return spec.argstr % value[0] - return super(RegistrationSynQuick, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() out_base = os.path.abspath(self.inputs.output_prefix) - outputs['warped_image'] = out_base + 'Warped.nii.gz' - outputs['inverse_warped_image'] = out_base + 'InverseWarped.nii.gz' - outputs['out_matrix'] = out_base + '0GenericAffine.mat' + outputs["warped_image"] = out_base + "Warped.nii.gz" + outputs["inverse_warped_image"] = out_base + "InverseWarped.nii.gz" + outputs["out_matrix"] = out_base + "0GenericAffine.mat" - if self.inputs.transform_type not in ('t', 'r', 'a'): - outputs['forward_warp_field'] = out_base + '1Warp.nii.gz' - outputs['inverse_warp_field'] = out_base + '1InverseWarp.nii.gz' + if self.inputs.transform_type not in ("t", "r", "a"): + outputs["forward_warp_field"] = out_base + "1Warp.nii.gz" + outputs["inverse_warp_field"] = out_base + "1InverseWarp.nii.gz" return outputs + class CompositeTransformUtilInputSpec(ANTSCommandInputSpec): - process = traits.Enum('assemble', 'disassemble', argstr='--%s', - position=1, usedefault=True, - desc='What to do with the transform inputs (assemble or disassemble)', - ) - out_file = File(exists=False, argstr='%s', position=2, - desc='Output file path (only used for disassembly).') - in_file = InputMultiPath(File(exists=True), mandatory=True, argstr='%s...', - position=3, desc='Input transform file(s)') - output_prefix = Str("transform", usedefault=True, argstr='%s', position=4, - desc="A prefix that is prepended to all output files (only used for assembly).") + process = traits.Enum( + "assemble", + "disassemble", + argstr="--%s", + position=1, + usedefault=True, + desc="What to do with the transform inputs (assemble or disassemble)", + ) + out_file = File( + exists=False, + argstr="%s", + position=2, + desc="Output file path (only used for disassembly).", + ) + in_file = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="%s...", + position=3, + desc="Input transform file(s)", + ) + output_prefix = Str( + "transform", + usedefault=True, + argstr="%s", + position=4, + desc="A prefix that is prepended to all output files (only used for assembly).", + ) + class CompositeTransformUtilOutputSpec(TraitedSpec): affine_transform = File(desc="Affine transform component") displacement_field = File(desc="Displacement field component") out_file = File(desc="Compound transformation file") + class CompositeTransformUtil(ANTSCommand): """ ANTs utility which can combine or break apart transform files into their individual @@ -1642,7 +1846,7 @@ class CompositeTransformUtil(ANTSCommand): >>> tran.run() # doctest: +SKIP """ - _cmd = 'CompositeTransformUtil' + _cmd = "CompositeTransformUtil" input_spec = CompositeTransformUtilInputSpec output_spec = CompositeTransformUtilOutputSpec @@ -1654,19 +1858,23 @@ def _num_threads_update(self): pass def _format_arg(self, name, spec, value): - if name == 'output_prefix' and self.inputs.process == 'assemble': - return '' - if name == 'out_file' and self.inputs.process == 'disassemble': - return '' - return super(CompositeTransformUtil, self)._format_arg(name, spec, value) + if name == "output_prefix" and self.inputs.process == "assemble": + return "" + if name == "out_file" and self.inputs.process == "disassemble": + return "" + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - if self.inputs.process == 'disassemble': - outputs['affine_transform'] = os.path.abspath( - '00_{}_AffineTransform.mat'.format(self.inputs.output_prefix)) - outputs['displacement_field'] = os.path.abspath( - '01_{}_DisplacementFieldTransform.nii.gz'.format(self.inputs.output_prefix)) - if self.inputs.process == 'assemble': - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + if self.inputs.process == "disassemble": + outputs["affine_transform"] = os.path.abspath( + f"00_{self.inputs.output_prefix}_AffineTransform.mat" + ) + outputs["displacement_field"] = os.path.abspath( + "01_{}_DisplacementFieldTransform.nii.gz".format( + self.inputs.output_prefix + ) + ) + if self.inputs.process == "assemble": + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index e26a48ed6a..95f29d5982 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -1,74 +1,77 @@ -# -*- coding: utf-8 -*- """ANTS Apply Transforms interface """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str + import os from .base import ANTSCommand, ANTSCommandInputSpec -from ..base import TraitedSpec, File, traits, isdefined, InputMultiPath +from ..base import TraitedSpec, File, traits, Tuple, isdefined, InputMultiObject from ...utils.filemanip import split_filename class WarpTimeSeriesImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 4, - 3, - argstr='%d', - usedefault=True, - desc='image dimension (3 or 4)', - position=1) + 4, 3, argstr="%d", usedefault=True, desc="image dimension (3 or 4)", position=1 + ) input_image = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)')) + desc=("image to apply transformation to (generally a coregistered functional)"), + ) out_postfix = traits.Str( - '_wtsimt', - argstr='%s', + "_wtsimt", + argstr="%s", usedefault=True, - desc=('Postfix that is prepended to all output ' - 'files (default = _wtsimt)')) + desc=("Postfix that is prepended to all output files (default = _wtsimt)"), + ) reference_image = File( - argstr='-R %s', - xor=['tightest_box'], - desc='reference image space that you wish to warp INTO') + argstr="-R %s", + xor=["tightest_box"], + desc="reference image space that you wish to warp INTO", + ) tightest_box = traits.Bool( - argstr='--tightest-bounding-box', - desc=('computes tightest bounding box (overrided by ' - 'reference_image if given)'), - xor=['reference_image']) + argstr="--tightest-bounding-box", + desc=( + "computes tightest bounding box (overridden by reference_image if given)" + ), + xor=["reference_image"], + ) reslice_by_header = traits.Bool( - argstr='--reslice-by-header', - desc=('Uses orientation matrix and origin encoded in ' - 'reference image file header. Not typically used ' - 'with additional transforms')) + argstr="--reslice-by-header", + desc=( + "Uses orientation matrix and origin encoded in " + "reference image file header. Not typically used " + "with additional transforms" + ), + ) use_nearest = traits.Bool( - argstr='--use-NN', desc='Use nearest neighbor interpolation') + argstr="--use-NN", desc="Use nearest neighbor interpolation" + ) use_bspline = traits.Bool( - argstr='--use-Bspline', desc='Use 3rd order B-Spline interpolation') - transformation_series = InputMultiPath( + argstr="--use-Bspline", desc="Use 3rd order B-Spline interpolation" + ) + transformation_series = InputMultiObject( File(exists=True), - argstr='%s', - desc='transformation file(s) to be applied', + argstr="%s", + desc="transformation file(s) to be applied", mandatory=True, - copyfile=False) + copyfile=False, + ) invert_affine = traits.List( traits.Int, desc=( - 'List of Affine transformations to invert.' - 'E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines ' - 'found in transformation_series. Note that indexing ' - 'starts with 1 and does not include warp fields. Affine ' - 'transformations are distinguished ' + "List of Affine transformations to invert." + "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " + "found in transformation_series. Note that indexing " + "starts with 1 and does not include warp fields. Affine " + "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' - )) + ), + ) class WarpTimeSeriesImageMultiTransformOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class WarpTimeSeriesImageMultiTransform(ANTSCommand): @@ -96,50 +99,47 @@ class WarpTimeSeriesImageMultiTransform(ANTSCommand): -i ants_Affine.txt' """ - _cmd = 'WarpTimeSeriesImageMultiTransform' + _cmd = "WarpTimeSeriesImageMultiTransform" input_spec = WarpTimeSeriesImageMultiTransformInputSpec output_spec = WarpTimeSeriesImageMultiTransformOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'out_postfix': - _, name, ext = split_filename( - os.path.abspath(self.inputs.input_image)) + if opt == "out_postfix": + _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) return name + val + ext - if opt == 'transformation_series': + if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: - if 'Affine' in transformation and \ - isdefined(self.inputs.invert_affine): + if "Affine" in transformation and isdefined(self.inputs.invert_affine): affine_counter += 1 if affine_counter in self.inputs.invert_affine: - series += ['-i'] + series += ["-i"] affine_invert.append(affine_counter) series += [transformation] if isdefined(self.inputs.invert_affine): diff_inv = set(self.inputs.invert_affine) - set(affine_invert) if diff_inv: - raise Exceptions( + raise Exception( "Review invert_affine, not all indexes from invert_affine were used, " - "check the description for the full definition") + "check the description for the full definition" + ) - return ' '.join(series) - return super(WarpTimeSeriesImageMultiTransform, self)._format_arg( - opt, spec, val) + return " ".join(series) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) - outputs['output_image'] = os.path.join(os.getcwd(), ''.join( - (name, self.inputs.out_postfix, ext))) + outputs["output_image"] = os.path.join( + os.getcwd(), f"{name}{self.inputs.out_postfix}{ext}" + ) return outputs def _run_interface(self, runtime, correct_return_codes=[0]): - runtime = super(WarpTimeSeriesImageMultiTransform, - self)._run_interface( - runtime, correct_return_codes=[0, 1]) + runtime = super()._run_interface(runtime, correct_return_codes=[0, 1]) if "100 % complete" not in runtime.stdout: self.raise_exception(runtime) return runtime @@ -147,70 +147,77 @@ def _run_interface(self, runtime, correct_return_codes=[0]): class WarpImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - desc='image dimension (2 or 3)', - position=1) + 3, 2, argstr="%d", usedefault=True, desc="image dimension (2 or 3)", position=1 + ) input_image = File( - argstr='%s', + argstr="%s", mandatory=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)'), - position=2) + desc=("image to apply transformation to (generally a coregistered functional)"), + position=2, + ) output_image = File( genfile=True, hash_files=False, - argstr='%s', - desc='name of the output warped image', + argstr="%s", + desc="name of the output warped image", position=3, - xor=['out_postfix']) + xor=["out_postfix"], + ) out_postfix = File( "_wimt", usedefault=True, hash_files=False, - desc=('Postfix that is prepended to all output ' - 'files (default = _wimt)'), - xor=['output_image']) + desc=("Postfix that is prepended to all output files (default = _wimt)"), + xor=["output_image"], + ) reference_image = File( - argstr='-R %s', - xor=['tightest_box'], - desc='reference image space that you wish to warp INTO') + argstr="-R %s", + xor=["tightest_box"], + desc="reference image space that you wish to warp INTO", + ) tightest_box = traits.Bool( - argstr='--tightest-bounding-box', - desc=('computes tightest bounding box (overrided by ' - 'reference_image if given)'), - xor=['reference_image']) + argstr="--tightest-bounding-box", + desc=( + "computes tightest bounding box (overridden by reference_image if given)" + ), + xor=["reference_image"], + ) reslice_by_header = traits.Bool( - argstr='--reslice-by-header', - desc=('Uses orientation matrix and origin encoded in ' - 'reference image file header. Not typically used ' - 'with additional transforms')) + argstr="--reslice-by-header", + desc=( + "Uses orientation matrix and origin encoded in " + "reference image file header. Not typically used " + "with additional transforms" + ), + ) use_nearest = traits.Bool( - argstr='--use-NN', desc='Use nearest neighbor interpolation') + argstr="--use-NN", desc="Use nearest neighbor interpolation" + ) use_bspline = traits.Bool( - argstr='--use-BSpline', desc='Use 3rd order B-Spline interpolation') - transformation_series = InputMultiPath( + argstr="--use-BSpline", desc="Use 3rd order B-Spline interpolation" + ) + transformation_series = InputMultiObject( File(exists=True), - argstr='%s', - desc='transformation file(s) to be applied', + argstr="%s", + desc="transformation file(s) to be applied", mandatory=True, - position=-1) + position=-1, + ) invert_affine = traits.List( traits.Int, desc=( - 'List of Affine transformations to invert.' - 'E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines ' - 'found in transformation_series. Note that indexing ' - 'starts with 1 and does not include warp fields. Affine ' - 'transformations are distinguished ' + "List of Affine transformations to invert." + "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " + "found in transformation_series. Note that indexing " + "starts with 1 and does not include warp fields. Affine " + "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' - )) + ), + ) class WarpImageMultiTransformOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class WarpImageMultiTransform(ANTSCommand): @@ -240,49 +247,51 @@ class WarpImageMultiTransform(ANTSCommand): """ - _cmd = 'WarpImageMultiTransform' + _cmd = "WarpImageMultiTransform" input_spec = WarpImageMultiTransformInputSpec output_spec = WarpImageMultiTransformOutputSpec def _gen_filename(self, name): - if name == 'output_image': - _, name, ext = split_filename( - os.path.abspath(self.inputs.input_image)) - return ''.join((name, self.inputs.out_postfix, ext)) + if name == "output_image": + _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) + return f"{name}{self.inputs.out_postfix}{ext}" return None def _format_arg(self, opt, spec, val): - if opt == 'transformation_series': + if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: - if "affine" in transformation.lower() and \ - isdefined(self.inputs.invert_affine): + if "affine" in transformation.lower() and isdefined( + self.inputs.invert_affine + ): affine_counter += 1 if affine_counter in self.inputs.invert_affine: - series += ['-i'] + series += ["-i"] affine_invert.append(affine_counter) series += [transformation] if isdefined(self.inputs.invert_affine): diff_inv = set(self.inputs.invert_affine) - set(affine_invert) if diff_inv: - raise Exceptions( + raise Exception( "Review invert_affine, not all indexes from invert_affine were used, " - "check the description for the full definition") + "check the description for the full definition" + ) - return ' '.join(series) + return " ".join(series) - return super(WarpImageMultiTransform, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_image): - outputs['output_image'] = os.path.abspath(self.inputs.output_image) + outputs["output_image"] = os.path.abspath(self.inputs.output_image) else: - outputs['output_image'] = os.path.abspath( - self._gen_filename('output_image')) + outputs["output_image"] = os.path.abspath( + self._gen_filename("output_image") + ) return outputs @@ -291,81 +300,89 @@ class ApplyTransformsInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', - desc=('This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, antsWarp tries to infer the ' - 'dimensionality from the input image.')) + argstr="--dimensionality %d", + desc=( + "This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, antsWarp tries to infer the " + "dimensionality from the input image." + ), + ) input_image_type = traits.Enum( 0, 1, 2, 3, - argstr='--input-image-type %d', - desc=('Option specifying the input image ' - 'type of scalar (default), vector, ' - 'tensor, or time series.')) + argstr="--input-image-type %d", + desc=( + "Option specifying the input image " + "type of scalar (default), vector, " + "tensor, or time series." + ), + ) input_image = File( - argstr='--input %s', + argstr="--input %s", mandatory=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)'), - exists=True) + desc=("image to apply transformation to (generally a coregistered functional)"), + exists=True, + ) output_image = traits.Str( - argstr='--output %s', - desc='output file name', - genfile=True, - hash_files=False) + argstr="--output %s", desc="output file name", genfile=True, hash_files=False + ) out_postfix = traits.Str( "_trans", usedefault=True, - desc=('Postfix that is appended to all output ' - 'files (default = _trans)')) + desc=("Postfix that is appended to all output files (default = _trans)"), + ) reference_image = File( - argstr='--reference-image %s', + argstr="--reference-image %s", mandatory=True, - desc='reference image space that you wish to warp INTO', - exists=True) + desc="reference image space that you wish to warp INTO", + exists=True, + ) interpolation = traits.Enum( - 'Linear', - 'NearestNeighbor', - 'CosineWindowedSinc', - 'WelchWindowedSinc', - 'HammingWindowedSinc', - 'LanczosWindowedSinc', - 'MultiLabel', - 'Gaussian', - 'BSpline', - argstr='%s', - usedefault=True) + "Linear", + "NearestNeighbor", + "CosineWindowedSinc", + "WelchWindowedSinc", + "HammingWindowedSinc", + "LanczosWindowedSinc", + "MultiLabel", + "Gaussian", + "BSpline", + "GenericLabel", + argstr="%s", + usedefault=True, + ) interpolation_parameters = traits.Either( - traits.Tuple(traits.Int()), # BSpline (order) - traits.Tuple( - traits.Float(), # Gaussian/MultiLabel (sigma, alpha) - traits.Float())) - transforms = traits.Either( - InputMultiPath(File(exists=True)), - 'identity', - argstr='%s', + Tuple(traits.Int()), # BSpline (order) + Tuple(traits.Float(), traits.Float()), # Gaussian/MultiLabel (sigma, alpha) + Tuple(traits.Str()), # GenericLabel + ) + transforms = InputMultiObject( + traits.Either(File(exists=True), "identity"), + argstr="%s", mandatory=True, - desc='transform files: will be applied in reverse order. For ' - 'example, the last specified transform will be applied first.') - invert_transform_flags = InputMultiPath(traits.Bool()) - default_value = traits.Float( - 0.0, argstr='--default-value %g', usedefault=True) + desc="transform files: will be applied in reverse order. For " + "example, the last specified transform will be applied first.", + ) + invert_transform_flags = InputMultiObject(traits.Bool()) + default_value = traits.Float(0.0, argstr="--default-value %g", usedefault=True) print_out_composite_warp_file = traits.Bool( False, requires=["output_image"], - desc='output a composite warp file instead of a transformed image') + desc="output a composite warp file instead of a transformed image", + ) float = traits.Bool( - argstr='--float %d', + argstr="--float %d", default_value=False, usedefault=True, - desc='Use float instead of double for computations.') + desc="Use float instead of double for computations.", + ) class ApplyTransformsOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class ApplyTransforms(ANTSCommand): @@ -383,7 +400,7 @@ class ApplyTransforms(ANTSCommand): >>> at.cmdline 'antsApplyTransforms --default-value 0 --float 0 --input moving1.nii \ --interpolation Linear --output moving1_trans.nii \ ---reference-image fixed1.nii -t identity' +--reference-image fixed1.nii --transform identity' >>> at = ApplyTransforms() >>> at.inputs.dimension = 3 @@ -393,11 +410,11 @@ class ApplyTransforms(ANTSCommand): >>> at.inputs.interpolation = 'Linear' >>> at.inputs.default_value = 0 >>> at.inputs.transforms = ['ants_Warp.nii.gz', 'trans.mat'] - >>> at.inputs.invert_transform_flags = [False, False] + >>> at.inputs.invert_transform_flags = [False, True] >>> at.cmdline 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ --interpolation Linear --output deformed_moving1.nii --reference-image fixed1.nii \ ---transform [ ants_Warp.nii.gz, 0 ] --transform [ trans.mat, 0 ]' +--transform ants_Warp.nii.gz --transform [ trans.mat, 1 ]' >>> at1 = ApplyTransforms() >>> at1.inputs.dimension = 3 @@ -412,14 +429,31 @@ class ApplyTransforms(ANTSCommand): >>> at1.cmdline 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ --interpolation BSpline[ 5 ] --output deformed_moving1.nii --reference-image fixed1.nii \ ---transform [ ants_Warp.nii.gz, 0 ] --transform [ trans.mat, 0 ]' +--transform ants_Warp.nii.gz --transform trans.mat' + + Identity transforms may be used as part of a chain: + + >>> at2 = ApplyTransforms() + >>> at2.inputs.dimension = 3 + >>> at2.inputs.input_image = 'moving1.nii' + >>> at2.inputs.reference_image = 'fixed1.nii' + >>> at2.inputs.output_image = 'deformed_moving1.nii' + >>> at2.inputs.interpolation = 'BSpline' + >>> at2.inputs.interpolation_parameters = (5,) + >>> at2.inputs.default_value = 0 + >>> at2.inputs.transforms = ['identity', 'ants_Warp.nii.gz', 'trans.mat'] + >>> at2.cmdline + 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ +--interpolation BSpline[ 5 ] --output deformed_moving1.nii --reference-image fixed1.nii \ +--transform identity --transform ants_Warp.nii.gz --transform trans.mat' """ - _cmd = 'antsApplyTransforms' + + _cmd = "antsApplyTransforms" input_spec = ApplyTransformsInputSpec output_spec = ApplyTransformsOutputSpec def _gen_filename(self, name): - if name == 'output_image': + if name == "output_image": output = self.inputs.output_image if not isdefined(output): _, name, ext = split_filename(self.inputs.input_image) @@ -429,27 +463,28 @@ def _gen_filename(self, name): def _get_transform_filenames(self): retval = [] - for ii in range(len(self.inputs.transforms)): - if isdefined(self.inputs.invert_transform_flags): - if len(self.inputs.transforms) == len( - self.inputs.invert_transform_flags): - invert_code = 1 if self.inputs.invert_transform_flags[ - ii] else 0 - retval.append("--transform [ %s, %d ]" % - (self.inputs.transforms[ii], invert_code)) - else: - raise Exception(( - "ERROR: The useInverse list must have the same number " - "of entries as the transformsFileName list.")) + invert_flags = self.inputs.invert_transform_flags + if not isdefined(invert_flags): + invert_flags = [False] * len(self.inputs.transforms) + elif len(self.inputs.transforms) != len(invert_flags): + raise ValueError( + "ERROR: The invert_transform_flags list must have the same number " + "of entries as the transforms list." + ) + + for transform, invert in zip(self.inputs.transforms, invert_flags): + if invert: + retval.append(f"--transform [ {transform}, 1 ]") else: - retval.append("--transform %s" % self.inputs.transforms[ii]) + retval.append(f"--transform {transform}") return " ".join(retval) def _get_output_warped_filename(self): if isdefined(self.inputs.print_out_composite_warp_file): return "--output [ %s, %d ]" % ( self._gen_filename("output_image"), - int(self.inputs.print_out_composite_warp_file)) + int(self.inputs.print_out_composite_warp_file), + ) else: return "--output %s" % (self._gen_filename("output_image")) @@ -457,25 +492,27 @@ def _format_arg(self, opt, spec, val): if opt == "output_image": return self._get_output_warped_filename() elif opt == "transforms": - if val == 'identity': - return '-t identity' return self._get_transform_filenames() - elif opt == 'interpolation': - if self.inputs.interpolation in ['BSpline', 'MultiLabel', 'Gaussian'] and \ - isdefined(self.inputs.interpolation_parameters): - return '--interpolation %s[ %s ]' % ( - self.inputs.interpolation, ', '.join([ - str(param) - for param in self.inputs.interpolation_parameters - ])) + elif opt == "interpolation": + if self.inputs.interpolation in [ + "BSpline", + "MultiLabel", + "Gaussian", + "GenericLabel", + ] and isdefined(self.inputs.interpolation_parameters): + return "--interpolation {}[ {} ]".format( + self.inputs.interpolation, + ", ".join( + [str(param) for param in self.inputs.interpolation_parameters] + ), + ) else: - return '--interpolation %s' % self.inputs.interpolation - return super(ApplyTransforms, self)._format_arg(opt, spec, val) + return "--interpolation %s" % self.inputs.interpolation + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.abspath( - self._gen_filename('output_image')) + outputs["output_image"] = os.path.abspath(self._gen_filename("output_image")) return outputs @@ -484,44 +521,50 @@ class ApplyTransformsToPointsInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', - desc=('This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, antsWarp tries to infer the ' - 'dimensionality from the input image.')) + argstr="--dimensionality %d", + desc=( + "This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, antsWarp tries to infer the " + "dimensionality from the input image." + ), + ) input_file = File( - argstr='--input %s', + argstr="--input %s", mandatory=True, - desc= - ("Currently, the only input supported is a csv file with" - " columns including x,y (2D), x,y,z (3D) or x,y,z,t,label (4D) column headers." - " The points should be defined in physical space." - " If in doubt how to convert coordinates from your files to the space" - " required by antsApplyTransformsToPoints try creating/drawing a simple" - " label volume with only one voxel set to 1 and all others set to 0." - " Write down the voxel coordinates. Then use ImageMaths LabelStats to find" - " out what coordinates for this voxel antsApplyTransformsToPoints is" - " expecting."), - exists=True) + desc=( + "Currently, the only input supported is a csv file with" + " columns including x,y (2D), x,y,z (3D) or x,y,z,t,label (4D) column headers." + " The points should be defined in physical space." + " If in doubt how to convert coordinates from your files to the space" + " required by antsApplyTransformsToPoints try creating/drawing a simple" + " label volume with only one voxel set to 1 and all others set to 0." + " Write down the voxel coordinates. Then use ImageMaths LabelStats to find" + " out what coordinates for this voxel antsApplyTransformsToPoints is" + " expecting." + ), + exists=True, + ) output_file = traits.Str( - argstr='--output %s', - desc='Name of the output CSV file', - name_source=['input_file'], + argstr="--output %s", + desc="Name of the output CSV file", + name_source=["input_file"], hash_files=False, - name_template='%s_transformed.csv') + name_template="%s_transformed.csv", + ) transforms = traits.List( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, - desc='transforms that will be applied to the points') + desc="transforms that will be applied to the points", + ) invert_transform_flags = traits.List( - traits.Bool(), - desc='list indicating if a transform should be reversed') + traits.Bool(), desc="list indicating if a transform should be reversed" + ) class ApplyTransformsToPointsOutputSpec(TraitedSpec): - output_file = File( - exists=True, desc='csv file with transformed coordinates') + output_file = File(exists=True, desc="csv file with transformed coordinates") class ApplyTransformsToPoints(ANTSCommand): @@ -543,7 +586,8 @@ class ApplyTransformsToPoints(ANTSCommand): """ - _cmd = 'antsApplyTransformsToPoints' + + _cmd = "antsApplyTransformsToPoints" input_spec = ApplyTransformsToPointsInputSpec output_spec = ApplyTransformsToPointsOutputSpec @@ -552,15 +596,18 @@ def _get_transform_filenames(self): for ii in range(len(self.inputs.transforms)): if isdefined(self.inputs.invert_transform_flags): if len(self.inputs.transforms) == len( - self.inputs.invert_transform_flags): - invert_code = 1 if self.inputs.invert_transform_flags[ - ii] else 0 - retval.append("--transform [ %s, %d ]" % - (self.inputs.transforms[ii], invert_code)) + self.inputs.invert_transform_flags + ): + invert_code = 1 if self.inputs.invert_transform_flags[ii] else 0 + retval.append( + "--transform [ %s, %d ]" + % (self.inputs.transforms[ii], invert_code) + ) else: - raise Exception(( + raise Exception( "ERROR: The useInverse list must have the same number " - "of entries as the transformsFileName list.")) + "of entries as the transformsFileName list." + ) else: retval.append("--transform %s" % self.inputs.transforms[ii]) return " ".join(retval) @@ -568,4 +615,4 @@ def _get_transform_filenames(self): def _format_arg(self, opt, spec, val): if opt == "transforms": return self._get_transform_filenames() - return super(ApplyTransformsToPoints, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 11bc0c48c7..3c87b71975 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1,14 +1,19 @@ -# -*- coding: utf-8 -*- -"""The ants module provides basic functions for interfacing with ants functions. -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str +"""Wrappers for segmentation utilities within ANTs.""" import os +from glob import glob from ...external.due import BibTeX -from ...utils.filemanip import split_filename, copyfile, which -from ..base import TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, isdefined +from ...utils.filemanip import split_filename, copyfile, which, fname_presuffix +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + InputMultiPath, + OutputMultiPath, + isdefined, +) +from ..mixins import CopyHeaderInterface from .base import ANTSCommand, ANTSCommandInputSpec @@ -17,46 +22,55 @@ class AtroposInputSpec(ANTSCommandInputSpec): 3, 2, 4, - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, - desc='image dimension (2, 3, or 4)') + desc="image dimension (2, 3, or 4)", + ) intensity_images = InputMultiPath( - File(exists=True), argstr="--intensity-image %s...", mandatory=True) - mask_image = File(exists=True, argstr='--mask-image %s', mandatory=True) + File(exists=True), argstr="--intensity-image %s...", mandatory=True + ) + mask_image = File(exists=True, argstr="--mask-image %s", mandatory=True) initialization = traits.Enum( - 'Random', - 'Otsu', - 'KMeans', - 'PriorProbabilityImages', - 'PriorLabelImage', + "Random", + "Otsu", + "KMeans", + "PriorProbabilityImages", + "PriorLabelImage", argstr="%s", - requires=['number_of_tissue_classes'], - mandatory=True) - prior_probability_images = InputMultiPath(File(exists=True)) + requires=["number_of_tissue_classes"], + mandatory=True, + ) + kmeans_init_centers = traits.List(traits.Either(traits.Int, traits.Float), minlen=1) + prior_image = traits.Either( + File(exists=True), + traits.Str, + desc="either a string pattern (e.g., 'prior%02d.nii') or an existing vector-image file.", + ) number_of_tissue_classes = traits.Int(mandatory=True) prior_weighting = traits.Float() - prior_probability_threshold = traits.Float(requires=['prior_weighting']) + prior_probability_threshold = traits.Float(requires=["prior_weighting"]) likelihood_model = traits.Str(argstr="--likelihood-model %s") mrf_smoothing_factor = traits.Float(argstr="%s") - mrf_radius = traits.List(traits.Int(), requires=['mrf_smoothing_factor']) + mrf_radius = traits.List(traits.Int(), requires=["mrf_smoothing_factor"]) icm_use_synchronous_update = traits.Bool(argstr="%s") maximum_number_of_icm_terations = traits.Int( - requires=['icm_use_synchronous_update']) + requires=["icm_use_synchronous_update"] + ) n_iterations = traits.Int(argstr="%s") - convergence_threshold = traits.Float(requires=['n_iterations']) + convergence_threshold = traits.Float(requires=["n_iterations"]) posterior_formulation = traits.Str(argstr="%s") use_random_seed = traits.Bool( True, - argstr='--use-random-seed %d', - desc='use random seed value over constant', - usedefault=True) - use_mixture_model_proportions = traits.Bool( - requires=['posterior_formulation']) - out_classified_image_name = File( - argstr="%s", genfile=True, hash_files=False) + argstr="--use-random-seed %d", + desc="use random seed value over constant", + usedefault=True, + ) + use_mixture_model_proportions = traits.Bool(requires=["posterior_formulation"]) + out_classified_image_name = File(argstr="%s", genfile=True, hash_files=False) save_posteriors = traits.Bool() output_posteriors_name_template = traits.Str( - 'POSTERIOR_%02d.nii.gz', usedefault=True) + "POSTERIOR_%02d.nii.gz", usedefault=True + ) class AtroposOutputSpec(TraitedSpec): @@ -65,7 +79,10 @@ class AtroposOutputSpec(TraitedSpec): class Atropos(ANTSCommand): - """A finite mixture modeling (FMM) segmentation approach with possibilities for + """ + A multivariate n-class segmentation algorithm. + + A finite mixture modeling (FMM) segmentation approach with possibilities for specifying prior constraints. These prior constraints include the specification of a prior label image, prior probability images (one for each class), and/or an MRF prior to enforce spatial smoothing of the labels. Similar algorithms include @@ -73,55 +90,131 @@ class Atropos(ANTSCommand): Examples -------- - >>> from nipype.interfaces.ants import Atropos - >>> at = Atropos() - >>> at.inputs.dimension = 3 - >>> at.inputs.intensity_images = 'structural.nii' - >>> at.inputs.mask_image = 'mask.nii' + >>> at = Atropos( + ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', + ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, + ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, + ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, + ... posterior_formulation='Socrates', use_mixture_model_proportions=True) + >>> at.inputs.initialization = 'Random' + >>> at.cmdline + 'Atropos --image-dimensionality 3 --icm [1,1] + --initialization Random[2] --intensity-image structural.nii + --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] + --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] + --use-random-seed 1' + + >>> at = Atropos( + ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', + ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, + ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, + ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, + ... posterior_formulation='Socrates', use_mixture_model_proportions=True) + >>> at.inputs.initialization = 'KMeans' + >>> at.inputs.kmeans_init_centers = [100, 200] + >>> at.cmdline + 'Atropos --image-dimensionality 3 --icm [1,1] + --initialization KMeans[2,100,200] --intensity-image structural.nii + --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] + --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] + --use-random-seed 1' + + >>> at = Atropos( + ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', + ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, + ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, + ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, + ... posterior_formulation='Socrates', use_mixture_model_proportions=True) >>> at.inputs.initialization = 'PriorProbabilityImages' - >>> at.inputs.prior_probability_images = ['rc1s1.nii', 'rc1s2.nii'] - >>> at.inputs.number_of_tissue_classes = 2 + >>> at.inputs.prior_image = 'BrainSegmentationPrior%02d.nii.gz' >>> at.inputs.prior_weighting = 0.8 >>> at.inputs.prior_probability_threshold = 0.0000001 - >>> at.inputs.likelihood_model = 'Gaussian' - >>> at.inputs.mrf_smoothing_factor = 0.2 - >>> at.inputs.mrf_radius = [1, 1, 1] - >>> at.inputs.icm_use_synchronous_update = True - >>> at.inputs.maximum_number_of_icm_terations = 1 - >>> at.inputs.n_iterations = 5 - >>> at.inputs.convergence_threshold = 0.000001 - >>> at.inputs.posterior_formulation = 'Socrates' - >>> at.inputs.use_mixture_model_proportions = True - >>> at.inputs.save_posteriors = True >>> at.cmdline - 'Atropos --image-dimensionality 3 --icm [1,1] \ ---initialization PriorProbabilityImages[2,priors/priorProbImages%02d.nii,0.8,1e-07] --intensity-image structural.nii \ ---likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] \ ---output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' + 'Atropos --image-dimensionality 3 --icm [1,1] + --initialization PriorProbabilityImages[2,BrainSegmentationPrior%02d.nii.gz,0.8,1e-07] + --intensity-image structural.nii --likelihood-model Gaussian --mask-image mask.nii + --mrf [0.2,1x1x1] --convergence [5,1e-06] + --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] + --posterior-formulation Socrates[1] --use-random-seed 1' + + >>> at = Atropos( + ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', + ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, + ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, + ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, + ... posterior_formulation='Socrates', use_mixture_model_proportions=True) + >>> at.inputs.initialization = 'PriorLabelImage' + >>> at.inputs.prior_image = 'segmentation0.nii.gz' + >>> at.inputs.number_of_tissue_classes = 2 + >>> at.inputs.prior_weighting = 0.8 + >>> at.cmdline + 'Atropos --image-dimensionality 3 --icm [1,1] + --initialization PriorLabelImage[2,segmentation0.nii.gz,0.8] --intensity-image structural.nii + --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] + --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] + --use-random-seed 1' """ + input_spec = AtroposInputSpec output_spec = AtroposOutputSpec - _cmd = 'Atropos' + _cmd = "Atropos" def _format_arg(self, opt, spec, val): - if opt == 'initialization': - retval = "--initialization %s[%d" % ( - val, self.inputs.number_of_tissue_classes) - if val == "PriorProbabilityImages": - _, _, ext = split_filename( - self.inputs.prior_probability_images[0]) - retval += ",priors/priorProbImages%02d" + \ - ext + ",%g" % self.inputs.prior_weighting - if isdefined(self.inputs.prior_probability_threshold): - retval += ",%g" % self.inputs.prior_probability_threshold - return retval + "]" - if opt == 'mrf_smoothing_factor': + if opt == "initialization": + n_classes = self.inputs.number_of_tissue_classes + brackets = ["%d" % n_classes] + if val == "KMeans" and isdefined(self.inputs.kmeans_init_centers): + centers = sorted(set(self.inputs.kmeans_init_centers)) + if len(centers) != n_classes: + raise ValueError( + "KMeans initialization with initial cluster centers requires " + "the number of centers to match number_of_tissue_classes" + ) + brackets += ["%g" % c for c in centers] + + if val in ("PriorProbabilityImages", "PriorLabelImage"): + if not isdefined(self.inputs.prior_image) or not isdefined( + self.inputs.prior_weighting + ): + raise ValueError( + "'%s' initialization requires setting " + "prior_image and prior_weighting" % val + ) + + priors_paths = [self.inputs.prior_image] + if "%02d" in priors_paths[0]: + if val == "PriorLabelImage": + raise ValueError( + "'PriorLabelImage' initialization does not " + "accept patterns for prior_image." + ) + priors_paths = [ + priors_paths[0] % i for i in range(1, n_classes + 1) + ] + + if not all(os.path.exists(p) for p in priors_paths): + raise FileNotFoundError( + "One or more prior images do not exist: " + "%s." % ", ".join(priors_paths) + ) + brackets += [ + self.inputs.prior_image, + "%g" % self.inputs.prior_weighting, + ] + + if val == "PriorProbabilityImages" and isdefined( + self.inputs.prior_probability_threshold + ): + brackets.append("%g" % self.inputs.prior_probability_threshold) + return "--initialization {}[{}]".format(val, ",".join(brackets)) + if opt == "mrf_smoothing_factor": retval = "--mrf [%g" % val if isdefined(self.inputs.mrf_radius): retval += ",%s" % self._format_xarray( - [str(s) for s in self.inputs.mrf_radius]) + [str(s) for s in self.inputs.mrf_radius] + ) return retval + "]" if opt == "icm_use_synchronous_update": retval = "--icm [%d" % val @@ -143,95 +236,90 @@ def _format_arg(self, opt, spec, val): if isdefined(self.inputs.save_posteriors): retval += ",%s" % self.inputs.output_posteriors_name_template return retval + "]" - return super(Atropos, self)._format_arg(opt, spec, val) - - def _run_interface(self, runtime, correct_return_codes=[0]): - if self.inputs.initialization == "PriorProbabilityImages": - priors_directory = os.path.join(os.getcwd(), "priors") - if not os.path.exists(priors_directory): - os.makedirs(priors_directory) - _, _, ext = split_filename(self.inputs.prior_probability_images[0]) - for i, f in enumerate(self.inputs.prior_probability_images): - target = os.path.join(priors_directory, - 'priorProbImages%02d' % (i + 1) + ext) - if not (os.path.exists(target) - and os.path.realpath(target) == os.path.abspath(f)): - copyfile( - os.path.abspath(f), - os.path.join(priors_directory, - 'priorProbImages%02d' % (i + 1) + ext)) - runtime = super(Atropos, self)._run_interface(runtime) - return runtime + return super()._format_arg(opt, spec, val) def _gen_filename(self, name): - if name == 'out_classified_image_name': + if name == "out_classified_image_name": output = self.inputs.out_classified_image_name if not isdefined(output): _, name, ext = split_filename(self.inputs.intensity_images[0]) - output = name + '_labeled' + ext + output = name + "_labeled" + ext return output - return None def _list_outputs(self): outputs = self._outputs().get() - outputs['classified_image'] = os.path.abspath( - self._gen_filename('out_classified_image_name')) - if isdefined( - self.inputs.save_posteriors) and self.inputs.save_posteriors: - outputs['posteriors'] = [] + outputs["classified_image"] = os.path.abspath( + self._gen_filename("out_classified_image_name") + ) + if isdefined(self.inputs.save_posteriors) and self.inputs.save_posteriors: + outputs["posteriors"] = [] for i in range(self.inputs.number_of_tissue_classes): - outputs['posteriors'].append( + outputs["posteriors"].append( os.path.abspath( - self.inputs.output_posteriors_name_template % (i + 1))) + self.inputs.output_posteriors_name_template % (i + 1) + ) + ) return outputs class LaplacianThicknessInputSpec(ANTSCommandInputSpec): input_wm = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc='white matter segmentation image', - position=1) + desc="white matter segmentation image", + position=1, + ) input_gm = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc='gray matter segmentation image', - position=2) - output_image = File( - desc='name of output file', - argstr='%s', + desc="gray matter segmentation image", + position=2, + ) + output_image = traits.Str( + desc="name of output file", + argstr="%s", position=3, - name_source=['input_wm'], - name_template='%s_thickness', + name_source=["input_wm"], + name_template="%s_thickness", keep_extension=True, - hash_files=False) + hash_files=False, + ) smooth_param = traits.Float( - argstr='%f', - desc='Sigma of the Laplacian Recursive Image Filter (defaults to 1)', - position=4) + argstr="%s", + desc="Sigma of the Laplacian Recursive Image Filter (defaults to 1)", + position=4, + ) prior_thickness = traits.Float( - argstr='%f', - desc='Prior thickness (defaults to 500)', - position=5) + argstr="%s", + desc="Prior thickness (defaults to 500)", + requires=["smooth_param"], + position=5, + ) dT = traits.Float( - argstr='%f', - desc='Time delta used during integration (defaults to 0.01)', - position=6) + argstr="%s", + desc="Time delta used during integration (defaults to 0.01)", + requires=["prior_thickness"], + position=6, + ) sulcus_prior = traits.Float( - argstr='%f', - desc='Positive floating point number for sulcus prior. ' - 'Authors said that 0.15 might be a reasonable value', - position=7) + argstr="%s", + desc="Positive floating point number for sulcus prior. " + "Authors said that 0.15 might be a reasonable value", + requires=["dT"], + position=7, + ) tolerance = traits.Float( - argstr='%f', - desc='Tolerance to reach during optimization (defaults to 0.001)', - position=8) + argstr="%s", + desc="Tolerance to reach during optimization (defaults to 0.001)", + requires=["sulcus_prior"], + position=8, + ) class LaplacianThicknessOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Cortical thickness') + output_image = File(exists=True, desc="Cortical thickness") class LaplacianThickness(ANTSCommand): @@ -253,64 +341,99 @@ class LaplacianThickness(ANTSCommand): """ - _cmd = 'LaplacianThickness' + _cmd = "LaplacianThickness" input_spec = LaplacianThicknessInputSpec output_spec = LaplacianThicknessOutputSpec class N4BiasFieldCorrectionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - 4, - argstr='-d %d', - usedefault=True, - desc='image dimension (2, 3 or 4)') + 3, 2, 4, argstr="-d %d", usedefault=True, desc="image dimension (2, 3 or 4)" + ) input_image = File( - argstr='--input-image %s', + argstr="--input-image %s", mandatory=True, - desc=('input for bias correction. Negative values or values close to ' - 'zero should be processed prior to correction')) + desc=( + "input for bias correction. Negative values or values close to " + "zero should be processed prior to correction" + ), + ) mask_image = File( - argstr='--mask-image %s', - desc=('image to specify region to perform final bias correction in')) + argstr="--mask-image %s", + desc=("image to specify region to perform final bias correction in"), + ) weight_image = File( - argstr='--weight-image %s', - desc=('image for relative weighting (e.g. probability map of the white ' - 'matter) of voxels during the B-spline fitting. ')) + argstr="--weight-image %s", + desc=( + "image for relative weighting (e.g. probability map of the white " + "matter) of voxels during the B-spline fitting. " + ), + ) output_image = traits.Str( - argstr='--output %s', - desc='output file name', - genfile=True, - hash_files=False) + argstr="--output %s", + desc="output file name", + name_source=["input_image"], + name_template="%s_corrected", + keep_extension=True, + hash_files=False, + ) bspline_fitting_distance = traits.Float(argstr="--bspline-fitting %s") - bspline_order = traits.Int(requires=['bspline_fitting_distance']) + bspline_order = traits.Int(requires=["bspline_fitting_distance"]) shrink_factor = traits.Int(argstr="--shrink-factor %d") n_iterations = traits.List(traits.Int(), argstr="--convergence %s") - convergence_threshold = traits.Float(requires=['n_iterations']) + convergence_threshold = traits.Float(requires=["n_iterations"]) save_bias = traits.Bool( False, mandatory=True, usedefault=True, - desc=('True if the estimated bias should be saved to file.'), - xor=['bias_image']) - bias_image = File( - desc='Filename for the estimated bias.', hash_files=False) + desc=("True if the estimated bias should be saved to file."), + xor=["bias_image"], + ) + bias_image = File(desc="Filename for the estimated bias.", hash_files=False) copy_header = traits.Bool( False, mandatory=True, usedefault=True, - desc='copy headers of the original image into the ' - 'output (corrected) file') + desc="copy headers of the original image into the output (corrected) file", + ) + rescale_intensities = traits.Bool( + False, + usedefault=True, + argstr="-r", + min_ver="2.1.0", + desc="""\ +[NOTE: Only ANTs>=2.1.0] +At each iteration, a new intensity mapping is calculated and applied but there +is nothing which constrains the new intensity range to be within certain values. +The result is that the range can "drift" from the original at each iteration. +This option rescales to the [min,max] range of the original image intensities +within the user-specified mask.""", + ) + histogram_sharpening = Tuple( + (0.15, 0.01, 200), + traits.Float, + traits.Float, + traits.Int, + argstr="--histogram-sharpening [%g,%g,%d]", + desc="""\ +Three-values tuple of histogram sharpening parameters \ +(FWHM, wienerNose, numberOfHistogramBins). +These options describe the histogram sharpening parameters, i.e. the \ +deconvolution step parameters described in the original N3 algorithm. +The default values have been shown to work fairly well.""", + ) class N4BiasFieldCorrectionOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') - bias_image = File(exists=True, desc='Estimated bias') + output_image = File(exists=True, desc="Warped image") + bias_image = File(exists=True, desc="Estimated bias") + +class N4BiasFieldCorrection(ANTSCommand, CopyHeaderInterface): + """ + Bias field correction. -class N4BiasFieldCorrection(ANTSCommand): - """N4 is a variant of the popular N3 (nonparameteric nonuniform normalization) + N4 is a variant of the popular N3 (nonparameteric nonuniform normalization) retrospective bias correction algorithm. Based on the assumption that the corruption of the low frequency bias field can be modeled as a convolution of the intensity histogram by a Gaussian, the basic algorithmic protocol is to @@ -335,263 +458,276 @@ class N4BiasFieldCorrection(ANTSCommand): >>> n4.inputs.shrink_factor = 3 >>> n4.inputs.n_iterations = [50,50,30,20] >>> n4.cmdline - 'N4BiasFieldCorrection --bspline-fitting [ 300 ] \ --d 3 --input-image structural.nii \ ---convergence [ 50x50x30x20 ] --output structural_corrected.nii \ ---shrink-factor 3' + 'N4BiasFieldCorrection --bspline-fitting [ 300 ] + -d 3 --input-image structural.nii + --convergence [ 50x50x30x20 ] --output structural_corrected.nii + --shrink-factor 3' >>> n4_2 = copy.deepcopy(n4) >>> n4_2.inputs.convergence_threshold = 1e-6 >>> n4_2.cmdline - 'N4BiasFieldCorrection --bspline-fitting [ 300 ] \ --d 3 --input-image structural.nii \ ---convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii \ ---shrink-factor 3' + 'N4BiasFieldCorrection --bspline-fitting [ 300 ] + -d 3 --input-image structural.nii + --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii + --shrink-factor 3' >>> n4_3 = copy.deepcopy(n4_2) >>> n4_3.inputs.bspline_order = 5 >>> n4_3.cmdline - 'N4BiasFieldCorrection --bspline-fitting [ 300, 5 ] \ --d 3 --input-image structural.nii \ ---convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii \ ---shrink-factor 3' + 'N4BiasFieldCorrection --bspline-fitting [ 300, 5 ] + -d 3 --input-image structural.nii + --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii + --shrink-factor 3' >>> n4_4 = N4BiasFieldCorrection() >>> n4_4.inputs.input_image = 'structural.nii' >>> n4_4.inputs.save_bias = True >>> n4_4.inputs.dimension = 3 >>> n4_4.cmdline - 'N4BiasFieldCorrection -d 3 --input-image structural.nii \ ---output [ structural_corrected.nii, structural_bias.nii ]' + 'N4BiasFieldCorrection -d 3 --input-image structural.nii + --output [ structural_corrected.nii, structural_bias.nii ]' + + >>> n4_5 = N4BiasFieldCorrection() + >>> n4_5.inputs.input_image = 'structural.nii' + >>> n4_5.inputs.dimension = 3 + >>> n4_5.inputs.histogram_sharpening = (0.12, 0.02, 200) + >>> n4_5.cmdline + 'N4BiasFieldCorrection -d 3 --histogram-sharpening [0.12,0.02,200] + --input-image structural.nii --output structural_corrected.nii' + """ - _cmd = 'N4BiasFieldCorrection' + _cmd = "N4BiasFieldCorrection" input_spec = N4BiasFieldCorrectionInputSpec output_spec = N4BiasFieldCorrectionOutputSpec + _copy_header_map = { + "output_image": ("input_image", False), + "bias_image": ("input_image", True), + } - def _gen_filename(self, name): - if name == 'output_image': - output = self.inputs.output_image - if not isdefined(output): - _, name, ext = split_filename(self.inputs.input_image) - output = name + '_corrected' + ext - return output - - if name == 'bias_image': - output = self.inputs.bias_image - if not isdefined(output): - _, name, ext = split_filename(self.inputs.input_image) - output = name + '_bias' + ext - return output - return None + def __init__(self, *args, **kwargs): + """Instantiate the N4BiasFieldCorrection interface.""" + self._out_bias_file = None + super().__init__(*args, **kwargs) def _format_arg(self, name, trait_spec, value): - if ((name == 'output_image') and - (self.inputs.save_bias or isdefined(self.inputs.bias_image))): - bias_image = self._gen_filename('bias_image') - output = self._gen_filename('output_image') - newval = '[ %s, %s ]' % (output, bias_image) + if name == "output_image" and self._out_bias_file: + newval = f"[ {value}, {self._out_bias_file} ]" return trait_spec.argstr % newval - if name == 'bspline_fitting_distance': + if name == "bspline_fitting_distance": if isdefined(self.inputs.bspline_order): - newval = '[ %g, %d ]' % (value, self.inputs.bspline_order) + newval = "[ %g, %d ]" % (value, self.inputs.bspline_order) else: - newval = '[ %g ]' % value + newval = "[ %g ]" % value return trait_spec.argstr % newval - if name == 'n_iterations': + if name == "n_iterations": if isdefined(self.inputs.convergence_threshold): - newval = '[ %s, %g ]' % ( + newval = "[ {}, {:g} ]".format( self._format_xarray([str(elt) for elt in value]), - self.inputs.convergence_threshold) + self.inputs.convergence_threshold, + ) else: - newval = '[ %s ]' % self._format_xarray( - [str(elt) for elt in value]) + newval = "[ %s ]" % self._format_xarray([str(elt) for elt in value]) return trait_spec.argstr % newval - return super(N4BiasFieldCorrection, self)._format_arg( - name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): - if skip is None: - skip = [] - skip += ['save_bias', 'bias_image'] - return super(N4BiasFieldCorrection, self)._parse_inputs(skip=skip) + skip = (skip or []) + ["save_bias", "bias_image"] + self._out_bias_file = None + if self.inputs.save_bias or isdefined(self.inputs.bias_image): + bias_image = self.inputs.bias_image + if not isdefined(bias_image): + bias_image = fname_presuffix( + os.path.basename(self.inputs.input_image), suffix="_bias" + ) + self._out_bias_file = bias_image + return super()._parse_inputs(skip=skip) def _list_outputs(self): - outputs = self._outputs().get() - outputs['output_image'] = os.path.abspath( - self._gen_filename('output_image')) - - if self.inputs.save_bias or isdefined(self.inputs.bias_image): - outputs['bias_image'] = os.path.abspath( - self._gen_filename('bias_image')) + outputs = super()._list_outputs() + if self._out_bias_file: + outputs["bias_image"] = os.path.abspath(self._out_bias_file) return outputs - def _run_interface(self, runtime, correct_return_codes=(0, )): - runtime = super(N4BiasFieldCorrection, self)._run_interface( - runtime, correct_return_codes) - - if self.inputs.copy_header and runtime.returncode in correct_return_codes: - self._copy_header(self._gen_filename('output_image')) - if self.inputs.save_bias or isdefined(self.inputs.bias_image): - self._copy_header(self._gen_filename('bias_image')) - - return runtime - - def _copy_header(self, fname): - """Copy header from input image to an output image""" - import nibabel as nb - in_img = nb.load(self.inputs.input_image) - out_img = nb.load(fname, mmap=False) - new_img = out_img.__class__(out_img.get_data(), in_img.affine, - in_img.header) - new_img.set_data_dtype(out_img.get_data_dtype()) - new_img.to_filename(fname) - class CorticalThicknessInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) anatomical_image = File( exists=True, - argstr='-a %s', - desc=('Structural *intensity* image, typically T1.' - ' If more than one anatomical image is specified,' - ' subsequently specified images are used during the' - ' segmentation process. However, only the first' - ' image is used in the registration of priors.' - ' Our suggestion would be to specify the T1' - ' as the first image.'), - mandatory=True) + argstr="-a %s", + desc=( + "Structural *intensity* image, typically T1." + " If more than one anatomical image is specified," + " subsequently specified images are used during the" + " segmentation process. However, only the first" + " image is used in the registration of priors." + " Our suggestion would be to specify the T1" + " as the first image." + ), + mandatory=True, + ) brain_template = File( exists=True, - argstr='-e %s', - desc=('Anatomical *intensity* template (possibly created using a' - ' population data set with buildtemplateparallel.sh in ANTs).' - ' This template is *not* skull-stripped.'), - mandatory=True) + argstr="-e %s", + desc=( + "Anatomical *intensity* template (possibly created using a" + " population data set with buildtemplateparallel.sh in ANTs)." + " This template is *not* skull-stripped." + ), + mandatory=True, + ) brain_probability_mask = File( exists=True, - argstr='-m %s', - desc='brain probability mask in template space', + argstr="-m %s", + desc="brain probability mask in template space", copyfile=False, - mandatory=True) + mandatory=True, + ) segmentation_priors = InputMultiPath( - File(exists=True), argstr='-p %s', mandatory=True) + File(exists=True), argstr="-p %s", mandatory=True + ) out_prefix = traits.Str( - 'antsCT_', - argstr='-o %s', + "antsCT_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output' - ' files (default = antsCT_)')) + desc=("Prefix that is prepended to all output files"), + ) image_suffix = traits.Str( - 'nii.gz', - desc=('any of standard ITK formats,' - ' nii.gz is default'), - argstr='-s %s', - usedefault=True) + "nii.gz", + desc=("any of standard ITK formats, nii.gz is default"), + argstr="-s %s", + usedefault=True, + ) t1_registration_template = File( exists=True, - desc=('Anatomical *intensity* template' - ' (assumed to be skull-stripped). A common' - ' case would be where this would be the same' - ' template as specified in the -e option which' - ' is not skull stripped.'), - argstr='-t %s', - mandatory=True) + desc=( + "Anatomical *intensity* template" + " (assumed to be skull-stripped). A common" + " case would be where this would be the same" + " template as specified in the -e option which" + " is not skull stripped." + ), + argstr="-t %s", + mandatory=True, + ) extraction_registration_mask = File( exists=True, - argstr='-f %s', - desc=('Mask (defined in the template space) used during' - ' registration for brain extraction.')) + argstr="-f %s", + desc=( + "Mask (defined in the template space) used during" + " registration for brain extraction." + ), + ) keep_temporary_files = traits.Int( - argstr='-k %d', - desc='Keep brain extraction/segmentation warps, etc (default = 0).') + argstr="-k %d", + desc="Keep brain extraction/segmentation warps, etc (default = 0).", + ) max_iterations = traits.Int( - argstr='-i %d', - desc=('ANTS registration max iterations (default = 100x100x70x20)')) + argstr="-i %d", + desc=("ANTS registration max iterations (default = 100x100x70x20)"), + ) prior_segmentation_weight = traits.Float( - argstr='-w %f', - desc=('Atropos spatial prior *probability* weight for' - ' the segmentation')) + argstr="-w %f", + desc=("Atropos spatial prior *probability* weight for the segmentation"), + ) segmentation_iterations = traits.Int( - argstr='-n %d', - desc=('N4 -> Atropos -> N4 iterations during segmentation' - ' (default = 3)')) + argstr="-n %d", + desc=("N4 -> Atropos -> N4 iterations during segmentation (default = 3)"), + ) posterior_formulation = traits.Str( - argstr='-b %s', - desc=('Atropos posterior formulation and whether or not' - ' to use mixture model proportions.' - ''' e.g 'Socrates[1]' (default) or 'Aristotle[1]'.''' - ' Choose the latter if you' - ' want use the distance priors (see also the -l option' - ' for label propagation control).')) + argstr="-b %s", + desc=( + "Atropos posterior formulation and whether or not" + " to use mixture model proportions." + """ e.g 'Socrates[1]' (default) or 'Aristotle[1]'.""" + " Choose the latter if you" + " want use the distance priors (see also the -l option" + " for label propagation control)." + ), + ) use_floatingpoint_precision = traits.Enum( 0, 1, - argstr='-j %d', - desc=('Use floating point precision in registrations (default = 0)')) + argstr="-j %d", + desc=("Use floating point precision in registrations (default = 0)"), + ) use_random_seeding = traits.Enum( 0, 1, - argstr='-u %d', - desc=('Use random number generated from system clock in Atropos' - ' (default = 1)')) + argstr="-u %d", + desc=("Use random number generated from system clock in Atropos (default = 1)"), + ) b_spline_smoothing = traits.Bool( - argstr='-v', - desc=('Use B-spline SyN for registrations and B-spline' - ' exponential mapping in DiReCT.')) + argstr="-v", + desc=( + "Use B-spline SyN for registrations and B-spline" + " exponential mapping in DiReCT." + ), + ) cortical_label_image = File( - exists=True, desc='Cortical ROI labels to use as a prior for ATITH.') + exists=True, desc="Cortical ROI labels to use as a prior for ATITH." + ) label_propagation = traits.Str( - argstr='-l %s', - desc= - ('Incorporate a distance prior one the posterior formulation. Should be' - ''' of the form 'label[lambda,boundaryProbability]' where label''' - ' is a value of 1,2,3,... denoting label ID. The label' - ' probability for anything outside the current label' - ' = boundaryProbability * exp( -lambda * distanceFromBoundary )' - ' Intuitively, smaller lambda values will increase the spatial capture' - ' range of the distance prior. To apply to all label values, simply omit' - ' specifying the label, i.e. -l [lambda,boundaryProbability].')) + argstr="-l %s", + desc=( + "Incorporate a distance prior one the posterior formulation. Should be" + """ of the form 'label[lambda,boundaryProbability]' where label""" + " is a value of 1,2,3,... denoting label ID. The label" + " probability for anything outside the current label" + " = boundaryProbability * exp( -lambda * distanceFromBoundary )" + " Intuitively, smaller lambda values will increase the spatial capture" + " range of the distance prior. To apply to all label values, simply omit" + " specifying the label, i.e. -l [lambda,boundaryProbability]." + ), + ) quick_registration = traits.Bool( - argstr='-q 1', - desc= - ('If = 1, use antsRegistrationSyNQuick.sh as the basis for registration' - ' during brain extraction, brain segmentation, and' - ' (optional) normalization to a template.' - ' Otherwise use antsRegistrationSyN.sh (default = 0).')) + argstr="-q 1", + desc=( + "If = 1, use antsRegistrationSyNQuick.sh as the basis for registration" + " during brain extraction, brain segmentation, and" + " (optional) normalization to a template." + " Otherwise use antsRegistrationSyN.sh (default = 0)." + ), + ) debug = traits.Bool( - argstr='-z 1', + argstr="-z 1", desc=( - 'If > 0, runs a faster version of the script.' - ' Only for testing. Implies -u 0.' - ' Requires single thread computation for complete reproducibility.' - )) + "If > 0, runs a faster version of the script." + " Only for testing. Implies -u 0." + " Requires single thread computation for complete reproducibility." + ), + ) class CorticalThicknessOutputSpec(TraitedSpec): - BrainExtractionMask = File(exists=True, desc='brain extraction mask') - ExtractedBrainN4 = File(exists=True, desc='extracted brain from N4 image') - BrainSegmentation = File(exists=True, desc='brain segmentaion image') - BrainSegmentationN4 = File(exists=True, desc='N4 corrected image') + BrainExtractionMask = File(exists=True, desc="brain extraction mask") + ExtractedBrainN4 = File(exists=True, desc="extracted brain from N4 image") + BrainSegmentation = File(exists=True, desc="brain segmentation image") + BrainSegmentationN4 = File(exists=True, desc="N4 corrected image") BrainSegmentationPosteriors = OutputMultiPath( - File(exists=True), desc='Posterior probability images') - CorticalThickness = File(exists=True, desc='cortical thickness file') + File(exists=True), desc="Posterior probability images" + ) + CorticalThickness = File(exists=True, desc="cortical thickness file") TemplateToSubject1GenericAffine = File( - exists=True, desc='Template to subject affine') - TemplateToSubject0Warp = File(exists=True, desc='Template to subject warp') - SubjectToTemplate1Warp = File( - exists=True, desc='Template to subject inverse warp') + exists=True, desc="Template to subject affine" + ) + TemplateToSubject0Warp = File(exists=True, desc="Template to subject warp") + SubjectToTemplate1Warp = File(exists=True, desc="Template to subject inverse warp") SubjectToTemplate0GenericAffine = File( - exists=True, desc='Template to subject inverse affine') + exists=True, desc="Template to subject inverse affine" + ) SubjectToTemplateLogJacobian = File( - exists=True, desc='Template to subject log jacobian') + exists=True, desc="Template to subject log jacobian" + ) CorticalThicknessNormedToTemplate = File( - exists=True, desc='Normalized cortical thickness') - BrainVolumes = File(exists=True, desc='Brain volumes as text') + exists=True, desc="Normalized cortical thickness" + ) + BrainVolumes = File(exists=True, desc="Brain volumes as text") class CorticalThickness(ANTSCommand): @@ -610,36 +746,37 @@ class CorticalThickness(ANTSCommand): ... 'BrainSegmentationPrior04.nii.gz'] >>> corticalthickness.inputs.t1_registration_template = 'brain_study_template.nii.gz' >>> corticalthickness.cmdline - 'antsCorticalThickness.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ --s nii.gz -o antsCT_ -p nipype_priors/BrainSegmentationPrior%02d.nii.gz -t brain_study_template.nii.gz' + 'antsCorticalThickness.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz + -e study_template.nii.gz -d 3 -s nii.gz -o antsCT_ + -p nipype_priors/BrainSegmentationPrior%02d.nii.gz -t brain_study_template.nii.gz' """ input_spec = CorticalThicknessInputSpec output_spec = CorticalThicknessOutputSpec - _cmd = 'antsCorticalThickness.sh' + _cmd = "antsCorticalThickness.sh" def _format_arg(self, opt, spec, val): - if opt == 'anatomical_image': - retval = '-a %s' % val + if opt == "anatomical_image": + retval = "-a %s" % val return retval - if opt == 'brain_template': - retval = '-e %s' % val + if opt == "brain_template": + retval = "-e %s" % val return retval - if opt == 'brain_probability_mask': - retval = '-m %s' % val + if opt == "brain_probability_mask": + retval = "-m %s" % val return retval - if opt == 'out_prefix': - retval = '-o %s' % val + if opt == "out_prefix": + retval = "-o %s" % val return retval - if opt == 't1_registration_template': - retval = '-t %s' % val + if opt == "t1_registration_template": + retval = "-t %s" % val return retval - if opt == 'segmentation_priors': + if opt == "segmentation_priors": _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval - return super(CorticalThickness, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): priors_directory = os.path.join(os.getcwd(), "nipype_priors") @@ -647,158 +784,200 @@ def _run_interface(self, runtime, correct_return_codes=[0]): os.makedirs(priors_directory) _, _, ext = split_filename(self.inputs.segmentation_priors[0]) for i, f in enumerate(self.inputs.segmentation_priors): - target = os.path.join(priors_directory, - 'BrainSegmentationPrior%02d' % (i + 1) + ext) - if not (os.path.exists(target) - and os.path.realpath(target) == os.path.abspath(f)): + target = os.path.join( + priors_directory, "BrainSegmentationPrior%02d" % (i + 1) + ext + ) + if not ( + os.path.exists(target) + and os.path.realpath(target) == os.path.abspath(f) + ): copyfile(os.path.abspath(f), target) - runtime = super(CorticalThickness, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['BrainExtractionMask'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionMask.' + - self.inputs.image_suffix) - outputs['ExtractedBrainN4'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'ExtractedBrain0N4.' + - self.inputs.image_suffix) - outputs['BrainSegmentation'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainSegmentation.' + - self.inputs.image_suffix) - outputs['BrainSegmentationN4'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainSegmentation0N4.' + - self.inputs.image_suffix) - posteriors = [] - for i in range(len(self.inputs.segmentation_priors)): - posteriors.append( - os.path.join(os.getcwd(), self.inputs.out_prefix + - 'BrainSegmentationPosteriors%02d.' % - (i + 1) + self.inputs.image_suffix)) - outputs['BrainSegmentationPosteriors'] = posteriors - outputs['CorticalThickness'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'CorticalThickness.' + - self.inputs.image_suffix) - outputs['TemplateToSubject1GenericAffine'] = os.path.join( + outputs["BrainExtractionMask"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, + ) + outputs["ExtractedBrainN4"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "ExtractedBrain0N4." + self.inputs.image_suffix, + ) + outputs["BrainSegmentation"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainSegmentation." + self.inputs.image_suffix, + ) + outputs["BrainSegmentationN4"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainSegmentation0N4." + self.inputs.image_suffix, + ) + posteriors = [ + os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainSegmentationPosteriors%02d." % (i + 1) + + self.inputs.image_suffix, + ) + for i in range(len(self.inputs.segmentation_priors)) + ] + outputs["BrainSegmentationPosteriors"] = posteriors + outputs["CorticalThickness"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, + ) + outputs["TemplateToSubject1GenericAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "TemplateToSubject1GenericAffine.mat" + ) + outputs["TemplateToSubject0Warp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "TemplateToSubject0Warp." + + self.inputs.image_suffix, + ) + outputs["SubjectToTemplate1Warp"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'TemplateToSubject1GenericAffine.mat') - outputs['TemplateToSubject0Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'TemplateToSubject0Warp.' + - self.inputs.image_suffix) - outputs['SubjectToTemplate1Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'SubjectToTemplate1Warp.' + - self.inputs.image_suffix) - outputs['SubjectToTemplate0GenericAffine'] = os.path.join( + self.inputs.out_prefix + + "SubjectToTemplate1Warp." + + self.inputs.image_suffix, + ) + outputs["SubjectToTemplate0GenericAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "SubjectToTemplate0GenericAffine.mat" + ) + outputs["SubjectToTemplateLogJacobian"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'SubjectToTemplate0GenericAffine.mat') - outputs['SubjectToTemplateLogJacobian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'SubjectToTemplateLogJacobian.' + self.inputs.image_suffix) - outputs['CorticalThicknessNormedToTemplate'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'CorticalThickness.' + - self.inputs.image_suffix) - outputs['BrainVolumes'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'brainvols.csv') + self.inputs.out_prefix + + "SubjectToTemplateLogJacobian." + + self.inputs.image_suffix, + ) + outputs["CorticalThicknessNormedToTemplate"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, + ) + outputs["BrainVolumes"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "brainvols.csv" + ) return outputs class BrainExtractionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) anatomical_image = File( exists=True, - argstr='-a %s', - desc=('Structural image, typically T1. If more than one' - ' anatomical image is specified, subsequently specified' - ' images are used during the segmentation process. However,' - ' only the first image is used in the registration of priors.' - ' Our suggestion would be to specify the T1 as the first image.' - ' Anatomical template created using e.g. LPBA40 data set with' - ' buildtemplateparallel.sh in ANTs.'), - mandatory=True) + argstr="-a %s", + desc=( + "Structural image, typically T1. If more than one" + " anatomical image is specified, subsequently specified" + " images are used during the segmentation process. However," + " only the first image is used in the registration of priors." + " Our suggestion would be to specify the T1 as the first image." + " Anatomical template created using e.g. LPBA40 data set with" + " buildtemplateparallel.sh in ANTs." + ), + mandatory=True, + ) brain_template = File( exists=True, - argstr='-e %s', - desc=('Anatomical template created using e.g. LPBA40 data set with' - ' buildtemplateparallel.sh in ANTs.'), - mandatory=True) + argstr="-e %s", + desc=( + "Anatomical template created using e.g. LPBA40 data set with" + " buildtemplateparallel.sh in ANTs." + ), + mandatory=True, + ) brain_probability_mask = File( exists=True, - argstr='-m %s', - desc=('Brain probability mask created using e.g. LPBA40 data set which' - ' have brain masks defined, and warped to anatomical template and' - ' averaged resulting in a probability image.'), + argstr="-m %s", + desc=( + "Brain probability mask created using e.g. LPBA40 data set which" + " have brain masks defined, and warped to anatomical template and" + " averaged resulting in a probability image." + ), copyfile=False, - mandatory=True) + mandatory=True, + ) out_prefix = traits.Str( - 'highres001_', - argstr='-o %s', + "highres001_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output' - ' files (default = highress001_)')) + desc=("Prefix that is prepended to all output files"), + ) extraction_registration_mask = File( exists=True, - argstr='-f %s', - desc=('Mask (defined in the template space) used during' - ' registration for brain extraction.' - ' To limit the metric computation to a specific region.')) + argstr="-f %s", + desc=( + "Mask (defined in the template space) used during" + " registration for brain extraction." + " To limit the metric computation to a specific region." + ), + ) image_suffix = traits.Str( - 'nii.gz', - desc=('any of standard ITK formats,' - ' nii.gz is default'), - argstr='-s %s', - usedefault=True) + "nii.gz", + desc=("any of standard ITK formats, nii.gz is default"), + argstr="-s %s", + usedefault=True, + ) use_random_seeding = traits.Enum( 0, 1, - argstr='-u %d', - desc=('Use random number generated from system clock in Atropos' - ' (default = 1)')) + argstr="-u %d", + desc=("Use random number generated from system clock in Atropos (default = 1)"), + ) keep_temporary_files = traits.Int( - argstr='-k %d', - desc='Keep brain extraction/segmentation warps, etc (default = 0).') + argstr="-k %d", + desc="Keep brain extraction/segmentation warps, etc (default = 0).", + ) use_floatingpoint_precision = traits.Enum( 0, 1, - argstr='-q %d', - desc=('Use floating point precision in registrations (default = 0)')) + argstr="-q %d", + desc=("Use floating point precision in registrations (default = 0)"), + ) debug = traits.Bool( - argstr='-z 1', + argstr="-z 1", desc=( - 'If > 0, runs a faster version of the script.' - ' Only for testing. Implies -u 0.' - ' Requires single thread computation for complete reproducibility.' - )) + "If > 0, runs a faster version of the script." + " Only for testing. Implies -u 0." + " Requires single thread computation for complete reproducibility." + ), + ) class BrainExtractionOutputSpec(TraitedSpec): - BrainExtractionMask = File(exists=True, desc='brain extraction mask') - BrainExtractionBrain = File(exists=True, desc='brain extraction image') - BrainExtractionCSF = File( - exists=True, desc='segmentation mask with only CSF') + BrainExtractionMask = File(exists=True, desc="brain extraction mask") + BrainExtractionBrain = File(exists=True, desc="brain extraction image") + BrainExtractionCSF = File(exists=True, desc="segmentation mask with only CSF") BrainExtractionGM = File( - exists=True, desc='segmentation mask with only grey matter') - BrainExtractionInitialAffine = File(exists=True, desc='') - BrainExtractionInitialAffineFixed = File(exists=True, desc='') - BrainExtractionInitialAffineMoving = File(exists=True, desc='') - BrainExtractionLaplacian = File(exists=True, desc='') - BrainExtractionPrior0GenericAffine = File(exists=True, desc='') - BrainExtractionPrior1InverseWarp = File(exists=True, desc='') - BrainExtractionPrior1Warp = File(exists=True, desc='') - BrainExtractionPriorWarped = File(exists=True, desc='') + exists=True, desc="segmentation mask with only grey matter" + ) + BrainExtractionInitialAffine = File(exists=True, desc="") + BrainExtractionInitialAffineFixed = File(exists=True, desc="") + BrainExtractionInitialAffineMoving = File(exists=True, desc="") + BrainExtractionLaplacian = File(exists=True, desc="") + BrainExtractionPrior0GenericAffine = File(exists=True, desc="") + BrainExtractionPrior1InverseWarp = File(exists=True, desc="") + BrainExtractionPrior1Warp = File(exists=True, desc="") + BrainExtractionPriorWarped = File(exists=True, desc="") BrainExtractionSegmentation = File( - exists=True, desc='segmentation mask with CSF, GM, and WM') - BrainExtractionTemplateLaplacian = File(exists=True, desc='') - BrainExtractionTmp = File(exists=True, desc='') + exists=True, desc="segmentation mask with CSF, GM, and WM" + ) + BrainExtractionTemplateLaplacian = File(exists=True, desc="") + BrainExtractionTmp = File(exists=True, desc="") BrainExtractionWM = File( - exists=True, desc='segmenration mask with only white matter') - N4Corrected0 = File(exists=True, desc='N4 bias field corrected image') - N4Truncated0 = File(exists=True, desc='') + exists=True, desc="segmenration mask with only white matter" + ) + N4Corrected0 = File(exists=True, desc="N4 bias field corrected image") + N4Truncated0 = File(exists=True, desc="") class BrainExtraction(ANTSCommand): """ + Atlas-based brain extraction. + Examples -------- >>> from nipype.interfaces.ants.segmentation import BrainExtraction @@ -808,48 +987,50 @@ class BrainExtraction(ANTSCommand): >>> brainextraction.inputs.brain_template = 'study_template.nii.gz' >>> brainextraction.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' >>> brainextraction.cmdline - 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ --s nii.gz -o highres001_' + 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz + -e study_template.nii.gz -d 3 -s nii.gz -o highres001_' + """ + input_spec = BrainExtractionInputSpec output_spec = BrainExtractionOutputSpec - _cmd = 'antsBrainExtraction.sh' + _cmd = "antsBrainExtraction.sh" - def _run_interface(self, runtime, correct_return_codes=(0, )): + def _run_interface(self, runtime, correct_return_codes=(0,)): # antsBrainExtraction.sh requires ANTSPATH to be defined out_environ = self._get_environ() - ants_path = out_environ.get('ANTSPATH', None) or os.getenv( - 'ANTSPATH', None) + ants_path = out_environ.get("ANTSPATH", None) or os.getenv("ANTSPATH", None) if ants_path is None: # Check for antsRegistration, which is under bin/ (the $ANTSPATH) instead of # checking for antsBrainExtraction.sh which is under script/ - cmd_path = which('antsRegistration', env=runtime.environ) + cmd_path = which("antsRegistration", env=runtime.environ) if not cmd_path: raise RuntimeError( 'The environment variable $ANTSPATH is not defined in host "%s", ' - 'and Nipype could not determine it automatically.' % - runtime.hostname) + "and Nipype could not determine it automatically." + % runtime.hostname + ) ants_path = os.path.dirname(cmd_path) - self.inputs.environ.update({'ANTSPATH': ants_path}) - runtime.environ.update({'ANTSPATH': ants_path}) - runtime = super(BrainExtraction, self)._run_interface(runtime) + self.inputs.environ.update({"ANTSPATH": ants_path}) + runtime.environ.update({"ANTSPATH": ants_path}) + runtime = super()._run_interface(runtime) # Still, double-check if it didn't found N4 - if 'we cant find' in runtime.stdout: - for line in runtime.stdout.split('\n'): - if line.strip().startswith('we cant find'): - tool = line.strip().replace('we cant find the', - '').split(' ')[0] + if "we can't find" in runtime.stdout: + for line in runtime.stdout.split("\n"): + if line.strip().startswith("we can't find"): + tool = line.strip().replace("we can't find the", "").split(" ")[0] break errmsg = ( 'antsBrainExtraction.sh requires "%s" to be found in $ANTSPATH ' - '($ANTSPATH="%s").') % (tool, ants_path) + '($ANTSPATH="%s").' + ) % (tool, ants_path) if runtime.stderr is None: runtime.stderr = errmsg else: - runtime.stderr += '\n' + errmsg + runtime.stderr += "\n" + errmsg runtime.returncode = 1 self.raise_exception(runtime) @@ -857,261 +1038,169 @@ def _run_interface(self, runtime, correct_return_codes=(0, )): def _list_outputs(self): outputs = self._outputs().get() - outputs['BrainExtractionMask'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionMask.' + - self.inputs.image_suffix) - outputs['BrainExtractionBrain'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionBrain.' + - self.inputs.image_suffix) - if isdefined(self.inputs.keep_temporary_files - ) and self.inputs.keep_temporary_files != 0: - outputs['BrainExtractionCSF'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionCSF.' + - self.inputs.image_suffix) - outputs['BrainExtractionGM'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionGM.' + - self.inputs.image_suffix) - outputs['BrainExtractionInitialAffine'] = os.path.join( + outputs["BrainExtractionMask"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, + ) + outputs["BrainExtractionBrain"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionBrain." + self.inputs.image_suffix, + ) + if ( + isdefined(self.inputs.keep_temporary_files) + and self.inputs.keep_temporary_files != 0 + ): + outputs["BrainExtractionCSF"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionCSF." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionGM"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionGM." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionInitialAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "BrainExtractionInitialAffine.mat" + ) + outputs["BrainExtractionInitialAffineFixed"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionInitialAffineFixed." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionInitialAffineMoving"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionInitialAffineMoving." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionLaplacian"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionLaplacian." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPrior0GenericAffine"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionPrior0GenericAffine.mat", + ) + outputs["BrainExtractionPrior1InverseWarp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionPrior1InverseWarp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPrior1Warp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionPrior1Warp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPriorWarped"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffine.mat') - outputs['BrainExtractionInitialAffineFixed'] = os.path.join( + self.inputs.out_prefix + + "BrainExtractionPriorWarped." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionSegmentation"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffineFixed.' + - self.inputs.image_suffix) - outputs['BrainExtractionInitialAffineMoving'] = os.path.join( + self.inputs.out_prefix + + "BrainExtractionSegmentation." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionTemplateLaplacian"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffineMoving.' - + self.inputs.image_suffix) - outputs['BrainExtractionLaplacian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionLaplacian.' + self.inputs.image_suffix) - outputs['BrainExtractionPrior0GenericAffine'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior0GenericAffine.mat') - outputs['BrainExtractionPrior1InverseWarp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior1InverseWarp.' + self.inputs.image_suffix) - outputs['BrainExtractionPrior1Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior1Warp.' + self.inputs.image_suffix) - outputs['BrainExtractionPriorWarped'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPriorWarped.' + self.inputs.image_suffix) - outputs['BrainExtractionSegmentation'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionSegmentation.' + self.inputs.image_suffix) - outputs['BrainExtractionTemplateLaplacian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionTemplateLaplacian.' + self.inputs.image_suffix) - outputs['BrainExtractionTmp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionTmp.' + - self.inputs.image_suffix) - outputs['BrainExtractionWM'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionWM.' + - self.inputs.image_suffix) - outputs['N4Corrected0'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'N4Corrected0.' + - self.inputs.image_suffix) - outputs['N4Truncated0'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'N4Truncated0.' + - self.inputs.image_suffix) + self.inputs.out_prefix + + "BrainExtractionTemplateLaplacian." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionTmp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionTmp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionWM"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionWM." + + self.inputs.image_suffix, + ) + outputs["N4Corrected0"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "N4Corrected0." + self.inputs.image_suffix, + ) + outputs["N4Truncated0"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "N4Truncated0." + self.inputs.image_suffix, + ) return outputs -class JointFusionInputSpec(ANTSCommandInputSpec): - dimension = traits.Enum( - 3, - 2, - 4, - argstr='%d', - position=0, - usedefault=True, - mandatory=True, - desc='image dimension (2, 3, or 4)') - modalities = traits.Int( - argstr='%d', - position=1, - mandatory=True, - desc='Number of modalities or features') - warped_intensity_images = InputMultiPath( - File(exists=True), - argstr="-g %s...", - mandatory=True, - desc='Warped atlas images') - target_image = InputMultiPath( - File(exists=True), - argstr='-tg %s...', - mandatory=True, - desc='Target image(s)') - warped_label_images = InputMultiPath( - File(exists=True), - argstr="-l %s...", - mandatory=True, - desc='Warped atlas segmentations') - method = traits.Str( - default='Joint', - argstr='-m %s', - usedefault=True, - desc=('Select voting method. Options: Joint (Joint' - ' Label Fusion). May be followed by optional' - ' parameters in brackets, e.g., -m Joint[0.1,2]')) - alpha = traits.Float( - default=0.1, - usedefault=True, - requires=['method'], - desc=('Regularization term added to matrix Mx for inverse')) - beta = traits.Int( - default=2, - usedefault=True, - requires=['method'], - desc=('Exponent for mapping intensity difference to joint error')) - output_label_image = File( - argstr='%s', - mandatory=True, - position=-1, - name_template='%s', - output_name='output_label_image', - desc='Output fusion label map image') - patch_radius = traits.ListInt( - minlen=3, - maxlen=3, - argstr='-rp %s', - desc=('Patch radius for similarity measures, ' - 'scalar or vector. Default: 2x2x2')) - search_radius = traits.ListInt( - minlen=3, - maxlen=3, - argstr='-rs %s', - desc='Local search radius. Default: 3x3x3') - exclusion_region = File( - exists=True, - argstr='-x %s', - desc=('Specify an exclusion region for the given label.')) - atlas_group_id = traits.ListInt( - argstr='-gp %d...', desc='Assign a group ID for each atlas') - atlas_group_weights = traits.ListInt( - argstr='-gpw %d...', - desc=('Assign the voting weights to each atlas group')) - - -class JointFusionOutputSpec(TraitedSpec): - output_label_image = File(exists=True) - # TODO: optional outputs - output_posteriors, output_voting_weights - - -class JointFusion(ANTSCommand): - """ - Examples - -------- - - >>> from nipype.interfaces.ants import JointFusion - >>> at = JointFusion() - >>> at.inputs.dimension = 3 - >>> at.inputs.modalities = 1 - >>> at.inputs.method = 'Joint[0.1,2]' - >>> at.inputs.output_label_image ='fusion_labelimage_output.nii' - >>> at.inputs.warped_intensity_images = ['im1.nii', - ... 'im2.nii', - ... 'im3.nii'] - >>> at.inputs.warped_label_images = ['segmentation0.nii.gz', - ... 'segmentation1.nii.gz', - ... 'segmentation1.nii.gz'] - >>> at.inputs.target_image = 'T1.nii' - >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz \ --l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' - - >>> at.inputs.method = 'Joint' - >>> at.inputs.alpha = 0.5 - >>> at.inputs.beta = 1 - >>> at.inputs.patch_radius = [3,2,1] - >>> at.inputs.search_radius = [1,2,3] - >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ --l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' - """ - input_spec = JointFusionInputSpec - output_spec = JointFusionOutputSpec - _cmd = 'jointfusion' - - def _format_arg(self, opt, spec, val): - if opt == 'method': - if '[' in val: - retval = '-m {0}'.format(val) - else: - retval = '-m {0}[{1},{2}]'.format( - self.inputs.method, self.inputs.alpha, self.inputs.beta) - elif opt == 'patch_radius': - retval = '-rp {0}'.format(self._format_xarray(val)) - elif opt == 'search_radius': - retval = '-rs {0}'.format(self._format_xarray(val)) - else: - if opt == 'warped_intensity_images': - assert len(val) == self.inputs.modalities * len(self.inputs.warped_label_images), \ - "Number of intensity images and label maps must be the same {0}!={1}".format( - len(val), len(self.inputs.warped_label_images)) - return super(JointFusion, self)._format_arg(opt, spec, val) - return retval - - def _list_outputs(self): - outputs = self._outputs().get() - outputs['output_label_image'] = os.path.abspath( - self.inputs.output_label_image) - return outputs - - class DenoiseImageInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 2, 3, 4, - argstr='-d %d', - desc='This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, the program tries to infer the ' - 'dimensionality from the input image.') + argstr="-d %d", + desc="This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, the program tries to infer the " + "dimensionality from the input image.", + ) input_image = File( exists=True, argstr="-i %s", mandatory=True, - desc='A scalar image is expected as input for noise correction.') + desc="A scalar image is expected as input for noise correction.", + ) noise_model = traits.Enum( - 'Gaussian', - 'Rician', - argstr='-n %s', + "Gaussian", + "Rician", + argstr="-n %s", usedefault=True, - desc=('Employ a Rician or Gaussian noise model.')) + desc=("Employ a Rician or Gaussian noise model."), + ) shrink_factor = traits.Int( default_value=1, usedefault=True, - argstr='-s %s', - desc=('Running noise correction on large images can' - ' be time consuming. To lessen computation time,' - ' the input image can be resampled. The shrink' - ' factor, specified as a single integer, describes' - ' this resampling. Shrink factor = 1 is the default.')) + argstr="-s %s", + desc=( + "Running noise correction on large images can" + " be time consuming. To lessen computation time," + " the input image can be resampled. The shrink" + " factor, specified as a single integer, describes" + " this resampling. Shrink factor = 1 is the default." + ), + ) output_image = File( argstr="-o %s", - name_source=['input_image'], + name_source=["input_image"], hash_files=False, keep_extension=True, - name_template='%s_noise_corrected', - desc='The output consists of the noise corrected' - ' version of the input image.') + name_template="%s_noise_corrected", + desc="The output consists of the noise corrected" + " version of the input image.", + ) save_noise = traits.Bool( False, mandatory=True, usedefault=True, - desc=('True if the estimated noise should be saved to file.'), - xor=['noise_image']) + desc=("True if the estimated noise should be saved to file."), + xor=["noise_image"], + ) noise_image = File( - name_source=['input_image'], + name_source=["input_image"], hash_files=False, keep_extension=True, - name_template='%s_noise', - desc='Filename for the estimated noise.') - verbose = traits.Bool(False, argstr="-v", desc=('Verbose output.')) + name_template="%s_noise", + desc="Filename for the estimated noise.", + ) + verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) class DenoiseImageOutputSpec(TraitedSpec): @@ -1143,350 +1232,397 @@ class DenoiseImage(ANTSCommand): >>> denoise_3.inputs.save_noise = True >>> denoise_3.cmdline 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' + """ + input_spec = DenoiseImageInputSpec output_spec = DenoiseImageOutputSpec - _cmd = 'DenoiseImage' + _cmd = "DenoiseImage" def _format_arg(self, name, trait_spec, value): - if ((name == 'output_image') and - (self.inputs.save_noise or isdefined(self.inputs.noise_image))): - newval = '[ %s, %s ]' % ( - self._filename_from_source('output_image'), - self._filename_from_source('noise_image')) + if (name == "output_image") and ( + self.inputs.save_noise or isdefined(self.inputs.noise_image) + ): + newval = "[ {}, {} ]".format( + self._filename_from_source("output_image"), + self._filename_from_source("noise_image"), + ) return trait_spec.argstr % newval - return super(DenoiseImage, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) -class AntsJointFusionInputSpec(ANTSCommandInputSpec): +class JointFusionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, 4, - argstr='-d %d', - desc='This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, the program tries to infer the ' - 'dimensionality from the input image.') + argstr="-d %d", + desc="This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, the program tries to infer the " + "dimensionality from the input image.", + ) target_image = traits.List( InputMultiPath(File(exists=True)), - argstr='-t %s', + argstr="-t %s", mandatory=True, - desc='The target image (or ' - 'multimodal target images) assumed to be ' - 'aligned to a common image domain.') + desc="The target image (or " + "multimodal target images) assumed to be " + "aligned to a common image domain.", + ) atlas_image = traits.List( InputMultiPath(File(exists=True)), argstr="-g %s...", mandatory=True, - desc='The atlas image (or ' - 'multimodal atlas images) assumed to be ' - 'aligned to a common image domain.') + desc="The atlas image (or " + "multimodal atlas images) assumed to be " + "aligned to a common image domain.", + ) atlas_segmentation_image = InputMultiPath( File(exists=True), argstr="-l %s...", mandatory=True, - desc='The atlas segmentation ' - 'images. For performing label fusion the number ' - 'of specified segmentations should be identical ' - 'to the number of atlas image sets.') + desc="The atlas segmentation " + "images. For performing label fusion the number " + "of specified segmentations should be identical " + "to the number of atlas image sets.", + ) alpha = traits.Float( default_value=0.1, usedefault=True, - argstr='-a %s', + argstr="-a %s", desc=( - 'Regularization ' - 'term added to matrix Mx for calculating the inverse. Default = 0.1' - )) + "Regularization " + "term added to matrix Mx for calculating the inverse. Default = 0.1" + ), + ) beta = traits.Float( default_value=2.0, usedefault=True, - argstr='-b %s', - desc=('Exponent for mapping ' - 'intensity difference to the joint error. Default = 2.0')) + argstr="-b %s", + desc=( + "Exponent for mapping " + "intensity difference to the joint error. Default = 2.0" + ), + ) retain_label_posterior_images = traits.Bool( False, - argstr='-r', + argstr="-r", usedefault=True, - requires=['atlas_segmentation_image'], - desc=('Retain label posterior probability images. Requires ' - 'atlas segmentations to be specified. Default = false')) + requires=["atlas_segmentation_image"], + desc=( + "Retain label posterior probability images. Requires " + "atlas segmentations to be specified. Default = false" + ), + ) retain_atlas_voting_images = traits.Bool( False, - argstr='-f', + argstr="-f", usedefault=True, - desc=('Retain atlas voting images. Default = false')) + desc=("Retain atlas voting images. Default = false"), + ) constrain_nonnegative = traits.Bool( False, - argstr='-c', + argstr="-c", usedefault=True, - desc=('Constrain solution to non-negative weights.')) + desc=("Constrain solution to non-negative weights."), + ) patch_radius = traits.ListInt( minlen=3, maxlen=3, - argstr='-p %s', - desc=('Patch radius for similarity measures.' - 'Default: 2x2x2')) + argstr="-p %s", + desc=("Patch radius for similarity measures. Default: 2x2x2"), + ) patch_metric = traits.Enum( - 'PC', - 'MSQ', - argstr='-m %s', - desc=('Metric to be used in determining the most similar ' - 'neighborhood patch. Options include Pearson\'s ' - 'correlation (PC) and mean squares (MSQ). Default = ' - 'PC (Pearson correlation).')) + "PC", + "MSQ", + argstr="-m %s", + desc=( + "Metric to be used in determining the most similar " + "neighborhood patch. Options include Pearson's " + "correlation (PC) and mean squares (MSQ). Default = " + "PC (Pearson correlation)." + ), + ) search_radius = traits.List( [3, 3, 3], minlen=1, maxlen=3, - argstr='-s %s', + argstr="-s %s", usedefault=True, - desc=('Search radius for similarity measures. Default = 3x3x3. ' - 'One can also specify an image where the value at the ' - 'voxel specifies the isotropic search radius at that voxel.')) + desc=( + "Search radius for similarity measures. Default = 3x3x3. " + "One can also specify an image where the value at the " + "voxel specifies the isotropic search radius at that voxel." + ), + ) exclusion_image_label = traits.List( traits.Str(), - argstr='-e %s', - requires=['exclusion_image'], - desc=('Specify a label for the exclusion region.')) + argstr="-e %s", + requires=["exclusion_image"], + desc=("Specify a label for the exclusion region."), + ) exclusion_image = traits.List( - File(exists=True), - desc=('Specify an exclusion region for the given label.')) + File(exists=True), desc=("Specify an exclusion region for the given label.") + ) mask_image = File( - argstr='-x %s', + argstr="-x %s", exists=True, - desc='If a mask image ' - 'is specified, fusion is only performed in the mask region.') + desc="If a mask image " + "is specified, fusion is only performed in the mask region.", + ) out_label_fusion = File( - argstr="%s", hash_files=False, desc='The output label fusion image.') + argstr="%s", hash_files=False, desc="The output label fusion image." + ) out_intensity_fusion_name_format = traits.Str( argstr="", - desc='Optional intensity fusion ' - 'image file name format. ' - '(e.g. "antsJointFusionIntensity_%d.nii.gz")') + desc="Optional intensity fusion " + "image file name format. " + '(e.g. "antsJointFusionIntensity_%d.nii.gz")', + ) out_label_post_prob_name_format = traits.Str( - 'antsJointFusionPosterior_%d.nii.gz', - requires=['out_label_fusion', 'out_intensity_fusion_name_format'], - desc='Optional label posterior probability ' - 'image file name format.') + "antsJointFusionPosterior_%d.nii.gz", + requires=["out_label_fusion", "out_intensity_fusion_name_format"], + desc="Optional label posterior probability image file name format.", + ) out_atlas_voting_weight_name_format = traits.Str( - 'antsJointFusionVotingWeight_%d.nii.gz', + "antsJointFusionVotingWeight_%d.nii.gz", requires=[ - 'out_label_fusion', 'out_intensity_fusion_name_format', - 'out_label_post_prob_name_format' + "out_label_fusion", + "out_intensity_fusion_name_format", + "out_label_post_prob_name_format", ], - desc='Optional atlas voting weight image ' - 'file name format.') - verbose = traits.Bool(False, argstr="-v", desc=('Verbose output.')) + desc="Optional atlas voting weight image file name format.", + ) + verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) -class AntsJointFusionOutputSpec(TraitedSpec): +class JointFusionOutputSpec(TraitedSpec): out_label_fusion = File(exists=True) - out_intensity_fusion_name_format = traits.Str() - out_label_post_prob_name_format = traits.Str() - out_atlas_voting_weight_name_format = traits.Str() + out_intensity_fusion = OutputMultiPath(File(exists=True)) + out_label_post_prob = OutputMultiPath(File(exists=True)) + out_atlas_voting_weight = OutputMultiPath(File(exists=True)) -class AntsJointFusion(ANTSCommand): +class JointFusion(ANTSCommand): """ + An image fusion algorithm. + + Developed by Hongzhi Wang and Paul Yushkevich, and it won segmentation challenges + at MICCAI 2012 and MICCAI 2013. + The original label fusion framework was extended to accommodate intensities by Brian + Avants. + This implementation is based on Paul's original ITK-style implementation + and Brian's ANTsR implementation. + + References include 1) H. Wang, J. W. Suh, S. + Das, J. Pluta, C. Craige, P. Yushkevich, Multi-atlas segmentation with joint + label fusion IEEE Trans. on Pattern Analysis and Machine Intelligence, 35(3), + 611-623, 2013. and 2) H. Wang and P. A. Yushkevich, Multi-atlas segmentation + with joint label fusion and corrective learning--an open source implementation, + Front. Neuroinform., 2013. + Examples -------- - - >>> from nipype.interfaces.ants import AntsJointFusion - >>> antsjointfusion = AntsJointFusion() - >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' - >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'] ] - >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] - >>> antsjointfusion.inputs.target_image = ['im1.nii'] - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" - - >>> antsjointfusion.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" - - >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], + >>> from nipype.interfaces.ants import JointFusion + >>> jf = JointFusion() + >>> jf.inputs.out_label_fusion = 'ants_fusion_label_output.nii' + >>> jf.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'] ] + >>> jf.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] + >>> jf.inputs.target_image = ['im1.nii'] + >>> jf.cmdline + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz + -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" + + >>> jf.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] + >>> jf.cmdline + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz + -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" + + >>> jf.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], ... ['rc2s1.nii','rc2s2.nii'] ] - >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', + >>> jf.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii \ --s 3x3x3 -t ['im1.nii', 'im2.nii']" - - >>> antsjointfusion.inputs.dimension = 3 - >>> antsjointfusion.inputs.alpha = 0.5 - >>> antsjointfusion.inputs.beta = 1.0 - >>> antsjointfusion.inputs.patch_radius = [3,2,1] - >>> antsjointfusion.inputs.search_radius = [3] - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii \ --p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" - - >>> antsjointfusion.inputs.search_radius = ['mask.nii'] - >>> antsjointfusion.inputs.verbose = True - >>> antsjointfusion.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] - >>> antsjointfusion.inputs.exclusion_image_label = ['1','2'] - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" - - >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' - >>> antsjointfusion.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' - >>> antsjointfusion.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' - >>> antsjointfusion.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' - >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, \ -ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] \ --p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + >>> jf.cmdline + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] + -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii + -s 3x3x3 -t ['im1.nii', 'im2.nii']" + + >>> jf.inputs.dimension = 3 + >>> jf.inputs.alpha = 0.5 + >>> jf.inputs.beta = 1.0 + >>> jf.inputs.patch_radius = [3,2,1] + >>> jf.inputs.search_radius = [3] + >>> jf.cmdline + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] + -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii + -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" + + >>> jf.inputs.search_radius = ['mask.nii'] + >>> jf.inputs.verbose = True + >>> jf.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] + >>> jf.inputs.exclusion_image_label = ['1','2'] + >>> jf.cmdline + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] + -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] + -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + + >>> jf.inputs.out_label_fusion = 'ants_fusion_label_output.nii' + >>> jf.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' + >>> jf.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' + >>> jf.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' + >>> jf.cmdline + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] + -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] + -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, + ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] + -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ - input_spec = AntsJointFusionInputSpec - output_spec = AntsJointFusionOutputSpec - _cmd = 'antsJointFusion' + + input_spec = JointFusionInputSpec + output_spec = JointFusionOutputSpec + _cmd = "antsJointFusion" def _format_arg(self, opt, spec, val): - if opt == 'exclusion_image_label': - retval = [] - for ii in range(len(self.inputs.exclusion_image_label)): - retval.append( - '-e {0}[{1}]'.format(self.inputs.exclusion_image_label[ii], - self.inputs.exclusion_image[ii])) - retval = ' '.join(retval) - elif opt == 'patch_radius': - retval = '-p {0}'.format(self._format_xarray(val)) - elif opt == 'search_radius': - retval = '-s {0}'.format(self._format_xarray(val)) - elif opt == 'out_label_fusion': - if isdefined(self.inputs.out_intensity_fusion_name_format): - if isdefined(self.inputs.out_label_post_prob_name_format): - if isdefined( - self.inputs.out_atlas_voting_weight_name_format): - retval = '-o [{0}, {1}, {2}, {3}]'.format( - self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format, - self.inputs.out_label_post_prob_name_format, - self.inputs.out_atlas_voting_weight_name_format) - else: - retval = '-o [{0}, {1}, {2}]'.format( - self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format, - self.inputs.out_label_post_prob_name_format) + if opt == "exclusion_image_label": + return " ".join( + "-e {}[{}]".format( + self.inputs.exclusion_image_label[ii], + self.inputs.exclusion_image[ii], + ) + for ii in range(len(self.inputs.exclusion_image_label)) + ) + if opt == "patch_radius": + return f"-p {self._format_xarray(val)}" + if opt == "search_radius": + return f"-s {self._format_xarray(val)}" + if opt == "out_label_fusion": + args = [self.inputs.out_label_fusion] + for option in ( + self.inputs.out_intensity_fusion_name_format, + self.inputs.out_label_post_prob_name_format, + self.inputs.out_atlas_voting_weight_name_format, + ): + if isdefined(option): + args.append(option) else: - retval = '-o [{0}, {1}]'.format( - self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format) - else: - retval = '-o {0}'.format(self.inputs.out_label_fusion) - elif opt == 'out_intensity_fusion_name_format': - retval = '' + break + if len(args) == 1: + return " ".join(("-o", args[0])) + return "-o [{}]".format(", ".join(args)) + if opt == "out_intensity_fusion_name_format": if not isdefined(self.inputs.out_label_fusion): - retval = '-o {0}'.format( - self.inputs.out_intensity_fusion_name_format) - elif opt == 'atlas_image': - atlas_image_cmd = " ".join([ - '-g [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.atlas_image - ]) - retval = atlas_image_cmd - elif opt == 'target_image': - target_image_cmd = " ".join([ - '-t [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.target_image - ]) - retval = target_image_cmd - elif opt == 'atlas_segmentation_image': - assert len(val) == len(self.inputs.atlas_image), "Number of specified " \ - "segmentations should be identical to the number of atlas image " \ - "sets {0}!={1}".format(len(val), len(self.inputs.atlas_image)) - - atlas_segmentation_image_cmd = " ".join([ - '-l {0}'.format(fn) - for fn in self.inputs.atlas_segmentation_image - ]) - retval = atlas_segmentation_image_cmd - else: - - return super(AntsJointFusion, self)._format_arg(opt, spec, val) - return retval + return f"-o {self.inputs.out_intensity_fusion_name_format}" + return "" + if opt == "atlas_image": + return " ".join( + [ + "-g [{}]".format(", ".join("'%s'" % fn for fn in ai)) + for ai in self.inputs.atlas_image + ] + ) + if opt == "target_image": + return " ".join( + [ + "-t [{}]".format(", ".join("'%s'" % fn for fn in ai)) + for ai in self.inputs.target_image + ] + ) + if opt == "atlas_segmentation_image": + if len(val) != len(self.inputs.atlas_image): + raise ValueError( + "Number of specified segmentations should be identical to the number " + "of atlas image sets {}!={}".format( + len(val), len(self.inputs.atlas_image) + ) + ) + + return " ".join([f"-l {fn}" for fn in self.inputs.atlas_segmentation_image]) + return super(AntsJointFusion, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_label_fusion): - outputs['out_label_fusion'] = os.path.abspath( - self.inputs.out_label_fusion) + outputs["out_label_fusion"] = os.path.abspath(self.inputs.out_label_fusion) if isdefined(self.inputs.out_intensity_fusion_name_format): - outputs['out_intensity_fusion_name_format'] = os.path.abspath( - self.inputs.out_intensity_fusion_name_format) + outputs["out_intensity_fusion"] = glob( + os.path.abspath( + self.inputs.out_intensity_fusion_name_format.replace("%d", "*") + ) + ) if isdefined(self.inputs.out_label_post_prob_name_format): - outputs['out_label_post_prob_name_format'] = os.path.abspath( - self.inputs.out_label_post_prob_name_format) + outputs["out_label_post_prob"] = glob( + os.path.abspath( + self.inputs.out_label_post_prob_name_format.replace("%d", "*") + ) + ) if isdefined(self.inputs.out_atlas_voting_weight_name_format): - outputs['out_atlas_voting_weight_name_format'] = os.path.abspath( - self.inputs.out_atlas_voting_weight_name_format) - + outputs["out_atlas_voting_weight"] = glob( + os.path.abspath( + self.inputs.out_atlas_voting_weight_name_format.replace("%d", "*") + ) + ) return outputs +# For backwards compatibility +AntsJointFusion = JointFusion +AntsJointFusionInputSpec = JointFusionInputSpec +AntsJointFusionOutputSpec = JointFusionOutputSpec + + class KellyKapowskiInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, - desc='image dimension (2 or 3)') + desc="image dimension (2 or 3)", + ) segmentation_image = File( exists=True, argstr='--segmentation-image "%s"', mandatory=True, - desc= - "A segmentation image must be supplied labeling the gray and white matters." - " Default values = 2 and 3, respectively.", + desc="A segmentation image must be supplied labeling the gray and white matters. " + "Default values = 2 and 3, respectively.", ) gray_matter_label = traits.Int( 2, usedefault=True, - desc= - "The label value for the gray matter label in the segmentation_image.") + desc="The label value for the gray matter label in the segmentation_image.", + ) white_matter_label = traits.Int( 3, usedefault=True, - desc= - "The label value for the white matter label in the segmentation_image." + desc="The label value for the white matter label in the segmentation_image.", ) gray_matter_prob_image = File( exists=True, argstr='--gray-matter-probability-image "%s"', - desc= - "In addition to the segmentation image, a gray matter probability image can be" + desc="In addition to the segmentation image, a gray matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" - " and a variance of 1.0 mm.") + " and a variance of 1.0 mm.", + ) white_matter_prob_image = File( exists=True, argstr='--white-matter-probability-image "%s"', - desc= - "In addition to the segmentation image, a white matter probability image can be" + desc="In addition to the segmentation image, a white matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" - " and a variance of 1.0 mm.") + " and a variance of 1.0 mm.", + ) convergence = traits.Str( - default="[50,0.001,10]", + "[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, - desc= - "Convergence is determined by fitting a line to the normalized energy profile of" + desc="Convergence is determined by fitting a line to the normalized energy profile of" " the last N iterations (where N is specified by the window size) and determining" " the slope which is then compared with the convergence threshold.", ) @@ -1495,73 +1631,86 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): 10, usedefault=True, argstr="--thickness-prior-estimate %f", - desc= - "Provides a prior constraint on the final thickness measurement in mm." + desc="Provides a prior constraint on the final thickness measurement in mm.", ) thickness_prior_image = File( exists=True, argstr='--thickness-prior-image "%s"', - desc="An image containing spatially varying prior thickness values.") + desc="An image containing spatially varying prior thickness values.", + ) gradient_step = traits.Float( 0.025, usedefault=True, argstr="--gradient-step %f", - desc="Gradient step size for the optimization.") + desc="Gradient step size for the optimization.", + ) smoothing_variance = traits.Float( - 1.0, usedefault=True, + 1.0, + usedefault=True, argstr="--smoothing-variance %f", - desc="Defines the Gaussian smoothing of the hit and total images.") + desc="Defines the Gaussian smoothing of the hit and total images.", + ) smoothing_velocity_field = traits.Float( - 1.5, usedefault=True, + 1.5, + usedefault=True, argstr="--smoothing-velocity-field-parameter %f", - desc= - "Defines the Gaussian smoothing of the velocity field (default = 1.5)." + desc="Defines the Gaussian smoothing of the velocity field (default = 1.5)." " If the b-spline smoothing option is chosen, then this defines the" - " isotropic mesh spacing for the smoothing spline (default = 15).") + " isotropic mesh spacing for the smoothing spline (default = 15).", + ) use_bspline_smoothing = traits.Bool( argstr="--use-bspline-smoothing 1", - desc="Sets the option for B-spline smoothing of the velocity field.") + desc="Sets the option for B-spline smoothing of the velocity field.", + ) number_integration_points = traits.Int( - 10, usedefault=True, + 10, + usedefault=True, argstr="--number-of-integration-points %d", - desc="Number of compositions of the diffeomorphism per iteration.") + desc="Number of compositions of the diffeomorphism per iteration.", + ) max_invert_displacement_field_iters = traits.Int( - 20, usedefault=True, + 20, + usedefault=True, argstr="--maximum-number-of-invert-displacement-field-iterations %d", desc="Maximum number of iterations for estimating the invert" - "displacement field.") + "displacement field.", + ) cortical_thickness = File( argstr='--output "%s"', keep_extension=True, name_source=["segmentation_image"], - name_template='%s_cortical_thickness', - desc='Filename for the cortical thickness.', - hash_files=False) + name_template="%s_cortical_thickness", + desc="Filename for the cortical thickness.", + hash_files=False, + ) warped_white_matter = File( name_source=["segmentation_image"], keep_extension=True, - name_template='%s_warped_white_matter', - desc='Filename for the warped white matter file.', - hash_files=False) + name_template="%s_warped_white_matter", + desc="Filename for the warped white matter file.", + hash_files=False, + ) class KellyKapowskiOutputSpec(TraitedSpec): cortical_thickness = File( - desc="A thickness map defined in the segmented gray matter.") + desc="A thickness map defined in the segmented gray matter." + ) warped_white_matter = File(desc="A warped white matter image.") class KellyKapowski(ANTSCommand): - """ Nipype Interface to ANTs' KellyKapowski, also known as DiReCT. + """ + Nipype Interface to ANTs' KellyKapowski, also known as DiReCT. DiReCT is a registration based estimate of cortical thickness. It was published in S. R. Das, B. B. Avants, M. Grossman, and J. C. Gee, Registration based @@ -1576,74 +1725,75 @@ class KellyKapowski(ANTSCommand): >>> kk.inputs.convergence = "[45,0.0,10]" >>> kk.inputs.thickness_prior_estimate = 10 >>> kk.cmdline - 'KellyKapowski --convergence "[45,0.0,10]" \ ---output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" \ ---image-dimensionality 3 --gradient-step 0.025000 \ ---maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 \ ---segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 \ ---smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' + 'KellyKapowski --convergence "[45,0.0,10]" + --output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" + --image-dimensionality 3 --gradient-step 0.025000 + --maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 + --segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 + --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ + _cmd = "KellyKapowski" input_spec = KellyKapowskiInputSpec output_spec = KellyKapowskiOutputSpec - references_ = [{ - 'entry': - BibTeX( - "@book{Das2009867," - "author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}," - "title={Registration based cortical thickness measurement.}," - "journal={NeuroImage}," - "volume={45}," - "number={37}," - "pages={867--879}," - "year={2009}," - "issn={1053-8119}," - "url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}," - "doi={https://doi.org/10.1016/j.neuroimage.2008.12.016}" - "}"), - 'description': - 'The details on the implementation of DiReCT.', - 'tags': ['implementation'], - }] + _references = [ + { + "entry": BibTeX( + """\ +@book{Das2009867, + author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}, + title={Registration based cortical thickness measurement.}, + journal={NeuroImage}, + volume={45}, + number={37}, + pages={867--879}, + year={2009}, + issn={1053-8119}, + url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}, + doi={https://doi.org/10.1016/j.neuroimage.2008.12.016} +}""" + ), + "description": "The details on the implementation of DiReCT.", + "tags": ["implementation"], + } + ] def _parse_inputs(self, skip=None): if skip is None: skip = [] - skip += [ - 'warped_white_matter', 'gray_matter_label', 'white_matter_label' - ] - return super(KellyKapowski, self)._parse_inputs(skip=skip) + skip += ["warped_white_matter", "gray_matter_label", "white_matter_label"] + return super()._parse_inputs(skip=skip) def _gen_filename(self, name): - if name == 'cortical_thickness': + if name == "cortical_thickness": output = self.inputs.cortical_thickness if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) - output = name + '_cortical_thickness' + ext + output = name + "_cortical_thickness" + ext return output - if name == 'warped_white_matter': + if name == "warped_white_matter": output = self.inputs.warped_white_matter if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) - output = name + '_warped_white_matter' + ext + output = name + "_warped_white_matter" + ext return output - return None - def _format_arg(self, opt, spec, val): if opt == "segmentation_image": - newval = '[{0},{1},{2}]'.format(self.inputs.segmentation_image, - self.inputs.gray_matter_label, - self.inputs.white_matter_label) + newval = "[{},{},{}]".format( + self.inputs.segmentation_image, + self.inputs.gray_matter_label, + self.inputs.white_matter_label, + ) return spec.argstr % newval if opt == "cortical_thickness": ct = self._gen_filename("cortical_thickness") wm = self._gen_filename("warped_white_matter") - newval = '[{},{}]'.format(ct, wm) + newval = f"[{ct},{wm}]" return spec.argstr % newval - return super(KellyKapowski, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/ants/tests/__init__.py b/nipype/interfaces/ants/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/ants/tests/__init__.py +++ b/nipype/interfaces/ants/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/ants/tests/test_auto_AI.py b/nipype/interfaces/ants/tests/test_auto_AI.py new file mode 100644 index 0000000000..bef56b7ee6 --- /dev/null +++ b/nipype/interfaces/ants/tests/test_auto_AI.py @@ -0,0 +1,90 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import AI + + +def test_AI_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + convergence=dict( + argstr="-c [%d,%g,%d]", + usedefault=True, + ), + dimension=dict( + argstr="-d %d", + usedefault=True, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_image=dict( + extensions=None, + mandatory=True, + ), + fixed_image_mask=dict( + argstr="-x %s", + extensions=None, + ), + metric=dict( + argstr="-m %s", + mandatory=True, + ), + moving_image=dict( + extensions=None, + mandatory=True, + ), + moving_image_mask=dict( + extensions=None, + requires=["fixed_image_mask"], + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + output_transform=dict( + argstr="-o %s", + extensions=None, + usedefault=True, + ), + principal_axes=dict( + argstr="-p %d", + usedefault=True, + xor=["blobs"], + ), + search_factor=dict( + argstr="-s [%g,%g]", + usedefault=True, + ), + search_grid=dict( + argstr="-g %s", + min_ver="2.3.0", + ), + transform=dict( + argstr="-t %s[%g]", + usedefault=True, + ), + verbose=dict( + argstr="-v %d", + usedefault=True, + ), + ) + inputs = AI.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_AI_outputs(): + output_map = dict( + output_transform=dict( + extensions=None, + ), + ) + outputs = AI.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_ANTS.py b/nipype/interfaces/ants/tests/test_auto_ANTS.py index 7c7ef6682c..17f456e0dd 100644 --- a/nipype/interfaces/ants/tests/test_auto_ANTS.py +++ b/nipype/interfaces/ants/tests/test_auto_ANTS.py @@ -1,35 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import ANTS def test_ANTS_inputs(): input_map = dict( - affine_gradient_descent_option=dict(argstr='%s', ), - args=dict(argstr='%s', ), - delta_time=dict(requires=['number_of_time_steps'], ), + affine_gradient_descent_option=dict( + argstr="%s", + ), + args=dict( + argstr="%s", + ), + delta_time=dict( + requires=["number_of_time_steps"], + ), dimension=dict( - argstr='%d', + argstr="%d", position=1, ), environ=dict( nohash=True, usedefault=True, ), - fixed_image=dict(mandatory=True, ), - gradient_step_length=dict(requires=['transformation_model'], ), - metric=dict(mandatory=True, ), + fixed_image=dict( + mandatory=True, + ), + gradient_step_length=dict( + requires=["transformation_model"], + ), + metric=dict( + mandatory=True, + ), metric_weight=dict( mandatory=True, - requires=['metric'], + requires=["metric"], usedefault=True, ), mi_option=dict( - argstr='--MI-option %s', - sep='x', + argstr="--MI-option %s", + sep="x", ), moving_image=dict( - argstr='%s', + argstr="%s", mandatory=True, ), num_threads=dict( @@ -37,43 +48,51 @@ def test_ANTS_inputs(): usedefault=True, ), number_of_affine_iterations=dict( - argstr='--number-of-affine-iterations %s', - sep='x', + argstr="--number-of-affine-iterations %s", + sep="x", ), number_of_iterations=dict( - argstr='--number-of-iterations %s', - sep='x', + argstr="--number-of-iterations %s", + sep="x", + ), + number_of_time_steps=dict( + requires=["gradient_step_length"], ), - number_of_time_steps=dict(requires=['gradient_step_length'], ), output_transform_prefix=dict( - argstr='--output-naming %s', + argstr="--output-naming %s", mandatory=True, usedefault=True, ), radius=dict( mandatory=True, - requires=['metric'], + requires=["metric"], + ), + regularization=dict( + argstr="%s", ), - regularization=dict(argstr='%s', ), regularization_deformation_field_sigma=dict( - requires=['regularization'], ), + requires=["regularization"], + ), regularization_gradient_field_sigma=dict( - requires=['regularization'], ), + requires=["regularization"], + ), smoothing_sigmas=dict( - argstr='--gaussian-smoothing-sigmas %s', - sep='x', + argstr="--gaussian-smoothing-sigmas %s", + sep="x", ), subsampling_factors=dict( - argstr='--subsampling-factors %s', - sep='x', + argstr="--subsampling-factors %s", + sep="x", + ), + symmetry_type=dict( + requires=["delta_time"], ), - symmetry_type=dict(requires=['delta_time'], ), transformation_model=dict( - argstr='%s', + argstr="%s", mandatory=True, ), use_histogram_matching=dict( - argstr='%s', + argstr="%s", usedefault=True, ), ) @@ -82,13 +101,25 @@ def test_ANTS_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ANTS_outputs(): output_map = dict( - affine_transform=dict(), - inverse_warp_transform=dict(), - metaheader=dict(), - metaheader_raw=dict(), - warp_transform=dict(), + affine_transform=dict( + extensions=None, + ), + inverse_warp_transform=dict( + extensions=None, + ), + metaheader=dict( + extensions=None, + ), + metaheader_raw=dict( + extensions=None, + ), + warp_transform=dict( + extensions=None, + ), ) outputs = ANTS.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py b/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py index 7423579ef7..8907c4ab91 100644 --- a/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py +++ b/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import ANTSCommand def test_ANTSCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py b/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py index fed21cdbef..24ef0d655e 100644 --- a/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py +++ b/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AffineInitializer def test_AffineInitializer_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%s', + argstr="%s", position=0, usedefault=True, ), @@ -16,17 +17,19 @@ def test_AffineInitializer_inputs(): usedefault=True, ), fixed_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), local_search=dict( - argstr='%d', + argstr="%d", position=7, usedefault=True, ), moving_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), @@ -35,22 +38,23 @@ def test_AffineInitializer_inputs(): usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, usedefault=True, ), principal_axes=dict( - argstr='%d', + argstr="%d", position=6, usedefault=True, ), radian_fraction=dict( - argstr='%f', + argstr="%f", position=5, usedefault=True, ), search_factor=dict( - argstr='%f', + argstr="%f", position=4, usedefault=True, ), @@ -60,8 +64,14 @@ def test_AffineInitializer_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineInitializer_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AffineInitializer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py b/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py deleted file mode 100644 index 341f250a2c..0000000000 --- a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py +++ /dev/null @@ -1,98 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..segmentation import AntsJointFusion - - -def test_AntsJointFusion_inputs(): - input_map = dict( - alpha=dict( - argstr='-a %s', - usedefault=True, - ), - args=dict(argstr='%s', ), - atlas_image=dict( - argstr='-g %s...', - mandatory=True, - ), - atlas_segmentation_image=dict( - argstr='-l %s...', - mandatory=True, - ), - beta=dict( - argstr='-b %s', - usedefault=True, - ), - constrain_nonnegative=dict( - argstr='-c', - usedefault=True, - ), - dimension=dict(argstr='-d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - exclusion_image=dict(), - exclusion_image_label=dict( - argstr='-e %s', - requires=['exclusion_image'], - ), - mask_image=dict(argstr='-x %s', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_atlas_voting_weight_name_format=dict( - requires=[ - 'out_label_fusion', 'out_intensity_fusion_name_format', - 'out_label_post_prob_name_format' - ], ), - out_intensity_fusion_name_format=dict(argstr='', ), - out_label_fusion=dict( - argstr='%s', - hash_files=False, - ), - out_label_post_prob_name_format=dict( - requires=['out_label_fusion', - 'out_intensity_fusion_name_format'], ), - patch_metric=dict(argstr='-m %s', ), - patch_radius=dict( - argstr='-p %s', - maxlen=3, - minlen=3, - ), - retain_atlas_voting_images=dict( - argstr='-f', - usedefault=True, - ), - retain_label_posterior_images=dict( - argstr='-r', - requires=['atlas_segmentation_image'], - usedefault=True, - ), - search_radius=dict( - argstr='-s %s', - usedefault=True, - ), - target_image=dict( - argstr='-t %s', - mandatory=True, - ), - verbose=dict(argstr='-v', ), - ) - inputs = AntsJointFusion.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value -def test_AntsJointFusion_outputs(): - output_map = dict( - out_atlas_voting_weight_name_format=dict(), - out_intensity_fusion_name_format=dict(), - out_label_fusion=dict(), - out_label_post_prob_name_format=dict(), - ) - outputs = AntsJointFusion.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py index c2bbffa14f..a18a3b60b4 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py @@ -1,31 +1,37 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..resampling import ApplyTransforms def test_ApplyTransforms_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), default_value=dict( - argstr='--default-value %g', + argstr="--default-value %g", usedefault=True, ), - dimension=dict(argstr='--dimensionality %d', ), + dimension=dict( + argstr="--dimensionality %d", + ), environ=dict( nohash=True, usedefault=True, ), float=dict( - argstr='--float %d', + argstr="--float %d", usedefault=True, ), input_image=dict( - argstr='--input %s', + argstr="--input %s", + extensions=None, mandatory=True, ), - input_image_type=dict(argstr='--input-image-type %d', ), + input_image_type=dict( + argstr="--input-image-type %d", + ), interpolation=dict( - argstr='%s', + argstr="%s", usedefault=True, ), interpolation_parameters=dict(), @@ -34,19 +40,24 @@ def test_ApplyTransforms_inputs(): nohash=True, usedefault=True, ), - out_postfix=dict(usedefault=True, ), + out_postfix=dict( + usedefault=True, + ), output_image=dict( - argstr='--output %s', + argstr="--output %s", genfile=True, hash_files=False, ), - print_out_composite_warp_file=dict(requires=['output_image'], ), + print_out_composite_warp_file=dict( + requires=["output_image"], + ), reference_image=dict( - argstr='--reference-image %s', + argstr="--reference-image %s", + extensions=None, mandatory=True, ), transforms=dict( - argstr='%s', + argstr="%s", mandatory=True, ), ) @@ -55,8 +66,14 @@ def test_ApplyTransforms_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransforms_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = ApplyTransforms.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py index 92c6c21ec1..472c22c8b2 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py @@ -1,18 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..resampling import ApplyTransformsToPoints def test_ApplyTransformsToPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(argstr='--dimensionality %d', ), + args=dict( + argstr="%s", + ), + dimension=dict( + argstr="--dimensionality %d", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='--input %s', + argstr="--input %s", + extensions=None, mandatory=True, ), invert_transform_flags=dict(), @@ -21,13 +25,13 @@ def test_ApplyTransformsToPoints_inputs(): usedefault=True, ), output_file=dict( - argstr='--output %s', + argstr="--output %s", hash_files=False, - name_source=['input_file'], - name_template='%s_transformed.csv', + name_source=["input_file"], + name_template="%s_transformed.csv", ), transforms=dict( - argstr='%s', + argstr="%s", mandatory=True, ), ) @@ -36,8 +40,14 @@ def test_ApplyTransformsToPoints_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransformsToPoints_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = ApplyTransformsToPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_Atropos.py b/nipype/interfaces/ants/tests/test_auto_Atropos.py index 90bc13778a..9f02bb5151 100644 --- a/nipype/interfaces/ants/tests/test_auto_Atropos.py +++ b/nipype/interfaces/ants/tests/test_auto_Atropos.py @@ -1,60 +1,86 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import Atropos def test_Atropos_inputs(): input_map = dict( - args=dict(argstr='%s', ), - convergence_threshold=dict(requires=['n_iterations'], ), + args=dict( + argstr="%s", + ), + convergence_threshold=dict( + requires=["n_iterations"], + ), dimension=dict( - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - icm_use_synchronous_update=dict(argstr='%s', ), + icm_use_synchronous_update=dict( + argstr="%s", + ), initialization=dict( - argstr='%s', + argstr="%s", mandatory=True, - requires=['number_of_tissue_classes'], + requires=["number_of_tissue_classes"], ), intensity_images=dict( - argstr='--intensity-image %s...', + argstr="--intensity-image %s...", mandatory=True, ), - likelihood_model=dict(argstr='--likelihood-model %s', ), + kmeans_init_centers=dict(), + likelihood_model=dict( + argstr="--likelihood-model %s", + ), mask_image=dict( - argstr='--mask-image %s', + argstr="--mask-image %s", + extensions=None, mandatory=True, ), maximum_number_of_icm_terations=dict( - requires=['icm_use_synchronous_update'], ), - mrf_radius=dict(requires=['mrf_smoothing_factor'], ), - mrf_smoothing_factor=dict(argstr='%s', ), - n_iterations=dict(argstr='%s', ), + requires=["icm_use_synchronous_update"], + ), + mrf_radius=dict( + requires=["mrf_smoothing_factor"], + ), + mrf_smoothing_factor=dict( + argstr="%s", + ), + n_iterations=dict( + argstr="%s", + ), num_threads=dict( nohash=True, usedefault=True, ), - number_of_tissue_classes=dict(mandatory=True, ), + number_of_tissue_classes=dict( + mandatory=True, + ), out_classified_image_name=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, ), - output_posteriors_name_template=dict(usedefault=True, ), - posterior_formulation=dict(argstr='%s', ), - prior_probability_images=dict(), - prior_probability_threshold=dict(requires=['prior_weighting'], ), + output_posteriors_name_template=dict( + usedefault=True, + ), + posterior_formulation=dict( + argstr="%s", + ), + prior_image=dict(), + prior_probability_threshold=dict( + requires=["prior_weighting"], + ), prior_weighting=dict(), save_posteriors=dict(), use_mixture_model_proportions=dict( - requires=['posterior_formulation'], ), + requires=["posterior_formulation"], + ), use_random_seed=dict( - argstr='--use-random-seed %d', + argstr="--use-random-seed %d", usedefault=True, ), ) @@ -63,9 +89,13 @@ def test_Atropos_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Atropos_outputs(): output_map = dict( - classified_image=dict(), + classified_image=dict( + extensions=None, + ), posteriors=dict(), ) outputs = Atropos.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py b/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py index 3fe3abe5cf..3da6956de2 100644 --- a/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AverageAffineTransform def test_AverageAffineTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", mandatory=True, position=0, ), @@ -20,12 +21,13 @@ def test_AverageAffineTransform_inputs(): usedefault=True, ), output_affine_transform=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), transforms=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), @@ -35,8 +37,14 @@ def test_AverageAffineTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageAffineTransform_outputs(): - output_map = dict(affine_transform=dict(), ) + output_map = dict( + affine_transform=dict( + extensions=None, + ), + ) outputs = AverageAffineTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_AverageImages.py b/nipype/interfaces/ants/tests/test_auto_AverageImages.py index 41e0c99007..8eb03ea7b4 100644 --- a/nipype/interfaces/ants/tests/test_auto_AverageImages.py +++ b/nipype/interfaces/ants/tests/test_auto_AverageImages.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AverageImages def test_AverageImages_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", mandatory=True, position=0, ), @@ -16,12 +17,12 @@ def test_AverageImages_inputs(): usedefault=True, ), images=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), normalize=dict( - argstr='%d', + argstr="%d", mandatory=True, position=2, ), @@ -30,7 +31,8 @@ def test_AverageImages_inputs(): usedefault=True, ), output_average_image=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, position=1, usedefault=True, @@ -41,8 +43,14 @@ def test_AverageImages_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageImages_outputs(): - output_map = dict(output_average_image=dict(), ) + output_map = dict( + output_average_image=dict( + extensions=None, + ), + ) outputs = AverageImages.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py b/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py index 2d48192199..454a102f2d 100644 --- a/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py +++ b/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py @@ -1,75 +1,128 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import BrainExtraction def test_BrainExtraction_inputs(): input_map = dict( anatomical_image=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, mandatory=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brain_probability_mask=dict( - argstr='-m %s', + argstr="-m %s", copyfile=False, + extensions=None, mandatory=True, ), brain_template=dict( - argstr='-e %s', + argstr="-e %s", + extensions=None, mandatory=True, ), - debug=dict(argstr='-z 1', ), + debug=dict( + argstr="-z 1", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - extraction_registration_mask=dict(argstr='-f %s', ), + extraction_registration_mask=dict( + argstr="-f %s", + extensions=None, + ), image_suffix=dict( - argstr='-s %s', + argstr="-s %s", usedefault=True, ), - keep_temporary_files=dict(argstr='-k %d', ), + keep_temporary_files=dict( + argstr="-k %d", + ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - use_floatingpoint_precision=dict(argstr='-q %d', ), - use_random_seeding=dict(argstr='-u %d', ), + use_floatingpoint_precision=dict( + argstr="-q %d", + ), + use_random_seeding=dict( + argstr="-u %d", + ), ) inputs = BrainExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrainExtraction_outputs(): output_map = dict( - BrainExtractionBrain=dict(), - BrainExtractionCSF=dict(), - BrainExtractionGM=dict(), - BrainExtractionInitialAffine=dict(), - BrainExtractionInitialAffineFixed=dict(), - BrainExtractionInitialAffineMoving=dict(), - BrainExtractionLaplacian=dict(), - BrainExtractionMask=dict(), - BrainExtractionPrior0GenericAffine=dict(), - BrainExtractionPrior1InverseWarp=dict(), - BrainExtractionPrior1Warp=dict(), - BrainExtractionPriorWarped=dict(), - BrainExtractionSegmentation=dict(), - BrainExtractionTemplateLaplacian=dict(), - BrainExtractionTmp=dict(), - BrainExtractionWM=dict(), - N4Corrected0=dict(), - N4Truncated0=dict(), + BrainExtractionBrain=dict( + extensions=None, + ), + BrainExtractionCSF=dict( + extensions=None, + ), + BrainExtractionGM=dict( + extensions=None, + ), + BrainExtractionInitialAffine=dict( + extensions=None, + ), + BrainExtractionInitialAffineFixed=dict( + extensions=None, + ), + BrainExtractionInitialAffineMoving=dict( + extensions=None, + ), + BrainExtractionLaplacian=dict( + extensions=None, + ), + BrainExtractionMask=dict( + extensions=None, + ), + BrainExtractionPrior0GenericAffine=dict( + extensions=None, + ), + BrainExtractionPrior1InverseWarp=dict( + extensions=None, + ), + BrainExtractionPrior1Warp=dict( + extensions=None, + ), + BrainExtractionPriorWarped=dict( + extensions=None, + ), + BrainExtractionSegmentation=dict( + extensions=None, + ), + BrainExtractionTemplateLaplacian=dict( + extensions=None, + ), + BrainExtractionTmp=dict( + extensions=None, + ), + BrainExtractionWM=dict( + extensions=None, + ), + N4Corrected0=dict( + extensions=None, + ), + N4Truncated0=dict( + extensions=None, + ), ) outputs = BrainExtraction.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py index 83fb2ed2aa..78afc21df2 100644 --- a/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ComposeMultiTransform def test_ComposeMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", position=0, usedefault=True, ), @@ -20,18 +21,20 @@ def test_ComposeMultiTransform_inputs(): usedefault=True, ), output_transform=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source=['transforms'], - name_template='%s_composed', + name_source=["transforms"], + name_template="%s_composed", position=1, ), reference_image=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), transforms=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), @@ -41,8 +44,14 @@ def test_ComposeMultiTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeMultiTransform_outputs(): - output_map = dict(output_transform=dict(), ) + output_map = dict( + output_transform=dict( + extensions=None, + ), + ) outputs = ComposeMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py b/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py index d2e6f943e2..54b428db2a 100644 --- a/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py +++ b/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import CompositeTransformUtil def test_CompositeTransformUtil_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s...', + argstr="%s...", mandatory=True, position=3, ), @@ -20,16 +21,17 @@ def test_CompositeTransformUtil_inputs(): usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), output_prefix=dict( - argstr='%s', + argstr="%s", position=4, usedefault=True, ), process=dict( - argstr='--%s', + argstr="--%s", position=1, usedefault=True, ), @@ -39,11 +41,19 @@ def test_CompositeTransformUtil_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CompositeTransformUtil_outputs(): output_map = dict( - affine_transform=dict(), - displacement_field=dict(), - out_file=dict(), + affine_transform=dict( + extensions=None, + ), + displacement_field=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = CompositeTransformUtil.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py index ea9a16cbe2..0ffdaef6fc 100644 --- a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py +++ b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py @@ -1,24 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..visualization import ConvertScalarImageToRGB def test_ConvertScalarImageToRGB_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), colormap=dict( - argstr='%s', + argstr="%s", mandatory=True, position=4, - usedefault=True, ), custom_color_map_file=dict( - argstr='%s', + argstr="%s", position=5, usedefault=True, ), dimension=dict( - argstr='%d', + argstr="%d", mandatory=True, position=0, usedefault=True, @@ -28,32 +28,33 @@ def test_ConvertScalarImageToRGB_inputs(): usedefault=True, ), input_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), mask_image=dict( - argstr='%s', + argstr="%s", position=3, usedefault=True, ), maximum_RGB_output=dict( - argstr='%d', + argstr="%d", position=9, usedefault=True, ), maximum_input=dict( - argstr='%d', + argstr="%d", mandatory=True, position=7, ), minimum_RGB_output=dict( - argstr='%d', + argstr="%d", position=8, usedefault=True, ), minimum_input=dict( - argstr='%d', + argstr="%d", mandatory=True, position=6, ), @@ -62,7 +63,7 @@ def test_ConvertScalarImageToRGB_inputs(): usedefault=True, ), output_image=dict( - argstr='%s', + argstr="%s", position=2, usedefault=True, ), @@ -72,8 +73,14 @@ def test_ConvertScalarImageToRGB_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertScalarImageToRGB_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = ConvertScalarImageToRGB.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py b/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py index e2595a710f..a42551788b 100644 --- a/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py +++ b/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py @@ -1,86 +1,144 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import CorticalThickness def test_CorticalThickness_inputs(): input_map = dict( anatomical_image=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, mandatory=True, ), - args=dict(argstr='%s', ), - b_spline_smoothing=dict(argstr='-v', ), + args=dict( + argstr="%s", + ), + b_spline_smoothing=dict( + argstr="-v", + ), brain_probability_mask=dict( - argstr='-m %s', + argstr="-m %s", copyfile=False, + extensions=None, mandatory=True, ), brain_template=dict( - argstr='-e %s', + argstr="-e %s", + extensions=None, mandatory=True, ), - cortical_label_image=dict(), - debug=dict(argstr='-z 1', ), + cortical_label_image=dict( + extensions=None, + ), + debug=dict( + argstr="-z 1", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - extraction_registration_mask=dict(argstr='-f %s', ), + extraction_registration_mask=dict( + argstr="-f %s", + extensions=None, + ), image_suffix=dict( - argstr='-s %s', + argstr="-s %s", usedefault=True, ), - keep_temporary_files=dict(argstr='-k %d', ), - label_propagation=dict(argstr='-l %s', ), - max_iterations=dict(argstr='-i %d', ), + keep_temporary_files=dict( + argstr="-k %d", + ), + label_propagation=dict( + argstr="-l %s", + ), + max_iterations=dict( + argstr="-i %d", + ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - posterior_formulation=dict(argstr='-b %s', ), - prior_segmentation_weight=dict(argstr='-w %f', ), - quick_registration=dict(argstr='-q 1', ), - segmentation_iterations=dict(argstr='-n %d', ), + posterior_formulation=dict( + argstr="-b %s", + ), + prior_segmentation_weight=dict( + argstr="-w %f", + ), + quick_registration=dict( + argstr="-q 1", + ), + segmentation_iterations=dict( + argstr="-n %d", + ), segmentation_priors=dict( - argstr='-p %s', + argstr="-p %s", mandatory=True, ), t1_registration_template=dict( - argstr='-t %s', + argstr="-t %s", + extensions=None, mandatory=True, ), - use_floatingpoint_precision=dict(argstr='-j %d', ), - use_random_seeding=dict(argstr='-u %d', ), + use_floatingpoint_precision=dict( + argstr="-j %d", + ), + use_random_seeding=dict( + argstr="-u %d", + ), ) inputs = CorticalThickness.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CorticalThickness_outputs(): output_map = dict( - BrainExtractionMask=dict(), - BrainSegmentation=dict(), - BrainSegmentationN4=dict(), + BrainExtractionMask=dict( + extensions=None, + ), + BrainSegmentation=dict( + extensions=None, + ), + BrainSegmentationN4=dict( + extensions=None, + ), BrainSegmentationPosteriors=dict(), - BrainVolumes=dict(), - CorticalThickness=dict(), - CorticalThicknessNormedToTemplate=dict(), - ExtractedBrainN4=dict(), - SubjectToTemplate0GenericAffine=dict(), - SubjectToTemplate1Warp=dict(), - SubjectToTemplateLogJacobian=dict(), - TemplateToSubject0Warp=dict(), - TemplateToSubject1GenericAffine=dict(), + BrainVolumes=dict( + extensions=None, + ), + CorticalThickness=dict( + extensions=None, + ), + CorticalThicknessNormedToTemplate=dict( + extensions=None, + ), + ExtractedBrainN4=dict( + extensions=None, + ), + SubjectToTemplate0GenericAffine=dict( + extensions=None, + ), + SubjectToTemplate1Warp=dict( + extensions=None, + ), + SubjectToTemplateLogJacobian=dict( + extensions=None, + ), + TemplateToSubject0Warp=dict( + extensions=None, + ), + TemplateToSubject1GenericAffine=dict( + extensions=None, + ), ) outputs = CorticalThickness.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py b/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py index b32e7b98e0..197ec2ad23 100644 --- a/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py +++ b/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CreateJacobianDeterminantImage def test_CreateJacobianDeterminantImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), deformationField=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), doLogJacobian=dict( - argstr='%d', + argstr="%d", position=3, ), environ=dict( @@ -20,7 +22,7 @@ def test_CreateJacobianDeterminantImage_inputs(): usedefault=True, ), imageDimension=dict( - argstr='%d', + argstr="%d", mandatory=True, position=0, ), @@ -29,12 +31,13 @@ def test_CreateJacobianDeterminantImage_inputs(): usedefault=True, ), outputImage=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), useGeometric=dict( - argstr='%d', + argstr="%d", position=4, ), ) @@ -43,8 +46,14 @@ def test_CreateJacobianDeterminantImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateJacobianDeterminantImage_outputs(): - output_map = dict(jacobian_image=dict(), ) + output_map = dict( + jacobian_image=dict( + extensions=None, + ), + ) outputs = CreateJacobianDeterminantImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py b/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py index 74c2b0a7c2..6516b03ad7 100644 --- a/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py +++ b/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py @@ -1,47 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..visualization import CreateTiledMosaic def test_CreateTiledMosaic_inputs(): input_map = dict( - alpha_value=dict(argstr='-a %.2f', ), - args=dict(argstr='%s', ), - direction=dict(argstr='-d %d', ), + alpha_value=dict( + argstr="-a %.2f", + ), + args=dict( + argstr="%s", + ), + direction=dict( + argstr="-d %d", + ), environ=dict( nohash=True, usedefault=True, ), - flip_slice=dict(argstr='-f %s', ), + flip_slice=dict( + argstr="-f %s", + ), input_image=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - mask_image=dict(argstr='-x %s', ), + mask_image=dict( + argstr="-x %s", + extensions=None, + ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - pad_or_crop=dict(argstr='-p %s', ), - permute_axes=dict(argstr='-g', ), + pad_or_crop=dict( + argstr="-p %s", + ), + permute_axes=dict( + argstr="-g", + ), rgb_image=dict( - argstr='-r %s', + argstr="-r %s", + extensions=None, mandatory=True, ), - slices=dict(argstr='-s %s', ), - tile_geometry=dict(argstr='-t %s', ), + slices=dict( + argstr="-s %s", + ), + tile_geometry=dict( + argstr="-t %s", + ), ) inputs = CreateTiledMosaic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateTiledMosaic_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = CreateTiledMosaic.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py index 882cb21854..7af4764633 100644 --- a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py +++ b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py @@ -1,28 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import DenoiseImage def test_DenoiseImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(argstr='-d %d', ), + args=dict( + argstr="%s", + ), + dimension=dict( + argstr="-d %d", + ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), noise_image=dict( + extensions=None, hash_files=False, keep_extension=True, - name_source=['input_image'], - name_template='%s_noise', + name_source=["input_image"], + name_template="%s_noise", ), noise_model=dict( - argstr='-n %s', + argstr="-n %s", usedefault=True, ), num_threads=dict( @@ -30,32 +35,41 @@ def test_DenoiseImage_inputs(): usedefault=True, ), output_image=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['input_image'], - name_template='%s_noise_corrected', + name_source=["input_image"], + name_template="%s_noise_corrected", ), save_noise=dict( mandatory=True, usedefault=True, - xor=['noise_image'], + xor=["noise_image"], ), shrink_factor=dict( - argstr='-s %s', + argstr="-s %s", usedefault=True, ), - verbose=dict(argstr='-v', ), + verbose=dict( + argstr="-v", + ), ) inputs = DenoiseImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DenoiseImage_outputs(): output_map = dict( - noise_image=dict(), - output_image=dict(), + noise_image=dict( + extensions=None, + ), + output_image=dict( + extensions=None, + ), ) outputs = DenoiseImage.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_GenWarpFields.py b/nipype/interfaces/ants/tests/test_auto_GenWarpFields.py index af91f9a8af..6fde6f5b44 100644 --- a/nipype/interfaces/ants/tests/test_auto_GenWarpFields.py +++ b/nipype/interfaces/ants/tests/test_auto_GenWarpFields.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..legacy import GenWarpFields def test_GenWarpFields_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_field_correction=dict(argstr='-n 1', ), + args=dict( + argstr="%s", + ), + bias_field_correction=dict( + argstr="-n 1", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", position=1, usedefault=True, ), @@ -16,34 +19,44 @@ def test_GenWarpFields_inputs(): nohash=True, usedefault=True, ), - force_proceed=dict(argstr='-f 1', ), + force_proceed=dict( + argstr="-f 1", + ), input_image=dict( - argstr='-i %s', + argstr="-i %s", copyfile=False, + extensions=None, mandatory=True, ), - inverse_warp_template_labels=dict(argstr='-l', ), + inverse_warp_template_labels=dict( + argstr="-l", + ), max_iterations=dict( - argstr='-m %s', - sep='x', + argstr="-m %s", + sep="x", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - quality_check=dict(argstr='-q 1', ), + quality_check=dict( + argstr="-q 1", + ), reference_image=dict( - argstr='-r %s', + argstr="-r %s", copyfile=True, + extensions=None, mandatory=True, ), - similarity_metric=dict(argstr='-s %s', ), + similarity_metric=dict( + argstr="-s %s", + ), transformation_model=dict( - argstr='-t %s', + argstr="-t %s", usedefault=True, ), ) @@ -52,13 +65,25 @@ def test_GenWarpFields_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenWarpFields_outputs(): output_map = dict( - affine_transformation=dict(), - input_file=dict(), - inverse_warp_field=dict(), - output_file=dict(), - warp_field=dict(), + affine_transformation=dict( + extensions=None, + ), + input_file=dict( + extensions=None, + ), + inverse_warp_field=dict( + extensions=None, + ), + output_file=dict( + extensions=None, + ), + warp_field=dict( + extensions=None, + ), ) outputs = GenWarpFields.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ImageMath.py b/nipype/interfaces/ants/tests/test_auto_ImageMath.py new file mode 100644 index 0000000000..d720233eb4 --- /dev/null +++ b/nipype/interfaces/ants/tests/test_auto_ImageMath.py @@ -0,0 +1,68 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import ImageMath + + +def test_ImageMath_inputs(): + input_map = dict( + args=dict( + argstr="%s", + position=-1, + ), + copy_header=dict( + usedefault=True, + ), + dimension=dict( + argstr="%d", + position=1, + usedefault=True, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + op1=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-3, + ), + op2=dict( + argstr="%s", + position=-2, + ), + operation=dict( + argstr="%s", + mandatory=True, + position=3, + ), + output_image=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source=["op1"], + name_template="%s_maths", + position=2, + ), + ) + inputs = ImageMath.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ImageMath_outputs(): + output_map = dict( + output_image=dict( + extensions=None, + ), + ) + outputs = ImageMath.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index 9e82584729..f234ceea7c 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -1,73 +1,97 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import JointFusion def test_JointFusion_inputs(): input_map = dict( alpha=dict( - requires=['method'], + argstr="-a %s", usedefault=True, ), - args=dict(argstr='%s', ), - atlas_group_id=dict(argstr='-gp %d...', ), - atlas_group_weights=dict(argstr='-gpw %d...', ), + args=dict( + argstr="%s", + ), + atlas_image=dict( + argstr="-g %s...", + mandatory=True, + ), + atlas_segmentation_image=dict( + argstr="-l %s...", + mandatory=True, + ), beta=dict( - requires=['method'], + argstr="-b %s", usedefault=True, ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, + constrain_nonnegative=dict( + argstr="-c", usedefault=True, ), + dimension=dict( + argstr="-d %d", + ), environ=dict( nohash=True, usedefault=True, ), - exclusion_region=dict(argstr='-x %s', ), - method=dict( - argstr='-m %s', - usedefault=True, + exclusion_image=dict(), + exclusion_image_label=dict( + argstr="-e %s", + requires=["exclusion_image"], ), - modalities=dict( - argstr='%d', - mandatory=True, - position=1, + mask_image=dict( + argstr="-x %s", + extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), - output_label_image=dict( - argstr='%s', - mandatory=True, - name_template='%s', - output_name='output_label_image', - position=-1, + out_atlas_voting_weight_name_format=dict( + requires=[ + "out_label_fusion", + "out_intensity_fusion_name_format", + "out_label_post_prob_name_format", + ], + ), + out_intensity_fusion_name_format=dict( + argstr="", + ), + out_label_fusion=dict( + argstr="%s", + extensions=None, + hash_files=False, + ), + out_label_post_prob_name_format=dict( + requires=["out_label_fusion", "out_intensity_fusion_name_format"], + ), + patch_metric=dict( + argstr="-m %s", ), patch_radius=dict( - argstr='-rp %s', + argstr="-p %s", maxlen=3, minlen=3, ), + retain_atlas_voting_images=dict( + argstr="-f", + usedefault=True, + ), + retain_label_posterior_images=dict( + argstr="-r", + requires=["atlas_segmentation_image"], + usedefault=True, + ), search_radius=dict( - argstr='-rs %s', - maxlen=3, - minlen=3, + argstr="-s %s", + usedefault=True, ), target_image=dict( - argstr='-tg %s...', + argstr="-t %s", mandatory=True, ), - warped_intensity_images=dict( - argstr='-g %s...', - mandatory=True, - ), - warped_label_images=dict( - argstr='-l %s...', - mandatory=True, + verbose=dict( + argstr="-v", ), ) inputs = JointFusion.input_spec() @@ -75,8 +99,17 @@ def test_JointFusion_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JointFusion_outputs(): - output_map = dict(output_label_image=dict(), ) + output_map = dict( + out_atlas_voting_weight=dict(), + out_intensity_fusion=dict(), + out_label_fusion=dict( + extensions=None, + ), + out_label_post_prob=dict(), + ) outputs = JointFusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py b/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py index 6129670d39..00c1ec53a9 100644 --- a/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py +++ b/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py @@ -1,24 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import KellyKapowski def test_KellyKapowski_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), convergence=dict( argstr='--convergence "%s"', usedefault=True, ), cortical_thickness=dict( argstr='--output "%s"', + extensions=None, hash_files=False, keep_extension=True, - name_source=['segmentation_image'], - name_template='%s_cortical_thickness', + name_source=["segmentation_image"], + name_template="%s_cortical_thickness", ), dimension=dict( - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, ), environ=dict( @@ -26,15 +28,18 @@ def test_KellyKapowski_inputs(): usedefault=True, ), gradient_step=dict( - argstr='--gradient-step %f', + argstr="--gradient-step %f", + usedefault=True, + ), + gray_matter_label=dict( usedefault=True, ), - gray_matter_label=dict(usedefault=True, ), gray_matter_prob_image=dict( - argstr='--gray-matter-probability-image "%s"', ), + argstr='--gray-matter-probability-image "%s"', + extensions=None, + ), max_invert_displacement_field_iters=dict( - argstr= - '--maximum-number-of-invert-displacement-field-iterations %d', + argstr="--maximum-number-of-invert-displacement-field-iterations %d", usedefault=True, ), num_threads=dict( @@ -42,46 +47,63 @@ def test_KellyKapowski_inputs(): usedefault=True, ), number_integration_points=dict( - argstr='--number-of-integration-points %d', + argstr="--number-of-integration-points %d", usedefault=True, ), segmentation_image=dict( argstr='--segmentation-image "%s"', + extensions=None, mandatory=True, ), smoothing_variance=dict( - argstr='--smoothing-variance %f', + argstr="--smoothing-variance %f", usedefault=True, ), smoothing_velocity_field=dict( - argstr='--smoothing-velocity-field-parameter %f', + argstr="--smoothing-velocity-field-parameter %f", usedefault=True, ), thickness_prior_estimate=dict( - argstr='--thickness-prior-estimate %f', + argstr="--thickness-prior-estimate %f", usedefault=True, ), - thickness_prior_image=dict(argstr='--thickness-prior-image "%s"', ), - use_bspline_smoothing=dict(argstr='--use-bspline-smoothing 1', ), + thickness_prior_image=dict( + argstr='--thickness-prior-image "%s"', + extensions=None, + ), + use_bspline_smoothing=dict( + argstr="--use-bspline-smoothing 1", + ), warped_white_matter=dict( + extensions=None, hash_files=False, keep_extension=True, - name_source=['segmentation_image'], - name_template='%s_warped_white_matter', + name_source=["segmentation_image"], + name_template="%s_warped_white_matter", + ), + white_matter_label=dict( + usedefault=True, ), - white_matter_label=dict(usedefault=True, ), white_matter_prob_image=dict( - argstr='--white-matter-probability-image "%s"', ), + argstr='--white-matter-probability-image "%s"', + extensions=None, + ), ) inputs = KellyKapowski.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_KellyKapowski_outputs(): output_map = dict( - cortical_thickness=dict(), - warped_white_matter=dict(), + cortical_thickness=dict( + extensions=None, + ), + warped_white_matter=dict( + extensions=None, + ), ) outputs = KellyKapowski.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py b/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py index a5de244711..306c7aa17e 100644 --- a/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py +++ b/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import LabelGeometry def test_LabelGeometry_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", position=0, usedefault=True, ), @@ -16,13 +17,15 @@ def test_LabelGeometry_inputs(): usedefault=True, ), intensity_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, usedefault=True, ), label_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), @@ -31,9 +34,9 @@ def test_LabelGeometry_inputs(): usedefault=True, ), output_file=dict( - argstr='%s', - name_source=['label_image'], - name_template='%s.csv', + argstr="%s", + name_source=["label_image"], + name_template="%s.csv", position=3, ), ) @@ -42,8 +45,14 @@ def test_LabelGeometry_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelGeometry_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = LabelGeometry.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py b/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py index 1bb82f0e33..f6a8ffde25 100644 --- a/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py +++ b/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py @@ -1,28 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import LaplacianThickness def test_LaplacianThickness_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dT=dict( - argstr='%f', + argstr="%s", position=6, + requires=["prior_thickness"], ), environ=dict( nohash=True, usedefault=True, ), input_gm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=2, ), input_wm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=1, ), @@ -31,28 +35,31 @@ def test_LaplacianThickness_inputs(): usedefault=True, ), output_image=dict( - argstr='%s', + argstr="%s", hash_files=False, keep_extension=True, - name_source=['input_wm'], - name_template='%s_thickness', + name_source=["input_wm"], + name_template="%s_thickness", position=3, ), prior_thickness=dict( - argstr='%f', + argstr="%s", position=5, + requires=["smooth_param"], ), smooth_param=dict( - argstr='%f', + argstr="%s", position=4, ), sulcus_prior=dict( - argstr='%f', + argstr="%s", position=7, + requires=["dT"], ), tolerance=dict( - argstr='%f', + argstr="%s", position=8, + requires=["sulcus_prior"], ), ) inputs = LaplacianThickness.input_spec() @@ -60,8 +67,14 @@ def test_LaplacianThickness_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LaplacianThickness_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = LaplacianThickness.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py b/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py index 1a5041ae74..05279d8017 100644 --- a/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py +++ b/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py @@ -1,45 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import MeasureImageSimilarity def test_MeasureImageSimilarity_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='--dimensionality %d', + argstr="--dimensionality %d", position=1, ), environ=dict( nohash=True, usedefault=True, ), - fixed_image=dict(mandatory=True, ), - fixed_image_mask=dict(argstr='%s', ), + fixed_image=dict( + extensions=None, + mandatory=True, + ), + fixed_image_mask=dict( + argstr="%s", + extensions=None, + ), metric=dict( - argstr='%s', + argstr="%s", mandatory=True, ), metric_weight=dict( - requires=['metric'], + requires=["metric"], usedefault=True, ), - moving_image=dict(mandatory=True, ), - moving_image_mask=dict(requires=['fixed_image_mask'], ), + moving_image=dict( + extensions=None, + mandatory=True, + ), + moving_image_mask=dict( + extensions=None, + requires=["fixed_image_mask"], + ), num_threads=dict( nohash=True, usedefault=True, ), radius_or_number_of_bins=dict( mandatory=True, - requires=['metric'], + requires=["metric"], ), sampling_percentage=dict( mandatory=True, - requires=['metric'], + requires=["metric"], ), sampling_strategy=dict( - requires=['metric'], + requires=["metric"], usedefault=True, ), ) @@ -48,8 +61,12 @@ def test_MeasureImageSimilarity_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeasureImageSimilarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict( + similarity=dict(), + ) outputs = MeasureImageSimilarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py b/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py index 1bf787018d..89f8fa60ae 100644 --- a/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py +++ b/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MultiplyImages def test_MultiplyImages_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", mandatory=True, position=0, ), @@ -16,7 +17,8 @@ def test_MultiplyImages_inputs(): usedefault=True, ), first_input=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), @@ -25,12 +27,13 @@ def test_MultiplyImages_inputs(): usedefault=True, ), output_product_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=3, ), second_input=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, ), @@ -40,8 +43,14 @@ def test_MultiplyImages_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiplyImages_outputs(): - output_map = dict(output_product_image=dict(), ) + output_map = dict( + output_product_image=dict( + extensions=None, + ), + ) outputs = MultiplyImages.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py index 66edf0c3d6..59775df2ea 100644 --- a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py +++ b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py @@ -1,59 +1,96 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import N4BiasFieldCorrection def test_N4BiasFieldCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_image=dict(hash_files=False, ), - bspline_fitting_distance=dict(argstr='--bspline-fitting %s', ), - bspline_order=dict(requires=['bspline_fitting_distance'], ), - convergence_threshold=dict(requires=['n_iterations'], ), + args=dict( + argstr="%s", + ), + bias_image=dict( + extensions=None, + hash_files=False, + ), + bspline_fitting_distance=dict( + argstr="--bspline-fitting %s", + ), + bspline_order=dict( + requires=["bspline_fitting_distance"], + ), + convergence_threshold=dict( + requires=["n_iterations"], + ), copy_header=dict( mandatory=True, usedefault=True, ), dimension=dict( - argstr='-d %d', + argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), + histogram_sharpening=dict( + argstr="--histogram-sharpening [%g,%g,%d]", + ), input_image=dict( - argstr='--input-image %s', + argstr="--input-image %s", + extensions=None, mandatory=True, ), - mask_image=dict(argstr='--mask-image %s', ), - n_iterations=dict(argstr='--convergence %s', ), + mask_image=dict( + argstr="--mask-image %s", + extensions=None, + ), + n_iterations=dict( + argstr="--convergence %s", + ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( - argstr='--output %s', - genfile=True, + argstr="--output %s", hash_files=False, + keep_extension=True, + name_source=["input_image"], + name_template="%s_corrected", + ), + rescale_intensities=dict( + argstr="-r", + min_ver="2.1.0", + usedefault=True, ), save_bias=dict( mandatory=True, usedefault=True, - xor=['bias_image'], + xor=["bias_image"], + ), + shrink_factor=dict( + argstr="--shrink-factor %d", + ), + weight_image=dict( + argstr="--weight-image %s", + extensions=None, ), - shrink_factor=dict(argstr='--shrink-factor %d', ), - weight_image=dict(argstr='--weight-image %s', ), ) inputs = N4BiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_N4BiasFieldCorrection_outputs(): output_map = dict( - bias_image=dict(), - output_image=dict(), + bias_image=dict( + extensions=None, + ), + output_image=dict( + extensions=None, + ), ) outputs = N4BiasFieldCorrection.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_Registration.py b/nipype/interfaces/ants/tests/test_auto_Registration.py index 4bd253c3d0..814948835c 100644 --- a/nipype/interfaces/ants/tests/test_auto_Registration.py +++ b/nipype/interfaces/ants/tests/test_auto_Registration.py @@ -1,82 +1,95 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Registration def test_Registration_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), collapse_output_transforms=dict( - argstr='--collapse-output-transforms %d', + argstr="--collapse-output-transforms %d", usedefault=True, ), convergence_threshold=dict( - requires=['number_of_iterations'], + requires=["number_of_iterations"], usedefault=True, ), convergence_window_size=dict( - requires=['convergence_threshold'], + requires=["convergence_threshold"], usedefault=True, ), dimension=dict( - argstr='--dimensionality %d', + argstr="--dimensionality %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - fixed_image=dict(mandatory=True, ), + fixed_image=dict( + mandatory=True, + ), fixed_image_mask=dict( - argstr='%s', - max_ver='2.1.0', - xor=['fixed_image_masks'], + argstr="%s", + extensions=None, + max_ver="2.1.0", + xor=["fixed_image_masks"], ), fixed_image_masks=dict( - min_ver='2.2.0', - xor=['fixed_image_mask'], + min_ver="2.2.0", + xor=["fixed_image_mask"], + ), + float=dict( + argstr="--float %d", ), - float=dict(argstr='--float %d', ), initial_moving_transform=dict( - argstr='%s', - xor=['initial_moving_transform_com'], + argstr="%s", + xor=["initial_moving_transform_com"], ), initial_moving_transform_com=dict( - argstr='%s', - xor=['initial_moving_transform'], + argstr="%s", + xor=["initial_moving_transform"], ), initialize_transforms_per_stage=dict( - argstr='--initialize-transforms-per-stage %d', + argstr="--initialize-transforms-per-stage %d", usedefault=True, ), interpolation=dict( - argstr='%s', + argstr="%s", usedefault=True, ), interpolation_parameters=dict(), invert_initial_moving_transform=dict( - requires=['initial_moving_transform'], - xor=['initial_moving_transform_com'], + requires=["initial_moving_transform"], + xor=["initial_moving_transform_com"], + ), + metric=dict( + mandatory=True, ), - metric=dict(mandatory=True, ), metric_item_trait=dict(), metric_stage_trait=dict(), metric_weight=dict( mandatory=True, - requires=['metric'], + requires=["metric"], + usedefault=True, + ), + metric_weight_item_trait=dict( usedefault=True, ), - metric_weight_item_trait=dict(usedefault=True, ), metric_weight_stage_trait=dict(), - moving_image=dict(mandatory=True, ), + moving_image=dict( + mandatory=True, + ), moving_image_mask=dict( - max_ver='2.1.0', - requires=['fixed_image_mask'], - xor=['moving_image_masks'], + extensions=None, + max_ver="2.1.0", + requires=["fixed_image_mask"], + xor=["moving_image_masks"], ), moving_image_masks=dict( - min_ver='2.2.0', - xor=['moving_image_mask'], + min_ver="2.2.0", + xor=["moving_image_mask"], ), num_threads=dict( nohash=True, @@ -85,52 +98,79 @@ def test_Registration_inputs(): number_of_iterations=dict(), output_inverse_warped_image=dict( hash_files=False, - requires=['output_warped_image'], + requires=["output_warped_image"], ), output_transform_prefix=dict( - argstr='%s', + argstr="%s", + usedefault=True, + ), + output_warped_image=dict( + hash_files=False, + ), + radius_bins_item_trait=dict( usedefault=True, ), - output_warped_image=dict(hash_files=False, ), - radius_bins_item_trait=dict(usedefault=True, ), radius_bins_stage_trait=dict(), radius_or_number_of_bins=dict( - requires=['metric_weight'], + requires=["metric_weight"], usedefault=True, ), - restore_state=dict(argstr='--restore-state %s', ), + random_seed=dict( + argstr="--random-seed %d", + min_ver="2.3.0", + ), + restore_state=dict( + argstr="--restore-state %s", + extensions=None, + ), restrict_deformation=dict(), - sampling_percentage=dict(requires=['sampling_strategy'], ), + sampling_percentage=dict( + requires=["sampling_strategy"], + ), sampling_percentage_item_trait=dict(), sampling_percentage_stage_trait=dict(), - sampling_strategy=dict(requires=['metric_weight'], ), + sampling_strategy=dict( + requires=["metric_weight"], + ), sampling_strategy_item_trait=dict(), sampling_strategy_stage_trait=dict(), - save_state=dict(argstr='--save-state %s', ), - shrink_factors=dict(mandatory=True, ), - sigma_units=dict(requires=['smoothing_sigmas'], ), - smoothing_sigmas=dict(mandatory=True, ), + save_state=dict( + argstr="--save-state %s", + extensions=None, + ), + shrink_factors=dict( + mandatory=True, + ), + sigma_units=dict( + requires=["smoothing_sigmas"], + ), + smoothing_sigmas=dict( + mandatory=True, + ), transform_parameters=dict(), transforms=dict( - argstr='%s', + argstr="%s", mandatory=True, ), use_estimate_learning_rate_once=dict(), - use_histogram_matching=dict(usedefault=True, ), + use_histogram_matching=dict( + usedefault=True, + ), verbose=dict( - argstr='-v', + argstr="-v", + nohash=True, usedefault=True, ), winsorize_lower_quantile=dict( - argstr='%s', + argstr="%s", usedefault=True, ), winsorize_upper_quantile=dict( - argstr='%s', + argstr="%s", usedefault=True, ), write_composite_transform=dict( - argstr='--write-composite-transform %d', + argstr="--write-composite-transform %d", usedefault=True, ), ) @@ -139,19 +179,33 @@ def test_Registration_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Registration_outputs(): output_map = dict( - composite_transform=dict(), + composite_transform=dict( + extensions=None, + ), elapsed_time=dict(), forward_invert_flags=dict(), forward_transforms=dict(), - inverse_composite_transform=dict(), - inverse_warped_image=dict(), + inverse_composite_transform=dict( + extensions=None, + ), + inverse_warped_image=dict( + extensions=None, + ), metric_value=dict(), + reverse_forward_invert_flags=dict(), + reverse_forward_transforms=dict(), reverse_invert_flags=dict(), reverse_transforms=dict(), - save_state=dict(), - warped_image=dict(), + save_state=dict( + extensions=None, + ), + warped_image=dict( + extensions=None, + ), ) outputs = Registration.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py index 8bc79392e1..c799f47299 100644 --- a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py +++ b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import RegistrationSynQuick def test_RegistrationSynQuick_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", usedefault=True, ), environ=dict( @@ -15,51 +16,69 @@ def test_RegistrationSynQuick_inputs(): usedefault=True, ), fixed_image=dict( - argstr='-f %s...', + argstr="-f %s...", mandatory=True, ), histogram_bins=dict( - argstr='-r %d', + argstr="-r %d", usedefault=True, ), moving_image=dict( - argstr='-m %s...', + argstr="-m %s...", mandatory=True, ), num_threads=dict( - argstr='-n %d', + argstr="-n %d", usedefault=True, ), output_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), precision_type=dict( - argstr='-p %s', + argstr="-p %s", usedefault=True, ), + random_seed=dict( + argstr="-e %d", + min_ver="2.3.0", + ), spline_distance=dict( - argstr='-s %d', + argstr="-s %d", usedefault=True, ), transform_type=dict( - argstr='-t %s', + argstr="-t %s", usedefault=True, ), - use_histogram_matching=dict(argstr='-j %d', ), + use_histogram_matching=dict( + argstr="-j %d", + ), ) inputs = RegistrationSynQuick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegistrationSynQuick_outputs(): output_map = dict( - forward_warp_field=dict(), - inverse_warp_field=dict(), - inverse_warped_image=dict(), - out_matrix=dict(), - warped_image=dict(), + forward_warp_field=dict( + extensions=None, + ), + inverse_warp_field=dict( + extensions=None, + ), + inverse_warped_image=dict( + extensions=None, + ), + out_matrix=dict( + extensions=None, + ), + warped_image=dict( + extensions=None, + ), ) outputs = RegistrationSynQuick.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ResampleImageBySpacing.py b/nipype/interfaces/ants/tests/test_auto_ResampleImageBySpacing.py new file mode 100644 index 0000000000..9bf5c6c6a6 --- /dev/null +++ b/nipype/interfaces/ants/tests/test_auto_ResampleImageBySpacing.py @@ -0,0 +1,74 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import ResampleImageBySpacing + + +def test_ResampleImageBySpacing_inputs(): + input_map = dict( + addvox=dict( + argstr="%d", + position=6, + requires=["apply_smoothing"], + ), + apply_smoothing=dict( + argstr="%d", + position=5, + ), + args=dict( + argstr="%s", + ), + dimension=dict( + argstr="%d", + position=1, + usedefault=True, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + input_image=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=2, + ), + nn_interp=dict( + argstr="%d", + position=-1, + requires=["addvox"], + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + out_spacing=dict( + argstr="%s", + mandatory=True, + position=4, + ), + output_image=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source=["input_image"], + name_template="%s_resampled", + position=3, + ), + ) + inputs = ResampleImageBySpacing.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ResampleImageBySpacing_outputs(): + output_map = dict( + output_image=dict( + extensions=None, + ), + ) + outputs = ResampleImageBySpacing.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_ThresholdImage.py b/nipype/interfaces/ants/tests/test_auto_ThresholdImage.py new file mode 100644 index 0000000000..c17f340b69 --- /dev/null +++ b/nipype/interfaces/ants/tests/test_auto_ThresholdImage.py @@ -0,0 +1,94 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import ThresholdImage + + +def test_ThresholdImage_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + copy_header=dict( + mandatory=True, + usedefault=True, + ), + dimension=dict( + argstr="%d", + position=1, + usedefault=True, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + input_image=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=2, + ), + input_mask=dict( + argstr="%s", + extensions=None, + requires=["num_thresholds"], + ), + inside_value=dict( + argstr="%f", + position=6, + requires=["th_low"], + ), + mode=dict( + argstr="%s", + position=4, + requires=["num_thresholds"], + xor=["th_low", "th_high"], + ), + num_threads=dict( + nohash=True, + usedefault=True, + ), + num_thresholds=dict( + argstr="%d", + position=5, + ), + output_image=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source=["input_image"], + name_template="%s_resampled", + position=3, + ), + outside_value=dict( + argstr="%f", + position=7, + requires=["th_low"], + ), + th_high=dict( + argstr="%f", + position=5, + xor=["mode"], + ), + th_low=dict( + argstr="%f", + position=4, + xor=["mode"], + ), + ) + inputs = ThresholdImage.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ThresholdImage_outputs(): + output_map = dict( + output_image=dict( + extensions=None, + ), + ) + outputs = ThresholdImage.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py index 42020f6db9..66f45d0cd7 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..resampling import WarpImageMultiTransform def test_WarpImageMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", position=1, usedefault=True, ), @@ -16,7 +17,8 @@ def test_WarpImageMultiTransform_inputs(): usedefault=True, ), input_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), @@ -26,41 +28,56 @@ def test_WarpImageMultiTransform_inputs(): usedefault=True, ), out_postfix=dict( + extensions=None, hash_files=False, usedefault=True, - xor=['output_image'], + xor=["output_image"], ), output_image=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=3, - xor=['out_postfix'], + xor=["out_postfix"], ), reference_image=dict( - argstr='-R %s', - xor=['tightest_box'], + argstr="-R %s", + extensions=None, + xor=["tightest_box"], + ), + reslice_by_header=dict( + argstr="--reslice-by-header", ), - reslice_by_header=dict(argstr='--reslice-by-header', ), tightest_box=dict( - argstr='--tightest-bounding-box', - xor=['reference_image'], + argstr="--tightest-bounding-box", + xor=["reference_image"], ), transformation_series=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), - use_bspline=dict(argstr='--use-BSpline', ), - use_nearest=dict(argstr='--use-NN', ), + use_bspline=dict( + argstr="--use-BSpline", + ), + use_nearest=dict( + argstr="--use-NN", + ), ) inputs = WarpImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpImageMultiTransform_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = WarpImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py index de3131f056..63d8d8365e 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..resampling import WarpTimeSeriesImageMultiTransform def test_WarpTimeSeriesImageMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='%d', + argstr="%d", position=1, usedefault=True, ), @@ -16,8 +17,9 @@ def test_WarpTimeSeriesImageMultiTransform_inputs(): usedefault=True, ), input_image=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, ), invert_affine=dict(), @@ -26,33 +28,46 @@ def test_WarpTimeSeriesImageMultiTransform_inputs(): usedefault=True, ), out_postfix=dict( - argstr='%s', + argstr="%s", usedefault=True, ), reference_image=dict( - argstr='-R %s', - xor=['tightest_box'], + argstr="-R %s", + extensions=None, + xor=["tightest_box"], + ), + reslice_by_header=dict( + argstr="--reslice-by-header", ), - reslice_by_header=dict(argstr='--reslice-by-header', ), tightest_box=dict( - argstr='--tightest-bounding-box', - xor=['reference_image'], + argstr="--tightest-bounding-box", + xor=["reference_image"], ), transformation_series=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, ), - use_bspline=dict(argstr='--use-Bspline', ), - use_nearest=dict(argstr='--use-NN', ), + use_bspline=dict( + argstr="--use-Bspline", + ), + use_nearest=dict( + argstr="--use-NN", + ), ) inputs = WarpTimeSeriesImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpTimeSeriesImageMultiTransform_outputs(): - output_map = dict(output_image=dict(), ) + output_map = dict( + output_image=dict( + extensions=None, + ), + ) outputs = WarpTimeSeriesImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_antsIntroduction.py b/nipype/interfaces/ants/tests/test_auto_antsIntroduction.py index fe21858500..0318db7624 100644 --- a/nipype/interfaces/ants/tests/test_auto_antsIntroduction.py +++ b/nipype/interfaces/ants/tests/test_auto_antsIntroduction.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..legacy import antsIntroduction def test_antsIntroduction_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_field_correction=dict(argstr='-n 1', ), + args=dict( + argstr="%s", + ), + bias_field_correction=dict( + argstr="-n 1", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", position=1, usedefault=True, ), @@ -16,34 +19,44 @@ def test_antsIntroduction_inputs(): nohash=True, usedefault=True, ), - force_proceed=dict(argstr='-f 1', ), + force_proceed=dict( + argstr="-f 1", + ), input_image=dict( - argstr='-i %s', + argstr="-i %s", copyfile=False, + extensions=None, mandatory=True, ), - inverse_warp_template_labels=dict(argstr='-l', ), + inverse_warp_template_labels=dict( + argstr="-l", + ), max_iterations=dict( - argstr='-m %s', - sep='x', + argstr="-m %s", + sep="x", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - quality_check=dict(argstr='-q 1', ), + quality_check=dict( + argstr="-q 1", + ), reference_image=dict( - argstr='-r %s', + argstr="-r %s", copyfile=True, + extensions=None, mandatory=True, ), - similarity_metric=dict(argstr='-s %s', ), + similarity_metric=dict( + argstr="-s %s", + ), transformation_model=dict( - argstr='-t %s', + argstr="-t %s", usedefault=True, ), ) @@ -52,13 +65,25 @@ def test_antsIntroduction_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_antsIntroduction_outputs(): output_map = dict( - affine_transformation=dict(), - input_file=dict(), - inverse_warp_field=dict(), - output_file=dict(), - warp_field=dict(), + affine_transformation=dict( + extensions=None, + ), + input_file=dict( + extensions=None, + ), + inverse_warp_field=dict( + extensions=None, + ), + output_file=dict( + extensions=None, + ), + warp_field=dict( + extensions=None, + ), ) outputs = antsIntroduction.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_buildtemplateparallel.py b/nipype/interfaces/ants/tests/test_auto_buildtemplateparallel.py index 8513003c29..2713c6af54 100644 --- a/nipype/interfaces/ants/tests/test_auto_buildtemplateparallel.py +++ b/nipype/interfaces/ants/tests/test_auto_buildtemplateparallel.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..legacy import buildtemplateparallel def test_buildtemplateparallel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_field_correction=dict(argstr='-n 1', ), + args=dict( + argstr="%s", + ), + bias_field_correction=dict( + argstr="-n 1", + ), dimension=dict( - argstr='-d %d', + argstr="-d %d", position=1, usedefault=True, ), @@ -16,40 +19,46 @@ def test_buildtemplateparallel_inputs(): nohash=True, usedefault=True, ), - gradient_step_size=dict(argstr='-g %f', ), + gradient_step_size=dict( + argstr="-g %f", + ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), iteration_limit=dict( - argstr='-i %d', + argstr="-i %d", usedefault=True, ), max_iterations=dict( - argstr='-m %s', - sep='x', + argstr="-m %s", + sep="x", ), num_cores=dict( - argstr='-j %d', - requires=['parallelization'], + argstr="-j %d", + requires=["parallelization"], ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), parallelization=dict( - argstr='-c %d', + argstr="-c %d", usedefault=True, ), - rigid_body_registration=dict(argstr='-r 1', ), - similarity_metric=dict(argstr='-s %s', ), + rigid_body_registration=dict( + argstr="-r 1", + ), + similarity_metric=dict( + argstr="-s %s", + ), transformation_model=dict( - argstr='-t %s', + argstr="-t %s", usedefault=True, ), use_first_as_target=dict(), @@ -59,9 +68,13 @@ def test_buildtemplateparallel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_buildtemplateparallel_outputs(): output_map = dict( - final_template_file=dict(), + final_template_file=dict( + extensions=None, + ), subject_outfiles=dict(), template_files=dict(), ) diff --git a/nipype/interfaces/ants/tests/test_base.py b/nipype/interfaces/ants/tests/test_base.py new file mode 100644 index 0000000000..293aed72e6 --- /dev/null +++ b/nipype/interfaces/ants/tests/test_base.py @@ -0,0 +1,25 @@ +from nipype.interfaces.ants.base import Info + +import pytest + +# fmt: off +ANTS_VERSIONS = [("""\ +ANTs Version: 2.3.3.dev168-g29bdf +Compiled: Jun 9 2020 03:44:55 + +""", "2.3.3"), ("""\ +ANTs Version: v2.3.5.post76-g28dd25c +Compiled: Nov 16 2021 14:57:48 + +""", "2.3.5"), ("""\ +ANTs Version: v2.1.0.post789-g0740f +Compiled: I don't still have this so not going to pretend + +""", "2.2.0"), +] +# fmt: on + + +@pytest.mark.parametrize("raw_info, version", ANTS_VERSIONS) +def test_version_parser(raw_info, version): + assert Info.parse_version(raw_info) == version diff --git a/nipype/interfaces/ants/tests/test_extra_Registration.py b/nipype/interfaces/ants/tests/test_extra_Registration.py index 745b825c65..d134324253 100644 --- a/nipype/interfaces/ants/tests/test_extra_Registration.py +++ b/nipype/interfaces/ants/tests/test_extra_Registration.py @@ -1,6 +1,5 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import unicode_literals from nipype.interfaces.ants import registration import os import pytest @@ -9,13 +8,13 @@ def test_ants_mand(tmpdir): tmpdir.chdir() filepath = os.path.dirname(os.path.realpath(__file__)) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) ants = registration.ANTS() ants.inputs.transformation_model = "SyN" - ants.inputs.moving_image = [os.path.join(datadir, 'resting.nii')] - ants.inputs.fixed_image = [os.path.join(datadir, 'T1.nii')] - ants.inputs.metric = ['MI'] + ants.inputs.moving_image = [os.path.join(datadir, "resting.nii")] + ants.inputs.fixed_image = [os.path.join(datadir, "T1.nii")] + ants.inputs.metric = ["MI"] with pytest.raises(ValueError) as er: ants.run() diff --git a/nipype/interfaces/ants/tests/test_resampling.py b/nipype/interfaces/ants/tests/test_resampling.py index 14903f0137..3b1da9d3ee 100644 --- a/nipype/interfaces/ants/tests/test_resampling.py +++ b/nipype/interfaces/ants/tests/test_resampling.py @@ -1,7 +1,10 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from nipype.interfaces.ants import WarpImageMultiTransform, WarpTimeSeriesImageMultiTransform +from nipype.interfaces.ants import ( + WarpImageMultiTransform, + WarpTimeSeriesImageMultiTransform, +) import os import pytest @@ -10,7 +13,7 @@ def change_dir(request): orig_dir = os.getcwd() filepath = os.path.dirname(os.path.realpath(__file__)) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) os.chdir(datadir) def move2orig(): @@ -22,32 +25,43 @@ def move2orig(): @pytest.fixture() def create_wimt(): wimt = WarpImageMultiTransform() - wimt.inputs.input_image = 'diffusion_weighted.nii' - wimt.inputs.reference_image = 'functional.nii' + wimt.inputs.input_image = "diffusion_weighted.nii" + wimt.inputs.reference_image = "functional.nii" wimt.inputs.transformation_series = [ - 'func2anat_coreg_Affine.txt', 'func2anat_InverseWarp.nii.gz', - 'dwi2anat_Warp.nii.gz', 'dwi2anat_coreg_Affine.txt' + "func2anat_coreg_Affine.txt", + "func2anat_InverseWarp.nii.gz", + "dwi2anat_Warp.nii.gz", + "dwi2anat_coreg_Affine.txt", ] return wimt def test_WarpImageMultiTransform(change_dir, create_wimt): wimt = create_wimt - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ +func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_1(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [1] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ --i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ +-i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_2(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [2] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): @@ -60,30 +74,32 @@ def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): @pytest.fixture() def create_wtsimt(): wtsimt = WarpTimeSeriesImageMultiTransform() - wtsimt.inputs.input_image = 'resting.nii' - wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' - wtsimt.inputs.transformation_series = [ - 'ants_Warp.nii.gz', 'ants_Affine.txt' - ] + wtsimt.inputs.input_image = "resting.nii" + wtsimt.inputs.reference_image = "ants_deformed.nii.gz" + wtsimt.inputs.transformation_series = ["ants_Warp.nii.gz", "ants_Affine.txt"] return wtsimt def test_WarpTimeSeriesImageMultiTransform(change_dir, create_wtsimt): wtsimt = create_wtsimt - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ --R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt' + assert ( + wtsimt.cmdline + == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ +-R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt" + ) -def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, - create_wtsimt): +def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [1] - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ --R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt' + assert ( + wtsimt.cmdline + == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ +-R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt" + ) -def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong( - change_dir, create_wtsimt): +def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [0] with pytest.raises(Exception): diff --git a/nipype/interfaces/ants/tests/test_segmentation.py b/nipype/interfaces/ants/tests/test_segmentation.py new file mode 100644 index 0000000000..4fc22ee34a --- /dev/null +++ b/nipype/interfaces/ants/tests/test_segmentation.py @@ -0,0 +1,67 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: + +from ..segmentation import LaplacianThickness +from .test_resampling import change_dir + +import os +import pytest + + +@pytest.fixture() +def change_dir(request): + orig_dir = os.getcwd() + filepath = os.path.dirname(os.path.realpath(__file__)) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) + os.chdir(datadir) + + def move2orig(): + os.chdir(orig_dir) + + request.addfinalizer(move2orig) + + +@pytest.fixture() +def create_lt(): + lt = LaplacianThickness() + # we do not run, so I stick some not really proper files as input + lt.inputs.input_gm = "diffusion_weighted.nii" + lt.inputs.input_wm = "functional.nii" + return lt + + +def test_LaplacianThickness_defaults(change_dir, create_lt): + lt = create_lt + base_cmd = "LaplacianThickness functional.nii diffusion_weighted.nii functional_thickness.nii" + assert lt.cmdline == base_cmd + lt.inputs.smooth_param = 4.5 + assert lt.cmdline == base_cmd + " 4.5" + lt.inputs.prior_thickness = 5.9 + assert lt.cmdline == base_cmd + " 4.5 5.9" + + +def test_LaplacianThickness_wrongargs(change_dir, create_lt): + lt = create_lt + lt.inputs.tolerance = 0.001 + with pytest.raises( + ValueError, match=r".* requires a value for input 'sulcus_prior' .*" + ): + lt.cmdline + lt.inputs.sulcus_prior = 0.15 + with pytest.raises(ValueError, match=r".* requires a value for input 'dT' .*"): + lt.cmdline + lt.inputs.dT = 0.01 + with pytest.raises( + ValueError, match=r".* requires a value for input 'prior_thickness' .*" + ): + lt.cmdline + lt.inputs.prior_thickness = 5.9 + with pytest.raises( + ValueError, match=r".* requires a value for input 'smooth_param' .*" + ): + lt.cmdline + lt.inputs.smooth_param = 4.5 + assert ( + lt.cmdline == "LaplacianThickness functional.nii diffusion_weighted.nii " + "functional_thickness.nii 4.5 5.9 0.01 0.15 0.001" + ) diff --git a/nipype/interfaces/ants/tests/test_spec_JointFusion.py b/nipype/interfaces/ants/tests/test_spec_JointFusion.py deleted file mode 100644 index b2ca69926a..0000000000 --- a/nipype/interfaces/ants/tests/test_spec_JointFusion.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import division -from builtins import range -from nipype.testing import example_data -from nipype.interfaces.base import InputMultiPath -from traits.trait_errors import TraitError -from nipype.interfaces.ants import JointFusion -import pytest - - -def test_JointFusion_dimension(): - at = JointFusion() - set_dimension = lambda d: setattr(at.inputs, 'dimension', int(d)) - for d in range(2, 5): - set_dimension(d) - assert at.inputs.dimension == int(d) - for d in [0, 1, 6, 7]: - with pytest.raises(TraitError): - set_dimension(d) - - -@pytest.mark.parametrize("m", range(1, 5)) -def test_JointFusion_modalities(m): - at = JointFusion() - setattr(at.inputs, 'modalities', int(m)) - assert at.inputs.modalities == int(m) - - -@pytest.mark.parametrize("a, b", - [(a, b) for a in range(10) for b in range(10)]) -def test_JointFusion_method(a, b): - at = JointFusion() - set_method = lambda a, b: setattr(at.inputs, 'method', 'Joint[%.1f,%d]'.format(a, b)) - _a = a / 10.0 - set_method(_a, b) - # set directly - assert at.inputs.method == 'Joint[%.1f,%d]'.format(_a, b) - aprime = _a + 0.1 - bprime = b + 1 - at.inputs.alpha = aprime - at.inputs.beta = bprime - # set with alpha/beta - assert at.inputs.method == 'Joint[%.1f,%d]'.format(aprime, bprime) - - -@pytest.mark.parametrize("attr, x", - [(attr, x) - for attr in ['patch_radius', 'search_radius'] - for x in range(5)]) -def test_JointFusion_radius(attr, x): - at = JointFusion() - setattr(at.inputs, attr, [x, x + 1, x**x]) - assert at._format_arg(attr, None, getattr( - at.inputs, attr))[4:] == '{0}x{1}x{2}'.format(x, x + 1, x**x) - - -def test_JointFusion_cmd(): - at = JointFusion() - at.inputs.dimension = 3 - at.inputs.modalities = 1 - at.inputs.method = 'Joint[0.1,2]' - at.inputs.output_label_image = 'fusion_labelimage_output.nii' - warped_intensity_images = [ - example_data('im1.nii'), - example_data('im2.nii') - ] - at.inputs.warped_intensity_images = warped_intensity_images - segmentation_images = [ - example_data('segmentation0.nii.gz'), - example_data('segmentation1.nii.gz') - ] - at.inputs.warped_label_images = segmentation_images - T1_image = example_data('T1.nii') - at.inputs.target_image = T1_image - at.inputs.patch_radius = [3, 2, 1] - at.inputs.search_radius = [1, 2, 3] - expected_command = ('jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3' - ' -tg %s -g %s -g %s -l %s -l %s' - ' fusion_labelimage_output.nii') % ( - T1_image, warped_intensity_images[0], - warped_intensity_images[1], segmentation_images[0], - segmentation_images[1]) - assert at.cmdline == expected_command - # setting intensity or labels with unequal lengths raises error - with pytest.raises(AssertionError): - at._format_arg('warped_intensity_images', InputMultiPath, - warped_intensity_images + [example_data('im3.nii')]) diff --git a/nipype/interfaces/ants/utils.py b/nipype/interfaces/ants/utils.py index 5d284b89c0..57202f5a34 100644 --- a/nipype/interfaces/ants/utils.py +++ b/nipype/interfaces/ants/utils.py @@ -1,38 +1,565 @@ -# -*- coding: utf-8 -*- -"""ANTS Apply Transforms interface -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""ANTs' utilities.""" import os +from warnings import warn +from ..base import traits, Tuple, isdefined, TraitedSpec, File, Str, InputMultiObject +from ..mixins import CopyHeaderInterface +from .base import ANTSCommandInputSpec, ANTSCommand -from ..base import TraitedSpec, File, traits, InputMultiPath -from .base import ANTSCommand, ANTSCommandInputSpec +class ImageMathInputSpec(ANTSCommandInputSpec): + dimension = traits.Int( + 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" + ) + output_image = File( + position=2, + argstr="%s", + name_source=["op1"], + name_template="%s_maths", + desc="output image file", + keep_extension=True, + ) + operation = traits.Enum( + # Mathematical Operations + "m", + "vm", + "+", + "v+", + "-", + "v-", + "/", + "^", + "max", + "exp", + "addtozero", + "overadd", + "abs", + "total", + "mean", + "vtotal", + "Decision", + "Neg", + # Spatial Filtering Operations + "Project", + "G", + "MD", + "ME", + "MO", + "MC", + "GD", + "GE", + "GO", + "GC", + "ExtractContours", + # Transform Image Operations + "Translate", + # Tensor Operations + "4DTensorTo3DTensor", + "ExtractVectorComponent", + "TensorColor", + "TensorFA", + "TensorFADenominator", + "TensorFANumerator", + "TensorMeanDiffusion", + "TensorRadialDiffusion", + "TensorAxialDiffusion", + "TensorEigenvalue", + "TensorToVector", + "TensorToVectorComponent", + "TensorMask", + # Unclassified Operators + "Byte", + "CorruptImage", + "D", + "MaurerDistance", + "ExtractSlice", + "FillHoles", + "Convolve", + "Finite", + "FlattenImage", + "GetLargestComponent", + "Grad", + "RescaleImage", + "WindowImage", + "NeighborhoodStats", + "ReplicateDisplacement", + "ReplicateImage", + "LabelStats", + "Laplacian", + "Canny", + "Lipschitz", + "MTR", + "Normalize", + "PadImage", + "SigmoidImage", + "Sharpen", + "UnsharpMask", + "PValueImage", + "ReplaceVoxelValue", + "SetTimeSpacing", + "SetTimeSpacingWarp", + "stack", + "ThresholdAtMean", + "TriPlanarView", + "TruncateImageIntensity", + mandatory=True, + position=3, + argstr="%s", + desc="mathematical operations", + ) + op1 = File( + exists=True, mandatory=True, position=-3, argstr="%s", desc="first operator" + ) + op2 = traits.Either( + File(exists=True), Str, position=-2, argstr="%s", desc="second operator" + ) -class AverageAffineTransformInputSpec(ANTSCommandInputSpec): + args = Str(position=-1, argstr="%s", desc="Additional parameters to the command") + + copy_header = traits.Bool( + True, + usedefault=True, + desc="copy headers of the original image into the output (corrected) file", + ) + + +class ImageMathOuputSpec(TraitedSpec): + output_image = File(exists=True, desc="output image file") + + +class ImageMath(ANTSCommand, CopyHeaderInterface): + """ + Operations over images. + + Examples + -------- + >>> ImageMath( + ... op1='structural.nii', + ... operation='+', + ... op2='2').cmdline + 'ImageMath 3 structural_maths.nii + structural.nii 2' + + >>> ImageMath( + ... op1='structural.nii', + ... operation='Project', + ... op2='1 2').cmdline + 'ImageMath 3 structural_maths.nii Project structural.nii 1 2' + + >>> ImageMath( + ... op1='structural.nii', + ... operation='G', + ... op2='4').cmdline + 'ImageMath 3 structural_maths.nii G structural.nii 4' + + >>> ImageMath( + ... op1='structural.nii', + ... operation='TruncateImageIntensity', + ... op2='0.005 0.999 256').cmdline + 'ImageMath 3 structural_maths.nii TruncateImageIntensity structural.nii 0.005 0.999 256' + + By default, Nipype copies headers from the first input image (``op1``) + to the output image. + For some operations, as the ``PadImage`` operation, the header cannot be copied from inputs to + outputs, and so ``copy_header`` option is automatically set to ``False``. + + >>> pad = ImageMath( + ... op1='structural.nii', + ... operation='PadImage') + >>> pad.inputs.copy_header + False + + While the operation is set to ``PadImage``, + setting ``copy_header = True`` will have no effect. + + >>> pad.inputs.copy_header = True + >>> pad.inputs.copy_header + False + + For any other operation, ``copy_header`` can be enabled/disabled normally: + + >>> pad.inputs.operation = "ME" + >>> pad.inputs.copy_header = True + >>> pad.inputs.copy_header + True + + """ + + _cmd = "ImageMath" + input_spec = ImageMathInputSpec + output_spec = ImageMathOuputSpec + _copy_header_map = {"output_image": "op1"} + _no_copy_header_operation = ( + "PadImage", + "LabelStats", + "SetTimeSpacing", + "SetTimeSpacingWarp", + "TriPlanarView", + ) + + def __init__(self, **inputs): + super().__init__(**inputs) + if self.inputs.operation in self._no_copy_header_operation: + self.inputs.copy_header = False + + self.inputs.on_trait_change(self._operation_update, "operation") + self.inputs.on_trait_change(self._copyheader_update, "copy_header") + + def _operation_update(self): + if self.inputs.operation in self._no_copy_header_operation: + self.inputs.copy_header = False + + def _copyheader_update(self): + if ( + self.inputs.copy_header + and self.inputs.operation in self._no_copy_header_operation + ): + warn( + f"copy_header cannot be updated to True with {self.inputs.operation} as operation." + ) + self.inputs.copy_header = False + + +class ResampleImageBySpacingInputSpec(ANTSCommandInputSpec): + dimension = traits.Int( + 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" + ) + input_image = File( + exists=True, mandatory=True, position=2, argstr="%s", desc="input image file" + ) + output_image = File( + position=3, + argstr="%s", + name_source=["input_image"], + name_template="%s_resampled", + desc="output image file", + keep_extension=True, + ) + out_spacing = traits.Either( + traits.List(traits.Float, minlen=2, maxlen=3), + Tuple(traits.Float, traits.Float, traits.Float), + Tuple(traits.Float, traits.Float), + position=4, + argstr="%s", + mandatory=True, + desc="output spacing", + ) + apply_smoothing = traits.Bool( + False, argstr="%d", position=5, desc="smooth before resampling" + ) + addvox = traits.Int( + argstr="%d", + position=6, + requires=["apply_smoothing"], + desc="addvox pads each dimension by addvox", + ) + nn_interp = traits.Bool( + argstr="%d", desc="nn interpolation", position=-1, requires=["addvox"] + ) + + +class ResampleImageBySpacingOutputSpec(TraitedSpec): + output_image = File(exists=True, desc="resampled file") + + +class ResampleImageBySpacing(ANTSCommand): + """ + Resample an image with a given spacing. + + Examples + -------- + >>> res = ResampleImageBySpacing(dimension=3) + >>> res.inputs.input_image = 'structural.nii' + >>> res.inputs.output_image = 'output.nii.gz' + >>> res.inputs.out_spacing = (4, 4, 4) + >>> res.cmdline #doctest: +ELLIPSIS + 'ResampleImageBySpacing 3 structural.nii output.nii.gz 4 4 4' + + >>> res = ResampleImageBySpacing(dimension=3) + >>> res.inputs.input_image = 'structural.nii' + >>> res.inputs.output_image = 'output.nii.gz' + >>> res.inputs.out_spacing = (4, 4, 4) + >>> res.inputs.apply_smoothing = True + >>> res.cmdline #doctest: +ELLIPSIS + 'ResampleImageBySpacing 3 structural.nii output.nii.gz 4 4 4 1' + + >>> res = ResampleImageBySpacing(dimension=3) + >>> res.inputs.input_image = 'structural.nii' + >>> res.inputs.output_image = 'output.nii.gz' + >>> res.inputs.out_spacing = (0.4, 0.4, 0.4) + >>> res.inputs.apply_smoothing = True + >>> res.inputs.addvox = 2 + >>> res.inputs.nn_interp = False + >>> res.cmdline #doctest: +ELLIPSIS + 'ResampleImageBySpacing 3 structural.nii output.nii.gz 0.4 0.4 0.4 1 2 0' + + """ + + _cmd = "ResampleImageBySpacing" + input_spec = ResampleImageBySpacingInputSpec + output_spec = ResampleImageBySpacingOutputSpec + + def _format_arg(self, name, trait_spec, value): + if name == "out_spacing": + if len(value) != self.inputs.dimension: + raise ValueError("out_spacing dimensions should match dimension") + + value = " ".join(["%g" % d for d in value]) + + return super()._format_arg(name, trait_spec, value) + + +class ThresholdImageInputSpec(ANTSCommandInputSpec): + dimension = traits.Int( + 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" + ) + input_image = File( + exists=True, mandatory=True, position=2, argstr="%s", desc="input image file" + ) + output_image = File( + position=3, + argstr="%s", + name_source=["input_image"], + name_template="%s_resampled", + desc="output image file", + keep_extension=True, + ) + + mode = traits.Enum( + "Otsu", + "Kmeans", + argstr="%s", + position=4, + requires=["num_thresholds"], + xor=["th_low", "th_high"], + desc="whether to run Otsu / Kmeans thresholding", + ) + num_thresholds = traits.Int(position=5, argstr="%d", desc="number of thresholds") + input_mask = File( + exists=True, + requires=["num_thresholds"], + argstr="%s", + desc="input mask for Otsu, Kmeans", + ) + + th_low = traits.Float(position=4, argstr="%f", xor=["mode"], desc="lower threshold") + th_high = traits.Float( + position=5, argstr="%f", xor=["mode"], desc="upper threshold" + ) + inside_value = traits.Float( + 1, position=6, argstr="%f", requires=["th_low"], desc="inside value" + ) + outside_value = traits.Float( + 0, position=7, argstr="%f", requires=["th_low"], desc="outside value" + ) + copy_header = traits.Bool( + True, + mandatory=True, + usedefault=True, + desc="copy headers of the original image into the output (corrected) file", + ) + + +class ThresholdImageOutputSpec(TraitedSpec): + output_image = File(exists=True, desc="resampled file") + + +class ThresholdImage(ANTSCommand, CopyHeaderInterface): + """ + Apply thresholds on images. + + Examples + -------- + >>> thres = ThresholdImage(dimension=3) + >>> thres.inputs.input_image = 'structural.nii' + >>> thres.inputs.output_image = 'output.nii.gz' + >>> thres.inputs.th_low = 0.5 + >>> thres.inputs.th_high = 1.0 + >>> thres.inputs.inside_value = 1.0 + >>> thres.inputs.outside_value = 0.0 + >>> thres.cmdline #doctest: +ELLIPSIS + 'ThresholdImage 3 structural.nii output.nii.gz 0.500000 1.000000 1.000000 0.000000' + + >>> thres = ThresholdImage(dimension=3) + >>> thres.inputs.input_image = 'structural.nii' + >>> thres.inputs.output_image = 'output.nii.gz' + >>> thres.inputs.mode = 'Kmeans' + >>> thres.inputs.num_thresholds = 4 + >>> thres.cmdline #doctest: +ELLIPSIS + 'ThresholdImage 3 structural.nii output.nii.gz Kmeans 4' + + """ + + _cmd = "ThresholdImage" + input_spec = ThresholdImageInputSpec + output_spec = ThresholdImageOutputSpec + _copy_header_map = {"output_image": "input_image"} + + +class AIInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', + 3, 2, usedefault=True, argstr="-d %d", desc="dimension of output image" + ) + verbose = traits.Bool( + False, usedefault=True, argstr="-v %d", desc="enable verbosity" + ) + + fixed_image = File( + exists=True, + mandatory=True, + desc="Image to which the moving_image should be transformed", + ) + moving_image = File( + exists=True, mandatory=True, - position=0, - desc='image dimension (2 or 3)') + desc="Image that will be transformed to fixed_image", + ) + + fixed_image_mask = File(exists=True, argstr="-x %s", desc="fixed mage mask") + moving_image_mask = File( + exists=True, requires=["fixed_image_mask"], desc="moving mage mask" + ) + + metric_trait = ( + traits.Enum("Mattes", "GC", "MI"), + traits.Int(32), + traits.Enum("Regular", "Random", "None"), + traits.Range(value=0.2, low=0.0, high=1.0), + ) + metric = Tuple( + *metric_trait, argstr="-m %s", mandatory=True, desc="the metric(s) to use." + ) + + transform = Tuple( + traits.Enum("Affine", "Rigid", "Similarity"), + traits.Range(value=0.1, low=0.0, exclude_low=True), + argstr="-t %s[%g]", + usedefault=True, + desc="Several transform options are available", + ) + + principal_axes = traits.Bool( + False, + usedefault=True, + argstr="-p %d", + xor=["blobs"], + desc="align using principal axes", + ) + search_factor = Tuple( + traits.Float(20), + traits.Range(value=0.12, low=0.0, high=1.0), + usedefault=True, + argstr="-s [%g,%g]", + desc="search factor", + ) + + search_grid = traits.Either( + Tuple(traits.Float, Tuple(traits.Float, traits.Float, traits.Float)), + Tuple(traits.Float, traits.Tuple(traits.Float, traits.Float)), + argstr="-g %s", + desc="Translation search grid in mm", + min_ver="2.3.0", + ) + + convergence = Tuple( + traits.Range(low=1, high=10000, value=10), + traits.Float(1e-6), + traits.Range(low=1, high=100, value=10), + usedefault=True, + argstr="-c [%d,%g,%d]", + desc="convergence", + ) + + output_transform = File( + "initialization.mat", usedefault=True, argstr="-o %s", desc="output file name" + ) + + +class AIOuputSpec(TraitedSpec): + output_transform = File(exists=True, desc="output file name") + + +class AI(ANTSCommand): + """ + Calculate the optimal linear transform parameters for aligning two images. + + Examples + -------- + >>> AI( + ... fixed_image='structural.nii', + ... moving_image='epi.nii', + ... metric=('Mattes', 32, 'Regular', 1), + ... ).cmdline + 'antsAI -c [10,1e-06,10] -d 3 -m Mattes[structural.nii,epi.nii,32,Regular,1] + -o initialization.mat -p 0 -s [20,0.12] -t Affine[0.1] -v 0' + + >>> AI(fixed_image='structural.nii', + ... moving_image='epi.nii', + ... metric=('Mattes', 32, 'Regular', 1), + ... search_grid=(12, (1, 1, 1)), + ... ).cmdline + 'antsAI -c [10,1e-06,10] -d 3 -m Mattes[structural.nii,epi.nii,32,Regular,1] + -o initialization.mat -p 0 -s [20,0.12] -g [12.0,1x1x1] -t Affine[0.1] -v 0' + + """ + + _cmd = "antsAI" + input_spec = AIInputSpec + output_spec = AIOuputSpec + + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super()._run_interface(runtime, correct_return_codes) + + self._output = { + "output_transform": os.path.join( + runtime.cwd, os.path.basename(self.inputs.output_transform) + ) + } + return runtime + + def _format_arg(self, opt, spec, val): + if opt == "metric": + val = "%s[{fixed_image},{moving_image},%d,%s,%g]" % val + val = val.format( + fixed_image=self.inputs.fixed_image, + moving_image=self.inputs.moving_image, + ) + return spec.argstr % val + + if opt == "search_grid": + fmtval = "[{},{}]".format(val[0], "x".join("%g" % v for v in val[1])) + return spec.argstr % fmtval + + if opt == "fixed_image_mask": + if isdefined(self.inputs.moving_image_mask): + return spec.argstr % (f"[{val},{self.inputs.moving_image_mask}]") + + return super()._format_arg(opt, spec, val) + + def _list_outputs(self): + return self._output + + +class AverageAffineTransformInputSpec(ANTSCommandInputSpec): + dimension = traits.Enum( + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) output_affine_transform = File( - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='Outputfname.txt: the name of the resulting transform.') - transforms = InputMultiPath( + desc="Outputfname.txt: the name of the resulting transform.", + ) + transforms = InputMultiObject( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='transforms to average') + desc="transforms to average", + ) class AverageAffineTransformOutputSpec(TraitedSpec): - affine_transform = File(exists=True, desc='average transform file') + affine_transform = File(exists=True, desc="average transform file") class AverageAffineTransform(ANTSCommand): @@ -46,54 +573,54 @@ class AverageAffineTransform(ANTSCommand): >>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat' >>> avg.cmdline 'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat' + """ - _cmd = 'AverageAffineTransform' + + _cmd = "AverageAffineTransform" input_spec = AverageAffineTransformInputSpec output_spec = AverageAffineTransformOutputSpec def _format_arg(self, opt, spec, val): - return super(AverageAffineTransform, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['affine_transform'] = os.path.abspath( - self.inputs.output_affine_transform) + outputs["affine_transform"] = os.path.abspath( + self.inputs.output_affine_transform + ) return outputs class AverageImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) output_average_image = File( "average.nii", - argstr='%s', + argstr="%s", position=1, usedefault=True, hash_files=False, - desc='the name of the resulting image.') + desc="the name of the resulting image.", + ) normalize = traits.Bool( argstr="%d", mandatory=True, position=2, - desc='Normalize: if true, the 2nd image is divided by its mean. ' - 'This will select the largest image to average into.') - images = InputMultiPath( + desc="Normalize: if true, the 2nd image is divided by its mean. " + "This will select the largest image to average into.", + ) + images = InputMultiObject( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc= - 'image to apply transformation to (generally a coregistered functional)' + desc="image to apply transformation to (generally a coregistered functional)", ) class AverageImagesOutputSpec(TraitedSpec): - output_average_image = File(exists=True, desc='average image file') + output_average_image = File(exists=True, desc="average image file") class AverageImages(ANTSCommand): @@ -109,46 +636,47 @@ class AverageImages(ANTSCommand): >>> avg.cmdline 'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii' """ - _cmd = 'AverageImages' + + _cmd = "AverageImages" input_spec = AverageImagesInputSpec output_spec = AverageImagesOutputSpec def _format_arg(self, opt, spec, val): - return super(AverageImages, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['output_average_image'] = os.path.realpath( - self.inputs.output_average_image) + outputs["output_average_image"] = os.path.realpath( + self.inputs.output_average_image + ) return outputs class MultiplyImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) first_input = File( - argstr='%s', exists=True, mandatory=True, position=1, desc='image 1') + argstr="%s", exists=True, mandatory=True, position=1, desc="image 1" + ) second_input = traits.Either( File(exists=True), traits.Float, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='image 2 or multiplication weight') + desc="image 2 or multiplication weight", + ) output_product_image = File( - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='Outputfname.nii.gz: the name of the resulting image.') + desc="Outputfname.nii.gz: the name of the resulting image.", + ) class MultiplyImagesOutputSpec(TraitedSpec): - output_product_image = File(exists=True, desc='average image file') + output_product_image = File(exists=True, desc="average image file") class MultiplyImages(ANTSCommand): @@ -164,44 +692,44 @@ class MultiplyImages(ANTSCommand): >>> test.cmdline 'MultiplyImages 3 moving2.nii 0.25 out.nii' """ - _cmd = 'MultiplyImages' + + _cmd = "MultiplyImages" input_spec = MultiplyImagesInputSpec output_spec = MultiplyImagesOutputSpec def _format_arg(self, opt, spec, val): - return super(MultiplyImages, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['output_product_image'] = os.path.abspath( - self.inputs.output_product_image) + outputs["output_product_image"] = os.path.abspath( + self.inputs.output_product_image + ) return outputs class CreateJacobianDeterminantImageInputSpec(ANTSCommandInputSpec): imageDimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) deformationField = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=1, - desc='deformation transformation file') - outputImage = File( - argstr='%s', mandatory=True, position=2, desc='output filename') + desc="deformation transformation file", + ) + outputImage = File(argstr="%s", mandatory=True, position=2, desc="output filename") doLogJacobian = traits.Enum( - 0, 1, argstr='%d', position=3, desc='return the log jacobian') + 0, 1, argstr="%d", position=3, desc="return the log jacobian" + ) useGeometric = traits.Enum( - 0, 1, argstr='%d', position=4, desc='return the geometric jacobian') + 0, 1, argstr="%d", position=4, desc="return the geometric jacobian" + ) class CreateJacobianDeterminantImageOutputSpec(TraitedSpec): - jacobian_image = File(exists=True, desc='jacobian image') + jacobian_image = File(exists=True, desc="jacobian image") class CreateJacobianDeterminantImage(ANTSCommand): @@ -217,76 +745,72 @@ class CreateJacobianDeterminantImage(ANTSCommand): 'CreateJacobianDeterminantImage 3 ants_Warp.nii.gz out_name.nii.gz' """ - _cmd = 'CreateJacobianDeterminantImage' + _cmd = "CreateJacobianDeterminantImage" input_spec = CreateJacobianDeterminantImageInputSpec output_spec = CreateJacobianDeterminantImageOutputSpec def _format_arg(self, opt, spec, val): - return super(CreateJacobianDeterminantImage, self)._format_arg( - opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['jacobian_image'] = os.path.abspath(self.inputs.outputImage) + outputs["jacobian_image"] = os.path.abspath(self.inputs.outputImage) return outputs class AffineInitializerInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, usedefault=True, position=0, argstr='%s', desc='dimension') + 3, 2, usedefault=True, position=0, argstr="%s", desc="dimension" + ) fixed_image = File( - exists=True, - mandatory=True, - position=1, - argstr='%s', - desc='reference image') + exists=True, mandatory=True, position=1, argstr="%s", desc="reference image" + ) moving_image = File( - exists=True, - mandatory=True, - position=2, - argstr='%s', - desc='moving image') + exists=True, mandatory=True, position=2, argstr="%s", desc="moving image" + ) out_file = File( - 'transform.mat', + "transform.mat", usedefault=True, position=3, - argstr='%s', - desc='output transform file') + argstr="%s", + desc="output transform file", + ) # Defaults in antsBrainExtraction.sh -> 15 0.1 0 10 search_factor = traits.Float( 15.0, usedefault=True, position=4, - argstr='%f', - desc='increments (degrees) for affine search') + argstr="%f", + desc="increments (degrees) for affine search", + ) radian_fraction = traits.Range( 0.0, 1.0, value=0.1, usedefault=True, position=5, - argstr='%f', - desc='search this arc +/- principal axes') + argstr="%f", + desc="search this arc +/- principal axes", + ) principal_axes = traits.Bool( False, usedefault=True, position=6, - argstr='%d', - desc= - 'whether the rotation is searched around an initial principal axis alignment.' + argstr="%d", + desc="whether the rotation is searched around an initial principal axis alignment.", ) local_search = traits.Int( 10, usedefault=True, position=7, - argstr='%d', - desc= - ' determines if a local optimization is run at each search point for the set ' - 'number of iterations') + argstr="%d", + desc=" determines if a local optimization is run at each search point for the set " + "number of iterations", + ) class AffineInitializerOutputSpec(TraitedSpec): - out_file = File(desc='output transform file') + out_file = File(desc="output transform file") class AffineInitializer(ANTSCommand): @@ -301,43 +825,43 @@ class AffineInitializer(ANTSCommand): 'antsAffineInitializer 3 fixed1.nii moving1.nii transform.mat 15.000000 0.100000 0 10' """ - _cmd = 'antsAffineInitializer' + + _cmd = "antsAffineInitializer" input_spec = AffineInitializerInputSpec output_spec = AffineInitializerOutputSpec def _list_outputs(self): - return {'out_file': os.path.abspath(self.inputs.out_file)} + return {"out_file": os.path.abspath(self.inputs.out_file)} class ComposeMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" + ) output_transform = File( - argstr='%s', + argstr="%s", position=1, - name_source=['transforms'], - name_template='%s_composed', + name_source=["transforms"], + name_template="%s_composed", keep_extension=True, - desc='the name of the resulting transform.') + desc="the name of the resulting transform.", + ) reference_image = File( - argstr='%s', + argstr="%s", position=2, - desc='Reference image (only necessary when output is warpfield)') - transforms = InputMultiPath( + desc="Reference image (only necessary when output is warpfield)", + ) + transforms = InputMultiObject( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='transforms to average') + desc="transforms to average", + ) class ComposeMultiTransformOutputSpec(TraitedSpec): - output_transform = File(exists=True, desc='Composed transform file') + output_transform = File(exists=True, desc="Composed transform file") class ComposeMultiTransform(ANTSCommand): @@ -351,46 +875,46 @@ class ComposeMultiTransform(ANTSCommand): >>> compose_transform.inputs.dimension = 3 >>> compose_transform.inputs.transforms = ['struct_to_template.mat', 'func_to_struct.mat'] >>> compose_transform.cmdline - 'ComposeMultiTransform 3 struct_to_template_composed.mat struct_to_template.mat func_to_struct.mat' + 'ComposeMultiTransform 3 struct_to_template_composed.mat + struct_to_template.mat func_to_struct.mat' """ - _cmd = 'ComposeMultiTransform' + + _cmd = "ComposeMultiTransform" input_spec = ComposeMultiTransformInputSpec output_spec = ComposeMultiTransformOutputSpec class LabelGeometryInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" + ) label_image = File( - argstr='%s', + argstr="%s", position=1, mandatory=True, - desc='label image to use for extracting geometry measures') + desc="label image to use for extracting geometry measures", + ) intensity_image = File( - value='[]', + value="[]", exists=True, - argstr='%s', + argstr="%s", mandatory=True, usedefault=True, position=2, - desc='Intensity image to extract values from. ' - 'This is an optional input') + desc="Intensity image to extract values from. This is an optional input", + ) output_file = traits.Str( - name_source=['label_image'], - name_template='%s.csv', - argstr='%s', + name_source=["label_image"], + name_template="%s.csv", + argstr="%s", position=3, - desc='name of output file') + desc="name of output file", + ) class LabelGeometryOutputSpec(TraitedSpec): - output_file = File(exists=True, desc='CSV file of geometry measures') + output_file = File(exists=True, desc="CSV file of geometry measures") class LabelGeometry(ANTSCommand): @@ -411,6 +935,7 @@ class LabelGeometry(ANTSCommand): 'LabelGeometryMeasures 3 atlas.nii.gz ants_Warp.nii.gz atlas.csv' """ - _cmd = 'LabelGeometryMeasures' + + _cmd = "LabelGeometryMeasures" input_spec = LabelGeometryInputSpec output_spec = LabelGeometryOutputSpec diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 21186931ce..c73b64c632 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- """The ants visualisation module provides basic functions based on ITK. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os @@ -14,60 +11,73 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='%d', + argstr="%d", usedefault=True, - desc='image dimension (2 or 3)', + desc="image dimension (2 or 3)", mandatory=True, - position=0) + position=0, + ) input_image = File( - argstr='%s', + argstr="%s", exists=True, - desc='Main input is a 3-D grayscale image.', + desc="Main input is a 3-D grayscale image.", mandatory=True, - position=1) + position=1, + ) output_image = traits.Str( - 'rgb.nii.gz', - argstr='%s', - usedefault=True, - desc='rgb output image', - position=2) - mask_image = File( - 'none', - argstr='%s', - exists=True, - desc='mask image', + "rgb.nii.gz", argstr="%s", usedefault=True, desc="rgb output image", position=2 + ) + mask_image = traits.Either( + "none", + traits.File(exists=True), + argstr="%s", + desc="mask image", position=3, - usedefault=True) - colormap = traits.Str( - argstr='%s', + default="none", usedefault=True, - desc=('Possible colormaps: grey, red, green, ' - 'blue, copper, jet, hsv, spring, summer, ' - 'autumn, winter, hot, cool, overunder, custom '), + ) + colormap = traits.Enum( + "grey", + "red", + "green", + "blue", + "copper", + "jet", + "hsv", + "spring", + "summer", + "autumn", + "winter", + "hot", + "cool", + "overunder", + "custom", + argstr="%s", + desc="Select a colormap", mandatory=True, - position=4) + position=4, + ) custom_color_map_file = traits.Str( - 'none', - argstr='%s', - usedefault=True, - desc='custom color map file', - position=5) + "none", argstr="%s", usedefault=True, desc="custom color map file", position=5 + ) minimum_input = traits.Int( - argstr='%d', desc='minimum input', mandatory=True, position=6) + argstr="%d", desc="minimum input", mandatory=True, position=6 + ) maximum_input = traits.Int( - argstr='%d', desc='maximum input', mandatory=True, position=7) - minimum_RGB_output = traits.Int( - 0, usedefault=True, argstr='%d', desc='', position=8) - maximum_RGB_output = traits.Int( - 255, usedefault=True, argstr='%d', desc='', position=9) + argstr="%d", desc="maximum input", mandatory=True, position=7 + ) + minimum_RGB_output = traits.Int(0, usedefault=True, argstr="%d", position=8) + maximum_RGB_output = traits.Int(255, usedefault=True, argstr="%d", position=9) class ConvertScalarImageToRGBOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='converted RGB image') + output_image = File(exists=True, desc="converted RGB image") class ConvertScalarImageToRGB(ANTSCommand): """ + Convert scalar images to RGB. + Examples -------- >>> from nipype.interfaces.ants.visualization import ConvertScalarImageToRGB @@ -79,83 +89,100 @@ class ConvertScalarImageToRGB(ANTSCommand): >>> converter.inputs.maximum_input = 6 >>> converter.cmdline 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' + """ - _cmd = 'ConvertScalarImageToRGB' + + _cmd = "ConvertScalarImageToRGB" input_spec = ConvertScalarImageToRGBInputSpec output_spec = ConvertScalarImageToRGBOutputSpec def _format_arg(self, opt, spec, val): - return super(ConvertScalarImageToRGB, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): input_image = File( - argstr='-i %s', + argstr="-i %s", exists=True, - desc='Main input is a 3-D grayscale image.', - mandatory=True) + desc="Main input is a 3-D grayscale image.", + mandatory=True, + ) rgb_image = File( - argstr='-r %s', + argstr="-r %s", exists=True, - desc=('An optional Rgb image can be added as an overlay.' - 'It must have the same image' - 'geometry as the input grayscale image.'), - mandatory=True) + desc=( + "An optional Rgb image can be added as an overlay." + "It must have the same image" + "geometry as the input grayscale image." + ), + mandatory=True, + ) mask_image = File( - argstr='-x %s', - exists=True, - desc='Specifies the ROI of the RGB voxels used.') + argstr="-x %s", exists=True, desc="Specifies the ROI of the RGB voxels used." + ) alpha_value = traits.Float( - argstr='-a %.2f', - desc=('If an Rgb image is provided, render the overlay ' - 'using the specified alpha parameter.')) + argstr="-a %.2f", + desc=( + "If an Rgb image is provided, render the overlay " + "using the specified alpha parameter." + ), + ) output_image = traits.Str( - 'output.png', - argstr='-o %s', - desc='The output consists of the tiled mosaic image.', - usedefault=True) + "output.png", + argstr="-o %s", + desc="The output consists of the tiled mosaic image.", + usedefault=True, + ) tile_geometry = traits.Str( - argstr='-t %s', + argstr="-t %s", desc=( - 'The tile geometry specifies the number of rows and columns' + "The tile geometry specifies the number of rows and columns" 'in the output image. For example, if the user specifies "5x10", ' - 'then 5 rows by 10 columns of slices are rendered. If R < 0 and C > ' - '0 (or vice versa), the negative value is selected' - 'based on direction.')) + "then 5 rows by 10 columns of slices are rendered. If R < 0 and C > " + "0 (or vice versa), the negative value is selected" + "based on direction." + ), + ) direction = traits.Int( - argstr='-d %d', - desc=('Specifies the direction of ' - 'the slices. If no direction is specified, the ' - 'direction with the coarsest spacing is chosen.')) + argstr="-d %d", + desc=( + "Specifies the direction of " + "the slices. If no direction is specified, the " + "direction with the coarsest spacing is chosen." + ), + ) pad_or_crop = traits.Str( - argstr='-p %s', - desc='argument passed to -p flag:' - '[padVoxelWidth,]' - '[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],' - 'constantValue]' - 'The user can specify whether to pad or crop a specified ' - 'voxel-width boundary of each individual slice. For this ' - 'program, cropping is simply padding with negative voxel-widths.' - 'If one pads (+), the user can also specify a constant pad ' - 'value (default = 0). If a mask is specified, the user can use ' + argstr="-p %s", + desc="argument passed to -p flag:" + "[padVoxelWidth,]" + "[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1]," + "constantValue]" + "The user can specify whether to pad or crop a specified " + "voxel-width boundary of each individual slice. For this " + "program, cropping is simply padding with negative voxel-widths." + "If one pads (+), the user can also specify a constant pad " + "value (default = 0). If a mask is specified, the user can use " 'the mask to define the region, by using the keyword "mask"' - ' plus an offset, e.g. "-p mask+3".') + ' plus an offset, e.g. "-p mask+3".', + ) slices = traits.Str( - argstr='-s %s', - desc=('Number of slices to increment Slice1xSlice2xSlice3' - '[numberOfSlicesToIncrement,,]')) - flip_slice = traits.Str(argstr='-f %s', desc='flipXxflipY') - permute_axes = traits.Bool(argstr='-g', desc='doPermute') + argstr="-s %s", + desc=( + "Number of slices to increment Slice1xSlice2xSlice3" + "[numberOfSlicesToIncrement,,]" + ), + ) + flip_slice = traits.Str(argstr="-f %s", desc="flipXxflipY") + permute_axes = traits.Bool(argstr="-g", desc="doPermute") class CreateTiledMosaicOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='image file') + output_image = File(exists=True, desc="image file") class CreateTiledMosaic(ANTSCommand): @@ -182,12 +209,11 @@ class CreateTiledMosaic(ANTSCommand): -r rgb.nii.gz -s [2 ,100 ,160]' """ - _cmd = 'CreateTiledMosaic' + _cmd = "CreateTiledMosaic" input_spec = CreateTiledMosaicInputSpec output_spec = CreateTiledMosaicOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index 2284c1763a..2e54847958 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,18 +7,45 @@ This module defines the API of all nipype interfaces. """ -from .core import (Interface, BaseInterface, SimpleInterface, CommandLine, - StdOutCommandLine, MpiCommandLine, SEMLikeCommandLine, - LibraryBaseInterface, PackageInfo) +from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_errors import TraitError -from .specs import (BaseTraitedSpec, TraitedSpec, DynamicTraitedSpec, - BaseInterfaceInputSpec, CommandLineInputSpec, - StdOutCommandLineInputSpec) +from .core import ( + Interface, + BaseInterface, + SimpleInterface, + CommandLine, + StdOutCommandLine, + MpiCommandLine, + SEMLikeCommandLine, + LibraryBaseInterface, + PackageInfo, +) + +from .specs import ( + BaseTraitedSpec, + TraitedSpec, + DynamicTraitedSpec, + BaseInterfaceInputSpec, + CommandLineInputSpec, + StdOutCommandLineInputSpec, +) from .traits_extension import ( - traits, Undefined, TraitDictObject, TraitListObject, TraitError, isdefined, - File, Directory, Str, DictStrStr, has_metadata, ImageFile, - OutputMultiObject, InputMultiObject, - OutputMultiPath, InputMultiPath) + traits, + Undefined, + isdefined, + has_metadata, + File, + ImageFile, + Directory, + Str, + DictStrStr, + OutputMultiObject, + InputMultiObject, + OutputMultiPath, + InputMultiPath, + Tuple, +) -from .support import (Bunch, InterfaceResult, NipypeInterfaceError) +from .support import Bunch, InterfaceResult, NipypeInterfaceError diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index ae002cf17f..8fadd9cc2d 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -1,63 +1,67 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype interfaces core ...................... - Defines the ``Interface`` API and the body of the most basic interfaces. The I/O specifications corresponding to these base interfaces are found in the ``specs`` module. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from builtins import object, open, str, bytes - -from copy import deepcopy -from datetime import datetime as dt import os -import platform import subprocess as sp import shlex -import sys import simplejson as json -from dateutil.parser import parse as parseutc -from future import standard_library +from traits.trait_errors import TraitError -from ... import config, logging, LooseVersion +from looseversion import LooseVersion + +from ... import config, logging from ...utils.provenance import write_provenance -from ...utils.misc import str2bool, rgetcwd -from ...utils.filemanip import (FileNotFoundError, split_filename, - which, get_dependencies) +from ...utils.misc import str2bool +from ...utils.filemanip import ( + canonicalize_env, + get_dependencies, + indirectory, + split_filename, + which, +) from ...utils.subprocess import run_command from ...external.due import due -from .traits_extension import traits, isdefined, TraitError -from .specs import (BaseInterfaceInputSpec, CommandLineInputSpec, - StdOutCommandLineInputSpec, MpiCommandLineInputSpec, - get_filecopy_info) -from .support import (Bunch, InterfaceResult, NipypeInterfaceError, - format_help) - -standard_library.install_aliases() - -iflogger = logging.getLogger('nipype.interface') +from .traits_extension import traits, isdefined, Undefined +from .specs import ( + BaseInterfaceInputSpec, + CommandLineInputSpec, + StdOutCommandLineInputSpec, + MpiCommandLineInputSpec, + get_filecopy_info, +) +from .support import ( + RuntimeContext, + InterfaceResult, + NipypeInterfaceError, + format_help, +) + +iflogger = logging.getLogger("nipype.interface") -PY35 = sys.version_info >= (3, 5) -PY3 = sys.version_info[0] > 2 VALID_TERMINAL_OUTPUT = [ - 'stream', 'allatonce', 'file', 'file_split', 'file_stdout', 'file_stderr', - 'none' + "stream", + "allatonce", + "file", + "file_split", + "file_stdout", + "file_stderr", + "none", ] -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" -class Interface(object): +class Interface: """This is an abstract definition for Interface objects. It provides no functionality. It defines the necessary attributes @@ -65,8 +69,15 @@ class Interface(object): """ - input_spec = None # A traited input specification - output_spec = None # A traited output specification + input_spec = None + """ + The specification of the input, defined by a :py:class:`~traits.has_traits.HasTraits` class. + """ + output_spec = None + """ + The specification of the output, defined by a :py:class:`~traits.has_traits.HasTraits` class. + """ + _can_resume = False # See property below _always_run = False # See property below @@ -89,12 +100,12 @@ def version(self): @classmethod def _outputs(cls): - """ Initializes outputs""" + """Initializes outputs""" raise NotImplementedError @classmethod def help(cls, returnhelp=False): - """ Prints class help """ + """Prints class help""" allhelp = format_help(cls) if returnhelp: return allhelp @@ -114,7 +125,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): raise NotImplementedError def _list_outputs(self): - """ List expected outputs""" + """List expected outputs""" raise NotImplementedError @classmethod @@ -123,16 +134,14 @@ def _get_filecopy_info(cls): Necessary for pipeline operation """ iflogger.warning( - '_get_filecopy_info member of Interface was deprecated ' - 'in nipype-1.1.6 and will be removed in 1.2.0') + "_get_filecopy_info member of Interface was deprecated " + "in nipype-1.1.6 and will be removed in 1.2.0" + ) return get_filecopy_info(cls) class BaseInterface(Interface): - """Implements common interface functionality. - - Implements - ---------- + """Implement common interface functionality. * Initializes inputs/outputs from input_spec/output_spec * Provides help based on input_spec and output_spec @@ -145,34 +154,55 @@ class BaseInterface(Interface): This class cannot be instantiated. - - Relevant Interface attributes - ----------------------------- - - ``input_spec`` points to the traited class for the inputs - ``output_spec`` points to the traited class for the outputs - ``_redirect_x`` should be set to ``True`` when the interface requires - connecting to a ``$DISPLAY`` (default is ``False``). - ``resource_monitor`` if ``False`` prevents resource-monitoring this - interface, if ``True`` monitoring will be enabled IFF the general - Nipype config is set on (``resource_monitor = true``). - + Attributes + ---------- + input_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` + points to the traited class for the inputs + output_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` + points to the traited class for the outputs + _redirect_x: bool + should be set to ``True`` when the interface requires + connecting to a ``$DISPLAY`` (default is ``False``). + resource_monitor: bool + If ``False``, prevents resource-monitoring this interface + If ``True`` monitoring will be enabled IFF the general + Nipype config is set on (``resource_monitor = true``). """ + input_spec = BaseInterfaceInputSpec _version = None _additional_metadata = [] _redirect_x = False - references_ = [] + _references = [] resource_monitor = True # Enabled for this interface IFF enabled in the config + _etelemetry_version_data = None + + def __init__( + self, from_file=None, resource_monitor=None, ignore_exception=False, **inputs + ): + if ( + config.getboolean("execution", "check_version") + and "NIPYPE_NO_ET" not in os.environ + ): + from ... import check_latest_version + + if BaseInterface._etelemetry_version_data is None: + BaseInterface._etelemetry_version_data = check_latest_version() or "n/a" - def __init__(self, from_file=None, resource_monitor=None, - ignore_exception=False, **inputs): if not self.input_spec: - raise Exception( - 'No input_spec in class: %s' % self.__class__.__name__) + raise Exception("No input_spec in class: %s" % self.__class__.__name__) + + # Create input spec, disable any defaults that are unavailable due to + # version, and then apply the inputs that were passed. + self.inputs = self.input_spec() + unavailable_traits = self._check_version_requirements( + self.inputs, permissive=True + ) + if unavailable_traits: + self.inputs.trait_set(**{k: Undefined for k in unavailable_traits}) + self.inputs.trait_set(**inputs) - self.inputs = self.input_spec(**inputs) self.ignore_exception = ignore_exception if resource_monitor is not None: @@ -185,8 +215,7 @@ def __init__(self, from_file=None, resource_monitor=None, setattr(self.inputs, name, value) def _outputs(self): - """ Returns a bunch containing output fields for the class - """ + """Returns a bunch containing output fields for the class""" outputs = None if self.output_spec: outputs = self.output_spec() @@ -194,54 +223,71 @@ def _outputs(self): return outputs def _check_requires(self, spec, name, value): - """ check if required inputs are satisfied - """ + """check if required inputs are satisfied""" if spec.requires: values = [ - not isdefined(getattr(self.inputs, field)) - for field in spec.requires + not isdefined(getattr(self.inputs, field)) for field in spec.requires ] if any(values) and isdefined(value): - msg = ("%s requires a value for input '%s' because one of %s " - "is set. For a list of required inputs, see %s.help()" % - (self.__class__.__name__, name, - ', '.join(spec.requires), self.__class__.__name__)) + if len(values) > 1: + fmt = ( + "%s requires values for inputs %s because '%s' is set. " + "For a list of required inputs, see %s.help()" + ) + else: + fmt = ( + "%s requires a value for input %s because '%s' is set. " + "For a list of required inputs, see %s.help()" + ) + msg = fmt % ( + self.__class__.__name__, + ", ".join("'%s'" % req for req in spec.requires), + name, + self.__class__.__name__, + ) raise ValueError(msg) def _check_xor(self, spec, name, value): - """ check if mutually exclusive inputs are satisfied - """ + """check if mutually exclusive inputs are satisfied""" if spec.xor: - values = [ - isdefined(getattr(self.inputs, field)) for field in spec.xor - ] + values = [isdefined(getattr(self.inputs, field)) for field in spec.xor] if not any(values) and not isdefined(value): - msg = ("%s requires a value for one of the inputs '%s'. " - "For a list of required inputs, see %s.help()" % - (self.__class__.__name__, ', '.join(spec.xor), - self.__class__.__name__)) + msg = ( + "%s requires a value for one of the inputs '%s'. " + "For a list of required inputs, see %s.help()" + % ( + self.__class__.__name__, + ", ".join(spec.xor), + self.__class__.__name__, + ) + ) raise ValueError(msg) def _check_mandatory_inputs(self): - """ Raises an exception if a mandatory input is Undefined - """ + """Raises an exception if a mandatory input is Undefined""" for name, spec in list(self.inputs.traits(mandatory=True).items()): value = getattr(self.inputs, name) self._check_xor(spec, name, value) if not isdefined(value) and spec.xor is None: - msg = ("%s requires a value for input '%s'. " - "For a list of required inputs, see %s.help()" % - (self.__class__.__name__, name, - self.__class__.__name__)) + msg = ( + "%s requires a value for input '%s'. " + "For a list of required inputs, see %s.help()" + % (self.__class__.__name__, name, self.__class__.__name__) + ) raise ValueError(msg) if isdefined(value): self._check_requires(spec, name, value) for name, spec in list( - self.inputs.traits(mandatory=None, transient=None).items()): + self.inputs.traits(mandatory=None, transient=None).items() + ): self._check_requires(spec, name, getattr(self.inputs, name)) - def _check_version_requirements(self, trait_object, raise_exception=True): - """ Raises an exception on version mismatch + def _check_version_requirements(self, trait_object, permissive=False): + """Raises an exception on version mismatch + + Set the ``permissive`` attribute to True to suppress warnings and exceptions. + This is currently only used in __init__ to silently identify unavailable + traits. """ unavailable_traits = [] # check minimum version @@ -251,16 +297,28 @@ def _check_version_requirements(self, trait_object, raise_exception=True): if names and self.version: version = LooseVersion(str(self.version)) for name in names: - min_ver = LooseVersion( - str(trait_object.traits()[name].min_ver)) - if min_ver > version: + min_ver = LooseVersion(str(trait_object.traits()[name].min_ver)) + try: + too_old = min_ver > version + except TypeError as err: + msg = ( + f"Nipype cannot validate the package version {version!r} for " + f"{self.__class__.__name__}. Trait {name} requires version >={min_ver}." + ) + if not permissive: + iflogger.warning(f"{msg}. Please verify validity.") + if config.getboolean("execution", "stop_on_unknown_version"): + raise ValueError(msg) from err + continue + if too_old: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue - if raise_exception: + if not permissive: raise Exception( - 'Trait %s (%s) (version %s < required %s)' % - (name, self.__class__.__name__, version, min_ver)) + "Trait %s (%s) (version %s < required %s)" + % (name, self.__class__.__name__, version, min_ver) + ) # check maximum version check = dict(max_ver=lambda t: t is not None) @@ -268,28 +326,38 @@ def _check_version_requirements(self, trait_object, raise_exception=True): if names and self.version: version = LooseVersion(str(self.version)) for name in names: - max_ver = LooseVersion( - str(trait_object.traits()[name].max_ver)) - if max_ver < version: + max_ver = LooseVersion(str(trait_object.traits()[name].max_ver)) + try: + too_new = max_ver < version + except TypeError as err: + msg = ( + f"Nipype cannot validate the package version {version!r} for " + f"{self.__class__.__name__}. Trait {name} requires version <={max_ver}." + ) + if not permissive: + iflogger.warning(f"{msg}. Please verify validity.") + if config.getboolean("execution", "stop_on_unknown_version"): + raise ValueError(msg) from err + continue + if too_new: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue - if raise_exception: + if not permissive: raise Exception( - 'Trait %s (%s) (version %s > required %s)' % - (name, self.__class__.__name__, version, max_ver)) + "Trait %s (%s) (version %s > required %s)" + % (name, self.__class__.__name__, version, max_ver) + ) return unavailable_traits def _run_interface(self, runtime): - """ Core function that executes interface - """ + """Core function that executes interface""" raise NotImplementedError def _duecredit_cite(self): - """ Add the interface references to the duecredit citations - """ - for r in self.references_: - r['path'] = self.__module__ + """Add the interface references to the duecredit citations""" + for r in self._references: + r["path"] = self.__module__ due.cite(**r) def run(self, cwd=None, ignore_exception=None, **inputs): @@ -300,185 +368,110 @@ def run(self, cwd=None, ignore_exception=None, **inputs): Parameters ---------- - cwd : specify a folder where the interface should be run inputs : allows the interface settings to be updated Returns ------- - results : an InterfaceResult object containing a copy of the instance - that was executed, provenance information and, if successful, results - """ - from ...utils.profiler import ResourceMonitor - - # if ignore_exception is not provided, taking self.ignore_exception - if ignore_exception is None: - ignore_exception = self.ignore_exception - - # Tear-up: get current and prev directories - syscwd = rgetcwd(error=False) # Recover when wd does not exist - if cwd is None: - cwd = syscwd - - os.chdir(cwd) # Change to the interface wd + results : :obj:`nipype.interfaces.base.support.InterfaceResult` + A copy of the instance that was executed, provenance information and, + if successful, results - enable_rm = config.resource_monitor and self.resource_monitor - self.inputs.trait_set(**inputs) + """ + rtc = RuntimeContext( + resource_monitor=config.resource_monitor and self.resource_monitor, + ignore_exception=( + ignore_exception + if ignore_exception is not None + else self.ignore_exception + ), + ) + + with indirectory(cwd or os.getcwd()): + self.inputs.trait_set(**inputs) self._check_mandatory_inputs() self._check_version_requirements(self.inputs) - interface = self.__class__ - self._duecredit_cite() - # initialize provenance tracking - store_provenance = str2bool( - config.get('execution', 'write_provenance', 'false')) - env = deepcopy(dict(os.environ)) - if self._redirect_x: - env['DISPLAY'] = config.get_display() - - runtime = Bunch( - cwd=cwd, - prevcwd=syscwd, - returncode=None, - duration=None, - environ=env, - startTime=dt.isoformat(dt.utcnow()), - endTime=None, - platform=platform.platform(), - hostname=platform.node(), - version=self.version) - runtime_attrs = set(runtime.dictcopy()) - - mon_sp = None - if enable_rm: - mon_freq = float( - config.get('execution', 'resource_monitor_frequency', 1)) - proc_pid = os.getpid() - iflogger.debug( - 'Creating a ResourceMonitor on a %s interface, PID=%d.', - self.__class__.__name__, proc_pid) - mon_sp = ResourceMonitor(proc_pid, freq=mon_freq) - mon_sp.start() - - # Grab inputs now, as they should not change during execution - inputs = self.inputs.get_traitsfree() - outputs = None - - try: + with rtc(self, cwd=cwd, redirect_x=self._redirect_x) as runtime: + # Grab inputs now, as they should not change during execution + inputs = self.inputs.get_traitsfree() + outputs = None + # Run interface runtime = self._pre_run_hook(runtime) runtime = self._run_interface(runtime) runtime = self._post_run_hook(runtime) + # Collect outputs outputs = self.aggregate_outputs(runtime) - except Exception as e: - import traceback - # Retrieve the maximum info fast - runtime.traceback = traceback.format_exc() - # Gather up the exception arguments and append nipype info. - exc_args = e.args if getattr(e, 'args') else tuple() - exc_args += ( - 'An exception of type %s occurred while running interface %s.' - % (type(e).__name__, self.__class__.__name__), ) - if config.get('logging', 'interface_level', - 'info').lower() == 'debug': - exc_args += ('Inputs: %s' % str(self.inputs), ) - - runtime.traceback_args = ('\n'.join( - ['%s' % arg for arg in exc_args]), ) - - if not ignore_exception: - raise - finally: - if runtime is None or runtime_attrs - set(runtime.dictcopy()): - raise RuntimeError("{} interface failed to return valid " - "runtime object".format( - interface.__class__.__name__)) - # This needs to be done always - runtime.endTime = dt.isoformat(dt.utcnow()) - timediff = parseutc(runtime.endTime) - parseutc(runtime.startTime) - runtime.duration = (timediff.days * 86400 + timediff.seconds + - timediff.microseconds / 1e6) - results = InterfaceResult( - interface, - runtime, - inputs=inputs, - outputs=outputs, - provenance=None) - - # Add provenance (if required) - if store_provenance: - # Provenance will only throw a warning if something went wrong - results.provenance = write_provenance(results) - - # Make sure runtime profiler is shut down - if enable_rm: - import numpy as np - mon_sp.stop() - - runtime.mem_peak_gb = None - runtime.cpu_percent = None - - # Read .prof file in and set runtime values - vals = np.loadtxt(mon_sp.fname, delimiter=',') - if vals.size: - vals = np.atleast_2d(vals) - runtime.mem_peak_gb = vals[:, 2].max() / 1024 - runtime.cpu_percent = vals[:, 1].max() - - runtime.prof_dict = { - 'time': vals[:, 0].tolist(), - 'cpus': vals[:, 1].tolist(), - 'rss_GiB': (vals[:, 2] / 1024).tolist(), - 'vms_GiB': (vals[:, 3] / 1024).tolist(), - } - os.chdir(syscwd) + + results = InterfaceResult( + self.__class__, + rtc.runtime, + inputs=inputs, + outputs=outputs, + provenance=None, + ) + + # Add provenance (if required) + if str2bool(config.get("execution", "write_provenance", "false")): + # Provenance will only throw a warning if something went wrong + results.provenance = write_provenance(results) + + self._duecredit_cite() return results def _list_outputs(self): - """ List the expected outputs - """ + """List the expected outputs""" if self.output_spec: raise NotImplementedError else: return None def aggregate_outputs(self, runtime=None, needed_outputs=None): - """ Collate expected outputs and check for existence - """ + """Collate expected outputs and apply output traits validation.""" + outputs = self._outputs() # Generate an empty output spec object + predicted_outputs = self._list_outputs() # Predictions from _list_outputs + if not predicted_outputs: + return outputs - predicted_outputs = self._list_outputs() - outputs = self._outputs() - if predicted_outputs: - _unavailable_outputs = [] - if outputs: - _unavailable_outputs = \ - self._check_version_requirements(self._outputs()) - for key, val in list(predicted_outputs.items()): - if needed_outputs and key not in needed_outputs: - continue - if key in _unavailable_outputs: - raise KeyError(('Output trait %s not available in version ' - '%s of interface %s. Please inform ' - 'developers.') % (key, self.version, - self.__class__.__name__)) - try: - setattr(outputs, key, val) - except TraitError as error: - if getattr(error, 'info', - 'default').startswith('an existing'): - msg = ("File/Directory '%s' not found for %s output " - "'%s'." % (val, self.__class__.__name__, key)) - raise FileNotFoundError(msg) - raise error + # Precalculate the list of output trait names that should be aggregated + aggregate_names = set(predicted_outputs) + if needed_outputs is not None: + aggregate_names = set(needed_outputs).intersection(aggregate_names) + + if aggregate_names: # Make sure outputs are compatible + _na_outputs = self._check_version_requirements(outputs) + na_names = aggregate_names.intersection(_na_outputs) + if na_names: + # XXX Change to TypeError in Nipype 2.0 + raise KeyError( + """\ +Output trait(s) %s not available in version %s of interface %s.\ +""" + % (", ".join(na_names), self.version, self.__class__.__name__) + ) + for key in aggregate_names: # Final aggregation + val = predicted_outputs[key] + try: + setattr(outputs, key, val) + except TraitError as error: + if "an existing" in getattr(error, "info", "default"): + msg = ( + "No such file or directory '%s' for output '%s' of a %s interface" + % (val, key, self.__class__.__name__) + ) + raise FileNotFoundError(msg) + raise error return outputs @property def version(self): if self._version is None: - if str2bool(config.get('execution', 'stop_on_unknown_version')): - raise ValueError('Interface %s has no version information' % - self.__class__.__name__) + if str2bool(config.get("execution", "stop_on_unknown_version")): + raise ValueError( + "Interface %s has no version information" % self.__class__.__name__ + ) return self._version def load_inputs_from_json(self, json_file, overwrite=True): @@ -493,7 +486,7 @@ def load_inputs_from_json(self, json_file, overwrite=True): if not overwrite: def_inputs = list(self.inputs.get_traitsfree().keys()) - new_inputs = list(set(list(inputs_dict.keys())) - set(def_inputs)) + new_inputs = set(inputs_dict) - set(def_inputs) for key in new_inputs: if hasattr(self.inputs, key): setattr(self.inputs, key, inputs_dict[key]) @@ -503,8 +496,8 @@ def save_inputs_to_json(self, json_file): A convenient way to save current inputs to a JSON file. """ inputs = self.inputs.get_traitsfree() - iflogger.debug('saving inputs %s', inputs) - with open(json_file, 'w' if PY3 else 'wb') as fhandle: + iflogger.debug("saving inputs %s", inputs) + with open(json_file, "w") as fhandle: json.dump(inputs, fhandle, indent=4, ensure_ascii=False) def _pre_run_hook(self, runtime): @@ -531,7 +524,7 @@ def _post_run_hook(self, runtime): class SimpleInterface(BaseInterface): - """ An interface pattern that allows outputs to be set in a dictionary + """An interface pattern that allows outputs to be set in a dictionary called ``_results`` that is automatically interpreted by ``_list_outputs()`` to find the outputs. @@ -543,7 +536,6 @@ class SimpleInterface(BaseInterface): Examples -------- - >>> from nipype.interfaces.base import ( ... SimpleInterface, BaseInterfaceInputSpec, TraitedSpec) @@ -568,11 +560,13 @@ class SimpleInterface(BaseInterface): >>> dbl.inputs.x = 2 >>> dbl.run().outputs.doubled 4.0 + """ def __init__(self, from_file=None, resource_monitor=None, **inputs): - super(SimpleInterface, self).__init__( - from_file=from_file, resource_monitor=resource_monitor, **inputs) + super().__init__( + from_file=from_file, resource_monitor=resource_monitor, **inputs + ) self._results = {} def _list_outputs(self): @@ -585,14 +579,11 @@ class must be instantiated with a command argument Parameters ---------- - - command : string + command : str define base immutable `command` you wish to run - - args : string, optional + args : str, optional optional arguments passed to base `command` - Examples -------- >>> import pprint @@ -602,7 +593,7 @@ class must be instantiated with a command argument >>> cli.cmdline 'ls -al' - # Use get_traitsfree() to check all inputs set + >>> # Use get_traitsfree() to check all inputs set >>> pprint.pprint(cli.inputs.get_traitsfree()) # doctest: {'args': '-al', 'environ': {'DISPLAY': ':1'}} @@ -613,11 +604,13 @@ class must be instantiated with a command argument '11c37f97649cd61627f4afe5136af8c0' """ + input_spec = CommandLineInputSpec - _cmd_prefix = '' + _cmd_prefix = "" _cmd = None _version = None - _terminal_output = 'stream' + _terminal_output = "stream" + _write_cmdline = False @classmethod def set_default_terminal_output(cls, output_type): @@ -632,18 +625,18 @@ def set_default_terminal_output(cls, output_type): if output_type in VALID_TERMINAL_OUTPUT: cls._terminal_output = output_type else: - raise AttributeError( - 'Invalid terminal output_type: %s' % output_type) + raise AttributeError("Invalid terminal output_type: %s" % output_type) - def __init__(self, command=None, terminal_output=None, **inputs): - super(CommandLine, self).__init__(**inputs) + def __init__( + self, command=None, terminal_output=None, write_cmdline=False, **inputs + ): + super().__init__(**inputs) self._environ = None # Set command. Input argument takes precedence - self._cmd = command or getattr(self, '_cmd', None) + self._cmd = command or getattr(self, "_cmd", None) # Store dependencies in runtime object - self._ldd = str2bool( - config.get('execution', 'get_linked_libs', 'true')) + self._ldd = str2bool(config.get("execution", "get_linked_libs", "true")) if self._cmd is None: raise Exception("Missing command") @@ -651,22 +644,25 @@ def __init__(self, command=None, terminal_output=None, **inputs): if terminal_output is not None: self.terminal_output = terminal_output + self._write_cmdline = write_cmdline + @property def cmd(self): """sets base command, immutable""" if not self._cmd: raise NotImplementedError( - 'CommandLineInterface should wrap an executable, but ' - 'none has been set.') + "CommandLineInterface should wrap an executable, but " + "none has been set." + ) return self._cmd @property def cmdline(self): - """ `command` plus any arguments (args) + """`command` plus any arguments (args) validates arguments and generates command line""" self._check_mandatory_inputs() allargs = [self._cmd_prefix + self.cmd] + self._parse_inputs() - return ' '.join(allargs) + return " ".join(allargs) @property def terminal_output(self): @@ -677,23 +673,34 @@ def terminal_output(self, value): if value not in VALID_TERMINAL_OUTPUT: raise RuntimeError( 'Setting invalid value "%s" for terminal_output. Valid values are ' - '%s.' % (value, - ', '.join(['"%s"' % v - for v in VALID_TERMINAL_OUTPUT]))) + "%s." % (value, ", ".join(['"%s"' % v for v in VALID_TERMINAL_OUTPUT])) + ) self._terminal_output = value + @property + def write_cmdline(self): + return self._write_cmdline + + @write_cmdline.setter + def write_cmdline(self, value): + self._write_cmdline = value is True + def raise_exception(self, runtime): raise RuntimeError( - ('Command:\n{cmdline}\nStandard output:\n{stdout}\n' - 'Standard error:\n{stderr}\nReturn code: {returncode}' - ).format(**runtime.dictcopy())) + ( + "Command:\n{cmdline}\nStandard output:\n{stdout}\n" + "Standard error:\n{stderr}\nReturn code: {returncode}" + ).format(**runtime.dictcopy()) + ) def _get_environ(self): - return getattr(self.inputs, 'environ', {}) + return getattr(self.inputs, "environ", {}) - def version_from_command(self, flag='-v', cmd=None): - iflogger.warning('version_from_command member of CommandLine was ' - 'Deprecated in nipype-1.0.0 and deleted in 1.1.0') + def version_from_command(self, flag="-v", cmd=None): + iflogger.warning( + "version_from_command member of CommandLine was " + "Deprecated in nipype-1.0.0 and deleted in 1.1.0" + ) if cmd is None: cmd = self.cmd.split()[0] @@ -702,16 +709,16 @@ def version_from_command(self, flag='-v', cmd=None): out_environ = self._get_environ() env.update(out_environ) proc = sp.Popen( - ' '.join((cmd, flag)), + f"{cmd} {flag}", shell=True, - env=env, + env=canonicalize_env(env), stdout=sp.PIPE, stderr=sp.PIPE, ) o, e = proc.communicate() return o - def _run_interface(self, runtime, correct_return_codes=(0, )): + def _run_interface(self, runtime, correct_return_codes=(0,)): """Execute command via subprocess Parameters @@ -720,37 +727,49 @@ def _run_interface(self, runtime, correct_return_codes=(0, )): Returns ------- - runtime : updated runtime information + runtime : + updated runtime information adds stdout, stderr, merged, cmdline, dependencies, command_path """ - out_environ = self._get_environ() # Initialize runtime Bunch + + try: + runtime.cmdline = self.cmdline + except Exception as exc: + raise RuntimeError( + "Error raised when interpolating the command line" + ) from exc + runtime.stdout = None runtime.stderr = None runtime.cmdline = self.cmdline runtime.environ.update(out_environ) + runtime.success_codes = correct_return_codes # which $cmd executable_name = shlex.split(self._cmd_prefix + self.cmd)[0] cmd_path = which(executable_name, env=runtime.environ) if cmd_path is None: - raise IOError( + raise OSError( 'No command "%s" found on host %s. Please check that the ' - 'corresponding package is installed.' % (executable_name, - runtime.hostname)) + "corresponding package is installed." + % (executable_name, runtime.hostname) + ) runtime.command_path = cmd_path - runtime.dependencies = (get_dependencies(executable_name, - runtime.environ) - if self._ldd else '') - runtime = run_command(runtime, output=self.terminal_output) - if runtime.returncode is None or \ - runtime.returncode not in correct_return_codes: - self.raise_exception(runtime) - + runtime.dependencies = ( + get_dependencies(executable_name, runtime.environ) + if self._ldd + else "" + ) + runtime = run_command( + runtime, + output=self.terminal_output, + write_cmdline=self.write_cmdline, + ) return runtime def _format_arg(self, name, trait_spec, value): @@ -759,15 +778,15 @@ def _format_arg(self, name, trait_spec, value): Formats a trait containing argstr metadata """ argstr = trait_spec.argstr - iflogger.debug('%s_%s', name, value) + iflogger.debug("%s_%s", name, value) if trait_spec.is_trait_type(traits.Bool) and "%" not in argstr: # Boolean options have no format string. Just append options if True. return argstr if value else None # traits.Either turns into traits.TraitCompound and does not have any # inner_traits - elif trait_spec.is_trait_type(traits.List) \ - or (trait_spec.is_trait_type(traits.TraitCompound) and - isinstance(value, list)): + elif trait_spec.is_trait_type(traits.List) or ( + trait_spec.is_trait_type(traits.TraitCompound) and isinstance(value, list) + ): # This is a bit simple-minded at present, and should be # construed as the default. If more sophisticated behavior # is needed, it can be accomplished with metadata (e.g. @@ -777,13 +796,13 @@ def _format_arg(self, name, trait_spec, value): # Depending on whether we stick with traitlets, and whether or # not we beef up traitlets.List, we may want to put some # type-checking code here as well - sep = trait_spec.sep if trait_spec.sep is not None else ' ' + sep = trait_spec.sep if trait_spec.sep is not None else " " - if argstr.endswith('...'): + if argstr.endswith("..."): # repeatable option # --id %d... will expand to # --id 1 --id 2 --id 3 etc.,. - argstr = argstr.replace('...', '') + argstr = argstr.replace("...", "") return sep.join([argstr % elt for elt in value]) else: return argstr % sep.join(str(elt) for elt in value) @@ -803,13 +822,16 @@ def _filename_from_source(self, name, chain=None): return retval # Do not generate filename when excluded by other inputs - if any(isdefined(getattr(self.inputs, field)) - for field in trait_spec.xor or ()): + if any( + isdefined(getattr(self.inputs, field)) for field in trait_spec.xor or () + ): return retval # Do not generate filename when required fields are missing - if not all(isdefined(getattr(self.inputs, field)) - for field in trait_spec.requires or ()): + if not all( + isdefined(getattr(self.inputs, field)) + for field in trait_spec.requires or () + ): return retval if isdefined(retval) and "%s" in retval: @@ -822,15 +844,14 @@ def _filename_from_source(self, name, chain=None): ns = trait_spec.name_source while isinstance(ns, (list, tuple)): if len(ns) > 1: - iflogger.warning( - 'Only one name_source per trait is allowed') + iflogger.warning("Only one name_source per trait is allowed") ns = ns[0] if not isinstance(ns, (str, bytes)): raise ValueError( - 'name_source of \'{}\' trait should be an input trait ' - 'name, but a type {} object was found'.format( - name, type(ns))) + "name_source of '{}' trait should be an input trait " + "name, but a type {} object was found".format(name, type(ns)) + ) if isdefined(getattr(self.inputs, ns)): name_source = ns @@ -845,8 +866,7 @@ def _filename_from_source(self, name, chain=None): base = source else: if name in chain: - raise NipypeInterfaceError( - 'Mutually pointing name_sources') + raise NipypeInterfaceError("Mutually pointing name_sources") chain.append(name) base = self._filename_from_source(ns, chain) @@ -914,7 +934,14 @@ def _parse_inputs(self, skip=None): if not isdefined(value): continue - arg = self._format_arg(name, spec, value) + + try: + arg = self._format_arg(name, spec, value) + except Exception as exc: + raise ValueError( + f"Error formatting command line argument '{name}' with value '{value}'" + ) from exc + if arg is None: continue pos = spec.position @@ -934,7 +961,7 @@ class StdOutCommandLine(CommandLine): input_spec = StdOutCommandLineInputSpec def _gen_filename(self, name): - return self._gen_outfilename() if name == 'out_file' else None + return self._gen_outfilename() if name == "out_file" else None def _gen_outfilename(self): raise NotImplementedError @@ -956,19 +983,21 @@ class MpiCommandLine(CommandLine): >>> mpi_cli.inputs.n_procs = 8 >>> mpi_cli.cmdline 'mpiexec -n 8 my_mpi_prog -v' + """ + input_spec = MpiCommandLineInputSpec @property def cmdline(self): - """Adds 'mpiexec' to begining of command""" + """Adds 'mpiexec' to beginning of command""" result = [] if self.inputs.use_mpi: - result.append('mpiexec') + result.append("mpiexec") if self.inputs.n_procs: - result.append('-n %d' % self.inputs.n_procs) - result.append(super(MpiCommandLine, self).cmdline) - return ' '.join(result) + result.append("-n %d" % self.inputs.n_procs) + result.append(super().cmdline) + return " ".join(result) class SEMLikeCommandLine(CommandLine): @@ -988,10 +1017,8 @@ def _outputs_from_inputs(self, outputs): for name in list(outputs.keys()): corresponding_input = getattr(self.inputs, name) if isdefined(corresponding_input): - if (isinstance(corresponding_input, bool) - and corresponding_input): - outputs[name] = \ - os.path.abspath(self._outputs_filenames[name]) + if isinstance(corresponding_input, bool) and corresponding_input: + outputs[name] = os.path.abspath(self._outputs_filenames[name]) else: if isinstance(corresponding_input, list): outputs[name] = [ @@ -1008,7 +1035,7 @@ def _format_arg(self, name, spec, value): value = os.path.abspath(self._outputs_filenames[name]) else: return "" - return super(SEMLikeCommandLine, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class LibraryBaseInterface(BaseInterface): @@ -1016,29 +1043,35 @@ class LibraryBaseInterface(BaseInterface): imports = () def __init__(self, check_import=True, *args, **kwargs): - super(LibraryBaseInterface, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if check_import: - import pkgutil - failed_imports = [] - for pkg in (self._pkg,) + tuple(self.imports): - if pkgutil.find_loader(pkg) is None: - failed_imports.append(pkg) + import importlib.util + + failed_imports = [ + pkg + for pkg in (self._pkg,) + tuple(self.imports) + if importlib.util.find_spec(pkg) is None + ] if failed_imports: - iflogger.warning('Unable to import %s; %s interface may fail to ' - 'run', failed_imports, self.__class__.__name__) + iflogger.warning( + "Unable to import %s; %s interface may fail to run", + failed_imports, + self.__class__.__name__, + ) @property def version(self): if self._version is None: import importlib + try: self._version = importlib.import_module(self._pkg).__version__ except (ImportError, AttributeError): pass - return super(LibraryBaseInterface, self).version + return super().version -class PackageInfo(object): +class PackageInfo: _version = None version_cmd = None version_file = None @@ -1051,14 +1084,15 @@ def version(klass): clout = CommandLine( command=klass.version_cmd, resource_monitor=False, - terminal_output='allatonce').run() - except IOError: + terminal_output="allatonce", + ).run() + except OSError: return None raw_info = clout.runtime.stdout elif klass.version_file is not None: try: - with open(klass.version_file, 'rt') as fobj: + with open(klass.version_file) as fobj: raw_info = fobj.read() except OSError: return None diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index dbbc816dc9..a7f61e6889 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -9,30 +8,28 @@ Define the API for the I/O of interfaces """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os from inspect import isclass from copy import deepcopy from warnings import warn -from builtins import str, bytes from packaging.version import Version -from ...utils.filemanip import md5, hash_infile, hash_timestamp, to_str +from traits.trait_errors import TraitError +from traits.trait_handlers import TraitDictObject, TraitListObject +from ...utils.filemanip import md5, hash_infile, hash_timestamp from .traits_extension import ( traits, + File, + Str, Undefined, isdefined, - TraitError, - TraitDictObject, - TraitListObject, has_metadata, + OutputMultiObject, ) from ... import config, __version__ -FLOAT_FORMAT = '{:.10f}'.format +_float_fmt = "{:.10f}".format nipype_version = Version(__version__) @@ -56,15 +53,16 @@ class BaseTraitedSpec(traits.HasTraits): XXX Reconsider this in the long run, but it seems like the best solution to move forward on the refactoring. """ + package_version = nipype_version def __init__(self, **kwargs): - """ Initialize handlers and inputs""" + """Initialize handlers and inputs""" # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. # super(TraitedSpec, self).__init__(*args, **kwargs) - super(BaseTraitedSpec, self).__init__(**kwargs) + super().__init__(**kwargs) traits.push_exception_handler(reraise_exceptions=True) undefined_traits = {} for trait in self.copyable_trait_names(): @@ -75,17 +73,16 @@ def __init__(self, **kwargs): self.trait_set(**kwargs) def items(self): - """ Name, trait generator for user modifiable traits - """ + """Name, trait generator for user modifiable traits""" for name in sorted(self.copyable_trait_names()): yield name, self.traits()[name] def __repr__(self): - """ Return a well-formatted representation of the traits """ + """Return a well-formatted representation of the traits""" outstr = [] for name, value in sorted(self.trait_get().items()): - outstr.append('%s = %s' % (name, value)) - return '\n{}\n'.format('\n'.join(outstr)) + outstr.append(f"{name} = {value}") + return "\n{}\n".format("\n".join(outstr)) def _generate_handlers(self): """Find all traits with the 'xor' metadata and attach an event @@ -101,8 +98,7 @@ def _generate_handlers(self): self.on_trait_change(self._deprecated_warn, elem) def _xor_warn(self, obj, name, old, new): - """ Generates warnings for xor traits - """ + """Generates warnings for xor traits""" if isdefined(new): trait_spec = self.traits()[name] # for each xor, set to default_value @@ -112,72 +108,76 @@ def _xor_warn(self, obj, name, old, new): continue if isdefined(getattr(self, trait_name)): self.trait_set( - trait_change_notify=False, **{ - '%s' % name: Undefined - }) - msg = ('Input "%s" is mutually exclusive with input "%s", ' - 'which is already set') % (name, trait_name) - raise IOError(msg) + trait_change_notify=False, **{"%s" % name: Undefined} + ) + msg = ( + 'Input "%s" is mutually exclusive with input "%s", ' + "which is already set" + ) % (name, trait_name) + raise OSError(msg) def _deprecated_warn(self, obj, name, old, new): - """Checks if a user assigns a value to a deprecated trait - """ + """Checks if a user assigns a value to a deprecated trait""" if isdefined(new): trait_spec = self.traits()[name] - msg1 = ('Input %s in interface %s is deprecated.' % - (name, self.__class__.__name__.split('InputSpec')[0])) - msg2 = ('Will be removed or raise an error as of release %s' % - trait_spec.deprecated) + msg1 = "Input {} in interface {} is deprecated.".format( + name, + self.__class__.__name__.split("InputSpec")[0], + ) + msg2 = ( + "Will be removed or raise an error as of release %s" + % trait_spec.deprecated + ) if trait_spec.new_name: if trait_spec.new_name not in self.copyable_trait_names(): - raise TraitError(msg1 + ' Replacement trait %s not found' % - trait_spec.new_name) - msg3 = 'It has been replaced by %s.' % trait_spec.new_name + raise TraitError( + msg1 + " Replacement trait %s not found" % trait_spec.new_name + ) + msg3 = "It has been replaced by %s." % trait_spec.new_name else: - msg3 = '' - msg = ' '.join((msg1, msg2, msg3)) + msg3 = "" + msg = f"{msg1} {msg2} {msg3}" if Version(str(trait_spec.deprecated)) < self.package_version: raise TraitError(msg) else: if trait_spec.new_name: - msg += 'Unsetting old value %s; setting new value %s.' % ( - name, trait_spec.new_name) + msg += "Unsetting old value {}; setting new value {}.".format( + name, + trait_spec.new_name, + ) warn(msg) if trait_spec.new_name: self.trait_set( trait_change_notify=False, - **{ - '%s' % name: Undefined, - '%s' % trait_spec.new_name: new - }) + **{"%s" % name: Undefined, "%s" % trait_spec.new_name: new}, + ) def trait_get(self, **kwargs): - """ Returns traited class as a dict + """Returns traited class as a dict Augments the trait get function to return a dictionary without notification handles """ - out = super(BaseTraitedSpec, self).trait_get(**kwargs) + out = super().trait_get(**kwargs) out = self._clean_container(out, Undefined) return out get = trait_get def get_traitsfree(self, **kwargs): - """ Returns traited class as a dict + """Returns traited class as a dict Augments the trait get function to return a dictionary without any traits. The dictionary does not contain any attributes that were Undefined """ - out = super(BaseTraitedSpec, self).trait_get(**kwargs) + out = super().trait_get(**kwargs) out = self._clean_container(out, skipundefined=True) return out def _clean_container(self, objekt, undefinedval=None, skipundefined=False): - """Convert a traited obejct into a pure python representation. - """ - if isinstance(objekt, TraitDictObject) or isinstance(objekt, dict): + """Convert a traited object into a pure python representation.""" + if isinstance(objekt, (TraitDictObject, dict)): out = {} for key, val in list(objekt.items()): if isdefined(val): @@ -185,8 +185,7 @@ def _clean_container(self, objekt, undefinedval=None, skipundefined=False): else: if not skipundefined: out[key] = undefinedval - elif (isinstance(objekt, TraitListObject) or isinstance(objekt, list) or - isinstance(objekt, tuple)): + elif isinstance(objekt, (TraitListObject, list, tuple)): out = [] for val in objekt: if isdefined(val): @@ -212,8 +211,7 @@ def has_metadata(self, name, metadata, value=None, recursive=True): Return has_metadata for the requested trait name in this interface """ - return has_metadata( - self.trait(name).trait_type, metadata, value, recursive) + return has_metadata(self.trait(name).trait_type, metadata, value, recursive) def get_hashval(self, hash_method=None): """Return a dictionary of our items with hashes for each file. @@ -242,36 +240,45 @@ def get_hashval(self, hash_method=None): # skip undefined traits and traits with nohash=True continue - hash_files = (not self.has_metadata(name, "hash_files", False) and - not self.has_metadata(name, "name_source")) - list_nofilename.append((name, - self._get_sorteddict( - val, - hash_method=hash_method, - hash_files=hash_files))) - list_withhash.append((name, - self._get_sorteddict( - val, - True, - hash_method=hash_method, - hash_files=hash_files))) - return list_withhash, md5(to_str(list_nofilename).encode()).hexdigest() - - def _get_sorteddict(self, - objekt, - dictwithhash=False, - hash_method=None, - hash_files=True): + hash_files = not self.has_metadata( + name, "hash_files", False + ) and not self.has_metadata(name, "name_source") + list_nofilename.append( + ( + name, + self._get_sorteddict( + val, hash_method=hash_method, hash_files=hash_files + ), + ) + ) + list_withhash.append( + ( + name, + self._get_sorteddict( + val, True, hash_method=hash_method, hash_files=hash_files + ), + ) + ) + return list_withhash, md5(str(list_nofilename).encode()).hexdigest() + + def _get_sorteddict( + self, objekt, dictwithhash=False, hash_method=None, hash_files=True + ): if isinstance(objekt, dict): out = [] for key, val in sorted(objekt.items()): if isdefined(val): - out.append((key, - self._get_sorteddict( - val, - dictwithhash, - hash_method=hash_method, - hash_files=hash_files))) + out.append( + ( + key, + self._get_sorteddict( + val, + dictwithhash, + hash_method=hash_method, + hash_files=hash_files, + ), + ) + ) elif isinstance(objekt, (list, tuple)): out = [] for val in objekt: @@ -281,30 +288,34 @@ def _get_sorteddict(self, val, dictwithhash, hash_method=hash_method, - hash_files=hash_files)) + hash_files=hash_files, + ) + ) if isinstance(objekt, tuple): out = tuple(out) else: out = None if isdefined(objekt): - if (hash_files and isinstance(objekt, (str, bytes)) and - os.path.isfile(objekt)): + if ( + hash_files + and isinstance(objekt, (str, bytes)) + and os.path.isfile(objekt) + ): if hash_method is None: - hash_method = config.get('execution', 'hash_method') + hash_method = config.get("execution", "hash_method") - if hash_method.lower() == 'timestamp': + if hash_method.lower() == "timestamp": hash = hash_timestamp(objekt) - elif hash_method.lower() == 'content': + elif hash_method.lower() == "content": hash = hash_infile(objekt) else: - raise Exception( - "Unknown hash method: %s" % hash_method) + raise Exception("Unknown hash method: %s" % hash_method) if dictwithhash: out = (objekt, hash) else: out = hash elif isinstance(objekt, float): - out = FLOAT_FORMAT(objekt) + out = _float_fmt(objekt) else: out = objekt return out @@ -313,12 +324,39 @@ def _get_sorteddict(self, def __all__(self): return self.copyable_trait_names() + def __getstate__(self): + """ + Override __getstate__ so that OutputMultiObjects are correctly pickled. + + >>> class OutputSpec(TraitedSpec): + ... out = OutputMultiObject(traits.List(traits.Int)) + >>> spec = OutputSpec() + >>> spec.out = [[4]] + >>> spec.out + [4] + + >>> spec.__getstate__()['out'] + [[4]] + + >>> spec.__setstate__(spec.__getstate__()) + >>> spec.out + [4] + + """ + state = super().__getstate__() + for key in self.__all__: + _trait_spec = self.trait(key) + if _trait_spec.is_trait_type(OutputMultiObject): + state[key] = _trait_spec.handler.get_value(self, key) + return state + class TraitedSpec(BaseTraitedSpec): - """ Create a subclass with strict traits. + """Create a subclass with strict traits. This is used in 90% of the cases. """ + _ = traits.Disallow @@ -327,15 +365,17 @@ class BaseInterfaceInputSpec(TraitedSpec): class DynamicTraitedSpec(BaseTraitedSpec): - """ A subclass to handle dynamic traits + """A subclass to handle dynamic traits This class is a workaround for add_traits and clone_traits not functioning well together. """ def __deepcopy__(self, memo): - """ bug in deepcopy for HasTraits results in weird cloning behavior for - added traits + """ + Replace the ``__deepcopy__`` member with a traits-friendly implementation. + + A bug in ``__deepcopy__`` for ``HasTraits`` results in weird cloning behaviors. """ id_self = id(self) if id_self in memo: @@ -343,7 +383,7 @@ def __deepcopy__(self, memo): dup_dict = deepcopy(self.trait_get(), memo) # access all keys for key in self.copyable_trait_names(): - if key in self.__dict__.keys(): + if key in self.__dict__: _ = getattr(self, key) # clone once dup = self.clone_traits(memo=memo) @@ -359,23 +399,25 @@ def __deepcopy__(self, memo): class CommandLineInputSpec(BaseInterfaceInputSpec): - args = traits.Str(argstr='%s', desc='Additional parameters to the command') + args = Str(argstr="%s", desc="Additional parameters to the command") environ = traits.DictStrStr( - desc='Environment variables', usedefault=True, nohash=True) + desc="Environment variables", usedefault=True, nohash=True + ) class StdOutCommandLineInputSpec(CommandLineInputSpec): - out_file = traits.File(argstr="> %s", position=-1, genfile=True) + out_file = File(argstr="> %s", position=-1, genfile=True) class MpiCommandLineInputSpec(CommandLineInputSpec): use_mpi = traits.Bool( - False, - desc="Whether or not to run the command with mpiexec", - usedefault=True) - n_procs = traits.Int(desc="Num processors to specify to mpiexec. Do not " - "specify if this is managed externally (e.g. through " - "SGE)") + False, desc="Whether or not to run the command with mpiexec", usedefault=True + ) + n_procs = traits.Int( + desc="Num processors to specify to mpiexec. Do not " + "specify if this is managed externally (e.g. through " + "SGE)" + ) def get_filecopy_info(cls): @@ -386,7 +428,7 @@ def get_filecopy_info(cls): return None # normalize_filenames is not a classmethod, hence check first - if not isclass(cls) and hasattr(cls, 'normalize_filenames'): + if not isclass(cls) and hasattr(cls, "normalize_filenames"): cls.normalize_filenames() info = [] inputs = cls.input_spec() if isclass(cls) else cls.inputs diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index de9d46f61a..45aeed5917 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,22 +6,122 @@ ...................................................... """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object, str - import os +from contextlib import AbstractContextManager from copy import deepcopy from textwrap import wrap import re +from datetime import datetime as dt +from dateutil.parser import parse as parseutc +import platform + +from ... import logging, config +from ...utils.datetime import utcnow +from ...utils.misc import is_container, rgetcwd +from ...utils.filemanip import md5, hash_infile -from ... import logging -from ...utils.misc import is_container -from ...utils.filemanip import md5, to_str, hash_infile -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") HELP_LINEWIDTH = 70 + +class RuntimeContext(AbstractContextManager): + """A context manager to run NiPype interfaces.""" + + __slots__ = ("_ignore_exc", "_resmon", "_runtime") + + def __init__(self, resource_monitor=False, ignore_exception=False): + """Initialize the context manager object.""" + self._ignore_exc = ignore_exception + _proc_pid = os.getpid() + if resource_monitor: + from ...utils.profiler import ResourceMonitor + else: + from ...utils.profiler import ResourceMonitorMock as ResourceMonitor + + self._resmon = ResourceMonitor( + _proc_pid, + freq=float(config.get("execution", "resource_monitor_frequency", 1)), + ) + + def __call__(self, interface, cwd=None, redirect_x=False): + """Generate a new runtime object.""" + # Tear-up: get current and prev directories + _syscwd = rgetcwd(error=False) # Recover when wd does not exist + if cwd is None: + cwd = _syscwd + + self._runtime = Bunch( + cwd=str(cwd), + duration=None, + endTime=None, + environ=deepcopy(dict(os.environ)), + hostname=platform.node(), + interface=interface.__class__.__name__, + platform=platform.platform(), + prevcwd=str(_syscwd), + redirect_x=redirect_x, + resmon=self._resmon.fname or "off", + returncode=None, + startTime=None, + version=interface.version, + ) + return self + + def __enter__(self): + """Tear-up the execution of an interface.""" + if self._runtime.redirect_x: + self._runtime.environ["DISPLAY"] = config.get_display() + + self._runtime.startTime = dt.isoformat(utcnow()) + self._resmon.start() + # TODO: Perhaps clean-up path and ensure it exists? + os.chdir(self._runtime.cwd) + return self._runtime + + def __exit__(self, exc_type, exc_value, exc_tb): + """Tear-down interface execution.""" + self._runtime.endTime = dt.isoformat(utcnow()) + timediff = parseutc(self._runtime.endTime) - parseutc(self._runtime.startTime) + self._runtime.duration = ( + timediff.days * 86400 + timediff.seconds + timediff.microseconds / 1e6 + ) + # Collect monitored data + for k, v in self._resmon.stop().items(): + setattr(self._runtime, k, v) + + os.chdir(self._runtime.prevcwd) + + if exc_type is not None or exc_value is not None or exc_tb is not None: + import traceback + + # Retrieve the maximum info fast + self._runtime.traceback = "".join( + traceback.format_exception(exc_type, exc_value, exc_tb) + ) + # Gather up the exception arguments and append nipype info. + exc_args = exc_value.args or () + exc_args += ( + f"An exception of type {exc_type.__name__} occurred while " + f"running interface {self._runtime.interface}.", + ) + self._runtime.traceback_args = ("\n".join([f"{arg}" for arg in exc_args]),) + + if self._ignore_exc: + return True + + if hasattr(self._runtime, "cmdline"): + retcode = self._runtime.returncode + if retcode not in self._runtime.success_codes: + self._runtime.traceback = ( + f"RuntimeError: subprocess exited with code {retcode}." + ) + + @property + def runtime(self): + return self._runtime + + class NipypeInterfaceError(Exception): """Custom error for interfaces""" @@ -30,14 +129,15 @@ def __init__(self, value): self.value = value def __str__(self): - return '{}'.format(self.value) + return f"{self.value}" -class Bunch(object): - """Dictionary-like class that provides attribute-style access to it's items. +class Bunch: + """ + Dictionary-like class that provides attribute-style access to its items. - A `Bunch` is a simple container that stores it's items as class - attributes. Internally all items are stored in a dictionary and + A ``Bunch`` is a simple container that stores its items as class + attributes [1]_. Internally all items are stored in a dictionary and the class exposes several of the dictionary methods. Examples @@ -50,10 +150,8 @@ class Bunch(object): >>> inputs Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=False) - Notes - ----- - The Bunch pattern came from the Python Cookbook: - + References + ---------- .. [1] A. Martelli, D. Hudgeon, "Collecting a Bunch of Named Items", Python Cookbook, 2nd Ed, Chapter 4.18, 2005. @@ -74,17 +172,15 @@ def items(self): def iteritems(self): """iterates over bunch attributes as key, value pairs""" - iflogger.warning('iteritems is deprecated, use items instead') + iflogger.warning("iteritems is deprecated, use items instead") return list(self.items()) def get(self, *args): - """Support dictionary get() functionality - """ + """Support dictionary get() functionality""" return self.__dict__.get(*args) def set(self, **kwargs): - """Support dictionary get() functionality - """ + """Support dictionary get() functionality""" return self.__dict__.update(**kwargs) def dictcopy(self): @@ -99,22 +195,22 @@ def __repr__(self): needs setting or not. Till that mechanism changes, only alter this after careful consideration. """ - outstr = ['Bunch('] + outstr = ["Bunch("] first = True for k, v in sorted(self.items()): if not first: - outstr.append(', ') + outstr.append(", ") if isinstance(v, dict): pairs = [] for key, value in sorted(v.items()): - pairs.append("'%s': %s" % (key, value)) - v = '{' + ', '.join(pairs) + '}' - outstr.append('%s=%s' % (k, v)) + pairs.append(f"'{key}': {value}") + v = "{" + ", ".join(pairs) + "}" + outstr.append(f"{k}={v}") else: - outstr.append('%s=%r' % (k, v)) + outstr.append(f"{k}={v!r}") first = False - outstr.append(')') - return ''.join(outstr) + outstr.append(")") + return "".join(outstr) def _get_bunch_hash(self): """Return a dictionary of our items with hashes for each file. @@ -147,7 +243,7 @@ def _get_bunch_hash(self): item = None else: if len(val) == 0: - raise AttributeError('%s attribute is empty' % key) + raise AttributeError("%s attribute is empty" % key) item = val[0] else: item = val @@ -165,24 +261,24 @@ def _get_bunch_hash(self): # Sort the items of the dictionary, before hashing the string # representation so we get a predictable order of the # dictionary. - sorted_dict = to_str(sorted(dict_nofilename.items())) + sorted_dict = str(sorted(dict_nofilename.items())) return dict_withhash, md5(sorted_dict.encode()).hexdigest() def _repr_pretty_(self, p, cycle): """Support for the pretty module from ipython.externals""" if cycle: - p.text('Bunch(...)') + p.text("Bunch(...)") else: - p.begin_group(6, 'Bunch(') + p.begin_group(6, "Bunch(") first = True for k, v in sorted(self.items()): if not first: - p.text(',') + p.text(",") p.breakable() - p.text(k + '=') + p.text(k + "=") p.pretty(v) first = False - p.end_group(6, ')') + p.end_group(6, ")") def _hash_bunch_dict(adict, key): @@ -193,7 +289,7 @@ def _hash_bunch_dict(adict, key): return [(afile, hash_infile(afile)) for afile in stuff] -class InterfaceResult(object): +class InterfaceResult: """Object that contains the results of running a particular Interface. Attributes @@ -220,12 +316,7 @@ class InterfaceResult(object): """ - def __init__(self, - interface, - runtime, - inputs=None, - outputs=None, - provenance=None): + def __init__(self, interface, runtime, inputs=None, outputs=None, provenance=None): self._version = 2.0 self.interface = interface self.runtime = runtime @@ -256,18 +347,20 @@ def format_help(cls): from ...utils.misc import trim docstring = [] - cmd = getattr(cls, '_cmd', None) + cmd = getattr(cls, "_cmd", None) if cmd: - docstring += ['Wraps the executable command ``%s``.' % cmd, ''] + docstring += ["Wraps the executable command ``%s``." % cmd, ""] if cls.__doc__: - docstring += trim(cls.__doc__).split('\n') + [''] - - allhelp = '\n'.join( - docstring + - _inputs_help(cls) + [''] + - _outputs_help(cls) + [''] + - _refs_help(cls) + docstring += trim(cls.__doc__).split("\n") + [""] + + allhelp = "\n".join( + docstring + + _inputs_help(cls) + + [""] + + _outputs_help(cls) + + [""] + + _refs_help(cls) ) return allhelp.expandtabs(8) @@ -278,10 +371,10 @@ def _inputs_help(cls): >>> from nipype.interfaces.afni import GCOR >>> _inputs_help(GCOR) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE - ['Inputs::', '', '\t[Mandatory]', '\tin_file: (an existing file name)', ... + ['Inputs::', '', '\t[Mandatory]', '\tin_file: (a pathlike object or string... """ - helpstr = ['Inputs::'] + helpstr = ["Inputs::"] mandatory_keys = [] optional_items = [] @@ -289,19 +382,21 @@ def _inputs_help(cls): inputs = cls.input_spec() mandatory_items = list(inputs.traits(mandatory=True).items()) if mandatory_items: - helpstr += ['', '\t[Mandatory]'] + helpstr += ["", "\t[Mandatory]"] for name, spec in mandatory_items: helpstr += get_trait_desc(inputs, name, spec) mandatory_keys = {item[0] for item in mandatory_items} - optional_items = ['\n'.join(get_trait_desc(inputs, name, val)) - for name, val in inputs.traits(transient=None).items() - if name not in mandatory_keys] + optional_items = [ + "\n".join(get_trait_desc(inputs, name, val)) + for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ] if optional_items: - helpstr += ['', '\t[Optional]'] + optional_items + helpstr += ["", "\t[Optional]"] + optional_items if not mandatory_keys and not optional_items: - helpstr += ['', '\tNone'] + helpstr += ["", "\tNone"] return helpstr @@ -314,12 +409,13 @@ def _outputs_help(cls): ['Outputs::', '', '\tout: (a float)\n\t\tglobal correlation value'] """ - helpstr = ['Outputs::', '', '\tNone'] + helpstr = ["Outputs::", "", "\tNone"] if cls.output_spec: outputs = cls.output_spec() outhelpstr = [ - '\n'.join(get_trait_desc(outputs, name, spec)) - for name, spec in outputs.traits(transient=None).items()] + "\n".join(get_trait_desc(outputs, name, spec)) + for name, spec in outputs.traits(transient=None).items() + ] if outhelpstr: helpstr = helpstr[:-1] + outhelpstr return helpstr @@ -327,13 +423,13 @@ def _outputs_help(cls): def _refs_help(cls): """Prints interface references.""" - references = getattr(cls, 'references_', None) + references = getattr(cls, "_references", None) if not references: return [] - helpstr = ['References:', '-----------'] + helpstr = ["References:", "-----------"] for r in references: - helpstr += ['{}'.format(r['entry'])] + helpstr += ["{}".format(r["entry"])] return helpstr @@ -345,59 +441,62 @@ def get_trait_desc(inputs, name, spec): requires = spec.requires argstr = spec.argstr - manhelpstr = ['\t%s' % name] + manhelpstr = ["\t%s" % name] type_info = spec.full_info(inputs, name, None) - default = '' + default = "" if spec.usedefault: - default = ', nipype default value: %s' % str( - spec.default_value()[1]) - line = "(%s%s)" % (type_info, default) + default = ", nipype default value: %s" % str(spec.default_value()[1]) + line = f"({type_info}{default})" manhelpstr = wrap( line, HELP_LINEWIDTH, - initial_indent=manhelpstr[0] + ': ', - subsequent_indent='\t\t ') + initial_indent=manhelpstr[0] + ": ", + subsequent_indent="\t\t ", + ) if desc: - for line in desc.split('\n'): + for line in desc.split("\n"): line = re.sub(r"\s+", " ", line) manhelpstr += wrap( - line, HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + line, HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t" + ) if argstr: pos = spec.position if pos is not None: manhelpstr += wrap( - 'argument: ``%s``, position: %s' % (argstr, pos), + f"argument: ``{argstr}``, position: {pos}", HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + initial_indent="\t\t", + subsequent_indent="\t\t", + ) else: manhelpstr += wrap( - 'argument: ``%s``' % argstr, + "argument: ``%s``" % argstr, HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + initial_indent="\t\t", + subsequent_indent="\t\t", + ) if xor: - line = '%s' % ', '.join(xor) + line = "%s" % ", ".join(xor) manhelpstr += wrap( line, HELP_LINEWIDTH, - initial_indent='\t\tmutually_exclusive: ', - subsequent_indent='\t\t ') + initial_indent="\t\tmutually_exclusive: ", + subsequent_indent="\t\t ", + ) if requires: others = [field for field in requires if field != name] - line = '%s' % ', '.join(others) + line = "%s" % ", ".join(others) manhelpstr += wrap( line, HELP_LINEWIDTH, - initial_indent='\t\trequires: ', - subsequent_indent='\t\t ') + initial_indent="\t\trequires: ", + subsequent_indent="\t\t ", + ) return manhelpstr diff --git a/nipype/interfaces/base/tests/__init__.py b/nipype/interfaces/base/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/base/tests/__init__.py +++ b/nipype/interfaces/base/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/base/tests/test_auto_BaseInterface.py b/nipype/interfaces/base/tests/test_auto_BaseInterface.py index eb0272d495..4ee8ea9359 100644 --- a/nipype/interfaces/base/tests/test_auto_BaseInterface.py +++ b/nipype/interfaces/base/tests/test_auto_BaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import BaseInterface diff --git a/nipype/interfaces/base/tests/test_auto_CommandLine.py b/nipype/interfaces/base/tests/test_auto_CommandLine.py index fb16422864..b03e4adfca 100644 --- a/nipype/interfaces/base/tests/test_auto_CommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_CommandLine.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import CommandLine def test_CommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/base/tests/test_auto_LibraryBaseInterface.py b/nipype/interfaces/base/tests/test_auto_LibraryBaseInterface.py index 19beef071f..9893a781a9 100644 --- a/nipype/interfaces/base/tests/test_auto_LibraryBaseInterface.py +++ b/nipype/interfaces/base/tests/test_auto_LibraryBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import LibraryBaseInterface diff --git a/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py b/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py index 2d66f2f623..908943c754 100644 --- a/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py @@ -1,17 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import MpiCommandLine def test_MpiCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), n_procs=dict(), - use_mpi=dict(usedefault=True, ), + use_mpi=dict( + usedefault=True, + ), ) inputs = MpiCommandLine.input_spec() diff --git a/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py b/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py index aceabfe2a2..1197b2479c 100644 --- a/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import SEMLikeCommandLine def test_SEMLikeCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/base/tests/test_auto_SimpleInterface.py b/nipype/interfaces/base/tests/test_auto_SimpleInterface.py index 2a4454dd9f..ac62059628 100644 --- a/nipype/interfaces/base/tests/test_auto_SimpleInterface.py +++ b/nipype/interfaces/base/tests/test_auto_SimpleInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import SimpleInterface diff --git a/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py b/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py index e39dc3acaa..39b80d487b 100644 --- a/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..core import StdOutCommandLine def test_StdOutCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), diff --git a/nipype/interfaces/base/tests/test_core.py b/nipype/interfaces/base/tests/test_core.py index 265edc444f..d86142ff3b 100644 --- a/nipype/interfaces/base/tests/test_core.py +++ b/nipype/interfaces/base/tests/test_core.py @@ -1,21 +1,17 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals -from future import standard_library -from builtins import open import os import simplejson as json +import logging import pytest +from unittest import mock from .... import config from ....testing import example_data from ... import base as nib from ..support import _inputs_help -standard_library.install_aliases() - def check_dict(ref_dict, tst_dict): """Compare dictionaries of inputs and and those loaded from json files""" @@ -58,27 +54,27 @@ def __init__(self): def test_BaseInterface(): - config.set('monitoring', 'enable', '0') + config.set("monitoring", "enable", "0") assert nib.BaseInterface.help() is None class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') - goo = nib.traits.Int(desc='a random int', mandatory=True) - moo = nib.traits.Int(desc='a random int', mandatory=False) - hoo = nib.traits.Int(desc='a random int', usedefault=True) - zoo = nib.File(desc='a file', copyfile=False) - woo = nib.File(desc='a file', copyfile=True) + foo = nib.traits.Int(desc="a random int") + goo = nib.traits.Int(desc="a random int", mandatory=True) + moo = nib.traits.Int(desc="a random int", mandatory=False) + hoo = nib.traits.Int(desc="a random int", usedefault=True) + zoo = nib.File(desc="a file", copyfile=False) + woo = nib.File(desc="a file", copyfile=True) class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class DerivedInterface(nib.BaseInterface): input_spec = InputSpec resource_monitor = False assert DerivedInterface.help() is None - assert 'moo' in ''.join(_inputs_help(DerivedInterface)) + assert "moo" in "".join(_inputs_help(DerivedInterface)) assert DerivedInterface()._outputs() is None assert DerivedInterface().inputs.foo == nib.Undefined with pytest.raises(ValueError): @@ -108,7 +104,7 @@ def _run_interface(self, runtime): def test_BaseInterface_load_save_inputs(tmpdir): - tmp_json = tmpdir.join('settings.json').strpath + tmp_json = tmpdir.join("settings.json").strpath class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int() @@ -120,9 +116,9 @@ class DerivedInterface(nib.BaseInterface): input_spec = InputSpec def __init__(self, **inputs): - super(DerivedInterface, self).__init__(**inputs) + super().__init__(**inputs) - inputs_dict = {'input1': 12, 'input3': True, 'input4': 'some string'} + inputs_dict = {"input1": 12, "input3": True, "input4": "some string"} bif = DerivedInterface(**inputs_dict) bif.save_inputs_to_json(tmp_json) bif2 = DerivedInterface() @@ -133,22 +129,22 @@ def __init__(self, **inputs): assert bif3.inputs.get_traitsfree() == inputs_dict inputs_dict2 = inputs_dict.copy() - inputs_dict2.update({'input4': 'some other string'}) - bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4']) + inputs_dict2.update({"input4": "some other string"}) + bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2["input4"]) assert bif4.inputs.get_traitsfree() == inputs_dict2 - bif5 = DerivedInterface(input4=inputs_dict2['input4']) + bif5 = DerivedInterface(input4=inputs_dict2["input4"]) bif5.load_inputs_from_json(tmp_json, overwrite=False) assert bif5.inputs.get_traitsfree() == inputs_dict2 - bif6 = DerivedInterface(input4=inputs_dict2['input4']) + bif6 = DerivedInterface(input4=inputs_dict2["input4"]) bif6.load_inputs_from_json(tmp_json) assert bif6.inputs.get_traitsfree() == inputs_dict # test get hashval in a complex interface from nipype.interfaces.ants import Registration - settings = example_data( - example_data('smri_ants_registration_settings.json')) + + settings = example_data(example_data("smri_ants_registration_settings.json")) with open(settings) as setf: data_dict = json.load(setf) @@ -159,15 +155,16 @@ def __init__(self, **inputs): tsthash2 = Registration(from_file=settings) assert {} == check_dict(data_dict, tsthash2.inputs.get_traitsfree()) - _, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp') - assert '8562a5623562a871115eb14822ee8d02' == hashvalue + _, hashvalue = tsthash.inputs.get_hashval(hash_method="timestamp") + assert hashvalue == "e35bf07fea8049cc02de9235f85e8903" class MinVerInputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.9') + foo = nib.traits.Int(desc="a random int", min_ver="0.9") + class MaxVerInputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', max_ver='0.7') + foo = nib.traits.Int(desc="a random int", max_ver="0.7") def test_input_version_1(): @@ -177,7 +174,7 @@ class DerivedInterface1(nib.BaseInterface): obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) - config.set('execution', 'stop_on_unknown_version', True) + config.set("execution", "stop_on_unknown_version", True) with pytest.raises(ValueError) as excinfo: obj._check_version_requirements(obj.inputs) @@ -189,7 +186,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_2(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.8' + _version = "0.8" obj = DerivedInterface1() obj.inputs.foo = 1 @@ -201,7 +198,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_3(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.10' + _version = "0.10" obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) @@ -210,7 +207,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_4(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.9' + _version = "0.9" obj = DerivedInterface1() obj.inputs.foo = 1 @@ -220,7 +217,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_5(): class DerivedInterface2(nib.BaseInterface): input_spec = MaxVerInputSpec - _version = '0.8' + _version = "0.8" obj = DerivedInterface2() obj.inputs.foo = 1 @@ -232,61 +229,138 @@ class DerivedInterface2(nib.BaseInterface): def test_input_version_6(): class DerivedInterface1(nib.BaseInterface): input_spec = MaxVerInputSpec - _version = '0.7' + _version = "0.7" obj = DerivedInterface1() obj.inputs.foo = 1 obj._check_version_requirements(obj.inputs) +def test_input_version_missing(caplog): + class DerivedInterface(nib.BaseInterface): + class input_spec(nib.TraitedSpec): + foo = nib.traits.Int(min_ver="0.9") + bar = nib.traits.Int(max_ver="0.9") + + _version = "misparsed-garbage" + + obj = DerivedInterface() + obj.inputs.foo = 1 + obj.inputs.bar = 1 + with caplog.at_level(logging.WARNING, logger="nipype.interface"): + obj._check_version_requirements(obj.inputs) + assert len(caplog.records) == 2 + + +def test_input_version_missing_error(caplog): + from nipype import config + + class DerivedInterface(nib.BaseInterface): + class input_spec(nib.TraitedSpec): + foo = nib.traits.Int(min_ver="0.9") + bar = nib.traits.Int(max_ver="0.9") + + _version = "misparsed-garbage" + + obj1 = DerivedInterface(foo=1) + obj2 = DerivedInterface(bar=1) + with caplog.at_level(logging.WARNING, logger="nipype.interface"): + with mock.patch.object(config, "getboolean", return_value=True): + with pytest.raises(ValueError): + obj1._check_version_requirements(obj1.inputs) + with pytest.raises(ValueError): + obj2._check_version_requirements(obj2.inputs) + assert len(caplog.records) == 2 + + +def test_unavailable_input(): + class WithInput(nib.BaseInterface): + class input_spec(nib.TraitedSpec): + foo = nib.traits.Int(3, usedefault=True, max_ver="0.5") + + _version = "0.4" + + def _run_interface(self, runtime): + return runtime + + class WithoutInput(WithInput): + _version = "0.6" + + has = WithInput() + hasnot = WithoutInput() + trying_anyway = WithoutInput(foo=3) + assert has.inputs.foo == 3 + assert not nib.isdefined(hasnot.inputs.foo) + assert trying_anyway.inputs.foo == 3 + + has.run() + hasnot.run() + with pytest.raises(Exception): + trying_anyway.run() + + # Still settable + has.inputs.foo = 4 + hasnot.inputs.foo = 4 + trying_anyway.inputs.foo = 4 + assert has.inputs.foo == 4 + assert hasnot.inputs.foo == 4 + assert trying_anyway.inputs.foo == 4 + + has.run() + with pytest.raises(Exception): + hasnot.run() + with pytest.raises(Exception): + trying_anyway.run() + + def test_output_version(): class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.9') + foo = nib.traits.Int(desc="a random int", min_ver="0.9") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False obj = DerivedInterface1() assert obj._check_version_requirements(obj._outputs()) == [] class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.11') + foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False obj = DerivedInterface1() - assert obj._check_version_requirements(obj._outputs()) == ['foo'] + assert obj._check_version_requirements(obj._outputs()) == ["foo"] class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.11') + foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False def _run_interface(self, runtime): return runtime def _list_outputs(self): - return {'foo': 1} + return {"foo": 1} obj = DerivedInterface1() with pytest.raises(KeyError): @@ -296,51 +370,50 @@ def _list_outputs(self): def test_Commandline(): with pytest.raises(Exception): nib.CommandLine() - ci = nib.CommandLine(command='which') - assert ci.cmd == 'which' + ci = nib.CommandLine(command="which") + assert ci.cmd == "which" assert ci.inputs.args == nib.Undefined - ci2 = nib.CommandLine(command='which', args='ls') - assert ci2.cmdline == 'which ls' - ci3 = nib.CommandLine(command='echo') + ci2 = nib.CommandLine(command="which", args="ls") + assert ci2.cmdline == "which ls" + ci3 = nib.CommandLine(command="echo") ci3.resource_monitor = False - ci3.inputs.environ = {'MYENV': 'foo'} + ci3.inputs.environ = {"MYENV": "foo"} res = ci3.run() - assert res.runtime.environ['MYENV'] == 'foo' + assert res.runtime.environ["MYENV"] == "foo" assert res.outputs is None class CommandLineInputSpec1(nib.CommandLineInputSpec): - foo = nib.Str(argstr='%s', desc='a str') - goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0) - hoo = nib.traits.List(argstr='-l %s', desc='a list') - moo = nib.traits.List( - argstr='-i %d...', desc='a repeated list', position=-1) - noo = nib.traits.Int(argstr='-x %d', desc='an int') - roo = nib.traits.Str(desc='not on command line') + foo = nib.Str(argstr="%s", desc="a str") + goo = nib.traits.Bool(argstr="-g", desc="a bool", position=0) + hoo = nib.traits.List(argstr="-l %s", desc="a list") + moo = nib.traits.List(argstr="-i %d...", desc="a repeated list", position=-1) + noo = nib.traits.Int(argstr="-x %d", desc="an int") + roo = nib.traits.Str(desc="not on command line") soo = nib.traits.Bool(argstr="-soo") nib.CommandLine.input_spec = CommandLineInputSpec1 - ci4 = nib.CommandLine(command='cmd') - ci4.inputs.foo = 'foo' + ci4 = nib.CommandLine(command="cmd") + ci4.inputs.foo = "foo" ci4.inputs.goo = True - ci4.inputs.hoo = ['a', 'b'] + ci4.inputs.hoo = ["a", "b"] ci4.inputs.moo = [1, 2, 3] ci4.inputs.noo = 0 - ci4.inputs.roo = 'hello' + ci4.inputs.roo = "hello" ci4.inputs.soo = False cmd = ci4._parse_inputs() - assert cmd[0] == '-g' - assert cmd[-1] == '-i 1 -i 2 -i 3' - assert 'hello' not in ' '.join(cmd) - assert '-soo' not in ' '.join(cmd) + assert cmd[0] == "-g" + assert cmd[-1] == "-i 1 -i 2 -i 3" + assert "hello" not in " ".join(cmd) + assert "-soo" not in " ".join(cmd) ci4.inputs.soo = True cmd = ci4._parse_inputs() - assert '-soo' in ' '.join(cmd) + assert "-soo" in " ".join(cmd) class CommandLineInputSpec2(nib.CommandLineInputSpec): - foo = nib.File(argstr='%s', desc='a str', genfile=True) + foo = nib.File(argstr="%s", desc="a str", genfile=True) nib.CommandLine.input_spec = CommandLineInputSpec2 - ci5 = nib.CommandLine(command='cmd') + ci5 = nib.CommandLine(command="cmd") with pytest.raises(NotImplementedError): ci5._parse_inputs() @@ -348,102 +421,106 @@ class DerivedClass(nib.CommandLine): input_spec = CommandLineInputSpec2 def _gen_filename(self, name): - return 'filename' + return "filename" - ci6 = DerivedClass(command='cmd') - assert ci6._parse_inputs()[0] == 'filename' + ci6 = DerivedClass(command="cmd") + assert ci6._parse_inputs()[0] == "filename" nib.CommandLine.input_spec = nib.CommandLineInputSpec def test_Commandline_environ(monkeypatch, tmpdir): from nipype import config + config.set_default_config() tmpdir.chdir() - monkeypatch.setitem(os.environ, 'DISPLAY', ':1') + monkeypatch.setitem(os.environ, "DISPLAY", ":1") # Test environment - ci3 = nib.CommandLine(command='echo') + ci3 = nib.CommandLine(command="echo") res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':1' + assert res.runtime.environ["DISPLAY"] == ":1" # Test display_variable option - monkeypatch.delitem(os.environ, 'DISPLAY', raising=False) - config.set('execution', 'display_variable', ':3') + monkeypatch.delitem(os.environ, "DISPLAY", raising=False) + config.set("execution", "display_variable", ":3") res = ci3.run() - assert 'DISPLAY' not in ci3.inputs.environ - assert 'DISPLAY' not in res.runtime.environ + assert "DISPLAY" not in ci3.inputs.environ + assert "DISPLAY" not in res.runtime.environ # If the interface has _redirect_x then yes, it should be set ci3._redirect_x = True res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':3' + assert res.runtime.environ["DISPLAY"] == ":3" # Test overwrite - monkeypatch.setitem(os.environ, 'DISPLAY', ':1') - ci3.inputs.environ = {'DISPLAY': ':2'} + monkeypatch.setitem(os.environ, "DISPLAY", ":1") + ci3.inputs.environ = {"DISPLAY": ":2"} res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':2' + assert res.runtime.environ["DISPLAY"] == ":2" def test_CommandLine_output(tmpdir): # Create one file tmpdir.chdir() - file = tmpdir.join('foo.txt') - file.write('123456\n') + file = tmpdir.join("foo.txt") + file.write("123456\n") name = os.path.basename(file.strpath) - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'allatonce' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "allatonce" res = ci.run() - assert res.runtime.merged == '' + assert res.runtime.merged == "" assert name in res.runtime.stdout # Check stdout is written - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_stdout' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_stdout" res = ci.run() - assert os.path.isfile('stdout.nipype') + assert os.path.isfile("stdout.nipype") assert name in res.runtime.stdout - tmpdir.join('stdout.nipype').remove(ignore_errors=True) + tmpdir.join("stdout.nipype").remove(ignore_errors=True) # Check stderr is written - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_stderr' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_stderr" res = ci.run() - assert os.path.isfile('stderr.nipype') - tmpdir.join('stderr.nipype').remove(ignore_errors=True) + assert os.path.isfile("stderr.nipype") + tmpdir.join("stderr.nipype").remove(ignore_errors=True) # Check outputs are thrown away - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'none' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "none" res = ci.run() - assert res.runtime.stdout == '' and \ - res.runtime.stderr == '' and \ - res.runtime.merged == '' + assert ( + res.runtime.stdout == "" + and res.runtime.stderr == "" + and res.runtime.merged == "" + ) # Check that new interfaces are set to default 'stream' - ci = nib.CommandLine(command='ls -l') + ci = nib.CommandLine(command="ls -l") res = ci.run() - assert ci.terminal_output == 'stream' - assert name in res.runtime.stdout and \ - res.runtime.stderr == '' + assert ci.terminal_output == "stream" + assert name in res.runtime.stdout and res.runtime.stderr == "" # Check only one file is generated - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file" res = ci.run() - assert os.path.isfile('output.nipype') - assert name in res.runtime.merged and \ - res.runtime.stdout == '' and \ - res.runtime.stderr == '' - tmpdir.join('output.nipype').remove(ignore_errors=True) + assert os.path.isfile("output.nipype") + assert ( + name in res.runtime.merged + and res.runtime.stdout == "" + and res.runtime.stderr == "" + ) + tmpdir.join("output.nipype").remove(ignore_errors=True) # Check split files are generated - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_split' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_split" res = ci.run() - assert os.path.isfile('stdout.nipype') - assert os.path.isfile('stderr.nipype') + assert os.path.isfile("stdout.nipype") + assert os.path.isfile("stderr.nipype") assert name in res.runtime.stdout @@ -451,34 +528,35 @@ def test_global_CommandLine_output(tmpdir): """Ensures CommandLine.set_default_terminal_output works""" from nipype.interfaces.fsl import BET - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'stream' # default case + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "stream" # default case ci = BET() - assert ci.terminal_output == 'stream' # default case + assert ci.terminal_output == "stream" # default case - nib.CommandLine.set_default_terminal_output('allatonce') - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'allatonce' + with mock.patch.object(nib.CommandLine, "_terminal_output"): + nib.CommandLine.set_default_terminal_output("allatonce") + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "allatonce" - nib.CommandLine.set_default_terminal_output('file') - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'file' + nib.CommandLine.set_default_terminal_output("file") + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "file" - # Check default affects derived interfaces - ci = BET() - assert ci.terminal_output == 'file' + # Check default affects derived interfaces + ci = BET() + assert ci.terminal_output == "file" def test_CommandLine_prefix(tmpdir): tmpdir.chdir() - oop = 'out/of/path' + oop = "out/of/path" os.makedirs(oop) - script_name = 'test_script.sh' + script_name = "test_script.sh" script_path = os.path.join(oop, script_name) - with open(script_path, 'w') as script_f: - script_f.write('#!/usr/bin/env bash\necho Success!') + with open(script_path, "w") as script_f: + script_f.write("#!/usr/bin/env bash\necho Success!") os.chmod(script_path, 0o755) ci = nib.CommandLine(command=script_name) @@ -486,19 +564,19 @@ def test_CommandLine_prefix(tmpdir): ci.run() class OOPCLI(nib.CommandLine): - _cmd_prefix = oop + '/' + _cmd_prefix = oop + "/" ci = OOPCLI(command=script_name) ci.run() class OOPShell(nib.CommandLine): - _cmd_prefix = 'bash {}/'.format(oop) + _cmd_prefix = f"bash {oop}/" ci = OOPShell(command=script_name) ci.run() class OOPBadShell(nib.CommandLine): - _cmd_prefix = 'shell_dne {}/'.format(oop) + _cmd_prefix = f"shell_dne {oop}/" ci = OOPBadShell(command=script_name) with pytest.raises(IOError): @@ -509,6 +587,7 @@ def test_runtime_checks(): class TestInterface(nib.BaseInterface): class input_spec(nib.TraitedSpec): a = nib.traits.Any() + class output_spec(nib.TraitedSpec): b = nib.traits.Any() @@ -521,7 +600,7 @@ def _run_interface(self, runtime): class BrokenRuntime(TestInterface): def _run_interface(self, runtime): - del runtime.__dict__['cwd'] + del runtime.__dict__["cwd"] return runtime with pytest.raises(RuntimeError): diff --git a/nipype/interfaces/base/tests/test_resource_monitor.py b/nipype/interfaces/base/tests/test_resource_monitor.py index d40ecbebde..802e8e6ec9 100644 --- a/nipype/interfaces/base/tests/test_resource_monitor.py +++ b/nipype/interfaces/base/tests/test_resource_monitor.py @@ -1,13 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Module to unit test the resource_monitor in nipype """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import pytest @@ -30,22 +27,26 @@ def use_resource_monitor(): class UseResourcesInputSpec(CommandLineInputSpec): mem_gb = traits.Float( - desc='Number of GB of RAM to use', argstr='-g %f', mandatory=True) + desc="Number of GB of RAM to use", argstr="-g %f", mandatory=True + ) n_procs = traits.Int( - desc='Number of threads to use', argstr='-p %d', mandatory=True) + desc="Number of threads to use", argstr="-p %d", mandatory=True + ) class UseResources(CommandLine): """ use_resources cmd interface """ + from nipype import __path__ + # Init attributes input_spec = UseResourcesInputSpec # Get path of executable exec_dir = os.path.realpath(__path__[0]) - exec_path = os.path.join(exec_dir, 'utils', 'tests', 'use_resources') + exec_path = os.path.join(exec_dir, "utils", "tests", "use_resources") # Init cmd _cmd = exec_path @@ -53,39 +54,41 @@ class UseResources(CommandLine): @pytest.mark.skip(reason="inconsistent readings") -@pytest.mark.skipif( - os.getenv('CI_SKIP_TEST', False), reason='disabled in CI tests') -@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), - (1.5, 1)]) +@pytest.mark.skipif(os.getenv("CI_SKIP_TEST", False), reason="disabled in CI tests") +@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a CommandLine-derived interface """ from nipype import config - config.set('monitoring', 'sample_frequency', '0.2') # Force sampling fast + + config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = UseResources(mem_gb=mem_gb, n_procs=n_procs) result = iface.run() - assert abs(mem_gb - result.runtime.mem_peak_gb - ) < 0.3, 'estimated memory error above .3GB' - assert int(result.runtime.cpu_percent / 100 + 0.2 - ) == n_procs, 'wrong number of threads estimated' + assert ( + abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 + ), "estimated memory error above .3GB" + assert ( + int(result.runtime.cpu_percent / 100 + 0.2) == n_procs + ), "wrong number of threads estimated" @pytest.mark.skipif( - True, reason='test disabled temporarily, until funcion profiling works') -@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), - (1.5, 1)]) + True, reason="test disabled temporarily, until function profiling works" +) +@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a Function interface """ from nipype import config - config.set('monitoring', 'sample_frequency', '0.2') # Force sampling fast + + config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = niu.Function(function=_use_resources) @@ -93,6 +96,7 @@ def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): iface.inputs.n_procs = n_procs result = iface.run() - assert abs(mem_gb - result.runtime.mem_peak_gb - ) < 0.3, 'estimated memory error above .3GB' + assert ( + abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 + ), "estimated memory error above .3GB" assert int(result.runtime.cpu_percent / 100 + 0.2) >= n_procs diff --git a/nipype/interfaces/base/tests/test_specs.py b/nipype/interfaces/base/tests/test_specs.py index bab112e96d..44a9c014c4 100644 --- a/nipype/interfaces/base/tests/test_specs.py +++ b/nipype/interfaces/base/tests/test_specs.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals -from future import standard_library import os import warnings @@ -16,15 +13,13 @@ from ....pipeline import Node from ..specs import get_filecopy_info -standard_library.install_aliases() - @pytest.fixture(scope="module") def setup_file(request, tmpdir_factory): - tmp_dir = tmpdir_factory.mktemp('files') - tmp_infile = tmp_dir.join('foo.txt') - with tmp_infile.open('w') as fp: - fp.writelines(['123456789']) + tmp_dir = tmpdir_factory.mktemp("files") + tmp_infile = tmp_dir.join("foo.txt") + with tmp_infile.open("w") as fp: + fp.writelines(["123456789"]) tmp_dir.chdir() @@ -33,7 +28,7 @@ def setup_file(request, tmpdir_factory): def test_TraitedSpec(): assert nib.TraitedSpec().get_hashval() - assert nib.TraitedSpec().__repr__() == '\n\n' + assert nib.TraitedSpec().__repr__() == "\n\n" class spec(nib.TraitedSpec): foo = nib.traits.Int @@ -45,14 +40,16 @@ class spec(nib.TraitedSpec): with pytest.raises(nib.traits.TraitError): specfunc(1) infields = spec(foo=1) - hashval = ([('foo', 1), ('goo', '0.0000000000')], - 'e89433b8c9141aa0fda2f8f4d662c047') + hashval = ( + [("foo", 1), ("goo", "0.0000000000")], + "e89433b8c9141aa0fda2f8f4d662c047", + ) assert infields.get_hashval() == hashval - assert infields.__repr__() == '\nfoo = 1\ngoo = 0.0\n' + assert infields.__repr__() == "\nfoo = 1\ngoo = 0.0\n" def test_TraitedSpec_tab_completion(): - bet_nd = Node(fsl.BET(), name='bet') + bet_nd = Node(fsl.BET(), name="bet") bet_interface = fsl.BET() bet_inputs = bet_nd.inputs.class_editable_traits() bet_outputs = bet_nd.outputs.class_editable_traits() @@ -68,15 +65,16 @@ def test_TraitedSpec_tab_completion(): @pytest.mark.skip def test_TraitedSpec_dynamic(): from pickle import dumps, loads + a = nib.BaseTraitedSpec() - a.add_trait('foo', nib.traits.Int) + a.add_trait("foo", nib.traits.Int) a.foo = 1 - assign_a = lambda: setattr(a, 'foo', 'a') + assign_a = lambda: setattr(a, "foo", "a") with pytest.raises(Exception): assign_a pkld_a = dumps(a) unpkld_a = loads(pkld_a) - assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a') + assign_a_again = lambda: setattr(unpkld_a, "foo", "a") with pytest.raises(Exception): assign_a_again @@ -86,39 +84,43 @@ def extract_func(list_out): return list_out[0] # Define interface - func_interface = Function(input_names=["list_out"], - output_names=["out_file", "another_file"], - function=extract_func) + func_interface = Function( + input_names=["list_out"], + output_names=["out_file", "another_file"], + function=extract_func, + ) # Define node - list_extract = Node(Function( - input_names=["list_out"], output_names=["out_file"], - function=extract_func), name="list_extract") + list_extract = Node( + Function( + input_names=["list_out"], output_names=["out_file"], function=extract_func + ), + name="list_extract", + ) # Check __all__ for interface inputs expected_input = set(list_extract.inputs.editable_traits()) - assert(set(func_interface.inputs.__all__) == expected_input) + assert set(func_interface.inputs.__all__) == expected_input # Check __all__ for node inputs - assert(set(list_extract.inputs.__all__) == expected_input) + assert set(list_extract.inputs.__all__) == expected_input # Check __all__ for node outputs expected_output = set(list_extract.outputs.editable_traits()) - assert(set(list_extract.outputs.__all__) == expected_output) + assert set(list_extract.outputs.__all__) == expected_output # Add trait and retest - list_extract._interface._output_names.append('added_out_trait') - expected_output.add('added_out_trait') - assert(set(list_extract.outputs.__all__) == expected_output) + list_extract._interface._output_names.append("added_out_trait") + expected_output.add("added_out_trait") + assert set(list_extract.outputs.__all__) == expected_output def test_TraitedSpec_logic(): class spec3(nib.TraitedSpec): - _xor_inputs = ('foo', 'bar') + _xor_inputs = ("foo", "bar") - foo = nib.traits.Int(xor=_xor_inputs, desc='foo or bar, not both') - bar = nib.traits.Int(xor=_xor_inputs, desc='bar or foo, not both') - kung = nib.traits.Float( - requires=('foo', ), position=0, desc='kung foo') + foo = nib.traits.Int(xor=_xor_inputs, desc="foo or bar, not both") + bar = nib.traits.Int(xor=_xor_inputs, desc="bar or foo, not both") + kung = nib.traits.Float(requires=("foo",), position=0, desc="kung foo") class out3(nib.TraitedSpec): output = nib.traits.Int @@ -133,7 +135,7 @@ class MyInterface(nib.BaseInterface): # setattr(myif.inputs, 'kung', 10.0) myif.inputs.foo = 1 assert myif.inputs.foo == 1 - set_bar = lambda: setattr(myif.inputs, 'bar', 1) + set_bar = lambda: setattr(myif.inputs, "bar", 1) with pytest.raises(IOError): set_bar() assert myif.inputs.foo == 1 @@ -143,34 +145,34 @@ class MyInterface(nib.BaseInterface): def test_deprecation(): with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec1(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='0.1') + foo = nib.traits.Int(deprecated="0.1") spec_instance = DeprecationSpec1() - set_foo = lambda: setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() - assert len(w) == 0, 'no warnings, just errors' + assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec2(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='100', new_name='bar') + foo = nib.traits.Int(deprecated="100", new_name="bar") spec_instance = DeprecationSpec2() - set_foo = lambda: setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() - assert len(w) == 0, 'no warnings, just errors' + assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='1000', new_name='bar') + foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() @@ -180,14 +182,13 @@ class DeprecationSpec3(nib.TraitedSpec): except nib.TraitError: not_raised = False assert not_raised - assert len( - w) == 1, 'deprecated warning 1 %s' % [w1.message for w1 in w] + assert len(w) == 1, f"deprecated warning 1 {[str(w1) for w1 in w]}" with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='1000', new_name='bar') + foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() @@ -199,8 +200,7 @@ class DeprecationSpec3(nib.TraitedSpec): assert not_raised assert spec_instance.foo == Undefined assert spec_instance.bar == 1 - assert len( - w) == 1, 'deprecated warning 2 %s' % [w1.message for w1 in w] + assert len(w) == 1, f"deprecated warning 2 {[str(w1) for w1 in w]}" def test_namesource(setup_file): @@ -208,12 +208,10 @@ def test_namesource(setup_file): tmpd, nme, ext = split_filename(tmp_infile) class spec2(nib.CommandLineInputSpec): - moo = nib.File( - name_source=['doo'], hash_files=False, argstr="%s", position=2) + moo = nib.File(name_source=["doo"], hash_files=False, argstr="%s", position=2) doo = nib.File(exists=True, argstr="%s", position=1) goo = traits.Int(argstr="%d", position=4) - poo = nib.File( - name_source=['goo'], hash_files=False, argstr="%s", position=3) + poo = nib.File(name_source=["goo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" @@ -222,10 +220,10 @@ class TestName(nib.CommandLine): testobj = TestName() testobj.inputs.doo = tmp_infile testobj.inputs.goo = 99 - assert '%s_generated' % nme in testobj.cmdline - assert '%d_generated' % testobj.inputs.goo in testobj.cmdline + assert "%s_generated" % nme in testobj.cmdline + assert "%d_generated" % testobj.inputs.goo in testobj.cmdline testobj.inputs.moo = "my_%s_template" - assert 'my_%s_template' % nme in testobj.cmdline + assert "my_%s_template" % nme in testobj.cmdline def test_chained_namesource(setup_file): @@ -235,13 +233,13 @@ def test_chained_namesource(setup_file): class spec2(nib.CommandLineInputSpec): doo = nib.File(exists=True, argstr="%s", position=1) moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=2, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" @@ -250,9 +248,9 @@ class TestName(nib.CommandLine): testobj = TestName() testobj.inputs.doo = tmp_infile res = testobj.cmdline - assert '%s' % tmp_infile in res - assert '%s_mootpl ' % nme in res - assert '%s_mootpl_generated' % nme in res + assert "%s" % tmp_infile in res + assert "%s_mootpl " % nme in res + assert "%s_mootpl_generated" % nme in res def test_cycle_namesource1(setup_file): @@ -261,15 +259,14 @@ def test_cycle_namesource1(setup_file): class spec3(nib.CommandLineInputSpec): moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=1, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=2) - doo = nib.File( - name_source=['poo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) + doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" @@ -291,15 +288,14 @@ def test_cycle_namesource2(setup_file): class spec3(nib.CommandLineInputSpec): moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=1, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=2) - doo = nib.File( - name_source=['poo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) + doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" @@ -317,9 +313,9 @@ class TestCycle(nib.CommandLine): print(res) assert not_raised - assert '%s' % tmp_infile in res - assert '%s_generated' % nme in res - assert '%s_generated_mootpl' % nme in res + assert "%s" % tmp_infile in res + assert "%s_generated" % nme in res + assert "%s_generated_mootpl" % nme in res def test_namesource_constraints(setup_file): @@ -328,30 +324,30 @@ def test_namesource_constraints(setup_file): class constrained_spec(nib.CommandLineInputSpec): in_file = nib.File(argstr="%s", position=1) - threshold = traits.Float( - argstr="%g", - xor=['mask_file'], - position=2) + threshold = traits.Float(argstr="%g", xor=["mask_file"], position=2) mask_file = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_mask', + name_source=["in_file"], + name_template="%s_mask", keep_extension=True, - xor=['threshold'], - position=2) + xor=["threshold"], + position=2, + ) out_file1 = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_out1', + name_source=["in_file"], + name_template="%s_out1", keep_extension=True, - position=3) + position=3, + ) out_file2 = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_out2', + name_source=["in_file"], + name_template="%s_out2", keep_extension=True, - requires=['threshold'], - position=4) + requires=["threshold"], + position=4, + ) class TestConstrained(nib.CommandLine): _cmd = "mycommand" @@ -360,15 +356,15 @@ class TestConstrained(nib.CommandLine): tc = TestConstrained() # name_source undefined, so template traits remain undefined - assert tc.cmdline == 'mycommand' + assert tc.cmdline == "mycommand" # mask_file and out_file1 enabled by name_source definition tc.inputs.in_file = os.path.basename(tmp_infile) - assert tc.cmdline == 'mycommand foo.txt foo_mask.txt foo_out1.txt' + assert tc.cmdline == "mycommand foo.txt foo_mask.txt foo_out1.txt" # mask_file disabled by threshold, out_file2 enabled by threshold - tc.inputs.threshold = 10. - assert tc.cmdline == 'mycommand foo.txt 10 foo_out1.txt foo_out2.txt' + tc.inputs.threshold = 10.0 + assert tc.cmdline == "mycommand foo.txt 10 foo_out1.txt foo_out2.txt" def test_TraitedSpec_withFile(setup_file): @@ -381,8 +377,8 @@ class spec2(nib.TraitedSpec): doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=tmp_infile, doo=[tmp_infile]) - hashval = infields.get_hashval(hash_method='content') - assert hashval[1] == 'a00e9ee24f5bfa9545a515b7a759886b' + hashval = infields.get_hashval(hash_method="content") + assert hashval[1] == "a00e9ee24f5bfa9545a515b7a759886b" def test_TraitedSpec_withNoFileHashing(setup_file): @@ -395,22 +391,22 @@ class spec2(nib.TraitedSpec): doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=nme, doo=[tmp_infile]) - hashval = infields.get_hashval(hash_method='content') - assert hashval[1] == '8da4669ff5d72f670a46ea3e7a203215' + hashval = infields.get_hashval(hash_method="content") + assert hashval[1] == "8da4669ff5d72f670a46ea3e7a203215" class spec3(nib.TraitedSpec): moo = nib.File(exists=True, name_source="doo") doo = nib.traits.List(nib.File(exists=True)) infields = spec3(moo=nme, doo=[tmp_infile]) - hashval1 = infields.get_hashval(hash_method='content') + hashval1 = infields.get_hashval(hash_method="content") class spec4(nib.TraitedSpec): moo = nib.File(exists=True) doo = nib.traits.List(nib.File(exists=True)) infields = spec4(moo=nme, doo=[tmp_infile]) - hashval2 = infields.get_hashval(hash_method='content') + hashval2 = infields.get_hashval(hash_method="content") assert hashval1[1] != hashval2[1] @@ -418,31 +414,29 @@ def test_ImageFile(): x = nib.BaseInterface().inputs # setup traits - x.add_trait('nifti', nib.ImageFile(types=['nifti1', 'dicom'])) - x.add_trait('anytype', nib.ImageFile()) - x.add_trait('newtype', nib.ImageFile(types=['nifti10'])) - x.add_trait('nocompress', - nib.ImageFile(types=['mgh'], allow_compressed=False)) + x.add_trait("nifti", nib.ImageFile(types=["nifti1", "dicom"])) + x.add_trait("anytype", nib.ImageFile()) + with pytest.raises(ValueError): + x.add_trait("newtype", nib.ImageFile(types=["nifti10"])) + x.add_trait("nocompress", nib.ImageFile(types=["mgh"], allow_compressed=False)) with pytest.raises(nib.TraitError): - x.nifti = 'test.mgz' - x.nifti = 'test.nii' - x.anytype = 'test.xml' - with pytest.raises(AttributeError): - x.newtype = 'test.nii' + x.nifti = "test.mgz" + x.nifti = "test.nii" + x.anytype = "test.xml" with pytest.raises(nib.TraitError): - x.nocompress = 'test.nii.gz' - x.nocompress = 'test.mgh' + x.nocompress = "test.mgz" + x.nocompress = "test.mgh" def test_filecopy_info(): class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') - goo = nib.traits.Int(desc='a random int', mandatory=True) - moo = nib.traits.Int(desc='a random int', mandatory=False) - hoo = nib.traits.Int(desc='a random int', usedefault=True) - zoo = nib.File(desc='a file', copyfile=False) - woo = nib.File(desc='a file', copyfile=True) + foo = nib.traits.Int(desc="a random int") + goo = nib.traits.Int(desc="a random int", mandatory=True) + moo = nib.traits.Int(desc="a random int", mandatory=False) + hoo = nib.traits.Int(desc="a random int", usedefault=True) + zoo = nib.File(desc="a file", copyfile=False) + woo = nib.File(desc="a file", copyfile=True) class DerivedInterface(nib.BaseInterface): input_spec = InputSpec @@ -450,16 +444,16 @@ class DerivedInterface(nib.BaseInterface): def normalize_filenames(self): """A mock normalize_filenames for freesurfer interfaces that have one""" - self.inputs.zoo = 'normalized_filename.ext' + self.inputs.zoo = "normalized_filename.ext" assert get_filecopy_info(nib.BaseInterface) == [] # Test on interface class, not instantiated info = get_filecopy_info(DerivedInterface) - assert info[0]['key'] == 'woo' - assert info[0]['copy'] - assert info[1]['key'] == 'zoo' - assert not info[1]['copy'] + assert info[0]["key"] == "woo" + assert info[0]["copy"] + assert info[1]["key"] == "zoo" + assert not info[1]["copy"] info = None # Test with instantiated interface @@ -469,9 +463,9 @@ def normalize_filenames(self): # After the first call to get_filecopy_info zoo is defined info = get_filecopy_info(derived) # Ensure that normalize_filenames was called - assert derived.inputs.zoo == 'normalized_filename.ext' + assert derived.inputs.zoo == "normalized_filename.ext" # Check the results are consistent - assert info[0]['key'] == 'woo' - assert info[0]['copy'] - assert info[1]['key'] == 'zoo' - assert not info[1]['copy'] + assert info[0]["key"] == "woo" + assert info[0]["copy"] + assert info[1]["key"] == "zoo" + assert not info[1]["copy"] diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index e6db69a458..52770e476c 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -1,20 +1,15 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals import os import pytest -from builtins import open -from future import standard_library + from pkg_resources import resource_filename as pkgrf from ....utils.filemanip import md5 from ... import base as nib -standard_library.install_aliases() - -@pytest.mark.parametrize("args", [{}, {'a': 1, 'b': [2, 3]}]) +@pytest.mark.parametrize("args", [{}, {"a": 1, "b": [2, 3]}]) def test_bunch(args): b = nib.Bunch(**args) assert b.__dict__ == args @@ -37,25 +32,24 @@ def test_bunch_methods(): b.update(a=3) newb = b.dictcopy() assert b.a == 3 - assert b.get('a') == 3 - assert b.get('badkey', 'otherthing') == 'otherthing' + assert b.get("a") == 3 + assert b.get("badkey", "otherthing") == "otherthing" assert b != newb - assert type(dict()) == type(newb) - assert newb['a'] == 3 + assert type(newb) is dict + assert newb["a"] == 3 def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. - json_pth = pkgrf('nipype', - os.path.join('testing', 'data', 'realign_json.json')) + json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) - b = nib.Bunch(infile=json_pth, otherthing='blue', yat=True) + b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() - assert bhash == 'd1f46750044c3de102efc847720fc35f' + assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth, 'r') as fp: - jshash.update(fp.read().encode('utf-8')) - assert newbdict['infile'][0][1] == jshash.hexdigest() - assert newbdict['yat'] is True + with open(json_pth) as fp: + jshash.update(fp.read().encode("utf-8")) + assert newbdict["infile"][0][1] == jshash.hexdigest() + assert newbdict["yat"] is True diff --git a/nipype/interfaces/base/tests/test_traits_extension.py b/nipype/interfaces/base/tests/test_traits_extension.py new file mode 100644 index 0000000000..91682f459e --- /dev/null +++ b/nipype/interfaces/base/tests/test_traits_extension.py @@ -0,0 +1,322 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Check the resolving/rebasing feature of ``BasePath``s.""" +from ... import base as nib +from ..traits_extension import rebase_path_traits, resolve_path_traits, Path + + +class _test_spec(nib.TraitedSpec): + a = nib.File() + b = nib.Tuple(nib.File(), nib.File()) + c = nib.traits.List(nib.File()) + d = nib.traits.Either(nib.File(), nib.traits.Float()) + e = nib.OutputMultiObject(nib.File()) + ee = nib.OutputMultiObject(nib.Str) + f = nib.traits.Dict(nib.Str, nib.File()) + g = nib.traits.Either(nib.File, nib.Str) + h = nib.Str + i = nib.traits.Either(nib.File, nib.Tuple(nib.File, nib.traits.Int)) + j = nib.traits.Either( + nib.File, + nib.Tuple(nib.File, nib.traits.Int), + nib.traits.Dict(nib.Str, nib.File()), + ) + k = nib.DictStrStr + + +def test_rebase_resolve_path_traits(): + """Check rebase_path_traits and resolve_path_traits and idempotence.""" + spec = _test_spec() + + v = "/some/path/f1.txt" + a = rebase_path_traits(spec.trait("a"), v, "/some/path") + assert a == Path("f1.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("a"), a, "/some/path") == a + + a = resolve_path_traits(spec.trait("a"), a, "/some/path") + assert a == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a + + a = rebase_path_traits(spec.trait("a"), v, "/some/other/path") + assert a == Path(v) + + # Idempotence + assert rebase_path_traits(spec.trait("a"), a, "/some/other/path") == a + + a = resolve_path_traits(spec.trait("a"), a, "/some/path") + assert a == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a + + v = ("/some/path/f1.txt", "/some/path/f2.txt") + b = rebase_path_traits(spec.trait("b"), v, "/some/path") + assert b == (Path("f1.txt"), Path("f2.txt")) + + # Idempotence + assert rebase_path_traits(spec.trait("b"), b, "/some/path") == b + + b = resolve_path_traits(spec.trait("b"), b, "/some/path") + assert b == (Path(v[0]), Path(v[1])) + + # Idempotence + assert resolve_path_traits(spec.trait("b"), b, "/some/path") == b + + v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] + c = rebase_path_traits(spec.trait("c"), v, "/some/path") + assert c == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] + + # Idempotence + assert rebase_path_traits(spec.trait("c"), c, "/some/path") == c + + c = resolve_path_traits(spec.trait("c"), c, "/some/path") + assert c == [Path(vp) for vp in v] + + # Idempotence + assert resolve_path_traits(spec.trait("c"), c, "/some/path") == c + + v = 2.0 + d = rebase_path_traits(spec.trait("d"), v, "/some/path") + assert d == v + + d = resolve_path_traits(spec.trait("d"), d, "/some/path") + assert d == v + + v = "/some/path/either.txt" + d = rebase_path_traits(spec.trait("d"), v, "/some/path") + assert d == Path("either.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("d"), d, "/some/path") == d + + d = resolve_path_traits(spec.trait("d"), d, "/some/path") + assert d == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("d"), d, "/some/path") == d + + v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] + e = rebase_path_traits(spec.trait("e"), v, "/some/path") + assert e == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] + + # Idempotence + assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e + + e = resolve_path_traits(spec.trait("e"), e, "/some/path") + assert e == [Path(vp) for vp in v] + + # Idempotence + assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e + + v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] + e = rebase_path_traits(spec.trait("e"), v, "/some/path") + assert e == [[Path("f1.txt"), Path("f2.txt")], [[Path("f3.txt")]]] + + # Idempotence + assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e + + e = resolve_path_traits(spec.trait("e"), e, "/some/path") + assert e == [ + [ + [Path(vpp) for vpp in vp] if isinstance(vp, list) else Path(vp) + for vp in inner + ] + for inner in v + ] + + # Idempotence + assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e + + # These are Str - no rebasing/resolving should happen + v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] + ee = rebase_path_traits(spec.trait("ee"), v, "/some/path") + assert ee == v + + # Idempotence + assert rebase_path_traits(spec.trait("ee"), ee, "/some/path") == ee + + ee = resolve_path_traits( + spec.trait("ee"), [["f1.txt", "f2.txt"], [["f3.txt"]]], "/some/path" + ) + assert ee == [["f1.txt", "f2.txt"], [["f3.txt"]]] + + # Idempotence + assert resolve_path_traits(spec.trait("ee"), ee, "/some/path") == ee + + v = {"1": "/some/path/f1.txt"} + f = rebase_path_traits(spec.trait("f"), v, "/some") + assert f == {"1": Path("path/f1.txt")} + + # Idempotence + assert rebase_path_traits(spec.trait("f"), f, "/some") == f + + f = resolve_path_traits(spec.trait("f"), f, "/some") + assert f == {k: Path(val) for k, val in v.items()} + + # Idempotence + assert resolve_path_traits(spec.trait("f"), f, "/some") == f + + # Either(Str, File): passing in path-like apply manipulation + v = "/some/path/either.txt" + g = rebase_path_traits(spec.trait("g"), v, "/some/path") + assert g == Path("either.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g + + g = resolve_path_traits(spec.trait("g"), g, "/some/path") + assert g == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g + + g = rebase_path_traits(spec.trait("g"), v, "/some") + assert g == Path("path/either.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g + + g = resolve_path_traits(spec.trait("g"), g, "/some") + assert g == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g + + # Either(Str, File): passing str discards File + v = "either.txt" + g = rebase_path_traits(spec.trait("g"), v, "/some/path") + assert g == v + + # Idempotence + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g + + # This is a problematic case, it is impossible to know whether this + # was meant to be a string or a file. + # In this implementation, strings take precedence + g = resolve_path_traits(spec.trait("g"), g, "/some/path") + assert g == v + + # Idempotence + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g + + v = "string" + g = rebase_path_traits(spec.trait("g"), v, "/some") + assert g == v + + # Idempotence + assert rebase_path_traits(spec.trait("g"), g, "/some") == g + + # This is a problematic case, it is impossible to know whether this + # was meant to be a string or a file. + g = resolve_path_traits(spec.trait("g"), v, "/some") + assert g == v + + # Idempotence + assert resolve_path_traits(spec.trait("g"), g, "/some") == g + + g = rebase_path_traits(spec.trait("g"), v, "/some/path") + assert g == v # You dont want this one to be a Path + + # Idempotence + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g + + # This is a problematic case, it is impossible to know whether this + # was meant to be a string or a file. + g = resolve_path_traits(spec.trait("g"), g, "/some/path") + assert g == v # You dont want this one to be a Path + + # Idempotence + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g + + h = rebase_path_traits(spec.trait("h"), v, "/some/path") + assert h == v + + # Idempotence + assert rebase_path_traits(spec.trait("h"), h, "/some/path") == h + + h = resolve_path_traits(spec.trait("h"), h, "/some/path") + assert h == v + + # Idempotence + assert resolve_path_traits(spec.trait("h"), h, "/some/path") == h + + v = "/some/path/either/file.txt" + i = rebase_path_traits(spec.trait("i"), v, "/some/path") + assert i == Path("either/file.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i + + i = resolve_path_traits(spec.trait("i"), i, "/some/path") + assert i == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i + + v = ("/some/path/either/tuple/file.txt", 2) + i = rebase_path_traits(spec.trait("i"), v, "/some/path") + assert i == (Path("either/tuple/file.txt"), 2) + + # Idempotence + assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i + + i = resolve_path_traits(spec.trait("i"), i, "/some/path") + assert i == (Path(v[0]), v[1]) + + # Idempotence + assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i + + v = "/some/path/either/file.txt" + j = rebase_path_traits(spec.trait("j"), v, "/some/path") + assert j == Path("either/file.txt") + + # Idempotence + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j + + j = resolve_path_traits(spec.trait("j"), j, "/some/path") + assert j == Path(v) + + # Idempotence + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j + + v = ("/some/path/either/tuple/file.txt", 2) + j = rebase_path_traits( + spec.trait("j"), ("/some/path/either/tuple/file.txt", 2), "/some/path" + ) + assert j == (Path("either/tuple/file.txt"), 2) + + # Idempotence + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j + + j = resolve_path_traits(spec.trait("j"), j, "/some/path") + assert j == (Path(v[0]), v[1]) + + # Idempotence + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j + + v = {"a": "/some/path/either/dict/file.txt"} + j = rebase_path_traits(spec.trait("j"), v, "/some/path") + assert j == {"a": Path("either/dict/file.txt")} + + # Idempotence + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j + + j = resolve_path_traits(spec.trait("j"), j, "/some/path") + assert j == {k: Path(val) for k, val in v.items()} + + # Idempotence + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j + + v = {"path": "/some/path/f1.txt"} + k = rebase_path_traits(spec.trait("k"), v, "/some/path") + assert k == v + + # Idempotence + assert rebase_path_traits(spec.trait("k"), k, "/some/path") == k + + k = resolve_path_traits(spec.trait("k"), k, "/some/path") + assert k == v diff --git a/nipype/interfaces/base/traits_extension.py b/nipype/interfaces/base/traits_extension.py index 7a464cc557..49ba234ba8 100644 --- a/nipype/interfaces/base/traits_extension.py +++ b/nipype/interfaces/base/traits_extension.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -20,32 +19,71 @@ (usually by Robert Kern). """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from builtins import str, bytes -import os -from collections import Sequence +from collections.abc import Sequence # perform all external trait imports here from traits import __version__ as traits_version import traits.api as traits -from traits.trait_handlers import TraitDictObject, TraitListObject -from traits.trait_errors import TraitError -from traits.trait_base import _Undefined, class_of +from traits.api import TraitType, Unicode +from traits.trait_base import _Undefined + +try: + # Moved in traits 6.0 + from traits.trait_type import NoDefaultSpecified +except ImportError: + # Pre-6.0 + from traits.trait_handlers import NoDefaultSpecified + +from pathlib import Path +from ...utils.filemanip import path_resolve + +if traits_version < "3.7.0": + raise ImportError("Traits version 3.7.0 or higher must be installed") + +IMG_FORMATS = { + "afni": (".HEAD", ".BRIK"), + "cifti2": (".nii", ".nii.gz"), + "dicom": (".dcm", ".IMA", ".tar", ".tar.gz"), + "gifti": (".gii", ".gii.gz"), + "mgh": (".mgh", ".mgz", ".mgh.gz"), + "nifti1": (".nii", ".nii.gz", ".hdr", ".img", ".img.gz"), + "nifti2": (".nii", ".nii.gz"), + "nrrd": (".nrrd", ".nhdr"), +} +IMG_ZIP_FMT = {".nii.gz", "tar.gz", ".gii.gz", ".mgz", ".mgh.gz", "img.gz"} -from traits.api import BaseUnicode -from traits.api import Unicode -from future import standard_library +""" +The functions that pop-up the Traits GUIs, edit_traits and +configure_traits, were failing because all of our inputs default to +Undefined deep and down in traits/ui/wx/list_editor.py it checks for +the len() of the elements of the list. The _Undefined class in traits +does not define the __len__ method and would error. I tried defining +our own Undefined and even subclassing Undefined, but both of those +failed with a TraitError in our initializer when we assign the +Undefined to the inputs because of an incompatible type: + +TraitError: The 'vertical_gradient' trait of a BetInputSpec instance must be \ +a float, but a value of was specified. + +So... in order to keep the same type but add the missing method, I +monkey patched. +""" -if traits_version < '3.7.0': - raise ImportError('Traits version 3.7.0 or higher must be installed') -standard_library.install_aliases() +def _length(self): + return 0 + + +########################################################################## +# Apply monkeypatch here +_Undefined.__len__ = _length +########################################################################## + +Undefined = _Undefined() class Str(Unicode): - """Replacement for the default traits.Str based in bytes""" + """Replaces the default traits.Str based in bytes.""" # Monkeypatch Str and DictStrStr for Python 2 compatibility @@ -54,261 +92,312 @@ class Str(Unicode): traits.DictStrStr = DictStrStr -class File(BaseUnicode): - """ Defines a trait whose value must be the name of a file. - """ +class BasePath(TraitType): + """Defines a trait whose value must be a valid filesystem path.""" # A description of the type of value this trait accepts: - info_text = 'a file name' - - def __init__(self, - value='', - filter=None, - auto_set=False, - entries=0, - exists=False, - **metadata): - """ Creates a File trait. - - Parameters - ---------- - value : string - The default value for the trait - filter : string - A wildcard string to filter filenames in the file dialog box used by - the attribute trait editor. - auto_set : boolean - Indicates whether the file editor updates the trait value after - every key stroke. - exists : boolean - Indicates whether the trait value must be an existing file or - not. - - Default Value - ------------- - *value* or '' - """ - self.filter = filter - self.auto_set = auto_set - self.entries = entries + exists = False + resolve = False + _is_file = False + _is_dir = False + + @property + def info_text(self): + """Create the trait's general description.""" + info_text = "a pathlike object or string" + if any((self.exists, self._is_file, self._is_dir)): + info_text += " representing a" + if self.exists: + info_text += "n existing" + if self._is_file: + info_text += " file" + elif self._is_dir: + info_text += " directory" + else: + info_text += " file or directory" + return info_text + + def __init__(self, value=Undefined, exists=False, resolve=False, **metadata): + """Create a BasePath trait.""" self.exists = exists + self.resolve = resolve + super().__init__(value, **metadata) - if exists: - self.info_text = 'an existing file name' + def validate(self, objekt, name, value, return_pathlike=False): + """Validate a value change.""" + try: + value = Path(value) # Use pathlib's validation + except Exception: + self.error(objekt, name, str(value)) - super(File, self).__init__(value, **metadata) + if self.exists: + if not value.exists(): + self.error(objekt, name, str(value)) - def validate(self, object, name, value): - """ Validates that a specified value is valid for this trait.""" - validated_value = super(File, self).validate(object, name, value) - if not self.exists: - return validated_value - elif os.path.isfile(value): - return validated_value - else: - raise TraitError( - args='The trait \'{}\' of {} instance is {}, but the path ' - ' \'{}\' does not exist.'.format(name, class_of(object), - self.info_text, value)) + if self._is_file and not value.is_file(): + self.error(objekt, name, str(value)) - self.error(object, name, value) + if self._is_dir and not value.is_dir(): + self.error(objekt, name, str(value)) + if self.resolve: + value = path_resolve(value, strict=self.exists) -# ------------------------------------------------------------------------------- -# 'Directory' trait -# ------------------------------------------------------------------------------- + if not return_pathlike: + value = str(value) + return value -class Directory(BaseUnicode): + +class Directory(BasePath): """ - Defines a trait whose value must be the name of a directory. + Defines a trait whose value must be a directory path. + + >>> from nipype.interfaces.base import Directory, TraitedSpec, TraitError + >>> class A(TraitedSpec): + ... foo = Directory(exists=False) + >>> a = A() + >>> a.foo + + + >>> a.foo = '/some/made/out/path' + >>> a.foo + '/some/made/out/path' + + >>> class A(TraitedSpec): + ... foo = Directory(exists=False, resolve=True) + >>> a = A(foo='relative_dir') + >>> a.foo # doctest: +ELLIPSIS + '.../relative_dir' + + >>> class A(TraitedSpec): + ... foo = Directory(exists=True, resolve=True) + >>> a = A() + >>> a.foo = 'relative_dir' # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + TraitError: + + >>> from os import mkdir + >>> mkdir('relative_dir') + >>> a.foo = 'relative_dir' + >>> a.foo # doctest: +ELLIPSIS + '.../relative_dir' + + >>> class A(TraitedSpec): + ... foo = Directory(exists=True, resolve=False) + >>> a = A(foo='relative_dir') + >>> a.foo + 'relative_dir' + + >>> class A(TraitedSpec): + ... foo = Directory('tmpdir') + >>> a = A() + >>> a.foo # doctest: +ELLIPSIS + + + >>> class A(TraitedSpec): + ... foo = Directory('tmpdir', usedefault=True) + >>> a = A() + >>> a.foo # doctest: +ELLIPSIS + 'tmpdir' + """ - # A description of the type of value this trait accepts: - info_text = 'a directory name' - - def __init__(self, - value='', - auto_set=False, - entries=0, - exists=False, - **metadata): - """ Creates a Directory trait. - - Parameters - ---------- - value : string - The default value for the trait - auto_set : boolean - Indicates whether the directory editor updates the trait value - after every key stroke. - exists : boolean - Indicates whether the trait value must be an existing directory or - not. - - Default Value - ------------- - *value* or '' - """ - self.entries = entries - self.auto_set = auto_set - self.exists = exists + _is_dir = True - if exists: - self.info_text = 'an existing directory name' - super(Directory, self).__init__(value, **metadata) +class File(BasePath): + """ + Defines a trait whose value must be a file path. - def validate(self, object, name, value): - """ Validates that a specified value is valid for this trait.""" - if isinstance(value, (str, bytes)): - if not self.exists: - return value - if os.path.isdir(value): - return value - else: - raise TraitError( - args='The trait \'{}\' of {} instance is {}, but the path ' - ' \'{}\' does not exist.'.format(name, class_of(object), - self.info_text, value)) - - self.error(object, name, value) - - -# lists of tuples -# each element consists of : -# - uncompressed (tuple[0]) extension -# - compressed (tuple[1]) extension -img_fmt_types = { - 'nifti1': [('.nii', '.nii.gz'), (('.hdr', '.img'), ('.hdr', '.img.gz'))], - 'mgh': [('.mgh', '.mgz'), ('.mgh', '.mgh.gz')], - 'nifti2': [('.nii', '.nii.gz')], - 'cifti2': [('.nii', '.nii.gz')], - 'gifti': [('.gii', '.gii.gz')], - 'dicom': [('.dcm', '.dcm'), ('.IMA', '.IMA'), ('.tar', '.tar.gz')], - 'nrrd': [('.nrrd', 'nrrd'), ('nhdr', 'nhdr')], - 'afni': [('.HEAD', '.HEAD'), ('.BRIK', '.BRIK')] -} + >>> from nipype.interfaces.base import File, TraitedSpec, TraitError + >>> class A(TraitedSpec): + ... foo = File() + >>> a = A() + >>> a.foo + + >>> a.foo = '/some/made/out/path/to/file' + >>> a.foo + '/some/made/out/path/to/file' -class ImageFile(File): - """ Defines a trait of specific neuroimaging files """ - - def __init__(self, - value='', - filter=None, - auto_set=False, - entries=0, - exists=False, - types=[], - allow_compressed=True, - **metadata): - """ Trait handles neuroimaging files. - - Parameters - ---------- - types : list - Strings of file format types accepted - compressed : boolean - Indicates whether the file format can compressed - """ - self.types = types - self.allow_compressed = allow_compressed - super(ImageFile, self).__init__(value, filter, auto_set, entries, - exists, **metadata) - - def info(self): - existing = 'n existing' if self.exists else '' - comma = ',' if self.exists and not self.allow_compressed else '' - uncompressed = ' uncompressed' if not self.allow_compressed else '' - with_ext = ' (valid extensions: [{}])'.format( - ', '.join(self.grab_exts())) if self.types else '' - return 'a{existing}{comma}{uncompressed} file{with_ext}'.format( - existing=existing, comma=comma, uncompressed=uncompressed, - with_ext=with_ext) - - def grab_exts(self): - # TODO: file type validation - exts = [] - for fmt in self.types: - if fmt in img_fmt_types: - exts.extend( - sum([[u for u in y[0]] - if isinstance(y[0], tuple) else [y[0]] - for y in img_fmt_types[fmt]], [])) - if self.allow_compressed: - exts.extend( - sum([[u for u in y[-1]] - if isinstance(y[-1], tuple) else [y[-1]] - for y in img_fmt_types[fmt]], [])) - else: - raise AttributeError( - 'Information has not been added for format' - ' type {} yet. Supported formats include: ' - '{}'.format(fmt, ', '.join(img_fmt_types.keys()))) - return list(set(exts)) - - def validate(self, object, name, value): - """ Validates that a specified value is valid for this trait. - """ - validated_value = super(ImageFile, self).validate(object, name, value) - if validated_value and self.types: - _exts = self.grab_exts() - if not any(validated_value.endswith(x) for x in _exts): - raise TraitError( - args="{} is not included in allowed types: {}".format( - validated_value, ', '.join(_exts))) - return validated_value + >>> class A(TraitedSpec): + ... foo = File(exists=False, resolve=True) + >>> a = A(foo='idontexist.txt') + >>> a.foo # doctest: +ELLIPSIS + '.../idontexist.txt' + >>> class A(TraitedSpec): + ... foo = File(exists=True, resolve=True) + >>> a = A() + >>> a.foo = 'idontexist.txt' # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + TraitError: -""" -The functions that pop-up the Traits GUIs, edit_traits and -configure_traits, were failing because all of our inputs default to -Undefined deep and down in traits/ui/wx/list_editor.py it checks for -the len() of the elements of the list. The _Undefined class in traits -does not define the __len__ method and would error. I tried defining -our own Undefined and even sublassing Undefined, but both of those -failed with a TraitError in our initializer when we assign the -Undefined to the inputs because of an incompatible type: + >>> open('idoexist.txt', 'w').close() + >>> a.foo = 'idoexist.txt' + >>> a.foo # doctest: +ELLIPSIS + '.../idoexist.txt' -TraitError: The 'vertical_gradient' trait of a BetInputSpec instance must be a float, but a value of was specified. + >>> class A(TraitedSpec): + ... foo = File('idoexist.txt') + >>> a = A() + >>> a.foo + -So... in order to keep the same type but add the missing method, I -monkey patched. -""" + >>> class A(TraitedSpec): + ... foo = File('idoexist.txt', usedefault=True) + >>> a = A() + >>> a.foo + 'idoexist.txt' + >>> class A(TraitedSpec): + ... foo = File(exists=True, resolve=True, extensions=['.txt', 'txt.gz']) + >>> a = A() + >>> a.foo = 'idoexist.badtxt' # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + TraitError: -def length(self): - return 0 + >>> a.foo = 'idoexist.txt' + >>> a.foo # doctest: +ELLIPSIS + '.../idoexist.txt' + >>> class A(TraitedSpec): + ... foo = File(extensions=['.nii', '.nii.gz']) + >>> a = A() + >>> a.foo = 'badext.txt' # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + TraitError: -########################################################################## -# Apply monkeypatch here -_Undefined.__len__ = length -########################################################################## + >>> class A(TraitedSpec): + ... foo = File(extensions=['.nii', '.nii.gz']) + >>> a = A() + >>> a.foo = 'goodext.nii' + >>> a.foo + 'goodext.nii' + + >>> a = A() + >>> a.foo = 'idontexist.000.nii' + >>> a.foo # doctest: +ELLIPSIS + 'idontexist.000.nii' + + >>> a = A() + >>> a.foo = 'idontexist.000.nii.gz' + >>> a.foo # doctest: +ELLIPSIS + 'idontexist.000.nii.gz' + + """ + + _is_file = True + _exts = None + + def __init__( + self, + value=NoDefaultSpecified, + exists=False, + resolve=False, + allow_compressed=True, + extensions=None, + **metadata, + ): + """Create a File trait.""" + if extensions is not None: + if isinstance(extensions, (bytes, str)): + extensions = [extensions] + + if allow_compressed is False: + extensions = list(set(extensions) - IMG_ZIP_FMT) + + self._exts = sorted( + {f".{ext}" if not ext.startswith(".") else ext for ext in extensions} + ) + + super().__init__( + value=value, + exists=exists, + resolve=resolve, + extensions=self._exts, + **metadata, + ) + + def validate(self, objekt, name, value, return_pathlike=False): + """Validate a value change.""" + value = super().validate(objekt, name, value, return_pathlike=True) + if self._exts: + fname = value.name + if not any(fname.endswith(e) for e in self._exts): + self.error(objekt, name, str(value)) + + if not return_pathlike: + value = str(value) + + return value -Undefined = _Undefined() + +class ImageFile(File): + """Defines a trait whose value must be a known neuroimaging file.""" + + def __init__( + self, + value=NoDefaultSpecified, + exists=False, + resolve=False, + types=None, + **metadata, + ): + """Create an ImageFile trait.""" + extensions = None + if types is not None: + if isinstance(types, (bytes, str)): + types = [types] + + if set(types) - set(IMG_FORMATS.keys()): + invalid = set(types) - set(IMG_FORMATS.keys()) + raise ValueError( + """\ +Unknown value(s) %s for metadata type of an ImageFile input.\ +""" + % ", ".join(['"%s"' % t for t in invalid]) + ) + extensions = [ext for t in types for ext in IMG_FORMATS[t]] + + super().__init__( + value=value, + exists=exists, + extensions=extensions, + resolve=resolve, + **metadata, + ) -def isdefined(object): - return not isinstance(object, _Undefined) +class Tuple(traits.BaseTuple): + def validate(self, objekt, name, value): + if isinstance(value, list): + value = tuple(value) + + return super().validate(objekt, name, value) + + +def isdefined(objekt): + return not isinstance(objekt, _Undefined) def has_metadata(trait, metadata, value=None, recursive=True): - ''' + """ Checks if a given trait has a metadata (and optionally if it is set to particular value) - ''' + """ count = 0 - if hasattr(trait, "_metadata") and metadata in list( - trait._metadata.keys()) and (trait._metadata[metadata] == value - or value is None): + if ( + hasattr(trait, "_metadata") + and metadata in list(trait._metadata.keys()) + and (trait._metadata[metadata] == value or value is None) + ): count += 1 if recursive: - if hasattr(trait, 'inner_traits'): + if hasattr(trait, "inner_traits"): for inner_trait in trait.inner_traits(): - count += has_metadata(inner_trait.trait_type, metadata, - recursive) - if hasattr(trait, 'handlers') and trait.handlers is not None: + count += has_metadata(inner_trait.trait_type, metadata, recursive) + if hasattr(trait, "handlers") and trait.handlers is not None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) @@ -316,38 +405,35 @@ def has_metadata(trait, metadata, value=None, recursive=True): class MultiObject(traits.List): - """ Abstract class - shared functionality of input and output MultiObject - """ - - def validate(self, object, name, value): + """Abstract class - shared functionality of input and output MultiObject""" + def validate(self, objekt, name, value): # want to treat range and other sequences (except str) as list - if not isinstance(value, (str, bytes)) and isinstance( - value, Sequence): + if not isinstance(value, (str, bytes)) and isinstance(value, Sequence): value = list(value) - if not isdefined(value) or \ - (isinstance(value, list) and len(value) == 0): + if not isdefined(value) or (isinstance(value, list) and len(value) == 0): return Undefined newvalue = value inner_trait = self.inner_traits()[0] - if not isinstance(value, list) \ - or (isinstance(inner_trait.trait_type, traits.List) and - not isinstance(inner_trait.trait_type, InputMultiObject) and - not isinstance(value[0], list)): + if not isinstance(value, list) or ( + isinstance(inner_trait.trait_type, traits.List) + and not isinstance(inner_trait.trait_type, InputMultiObject) + and not isinstance(value[0], list) + ): newvalue = [value] - value = super(MultiObject, self).validate(object, name, newvalue) + value = super().validate(objekt, name, newvalue) if value: return value - self.error(object, name, value) + self.error(objekt, name, value) class OutputMultiObject(MultiObject): - """ Implements a user friendly traits that accepts one or more + """Implements a user friendly traits that accepts one or more paths to files or directories. This is the output version which return a single string whenever possible (when it was set to a single value or a list of length 1). Default value of this trait @@ -379,8 +465,8 @@ class OutputMultiObject(MultiObject): """ - def get(self, object, name): - value = self.get_value(object, name) + def get(self, objekt, name): + value = self.get_value(objekt, name) if len(value) == 0: return Undefined elif len(value) == 1: @@ -388,12 +474,12 @@ def get(self, object, name): else: return value - def set(self, object, name, value): - self.set_value(object, name, value) + def set(self, objekt, name, value): + self.set_value(objekt, name, value) class InputMultiObject(MultiObject): - """ Implements a user friendly traits that accepts one or more + """Implements a user friendly traits that accepts one or more paths to files or directories. This is the input version which always returns a list. Default value of this trait is _Undefined. It does not accept empty lists. @@ -423,7 +509,93 @@ class InputMultiObject(MultiObject): ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ + pass + InputMultiPath = InputMultiObject OutputMultiPath = OutputMultiObject + + +def _rebase_path(value, cwd): + if isinstance(value, list): + return [_rebase_path(v, cwd) for v in value] + + try: + value = Path(value) + except TypeError: + pass + else: + try: + value = value.relative_to(cwd) + except ValueError: + pass + return value + + +def _resolve_path(value, cwd): + if isinstance(value, list): + return [_resolve_path(v, cwd) for v in value] + + try: + value = Path(value) + except TypeError: + pass + else: + if not value.is_absolute(): + value = Path(cwd).absolute() / value + return value + + +def _recurse_on_path_traits(func, thistrait, value, cwd): + """Run func recursively on BasePath-derived traits.""" + if thistrait.is_trait_type(BasePath): + value = func(value, cwd) + elif thistrait.is_trait_type(traits.List): + (innertrait,) = thistrait.inner_traits + if not isinstance(value, (list, tuple)): + return _recurse_on_path_traits(func, innertrait, value, cwd) + + value = [_recurse_on_path_traits(func, innertrait, v, cwd) for v in value] + elif isinstance(value, dict) and thistrait.is_trait_type(traits.Dict): + _, innertrait = thistrait.inner_traits + value = { + k: _recurse_on_path_traits(func, innertrait, v, cwd) + for k, v in value.items() + } + elif isinstance(value, tuple) and thistrait.is_trait_type(traits.BaseTuple): + value = tuple( + _recurse_on_path_traits(func, subtrait, v, cwd) + for subtrait, v in zip(thistrait.handler.types, value) + ) + elif thistrait.is_trait_type(traits.TraitCompound): + is_str = [ + isinstance(f, (traits.String, traits.BaseStr, traits.BaseBytes, Str)) + for f in thistrait.handler.handlers + ] + if ( + any(is_str) + and isinstance(value, (bytes, str)) + and not value.startswith("/") + ): + return value + + for subtrait in thistrait.handler.handlers: + try: + sb_instance = subtrait() + except TypeError: + return value + else: + value = _recurse_on_path_traits(func, sb_instance, value, cwd) + + return value + + +def rebase_path_traits(thistrait, value, cwd): + """Rebase a BasePath-derived trait given an interface spec.""" + return _recurse_on_path_traits(_rebase_path, thistrait, value, cwd) + + +def resolve_path_traits(thistrait, value, cwd): + """Resolve a BasePath-derived trait given an interface spec.""" + return _recurse_on_path_traits(_resolve_path, thistrait, value, cwd) diff --git a/nipype/interfaces/brainsuite/__init__.py b/nipype/interfaces/brainsuite/__init__.py index 6611aedff4..5fb27d6ae1 100644 --- a/nipype/interfaces/brainsuite/__init__.py +++ b/nipype/interfaces/brainsuite/__init__.py @@ -1,4 +1,17 @@ -# -*- coding: utf-8 -*- -from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, - Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, - BDP, ThicknessPVC) +from .brainsuite import ( + Bse, + Bfc, + Pvc, + Cerebro, + Cortex, + Scrubmask, + Tca, + Dewisp, + Dfs, + Pialmesh, + Skullfinder, + Hemisplit, + SVReg, + BDP, + ThicknessPVC, +) diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index aa75d2bf81..cf7161c030 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,73 +1,77 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -import os -import re as regex - -from ..base import TraitedSpec, CommandLineInputSpec, CommandLine, File, traits, isdefined """This script provides interfaces for BrainSuite command line tools. Please see brainsuite.org for more information. Author: Jason Wong """ +import os +import re as regex + +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + traits, + isdefined, +) -class BseInputSpec(CommandLineInputSpec): - inputMRIFile = File( - mandatory=True, argstr='-i %s', desc='input MRI volume') +class BseInputSpec(CommandLineInputSpec): + inputMRIFile = File(mandatory=True, argstr="-i %s", desc="input MRI volume") outputMRIVolume = File( - desc= - 'output brain-masked MRI volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', + desc="output brain-masked MRI volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", hash_files=False, - genfile=True) + genfile=True, + ) outputMaskFile = File( - desc= - 'save smooth brain mask. If unspecified, output file name will be auto generated.', - argstr='--mask %s', + desc="save smooth brain mask. If unspecified, output file name will be auto generated.", + argstr="--mask %s", hash_files=False, - genfile=True) + genfile=True, + ) diffusionConstant = traits.Float( - 25, usedefault=True, desc='diffusion constant', argstr='-d %f') + 25, usedefault=True, desc="diffusion constant", argstr="-d %f" + ) diffusionIterations = traits.Int( - 3, usedefault=True, desc='diffusion iterations', argstr='-n %d') + 3, usedefault=True, desc="diffusion iterations", argstr="-n %d" + ) edgeDetectionConstant = traits.Float( - 0.64, usedefault=True, desc='edge detection constant', argstr='-s %f') + 0.64, usedefault=True, desc="edge detection constant", argstr="-s %f" + ) radius = traits.Float( - 1, - usedefault=True, - desc='radius of erosion/dilation filter', - argstr='-r %f') + 1, usedefault=True, desc="radius of erosion/dilation filter", argstr="-r %f" + ) dilateFinalMask = traits.Bool( - True, usedefault=True, desc='dilate final mask', argstr='-p') - trim = traits.Bool( - True, usedefault=True, desc='trim brainstem', argstr='--trim') + True, usedefault=True, desc="dilate final mask", argstr="-p" + ) + trim = traits.Bool(True, usedefault=True, desc="trim brainstem", argstr="--trim") outputDiffusionFilter = File( - desc='diffusion filter output', argstr='--adf %s', hash_files=False) - outputEdgeMap = File( - desc='edge map output', argstr='--edge %s', hash_files=False) + desc="diffusion filter output", argstr="--adf %s", hash_files=False + ) + outputEdgeMap = File(desc="edge map output", argstr="--edge %s", hash_files=False) outputDetailedBrainMask = File( - desc='save detailed brain mask', argstr='--hires %s', hash_files=False) - outputCortexFile = File( - desc='cortex file', argstr='--cortex %s', hash_files=False) + desc="save detailed brain mask", argstr="--hires %s", hash_files=False + ) + outputCortexFile = File(desc="cortex file", argstr="--cortex %s", hash_files=False) verbosityLevel = traits.Float( - 1, usedefault=True, desc=' verbosity level (0=silent)', argstr='-v %f') + 1, usedefault=True, desc=" verbosity level (0=silent)", argstr="-v %f" + ) noRotate = traits.Bool( - desc= - 'retain original orientation(default behavior will auto-rotate input NII files to LPI orientation)', - argstr='--norotate') - timer = traits.Bool(desc='show timing', argstr='--timer') + desc="retain original orientation(default behavior will auto-rotate input NII files to LPI orientation)", + argstr="--norotate", + ) + timer = traits.Bool(desc="show timing", argstr="--timer") class BseOutputSpec(TraitedSpec): - outputMRIVolume = File(desc='path/name of brain-masked MRI volume') - outputMaskFile = File(desc='path/name of smooth brain mask') - outputDiffusionFilter = File(desc='path/name of diffusion filter output') - outputEdgeMap = File(desc='path/name of edge map output') - outputDetailedBrainMask = File(desc='path/name of detailed brain mask') - outputCortexFile = File(desc='path/name of cortex file') + outputMRIVolume = File(desc="path/name of brain-masked MRI volume") + outputMaskFile = File(desc="path/name of smooth brain mask") + outputDiffusionFilter = File(desc="path/name of diffusion filter output") + outputEdgeMap = File(desc="path/name of edge map output") + outputDetailedBrainMask = File(desc="path/name of detailed brain mask") + outputCortexFile = File(desc="path/name of cortex file") class Bse(CommandLine): @@ -90,7 +94,7 @@ class Bse(CommandLine): input_spec = BseInputSpec output_spec = BseOutputSpec - _cmd = 'bse' + _cmd = "bse" def _gen_filename(self, name): inputs = self.inputs.get() @@ -98,8 +102,8 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputMRIVolume': '.bse.nii.gz', - 'outputMaskFile': '.mask.nii.gz' + "outputMRIVolume": ".bse.nii.gz", + "outputMaskFile": ".mask.nii.gz", } if name in fileToSuffixMap: @@ -113,87 +117,102 @@ def _list_outputs(self): class BfcInputSpec(CommandLineInputSpec): inputMRIFile = File( - mandatory=True, desc='input skull-stripped MRI volume', argstr='-i %s') - inputMaskFile = File(desc='mask file', argstr='-m %s', hash_files=False) + mandatory=True, desc="input skull-stripped MRI volume", argstr="-i %s" + ) + inputMaskFile = File(desc="mask file", argstr="-m %s", hash_files=False) outputMRIVolume = File( - desc= - 'output bias-corrected MRI volume.If unspecified, output file name will be auto generated.', - argstr='-o %s', + desc="output bias-corrected MRI volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", hash_files=False, - genfile=True) + genfile=True, + ) outputBiasField = File( - desc='save bias field estimate', argstr='--bias %s', hash_files=False) + desc="save bias field estimate", argstr="--bias %s", hash_files=False + ) outputMaskedBiasField = File( - desc='save bias field estimate (masked)', - argstr='--maskedbias %s', - hash_files=False) - histogramRadius = traits.Int( - desc='histogram radius (voxels)', argstr='-r %d') + desc="save bias field estimate (masked)", + argstr="--maskedbias %s", + hash_files=False, + ) + histogramRadius = traits.Int(desc="histogram radius (voxels)", argstr="-r %d") biasEstimateSpacing = traits.Int( - desc='bias sample spacing (voxels)', argstr='-s %d') + desc="bias sample spacing (voxels)", argstr="-s %d" + ) controlPointSpacing = traits.Int( - desc='control point spacing (voxels)', argstr='-c %d') + desc="control point spacing (voxels)", argstr="-c %d" + ) splineLambda = traits.Float( - desc='spline stiffness weighting parameter', argstr='-w %f') + desc="spline stiffness weighting parameter", argstr="-w %f" + ) histogramType = traits.Enum( - 'ellipse', - 'block', - desc= - 'Options for type of histogram\nellipse: use ellipsoid for ROI histogram\nblock :use block for ROI histogram', - argstr='%s') + "ellipse", + "block", + desc="""\ +Options for type of histogram: + + * ``ellipse``: use ellipsoid for ROI histogram + * ``block``:use block for ROI histogram + +""", + argstr="%s", + ) iterativeMode = traits.Bool( - desc='iterative mode (overrides -r, -s, -c, -w settings)', - argstr='--iterate') - correctionScheduleFile = File( - desc='list of parameters ', argstr='--schedule %s') + desc="iterative mode (overrides -r, -s, -c, -w settings)", argstr="--iterate" + ) + correctionScheduleFile = File(desc="list of parameters ", argstr="--schedule %s") biasFieldEstimatesOutputPrefix = traits.Str( - desc='save iterative bias field estimates as .n.field.nii.gz', - argstr='--biasprefix %s') + desc="save iterative bias field estimates as .n.field.nii.gz", + argstr="--biasprefix %s", + ) correctedImagesOutputPrefix = traits.Str( - desc='save iterative corrected images as .n.bfc.nii.gz', - argstr='--prefix %s') + desc="save iterative corrected images as .n.bfc.nii.gz", + argstr="--prefix %s", + ) correctWholeVolume = traits.Bool( - desc='apply correction field to entire volume', argstr='--extrapolate') + desc="apply correction field to entire volume", argstr="--extrapolate" + ) minBias = traits.Float( - 0.5, - usedefault=True, - desc='minimum allowed bias value', - argstr='-L %f') + 0.5, usedefault=True, desc="minimum allowed bias value", argstr="-L %f" + ) maxBias = traits.Float( - 1.5, - usedefault=True, - desc='maximum allowed bias value', - argstr='-U %f') + 1.5, usedefault=True, desc="maximum allowed bias value", argstr="-U %f" + ) biasRange = traits.Enum( "low", "medium", "high", - desc= - 'Preset options for bias_model\n low: small bias model [0.95,1.05]\n' - 'medium: medium bias model [0.90,1.10]\n high: high bias model [0.80,1.20]', - argstr='%s') + desc="""\ +Preset options for bias_model + + * low: small bias model [0.95,1.05] + * medium: medium bias model [0.90,1.10] + * high: high bias model [0.80,1.20] + +""", + argstr="%s", + ) intermediate_file_type = traits.Enum( "analyze", "nifti", "gzippedAnalyze", "gzippedNifti", - desc='Options for the format in which intermediate files are generated', - argstr='%s') - convergenceThreshold = traits.Float( - desc='convergence threshold', argstr='--eps %f') + desc="Options for the format in which intermediate files are generated", + argstr="%s", + ) + convergenceThreshold = traits.Float(desc="convergence threshold", argstr="--eps %f") biasEstimateConvergenceThreshold = traits.Float( - desc='bias estimate convergence threshold (values > 0.1 disable)', - argstr='--beps %f') - verbosityLevel = traits.Int( - desc='verbosity level (0=silent)', argstr='-v %d') - timer = traits.Bool(desc='display timing information', argstr='--timer') + desc="bias estimate convergence threshold (values > 0.1 disable)", + argstr="--beps %f", + ) + verbosityLevel = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") + timer = traits.Bool(desc="display timing information", argstr="--timer") class BfcOutputSpec(TraitedSpec): - outputMRIVolume = File(desc='path/name of output file') - outputBiasField = File(desc='path/name of bias field output file') - outputMaskedBiasField = File(desc='path/name of masked bias field output') - correctionScheduleFile = File(desc='path/name of schedule file') + outputMRIVolume = File(desc="path/name of output file") + outputBiasField = File(desc="path/name of bias field output file") + outputMaskedBiasField = File(desc="path/name of masked bias field output") + correctionScheduleFile = File(desc="path/name of schedule file") class Bfc(CommandLine): @@ -217,65 +236,66 @@ class Bfc(CommandLine): input_spec = BfcInputSpec output_spec = BfcOutputSpec - _cmd = 'bfc' + _cmd = "bfc" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - fileToSuffixMap = {'outputMRIVolume': '.bfc.nii.gz'} + fileToSuffixMap = {"outputMRIVolume": ".bfc.nii.gz"} if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _format_arg(self, name, spec, value): - if name == 'histogramType': - return spec.argstr % { - "ellipse": "--ellipse", - "block": "--block" - }[value] - if name == 'biasRange': - return spec.argstr % { - "low": "--low", - "medium": "--medium", - "high": "--high" - }[value] - if name == 'intermediate_file_type': - return spec.argstr % { - "analyze": "--analyze", - "nifti": "--nifti", - "gzippedAnalyze": "--analyzegz", - "gzippedNifti": "--niftigz" - }[value] - - return super(Bfc, self)._format_arg(name, spec, value) + if name == "histogramType": + return spec.argstr % {"ellipse": "--ellipse", "block": "--block"}[value] + if name == "biasRange": + return ( + spec.argstr + % {"low": "--low", "medium": "--medium", "high": "--high"}[value] + ) + if name == "intermediate_file_type": + return ( + spec.argstr + % { + "analyze": "--analyze", + "nifti": "--nifti", + "gzippedAnalyze": "--analyzegz", + "gzippedNifti": "--niftigz", + }[value] + ) + + return super()._format_arg(name, spec, value) def _list_outputs(self): return l_outputs(self) class PvcInputSpec(CommandLineInputSpec): - inputMRIFile = File(mandatory=True, desc='MRI file', argstr='-i %s') - inputMaskFile = File(desc='brain mask file', argstr='-m %s') + inputMRIFile = File(mandatory=True, desc="MRI file", argstr="-i %s") + inputMaskFile = File(desc="brain mask file", argstr="-m %s") outputLabelFile = File( - desc= - 'output label file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output label file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) outputTissueFractionFile = File( - desc='output tissue fraction file', argstr='-f %s', genfile=True) - spatialPrior = traits.Float(desc='spatial prior strength', argstr='-l %f') - verbosity = traits.Int(desc='verbosity level (0 = silent)', argstr='-v %d') + desc="output tissue fraction file", argstr="-f %s", genfile=True + ) + spatialPrior = traits.Float(desc="spatial prior strength", argstr="-l %f") + verbosity = traits.Int(desc="verbosity level (0 = silent)", argstr="-v %d") threeClassFlag = traits.Bool( - desc='use a three-class (CSF=0,GM=1,WM=2) labeling', argstr='-3') - timer = traits.Bool(desc='time processing', argstr='--timer') + desc="use a three-class (CSF=0,GM=1,WM=2) labeling", argstr="-3" + ) + timer = traits.Bool(desc="time processing", argstr="--timer") class PvcOutputSpec(TraitedSpec): - outputLabelFile = File(desc='path/name of label file') - outputTissueFractionFile = File(desc='path/name of tissue fraction file') + outputLabelFile = File(desc="path/name of label file") + outputTissueFractionFile = File(desc="path/name of tissue fraction file") class Pvc(CommandLine): @@ -300,7 +320,7 @@ class Pvc(CommandLine): input_spec = PvcInputSpec output_spec = PvcOutputSpec - _cmd = 'pvc' + _cmd = "pvc" def _gen_filename(self, name): inputs = self.inputs.get() @@ -308,8 +328,8 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputLabelFile': '.pvc.label.nii.gz', - 'outputTissueFractionFile': '.pvc.frac.nii.gz' + "outputLabelFile": ".pvc.label.nii.gz", + "outputTissueFractionFile": ".pvc.frac.nii.gz", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) @@ -321,53 +341,53 @@ def _list_outputs(self): class CerebroInputSpec(CommandLineInputSpec): - inputMRIFile = File( - mandatory=True, desc='input 3D MRI volume', argstr='-i %s') + inputMRIFile = File(mandatory=True, desc="input 3D MRI volume", argstr="-i %s") inputAtlasMRIFile = File( - mandatory=True, desc='atlas MRI volume', argstr='--atlas %s') + mandatory=True, desc="atlas MRI volume", argstr="--atlas %s" + ) inputAtlasLabelFile = File( - mandatory=True, desc='atlas labeling', argstr='--atlaslabels %s') - inputBrainMaskFile = File(desc='brain mask file', argstr='-m %s') + mandatory=True, desc="atlas labeling", argstr="--atlaslabels %s" + ) + inputBrainMaskFile = File(desc="brain mask file", argstr="-m %s") outputCerebrumMaskFile = File( - desc= - 'output cerebrum mask volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output cerebrum mask volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) outputLabelVolumeFile = File( - desc= - 'output labeled hemisphere/cerebrum volume. If unspecified, output file name will be auto generated.', - argstr='-l %s', - genfile=True) - costFunction = traits.Int(2, usedefault=True, desc='0,1,2', argstr='-c %d') + desc="output labeled hemisphere/cerebrum volume. If unspecified, output file name will be auto generated.", + argstr="-l %s", + genfile=True, + ) + costFunction = traits.Int(2, usedefault=True, desc="0,1,2", argstr="-c %d") useCentroids = traits.Bool( - desc='use centroids of data to initialize position', - argstr='--centroids') + desc="use centroids of data to initialize position", argstr="--centroids" + ) outputAffineTransformFile = File( - desc='save affine transform to file.', argstr='--air %s', genfile=True) + desc="save affine transform to file.", argstr="--air %s", genfile=True + ) outputWarpTransformFile = File( - desc='save warp transform to file.', argstr='--warp %s', genfile=True) - verbosity = traits.Int(desc='verbosity level (0=silent)', argstr='-v %d') - linearConvergence = traits.Float( - desc='linear convergence', argstr='--linconv %f') - warpLabel = traits.Int( - desc='warp order (2,3,4,5,6,7,8)', argstr='--warplevel %d') - warpConvergence = traits.Float( - desc='warp convergence', argstr='--warpconv %f') - keepTempFiles = traits.Bool( - desc="don't remove temporary files", argstr='--keep') + desc="save warp transform to file.", argstr="--warp %s", genfile=True + ) + verbosity = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") + linearConvergence = traits.Float(desc="linear convergence", argstr="--linconv %f") + warpLabel = traits.Int(desc="warp order (2,3,4,5,6,7,8)", argstr="--warplevel %d") + warpConvergence = traits.Float(desc="warp convergence", argstr="--warpconv %f") + keepTempFiles = traits.Bool(desc="don't remove temporary files", argstr="--keep") tempDirectory = traits.Str( - desc='specify directory to use for temporary files', - argstr='--tempdir %s') + desc="specify directory to use for temporary files", argstr="--tempdir %s" + ) tempDirectoryBase = traits.Str( - desc='create a temporary directory within this directory', - argstr='--tempdirbase %s') + desc="create a temporary directory within this directory", + argstr="--tempdirbase %s", + ) class CerebroOutputSpec(TraitedSpec): - outputCerebrumMaskFile = File(desc='path/name of cerebrum mask file') - outputLabelVolumeFile = File(desc='path/name of label mask file') - outputAffineTransformFile = File(desc='path/name of affine transform file') - outputWarpTransformFile = File(desc='path/name of warp transform file') + outputCerebrumMaskFile = File(desc="path/name of cerebrum mask file") + outputLabelVolumeFile = File(desc="path/name of label mask file") + outputAffineTransformFile = File(desc="path/name of affine transform file") + outputWarpTransformFile = File(desc="path/name of warp transform file") class Cerebro(CommandLine): @@ -395,7 +415,7 @@ class Cerebro(CommandLine): input_spec = CerebroInputSpec output_spec = CerebroOutputSpec - _cmd = 'cerebro' + _cmd = "cerebro" def _gen_filename(self, name): inputs = self.inputs.get() @@ -403,10 +423,10 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputCerebrumMaskFile': '.cerebrum.mask.nii.gz', - 'outputLabelVolumeFile': '.hemi.label.nii.gz', - 'outputWarpTransformFile': '.warp', - 'outputAffineTransformFile': '.air' + "outputCerebrumMaskFile": ".cerebrum.mask.nii.gz", + "outputLabelVolumeFile": ".hemi.label.nii.gz", + "outputWarpTransformFile": ".warp", + "outputAffineTransformFile": ".air", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) @@ -419,36 +439,38 @@ def _list_outputs(self): class CortexInputSpec(CommandLineInputSpec): inputHemisphereLabelFile = File( - mandatory=True, desc='hemisphere / lobe label volume', argstr='-h %s') + mandatory=True, desc="hemisphere / lobe label volume", argstr="-h %s" + ) outputCerebrumMask = File( - desc= - 'output structure mask. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output structure mask. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) inputTissueFractionFile = File( - mandatory=True, - desc='tissue fraction file (32-bit float)', - argstr='-f %s') + mandatory=True, desc="tissue fraction file (32-bit float)", argstr="-f %s" + ) tissueFractionThreshold = traits.Float( 50.0, usedefault=True, - desc='tissue fraction threshold (percentage)', - argstr='-p %f') + desc="tissue fraction threshold (percentage)", + argstr="-p %f", + ) computeWGBoundary = traits.Bool( - True, usedefault=True, desc='compute WM/GM boundary', argstr='-w') - computeGCBoundary = traits.Bool( - desc='compute GM/CSF boundary', argstr='-g') + True, usedefault=True, desc="compute WM/GM boundary", argstr="-w" + ) + computeGCBoundary = traits.Bool(desc="compute GM/CSF boundary", argstr="-g") includeAllSubcorticalAreas = traits.Bool( True, usedefault=True, - desc='include all subcortical areas in WM mask', - argstr='-a') - verbosity = traits.Int(desc='verbosity level', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="include all subcortical areas in WM mask", + argstr="-a", + ) + verbosity = traits.Int(desc="verbosity level", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class CortexOutputSpec(TraitedSpec): - outputCerebrumMask = File(desc='path/name of cerebrum mask') + outputCerebrumMask = File(desc="path/name of cerebrum mask") class Cortex(CommandLine): @@ -473,16 +495,17 @@ class Cortex(CommandLine): input_spec = CortexInputSpec output_spec = CortexOutputSpec - _cmd = 'cortex' + _cmd = "cortex" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputCerebrumMask': - return getFileName(self.inputs.inputHemisphereLabelFile, - '.init.cortex.mask.nii.gz') + if name == "outputCerebrumMask": + return getFileName( + self.inputs.inputHemisphereLabelFile, ".init.cortex.mask.nii.gz" + ) return None def _list_outputs(self): @@ -491,23 +514,26 @@ def _list_outputs(self): class ScrubmaskInputSpec(CommandLineInputSpec): inputMaskFile = File( - mandatory=True, desc='input structure mask file', argstr='-i %s') + mandatory=True, desc="input structure mask file", argstr="-i %s" + ) outputMaskFile = File( - desc= - 'output structure mask file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output structure mask file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) backgroundFillThreshold = traits.Int( - 2, usedefault=True, desc='background fill threshold', argstr='-b %d') + 2, usedefault=True, desc="background fill threshold", argstr="-b %d" + ) foregroundTrimThreshold = traits.Int( - 0, usedefault=True, desc='foreground trim threshold', argstr='-f %d') - numberIterations = traits.Int(desc='number of iterations', argstr='-n %d') - verbosity = traits.Int(desc='verbosity (0=silent)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + 0, usedefault=True, desc="foreground trim threshold", argstr="-f %d" + ) + numberIterations = traits.Int(desc="number of iterations", argstr="-n %d") + verbosity = traits.Int(desc="verbosity (0=silent)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class ScrubmaskOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Scrubmask(CommandLine): @@ -528,18 +554,20 @@ class Scrubmask(CommandLine): >>> results = scrubmask.run() #doctest: +SKIP """ + input_spec = ScrubmaskInputSpec output_spec = ScrubmaskOutputSpec - _cmd = 'scrubmask' + _cmd = "scrubmask" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.scrubbed.mask.nii.gz') + if name == "outputMaskFile": + return getFileName( + self.inputs.inputMaskFile, ".cortex.scrubbed.mask.nii.gz" + ) return None @@ -548,25 +576,25 @@ def _list_outputs(self): class TcaInputSpec(CommandLineInputSpec): - inputMaskFile = File( - mandatory=True, desc='input mask volume', argstr='-i %s') + inputMaskFile = File(mandatory=True, desc="input mask volume", argstr="-i %s") outputMaskFile = File( - desc= - 'output mask volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output mask volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) minCorrectionSize = traits.Int( - 2500, usedefault=True, desc='maximum correction size', argstr='-m %d') - maxCorrectionSize = traits.Int( - desc='minimum correction size', argstr='-n %d') + 2500, usedefault=True, desc="maximum correction size", argstr="-m %d" + ) + maxCorrectionSize = traits.Int(desc="minimum correction size", argstr="-n %d") foregroundDelta = traits.Int( - 20, usedefault=True, desc='foreground delta', argstr='--delta %d') - verbosity = traits.Int(desc='verbosity (0 = quiet)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + 20, usedefault=True, desc="foreground delta", argstr="--delta %d" + ) + verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class TcaOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Tca(CommandLine): @@ -585,18 +613,18 @@ class Tca(CommandLine): >>> results = tca.run() #doctest: +SKIP """ + input_spec = TcaInputSpec output_spec = TcaOutputSpec - _cmd = 'tca' + _cmd = "tca" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.tca.mask.nii.gz') + if name == "outputMaskFile": + return getFileName(self.inputs.inputMaskFile, ".cortex.tca.mask.nii.gz") return None @@ -605,21 +633,20 @@ def _list_outputs(self): class DewispInputSpec(CommandLineInputSpec): - inputMaskFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputMaskFile = File(mandatory=True, desc="input file", argstr="-i %s") outputMaskFile = File( - desc= - 'output file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') - sizeThreshold = traits.Int(desc='size threshold', argstr='-t %d') - maximumIterations = traits.Int( - desc='maximum number of iterations', argstr='-n %d') - timer = traits.Bool(desc='time processing', argstr='--timer') + desc="output file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") + sizeThreshold = traits.Int(desc="size threshold", argstr="-t %d") + maximumIterations = traits.Int(desc="maximum number of iterations", argstr="-n %d") + timer = traits.Bool(desc="time processing", argstr="--timer") class DewispOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Dewisp(CommandLine): @@ -647,16 +674,15 @@ class Dewisp(CommandLine): input_spec = DewispInputSpec output_spec = DewispOutputSpec - _cmd = 'dewisp' + _cmd = "dewisp" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.dewisp.mask.nii.gz') + if name == "outputMaskFile": + return getFileName(self.inputs.inputMaskFile, ".cortex.dewisp.mask.nii.gz") return None @@ -665,55 +691,59 @@ def _list_outputs(self): class DfsInputSpec(CommandLineInputSpec): - inputVolumeFile = File( - mandatory=True, desc='input 3D volume', argstr='-i %s') + inputVolumeFile = File(mandatory=True, desc="input 3D volume", argstr="-i %s") outputSurfaceFile = File( - desc= - 'output surface mesh file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output surface mesh file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) inputShadingVolume = File( - desc='shade surface model with data from image volume', argstr='-c %s') + desc="shade surface model with data from image volume", argstr="-c %s" + ) smoothingIterations = traits.Int( - 10, - usedefault=True, - desc='number of smoothing iterations', - argstr='-n %d') + 10, usedefault=True, desc="number of smoothing iterations", argstr="-n %d" + ) smoothingConstant = traits.Float( - 0.5, usedefault=True, desc='smoothing constant', argstr='-a %f') + 0.5, usedefault=True, desc="smoothing constant", argstr="-a %f" + ) curvatureWeighting = traits.Float( - 5.0, usedefault=True, desc='curvature weighting', argstr='-w %f') - scalingPercentile = traits.Float(desc='scaling percentile', argstr='-f %f') + 5.0, usedefault=True, desc="curvature weighting", argstr="-w %f" + ) + scalingPercentile = traits.Float(desc="scaling percentile", argstr="-f %f") nonZeroTessellation = traits.Bool( - desc='tessellate non-zero voxels', - argstr='-nz', - xor=('nonZeroTessellation', 'specialTessellation')) + desc="tessellate non-zero voxels", + argstr="-nz", + xor=("nonZeroTessellation", "specialTessellation"), + ) tessellationThreshold = traits.Float( - desc= - 'To be used with specialTessellation. Set this value first, then set specialTessellation value.\nUsage: tessellate voxels greater_than, less_than, or equal_to ', - argstr='%f') + desc="To be used with specialTessellation. Set this value first, then set specialTessellation value.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", + argstr="%f", + ) specialTessellation = traits.Enum( - 'greater_than', - 'less_than', - 'equal_to', - desc= - 'To avoid throwing a UserWarning, set tessellationThreshold first. Then set this attribute.\nUsage: tessellate voxels greater_than, less_than, or equal_to ', - argstr='%s', - xor=('nonZeroTessellation', 'specialTessellation'), - requires=['tessellationThreshold'], - position=-1) + "greater_than", + "less_than", + "equal_to", + desc="To avoid throwing a UserWarning, set tessellationThreshold first. Then set this attribute.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", + argstr="%s", + xor=("nonZeroTessellation", "specialTessellation"), + requires=["tessellationThreshold"], + position=-1, + ) zeroPadFlag = traits.Bool( - desc='zero-pad volume (avoids clipping at edges)', argstr='-z') + desc="zero-pad volume (avoids clipping at edges)", argstr="-z" + ) noNormalsFlag = traits.Bool( - desc='do not compute vertex normals', argstr='--nonormals') + desc="do not compute vertex normals", argstr="--nonormals" + ) postSmoothFlag = traits.Bool( - desc='smooth vertices after coloring', argstr='--postsmooth') - verbosity = traits.Int(desc='verbosity (0 = quiet)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="smooth vertices after coloring", argstr="--postsmooth" + ) + verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class DfsOutputSpec(TraitedSpec): - outputSurfaceFile = File(desc='path/name of surface file') + outputSurfaceFile = File(desc="path/name of surface file") class Dfs(CommandLine): @@ -736,28 +766,30 @@ class Dfs(CommandLine): input_spec = DfsInputSpec output_spec = DfsOutputSpec - _cmd = 'dfs' + _cmd = "dfs" def _format_arg(self, name, spec, value): - if name == 'tessellationThreshold': - return '' # blank argstr - if name == 'specialTessellation': + if name == "tessellationThreshold": + return "" # blank argstr + if name == "specialTessellation": threshold = self.inputs.tessellationThreshold - return spec.argstr % { - "greater_than": ''.join(("-gt %f" % threshold)), - "less_than": ''.join(("-lt %f" % threshold)), - "equal_to": ''.join(("-eq %f" % threshold)) - }[value] - return super(Dfs, self)._format_arg(name, spec, value) + return ( + spec.argstr + % { + "greater_than": "".join("-gt %f" % threshold), + "less_than": "".join("-lt %f" % threshold), + "equal_to": "".join("-eq %f" % threshold), + }[value] + ) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputSurfaceFile': - return getFileName(self.inputs.inputVolumeFile, - '.inner.cortex.dfs') + if name == "outputSurfaceFile": + return getFileName(self.inputs.inputVolumeFile, ".inner.cortex.dfs") return None @@ -766,60 +798,57 @@ def _list_outputs(self): class PialmeshInputSpec(CommandLineInputSpec): - inputSurfaceFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputSurfaceFile = File(mandatory=True, desc="input file", argstr="-i %s") outputSurfaceFile = File( - desc= - 'output file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') + desc="output file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") inputTissueFractionFile = File( - mandatory=True, - desc='floating point (32) tissue fraction image', - argstr='-f %s') + mandatory=True, desc="floating point (32) tissue fraction image", argstr="-f %s" + ) numIterations = traits.Int( - 100, usedefault=True, desc='number of iterations', argstr='-n %d') + 100, usedefault=True, desc="number of iterations", argstr="-n %d" + ) searchRadius = traits.Float( - 1, usedefault=True, desc='search radius', argstr='-r %f') - stepSize = traits.Float( - 0.4, usedefault=True, desc='step size', argstr='-s %f') + 1, usedefault=True, desc="search radius", argstr="-r %f" + ) + stepSize = traits.Float(0.4, usedefault=True, desc="step size", argstr="-s %f") inputMaskFile = File( - mandatory=True, - desc='restrict growth to mask file region', - argstr='-m %s') + mandatory=True, desc="restrict growth to mask file region", argstr="-m %s" + ) maxThickness = traits.Float( - 20, - usedefault=True, - desc='maximum allowed tissue thickness', - argstr='--max %f') + 20, usedefault=True, desc="maximum allowed tissue thickness", argstr="--max %f" + ) tissueThreshold = traits.Float( - 1.05, usedefault=True, desc='tissue threshold', argstr='-t %f') + 1.05, usedefault=True, desc="tissue threshold", argstr="-t %f" + ) # output interval is not an output -- it specifies how frequently the # output surfaces are generated outputInterval = traits.Int( - 10, usedefault=True, desc='output interval', argstr='--interval %d') + 10, usedefault=True, desc="output interval", argstr="--interval %d" + ) exportPrefix = traits.Str( - desc='prefix for exporting surfaces if interval is set', - argstr='--prefix %s') + desc="prefix for exporting surfaces if interval is set", argstr="--prefix %s" + ) laplacianSmoothing = traits.Float( - 0.025, - usedefault=True, - desc='apply Laplacian smoothing', - argstr='--smooth %f') - timer = traits.Bool(desc='show timing', argstr='--timer') + 0.025, usedefault=True, desc="apply Laplacian smoothing", argstr="--smooth %f" + ) + timer = traits.Bool(desc="show timing", argstr="--timer") recomputeNormals = traits.Bool( - desc='recompute normals at each iteration', argstr='--norm') + desc="recompute normals at each iteration", argstr="--norm" + ) normalSmoother = traits.Float( - 0.2, - usedefault=True, - desc='strength of normal smoother.', - argstr='--nc %f') + 0.2, usedefault=True, desc="strength of normal smoother.", argstr="--nc %f" + ) tangentSmoother = traits.Float( - desc='strength of tangential smoother.', argstr='--tc %f') + desc="strength of tangential smoother.", argstr="--tc %f" + ) class PialmeshOutputSpec(TraitedSpec): - outputSurfaceFile = File(desc='path/name of surface file') + outputSurfaceFile = File(desc="path/name of surface file") class Pialmesh(CommandLine): @@ -844,16 +873,15 @@ class Pialmesh(CommandLine): input_spec = PialmeshInputSpec output_spec = PialmeshOutputSpec - _cmd = 'pialmesh' + _cmd = "pialmesh" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputSurfaceFile': - return getFileName(self.inputs.inputSurfaceFile, - '.pial.cortex.dfs') + if name == "outputSurfaceFile": + return getFileName(self.inputs.inputSurfaceFile, ".pial.cortex.dfs") return None @@ -862,42 +890,43 @@ def _list_outputs(self): class HemisplitInputSpec(CommandLineInputSpec): - inputSurfaceFile = File( - mandatory=True, desc='input surface', argstr='-i %s') + inputSurfaceFile = File(mandatory=True, desc="input surface", argstr="-i %s") inputHemisphereLabelFile = File( - mandatory=True, desc='input hemisphere label volume', argstr='-l %s') + mandatory=True, desc="input hemisphere label volume", argstr="-l %s" + ) outputLeftHemisphere = File( - desc= - 'output surface file, left hemisphere. If unspecified, output file name will be auto generated.', - argstr='--left %s', - genfile=True) + desc="output surface file, left hemisphere. If unspecified, output file name will be auto generated.", + argstr="--left %s", + genfile=True, + ) outputRightHemisphere = File( - desc= - 'output surface file, right hemisphere. If unspecified, output file name will be auto generated.', - argstr='--right %s', - genfile=True) + desc="output surface file, right hemisphere. If unspecified, output file name will be auto generated.", + argstr="--right %s", + genfile=True, + ) pialSurfaceFile = File( - desc='pial surface file -- must have same geometry as input surface', - argstr='-p %s') + desc="pial surface file -- must have same geometry as input surface", + argstr="-p %s", + ) outputLeftPialHemisphere = File( - desc= - 'output pial surface file, left hemisphere. If unspecified, output file name will be auto generated.', - argstr='-pl %s', - genfile=True) + desc="output pial surface file, left hemisphere. If unspecified, output file name will be auto generated.", + argstr="-pl %s", + genfile=True, + ) outputRightPialHemisphere = File( - desc= - 'output pial surface file, right hemisphere. If unspecified, output file name will be auto generated.', - argstr='-pr %s', - genfile=True) - verbosity = traits.Int(desc='verbosity (0 = silent)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="output pial surface file, right hemisphere. If unspecified, output file name will be auto generated.", + argstr="-pr %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity (0 = silent)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class HemisplitOutputSpec(TraitedSpec): - outputLeftHemisphere = File(desc='path/name of left hemisphere') - outputRightHemisphere = File(desc='path/name of right hemisphere') - outputLeftPialHemisphere = File(desc='path/name of left pial hemisphere') - outputRightPialHemisphere = File(desc='path/name of right pial hemisphere') + outputLeftHemisphere = File(desc="path/name of left hemisphere") + outputRightHemisphere = File(desc="path/name of right hemisphere") + outputLeftPialHemisphere = File(desc="path/name of left pial hemisphere") + outputRightPialHemisphere = File(desc="path/name of right pial hemisphere") class Hemisplit(CommandLine): @@ -922,7 +951,7 @@ class Hemisplit(CommandLine): input_spec = HemisplitInputSpec output_spec = HemisplitOutputSpec - _cmd = 'hemisplit' + _cmd = "hemisplit" def _gen_filename(self, name): inputs = self.inputs.get() @@ -930,14 +959,13 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputLeftHemisphere': '.left.inner.cortex.dfs', - 'outputLeftPialHemisphere': '.left.pial.cortex.dfs', - 'outputRightHemisphere': '.right.inner.cortex.dfs', - 'outputRightPialHemisphere': '.right.pial.cortex.dfs' + "outputLeftHemisphere": ".left.inner.cortex.dfs", + "outputLeftPialHemisphere": ".left.pial.cortex.dfs", + "outputRightHemisphere": ".right.inner.cortex.dfs", + "outputRightPialHemisphere": ".right.pial.cortex.dfs", } if name in fileToSuffixMap: - return getFileName(self.inputs.inputSurfaceFile, - fileToSuffixMap[name]) + return getFileName(self.inputs.inputSurfaceFile, fileToSuffixMap[name]) return None @@ -946,42 +974,48 @@ def _list_outputs(self): class SkullfinderInputSpec(CommandLineInputSpec): - inputMRIFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputMRIFile = File(mandatory=True, desc="input file", argstr="-i %s") inputMaskFile = File( mandatory=True, - desc='A brain mask file, 8-bit image (0=non-brain, 255=brain)', - argstr='-m %s') + desc="A brain mask file, 8-bit image (0=non-brain, 255=brain)", + argstr="-m %s", + ) outputLabelFile = File( - desc= - 'output multi-colored label volume segmenting brain, scalp, inner skull & outer skull ' - 'If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') - lowerThreshold = traits.Int( - desc='Lower threshold for segmentation', argstr='-l %d') - upperThreshold = traits.Int( - desc='Upper threshold for segmentation', argstr='-u %d') + desc="output multi-colored label volume segmenting brain, scalp, inner skull & outer skull " + "If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") + lowerThreshold = traits.Int(desc="Lower threshold for segmentation", argstr="-l %d") + upperThreshold = traits.Int(desc="Upper threshold for segmentation", argstr="-u %d") surfaceFilePrefix = traits.Str( - desc='if specified, generate surface files for brain, skull, and scalp', - argstr='-s %s') + desc="if specified, generate surface files for brain, skull, and scalp", + argstr="-s %s", + ) bgLabelValue = traits.Int( - desc='background label value (0-255)', argstr='--bglabel %d') + desc="background label value (0-255)", argstr="--bglabel %d" + ) scalpLabelValue = traits.Int( - desc='scalp label value (0-255)', argstr='--scalplabel %d') + desc="scalp label value (0-255)", argstr="--scalplabel %d" + ) skullLabelValue = traits.Int( - desc='skull label value (0-255)', argstr='--skulllabel %d') + desc="skull label value (0-255)", argstr="--skulllabel %d" + ) spaceLabelValue = traits.Int( - desc='space label value (0-255)', argstr='--spacelabel %d') + desc="space label value (0-255)", argstr="--spacelabel %d" + ) brainLabelValue = traits.Int( - desc='brain label value (0-255)', argstr='--brainlabel %d') + desc="brain label value (0-255)", argstr="--brainlabel %d" + ) performFinalOpening = traits.Bool( - desc='perform a final opening operation on the scalp mask', - argstr='--finalOpening') + desc="perform a final opening operation on the scalp mask", + argstr="--finalOpening", + ) class SkullfinderOutputSpec(TraitedSpec): - outputLabelFile = File(desc='path/name of label file') + outputLabelFile = File(desc="path/name of label file") class Skullfinder(CommandLine): @@ -999,18 +1033,18 @@ class Skullfinder(CommandLine): >>> results = skullfinder.run() #doctest: +SKIP """ + input_spec = SkullfinderInputSpec output_spec = SkullfinderOutputSpec - _cmd = 'skullfinder' + _cmd = "skullfinder" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputLabelFile': - return getFileName(self.inputs.inputMRIFile, - '.skullfinder.label.nii.gz') + if name == "outputLabelFile": + return getFileName(self.inputs.inputMRIFile, ".skullfinder.label.nii.gz") return None @@ -1020,116 +1054,117 @@ def _list_outputs(self): class SVRegInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( - argstr='\'%s\'', + argstr="'%s'", mandatory=True, position=0, - desc= - 'Absolute path and filename prefix of the subjects output from BrainSuite ' - 'Cortical Surface Extraction Sequence') + desc="Absolute path and filename prefix of the subjects output from BrainSuite " + "Cortical Surface Extraction Sequence", + ) dataSinkDelay = traits.List( traits.Str, - argstr='%s', - desc= - 'Connect datasink out_file to dataSinkDelay to delay execution of SVReg ' - 'until dataSink has finished sinking CSE outputs.' - 'For use with parallel processing workflows including Brainsuites Cortical ' - 'Surface Extraction sequence (SVReg requires certain files from Brainsuite ' - 'CSE, which must all be in the pathway specified by subjectFilePrefix. see ' - 'http://brainsuite.org/processing/svreg/usage/ for list of required inputs ' + argstr="%s", + desc="Connect datasink out_file to dataSinkDelay to delay execution of SVReg " + "until dataSink has finished sinking CSE outputs." + "For use with parallel processing workflows including Brainsuites Cortical " + "Surface Extraction sequence (SVReg requires certain files from Brainsuite " + "CSE, which must all be in the pathway specified by subjectFilePrefix. see " + "http://brainsuite.org/processing/svreg/usage/ for list of required inputs ", ) atlasFilePrefix = traits.Str( position=1, - argstr='\'%s\'', - desc= - 'Optional: Absolute Path and filename prefix of atlas files and labels to which ' - 'the subject will be registered. If unspecified, SVReg' - 'will use its own included atlas files') + argstr="'%s'", + desc="Optional: Absolute Path and filename prefix of atlas files and labels to which " + "the subject will be registered. If unspecified, SVReg" + "will use its own included atlas files", + ) iterations = traits.Int( - argstr='\'-H %d\'', - desc='Assigns a number of iterations in the intensity registration step.' - 'if unspecified, performs 100 iterations') + argstr="'-H %d'", + desc="Assigns a number of iterations in the intensity registration step." + "if unspecified, performs 100 iterations", + ) refineOutputs = traits.Bool( - argstr='\'-r\'', - desc='Refine outputs at the expense of more processing time.') + argstr="'-r'", desc="Refine outputs at the expense of more processing time." + ) skipToVolumeReg = traits.Bool( - argstr='\'-s\'', - desc= - 'If surface registration was already performed at an earlier time and the ' - 'user would not like to redo this step, then this flag may be used to skip ' - 'ahead to the volumetric registration. Necessary input files will need to ' - 'be present in the input directory called by the command.') + argstr="'-s'", + desc="If surface registration was already performed at an earlier time and the " + "user would not like to redo this step, then this flag may be used to skip " + "ahead to the volumetric registration. Necessary input files will need to " + "be present in the input directory called by the command.", + ) skipToIntensityReg = traits.Bool( - argstr='\'-p\'', - desc= - 'If the p-harmonic volumetric registration was already performed at an ' - 'earlier time and the user would not like to redo this step, then this ' - 'flag may be used to skip ahead to the intensity registration and ' - 'label transfer step.') + argstr="'-p'", + desc="If the p-harmonic volumetric registration was already performed at an " + "earlier time and the user would not like to redo this step, then this " + "flag may be used to skip ahead to the intensity registration and " + "label transfer step.", + ) useManualMaskFile = traits.Bool( - argstr='\'-cbm\'', - desc= - 'Can call a manually edited cerebrum mask to limit boundaries. Will ' - 'use file: subbasename.cerebrum.mask.nii.gz Make sure to correctly ' - 'replace your manually edited mask file in your input folder with the ' - 'correct subbasename.') + argstr="'-cbm'", + desc="Can call a manually edited cerebrum mask to limit boundaries. Will " + "use file: subbasename.cerebrum.mask.nii.gz Make sure to correctly " + "replace your manually edited mask file in your input folder with the " + "correct subbasename.", + ) curveMatchingInstructions = traits.Str( - argstr='\'-cur %s\'', - desc= - 'Used to take control of the curve matching process between the atlas ' - 'and subject. One can specify the name of the .dfc file and ' - 'the sulcal numbers <#sul> to be used as constraints. ' - 'example: curveMatchingInstructions = "subbasename.right.dfc 1 2 20"') + argstr="'-cur %s'", + desc="Used to take control of the curve matching process between the atlas " + "and subject. One can specify the name of the .dfc file and " + "the sulcal numbers <#sul> to be used as constraints. " + 'example: curveMatchingInstructions = "subbasename.right.dfc 1 2 20"', + ) useCerebrumMask = traits.Bool( - argstr='\'-C\'', - desc= - 'The cerebrum mask will be used for ' - 'masking the final labels instead of the default pial surface mask. ' - 'Every voxel will be labeled within the cerebrum mask regardless of ' - 'the boundaries of the pial surface.') + argstr="'-C'", + desc="The cerebrum mask will be used for " + "masking the final labels instead of the default pial surface mask. " + "Every voxel will be labeled within the cerebrum mask regardless of " + "the boundaries of the pial surface.", + ) pialSurfaceMaskDilation = traits.Int( - argstr='\'-D %d\'', - desc= - 'Cortical volume labels found in file output subbasename.svreg.label.nii.gz ' - 'find its boundaries by using the pial surface then dilating by 1 voxel. ' - 'Use this flag in order to control the number of pial surface mask dilation. ' - '(ie. -D 0 will assign no voxel dilation)') + argstr="'-D %d'", + desc="Cortical volume labels found in file output subbasename.svreg.label.nii.gz " + "find its boundaries by using the pial surface then dilating by 1 voxel. " + "Use this flag in order to control the number of pial surface mask dilation. " + "(ie. -D 0 will assign no voxel dilation)", + ) keepIntermediates = traits.Bool( - argstr='\'-k\'', - desc='Keep the intermediate files after the svreg sequence is complete.' + argstr="'-k'", + desc="Keep the intermediate files after the svreg sequence is complete.", ) - _XOR_verbosity = ('verbosity0', 'verbosity1', 'verbosity2') + _XOR_verbosity = ("verbosity0", "verbosity1", "verbosity2") verbosity0 = traits.Bool( - argstr='\'-v0\'', - xor=_XOR_verbosity, - desc='no messages will be reported') + argstr="'-v0'", xor=_XOR_verbosity, desc="no messages will be reported" + ) verbosity1 = traits.Bool( - argstr='\'-v1\'', + argstr="'-v1'", xor=_XOR_verbosity, - desc= - 'messages will be reported but not the iteration-wise detailed messages' + desc="messages will be reported but not the iteration-wise detailed messages", ) verbosity2 = traits.Bool( - argstr='\'v2\'', + argstr="'v2'", xor=_XOR_verbosity, - desc='all the messages, including per-iteration, will be displayed') + desc="all the messages, including per-iteration, will be displayed", + ) shortMessages = traits.Bool( - argstr='\'-gui\'', desc='Short messages instead of detailed messages') + argstr="'-gui'", desc="Short messages instead of detailed messages" + ) displayModuleName = traits.Bool( - argstr='\'-m\'', desc='Module name will be displayed in the messages') + argstr="'-m'", desc="Module name will be displayed in the messages" + ) displayTimestamps = traits.Bool( - argstr='\'-t\'', desc='Timestamps will be displayed in the messages') + argstr="'-t'", desc="Timestamps will be displayed in the messages" + ) skipVolumetricProcessing = traits.Bool( - argstr='\'-S\'', - desc= - 'Only surface registration and labeling will be performed. Volumetric ' - 'processing will be skipped.') + argstr="'-S'", + desc="Only surface registration and labeling will be performed. Volumetric " + "processing will be skipped.", + ) useMultiThreading = traits.Bool( - argstr='\'-P\'', - desc= - 'If multiple CPUs are present on the system, the code will try to use ' - 'multithreading to make the execution fast.') - useSingleThreading = traits.Bool( - argstr='\'-U\'', desc='Use single threaded mode.') + argstr="'-P'", + desc="If multiple CPUs are present on the system, the code will try to use " + "multithreading to make the execution fast.", + ) + useSingleThreading = traits.Bool(argstr="'-U'", desc="Use single threaded mode.") class SVReg(CommandLine): @@ -1159,64 +1194,66 @@ class SVReg(CommandLine): """ input_spec = SVRegInputSpec - _cmd = 'svreg.sh' + _cmd = "svreg.sh" def _format_arg(self, name, spec, value): - if name == 'subjectFilePrefix' or name == 'atlasFilePrefix' or name == 'curveMatchingInstructions': + if ( + name == "subjectFilePrefix" + or name == "atlasFilePrefix" + or name == "curveMatchingInstructions" + ): return spec.argstr % os.path.expanduser(value) - if name == 'dataSinkDelay': - return spec.argstr % '' - return super(SVReg, self)._format_arg(name, spec, value) + if name == "dataSinkDelay": + return spec.argstr % "" + return super()._format_arg(name, spec, value) class BDPInputSpec(CommandLineInputSpec): bfcFile = File( - argstr='%s', + argstr="%s", mandatory=True, position=0, - xor=['noStructuralRegistration'], - desc= - 'Specify absolute path to file produced by bfc. By default, bfc produces the file in ' - 'the format: prefix.bfc.nii.gz') + xor=["noStructuralRegistration"], + desc="Specify absolute path to file produced by bfc. By default, bfc produces the file in " + "the format: prefix.bfc.nii.gz", + ) noStructuralRegistration = traits.Bool( - argstr='--no-structural-registration', + argstr="--no-structural-registration", mandatory=True, position=0, - xor=['bfcFile'], - desc= - 'Allows BDP to work without any structural input. This can useful when ' - 'one is only interested in diffusion modelling part of BDP. With this ' - 'flag only fieldmap-based distortion correction is supported. ' - 'outPrefix can be used to specify fileprefix of the output ' - 'filenames. Change dwiMask to define region of interest ' - 'for diffusion modelling.') + xor=["bfcFile"], + desc="Allows BDP to work without any structural input. This can useful when " + "one is only interested in diffusion modelling part of BDP. With this " + "flag only fieldmap-based distortion correction is supported. " + "outPrefix can be used to specify fileprefix of the output " + "filenames. Change dwiMask to define region of interest " + "for diffusion modelling.", + ) inputDiffusionData = File( - argstr='--nii %s', + argstr="--nii %s", mandatory=True, position=-2, - desc= - 'Specifies the absolute path and filename of the input diffusion data in 4D NIfTI-1 ' - 'format. The flag must be followed by the filename. Only NIfTI-1 files ' - 'with extension .nii or .nii.gz are supported. Furthermore, either ' - 'bMatrixFile, or a combination of both bValueFile and diffusionGradientFile ' - 'must be used to provide the necessary b-matrices/b-values and gradient vectors. ' + desc="Specifies the absolute path and filename of the input diffusion data in 4D NIfTI-1 " + "format. The flag must be followed by the filename. Only NIfTI-1 files " + "with extension .nii or .nii.gz are supported. Furthermore, either " + "bMatrixFile, or a combination of both bValueFile and diffusionGradientFile " + "must be used to provide the necessary b-matrices/b-values and gradient vectors. ", ) bMatrixFile = File( - argstr='--bmat %s', + argstr="--bmat %s", mandatory=True, - xor=['BVecBValPair'], + xor=["BVecBValPair"], position=-1, - desc= - 'Specifies the absolute path and filename of the file containing b-matrices for ' - 'diffusion-weighted scans. The flag must be followed by the filename. ' - 'This file must be a plain text file containing 3x3 matrices for each ' - 'diffusion encoding direction. It should contain zero matrices ' + desc="Specifies the absolute path and filename of the file containing b-matrices for " + "diffusion-weighted scans. The flag must be followed by the filename. " + "This file must be a plain text file containing 3x3 matrices for each " + "diffusion encoding direction. It should contain zero matrices " 'corresponding to b=0 images. This file usually has ".bmat" as its ' - 'extension, and can be used to provide BDP with the more-accurate ' - 'b-matrices as saved by some proprietary scanners. The b-matrices ' - 'specified by the file must be in the voxel coordinates of the input ' - 'diffusion weighted image (NIfTI file). In case b-matrices are not known/calculated, ' - 'bvec and .bval files can be used instead (see diffusionGradientFile and bValueFile). ' + "extension, and can be used to provide BDP with the more-accurate " + "b-matrices as saved by some proprietary scanners. The b-matrices " + "specified by the file must be in the voxel coordinates of the input " + "diffusion weighted image (NIfTI file). In case b-matrices are not known/calculated, " + "bvec and .bval files can be used instead (see diffusionGradientFile and bValueFile). ", ) BVecBValPair = traits.List( traits.Str, @@ -1224,452 +1261,465 @@ class BDPInputSpec(CommandLineInputSpec): maxlen=2, mandatory=True, position=-1, - xor=['bMatrixFile'], - argstr='--bvec %s --bval %s', - desc= - 'Must input a list containing first the BVector file, then the BValue file (both must be absolute paths)\n' - 'Example: bdp.inputs.BVecBValPair = [\'/directory/subdir/prefix.dwi.bvec\', \'/directory/subdir/prefix.dwi.bval\'] ' - 'The first item in the list specifies the filename of the file containing b-values for the ' - 'diffusion scan. The b-value file must be a plain-text file and usually has an ' - 'extension of .bval\n' - 'The second item in the list specifies the filename of the file containing the diffusion gradient ' - 'directions (specified in the voxel coordinates of the input ' - 'diffusion-weighted image)The b-vectors file must be a plain text file and ' - 'usually has an extension of .bvec ') + xor=["bMatrixFile"], + argstr="--bvec %s --bval %s", + desc="Must input a list containing first the BVector file, then the BValue file (both must be absolute paths)\n" + "Example: bdp.inputs.BVecBValPair = ['/directory/subdir/prefix.dwi.bvec', '/directory/subdir/prefix.dwi.bval'] " + "The first item in the list specifies the filename of the file containing b-values for the " + "diffusion scan. The b-value file must be a plain-text file and usually has an " + "extension of .bval\n" + "The second item in the list specifies the filename of the file containing the diffusion gradient " + "directions (specified in the voxel coordinates of the input " + "diffusion-weighted image)The b-vectors file must be a plain text file and " + "usually has an extension of .bvec ", + ) dataSinkDelay = traits.List( traits.Str, - argstr='%s', - desc= - 'For use in parallel processing workflows including Brainsuite Cortical ' - 'Surface Extraction sequence. Connect datasink out_file to dataSinkDelay ' - 'to delay execution of BDP until dataSink has finished sinking outputs. ' - 'In particular, BDP may be run after BFC has finished. For more information ' - 'see http://brainsuite.org/processing/diffusion/pipeline/') + argstr="%s", + desc="For use in parallel processing workflows including Brainsuite Cortical " + "Surface Extraction sequence. Connect datasink out_file to dataSinkDelay " + "to delay execution of BDP until dataSink has finished sinking outputs. " + "In particular, BDP may be run after BFC has finished. For more information " + "see http://brainsuite.org/processing/diffusion/pipeline/", + ) phaseEncodingDirection = traits.Enum( - 'x', - 'x-', - 'y', - 'y-', - 'z', - 'z-', - argstr='--dir=%s', - desc= - 'Specifies the phase-encoding direction of the EPI (diffusion) images. ' - 'It is same as the dominant direction of distortion in the images. This ' - 'information is used to constrain the distortion correction along the ' - 'specified direction. Directions are represented by any one of x, x-, y, ' + "x", + "x-", + "y", + "y-", + "z", + "z-", + argstr="--dir=%s", + desc="Specifies the phase-encoding direction of the EPI (diffusion) images. " + "It is same as the dominant direction of distortion in the images. This " + "information is used to constrain the distortion correction along the " + "specified direction. Directions are represented by any one of x, x-, y, " 'y-, z or z-. "x" direction increases towards the right side of the ' 'subject, while "x-" increases towards the left side of the subject. ' 'Similarly, "y" and "y-" are along the anterior-posterior direction of ' 'the subject, and "z" & "z-" are along the inferior-superior direction. ' 'When this flag is not used, BDP uses "y" as the default phase-encoding ' - 'direction. ') + "direction. ", + ) echoSpacing = traits.Float( - argstr='--echo-spacing=%f', - desc= - 'Sets the echo spacing to t seconds, which is used for fieldmap-based ' - 'distortion correction. This flag is required when using fieldmapCorrection' + argstr="--echo-spacing=%f", + desc="Sets the echo spacing to t seconds, which is used for fieldmap-based " + "distortion correction. This flag is required when using fieldmapCorrection", ) bValRatioThreshold = traits.Float( - argstr='--bval-ratio-threshold %f', - desc= - 'Sets a threshold which is used to determine b=0 images. When there are ' - 'no diffusion weighted image with b-value of zero, then BDP tries to use ' - 'diffusion weighted images with a low b-value in place of b=0 image. The ' - 'diffusion images with minimum b-value is used as b=0 image only if the ' - 'ratio of the maximum and minimum b-value is more than the specified ' - 'threshold. A lower value of threshold will allow diffusion images with ' - 'higher b-value to be used as b=0 image. The default value of this ' - 'threshold is set to 45, if this trait is not set. ') + argstr="--bval-ratio-threshold %f", + desc="Sets a threshold which is used to determine b=0 images. When there are " + "no diffusion weighted image with b-value of zero, then BDP tries to use " + "diffusion weighted images with a low b-value in place of b=0 image. The " + "diffusion images with minimum b-value is used as b=0 image only if the " + "ratio of the maximum and minimum b-value is more than the specified " + "threshold. A lower value of threshold will allow diffusion images with " + "higher b-value to be used as b=0 image. The default value of this " + "threshold is set to 45, if this trait is not set. ", + ) estimateTensors = traits.Bool( - argstr='--tensors', - desc= - 'Estimates diffusion tensors using a weighted log-linear estimation and ' - 'saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, ' - 'L3). This is the default behavior if no diffusion modeling flags are ' - 'specified. The estimated diffusion tensors can be visualized by loading ' - 'the saved *.eig.nii.gz file in BrainSuite. BDP reports diffusivity (MD, ' - 'axial, radial, L2 and L3) in a unit which is reciprocal inverse of the ' - 'unit of input b-value. ') + argstr="--tensors", + desc="Estimates diffusion tensors using a weighted log-linear estimation and " + "saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, " + "L3). This is the default behavior if no diffusion modeling flags are " + "specified. The estimated diffusion tensors can be visualized by loading " + "the saved ``*.eig.nii.gz`` file in BrainSuite. BDP reports diffusivity (MD, " + "axial, radial, L2 and L3) in a unit which is reciprocal inverse of the " + "unit of input b-value. ", + ) estimateODF_FRACT = traits.Bool( - argstr='--FRACT', - desc= - 'Estimates ODFs using the Funk-Radon and Cosine Transformation (FRACT). ' + argstr="--FRACT", + desc="Estimates ODFs using the Funk-Radon and Cosine Transformation (FRACT). " 'The outputs are saved in a separate directory with name "FRACT" and the ' - 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ' + 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ', ) estimateODF_FRT = traits.Bool( - argstr='--FRT', - desc= - 'Estimates ODFs using Funk-Radon Transformation (FRT). The coefficient ' + argstr="--FRT", + desc="Estimates ODFs using Funk-Radon Transformation (FRT). The coefficient " 'maps for ODFs are saved in a separate directory with name "FRT" and the ' 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ' - 'The derived generalized-FA (GFA) maps are also saved in the output ' - 'directory. ') + "The derived generalized-FA (GFA) maps are also saved in the output " + "directory. ", + ) estimateODF_3DShore = traits.Float( - argstr='--3dshore --diffusion_time_ms %f', - desc='Estimates ODFs using 3Dshore. Pass in diffusion time, in ms') + argstr="--3dshore --diffusion_time_ms %f", + desc="Estimates ODFs using 3Dshore. Pass in diffusion time, in ms", + ) odfLambta = traits.Bool( - argstr='--odf-lambda ', - desc= - 'Sets the regularization parameter, lambda, of the Laplace-Beltrami ' - 'operator while estimating ODFs. The default value is set to 0.006 . This ' - 'can be used to set the appropriate regularization for the input ' - 'diffusion data. ') + argstr="--odf-lambda ", + desc="Sets the regularization parameter, lambda, of the Laplace-Beltrami " + "operator while estimating ODFs. The default value is set to 0.006 . This " + "can be used to set the appropriate regularization for the input " + "diffusion data. ", + ) t1Mask = File( - argstr='--t1-mask %s', - desc= - 'Specifies the filename of the brain-mask file for input T1-weighted ' - 'image. This mask can be same as the brain mask generated during ' - 'BrainSuite extraction sequence. For best results, the mask should not ' - 'include any extra-meningial tissues from T1-weighted image. The mask ' - 'must be in the same coordinates as input T1-weighted image (i.e. should ' - 'overlay correctly with input .bfc.nii.gz file in ' - 'BrainSuite). This mask is used for co-registration and defining brain ' - 'boundary for statistics computation. The mask can be generated and/or ' - 'edited in BrainSuite. In case outputDiffusionCoordinates is also ' - 'used, this mask is first transformed to diffusion coordinate and the ' - 'transformed mask is used for defining brain boundary in diffusion ' - 'coordinates. When t1Mask is not set, BDP will try to use ' - 'fileprefix>.mask.nii.gz as brain-mask. If .mask.nii.gz is ' - 'not found, then BDP will use the input .bfc.nii.gz itself as ' - 'mask (i.e. all non-zero voxels in .bfc.nii.gz is assumed to ' - 'constitute brain mask). ') + argstr="--t1-mask %s", + desc="Specifies the filename of the brain-mask file for input T1-weighted " + "image. This mask can be same as the brain mask generated during " + "BrainSuite extraction sequence. For best results, the mask should not " + "include any extra-meningial tissues from T1-weighted image. The mask " + "must be in the same coordinates as input T1-weighted image (i.e. should " + "overlay correctly with input .bfc.nii.gz file in " + "BrainSuite). This mask is used for co-registration and defining brain " + "boundary for statistics computation. The mask can be generated and/or " + "edited in BrainSuite. In case outputDiffusionCoordinates is also " + "used, this mask is first transformed to diffusion coordinate and the " + "transformed mask is used for defining brain boundary in diffusion " + "coordinates. When t1Mask is not set, BDP will try to use " + "fileprefix>.mask.nii.gz as brain-mask. If .mask.nii.gz is " + "not found, then BDP will use the input .bfc.nii.gz itself as " + "mask (i.e. all non-zero voxels in .bfc.nii.gz is assumed to " + "constitute brain mask). ", + ) dwiMask = File( - argstr='--dwi-mask %s', - desc= - 'Specifies the filename of the brain-mask file for diffusion data. This ' - 'mask is used only for co-registration purposes and can affect overall ' - 'quality of co-registration (see t1Mask for definition of brain mask ' - 'for statistics computation). The mask must be a 3D volume and should be ' - 'in the same coordinates as input Diffusion file/data (i.e. should ' - 'overlay correctly with input diffusion data in BrainSuite). For best ' - 'results, the mask should include only brain voxels (CSF voxels around ' - 'brain is also acceptable). When this flag is not used, BDP will generate ' - 'a pseudo mask using first b=0 image volume and would save it as ' - 'fileprefix>.dwi.RSA.mask.nii.gz. In case co-registration is not ' - 'accurate with automatically generated pseudo mask, BDP should be re-run ' - 'with a refined diffusion mask. The mask can be generated and/or edited ' - 'in BrainSuite. ') + argstr="--dwi-mask %s", + desc="Specifies the filename of the brain-mask file for diffusion data. This " + "mask is used only for co-registration purposes and can affect overall " + "quality of co-registration (see t1Mask for definition of brain mask " + "for statistics computation). The mask must be a 3D volume and should be " + "in the same coordinates as input Diffusion file/data (i.e. should " + "overlay correctly with input diffusion data in BrainSuite). For best " + "results, the mask should include only brain voxels (CSF voxels around " + "brain is also acceptable). When this flag is not used, BDP will generate " + "a pseudo mask using first b=0 image volume and would save it as " + "fileprefix>.dwi.RSA.mask.nii.gz. In case co-registration is not " + "accurate with automatically generated pseudo mask, BDP should be re-run " + "with a refined diffusion mask. The mask can be generated and/or edited " + "in BrainSuite. ", + ) rigidRegMeasure = traits.Enum( - 'MI', - 'INVERSION', - 'BDP', - argstr='--rigid-reg-measure %s', - desc='Defines the similarity measure to be used for rigid registration. ' + "MI", + "INVERSION", + "BDP", + argstr="--rigid-reg-measure %s", + desc="Defines the similarity measure to be used for rigid registration. " 'Possible measures are "MI", "INVERSION" and "BDP". MI measure uses ' - 'normalized mutual information based cost function. INVERSION measure ' - 'uses simpler cost function based on sum of squared difference by ' - 'exploiting the approximate inverse-contrast relationship in T1- and ' - 'T2-weighted images. BDP measure combines MI and INVERSION. It starts ' - 'with INVERSION measure and refines the result with MI measure. BDP is ' - 'the default measure when this trait is not set. ') + "normalized mutual information based cost function. INVERSION measure " + "uses simpler cost function based on sum of squared difference by " + "exploiting the approximate inverse-contrast relationship in T1- and " + "T2-weighted images. BDP measure combines MI and INVERSION. It starts " + "with INVERSION measure and refines the result with MI measure. BDP is " + "the default measure when this trait is not set. ", + ) dcorrRegMeasure = traits.Enum( - 'MI', - 'INVERSION-EPI', - 'INVERSION-T1', - 'INVERSION-BOTH', - 'BDP', - argstr='--dcorr-reg-method %s', - desc='Defines the method for registration-based distortion correction. ' + "MI", + "INVERSION-EPI", + "INVERSION-T1", + "INVERSION-BOTH", + "BDP", + argstr="--dcorr-reg-method %s", + desc="Defines the method for registration-based distortion correction. " 'Possible methods are "MI", "INVERSION-EPI", "INVERSION-T1", ' 'INVERSION-BOTH", and "BDP". MI method uses normalized mutual ' - 'information based cost-function while estimating the distortion field. ' - 'INVERSION-based method uses simpler cost function based on sum of ' - 'squared difference by exploiting the known approximate contrast ' - 'relationship in T1- and T2-weighted images. T2-weighted EPI is inverted ' - 'when INVERSION-EPI is used; T1-image is inverted when INVERSION-T1 is ' - 'used; and both are inverted when INVERSION-BOTH is used. BDP method add ' - 'the MI-based refinement after the correction using INVERSION-BOTH ' - 'method. BDP is the default method when this trait is not set. ') + "information based cost-function while estimating the distortion field. " + "INVERSION-based method uses simpler cost function based on sum of " + "squared difference by exploiting the known approximate contrast " + "relationship in T1- and T2-weighted images. T2-weighted EPI is inverted " + "when INVERSION-EPI is used; T1-image is inverted when INVERSION-T1 is " + "used; and both are inverted when INVERSION-BOTH is used. BDP method add " + "the MI-based refinement after the correction using INVERSION-BOTH " + "method. BDP is the default method when this trait is not set. ", + ) dcorrWeight = traits.Float( - argstr='--dcorr-regularization-wt %f', - desc= - 'Sets the (scalar) weighting parameter for regularization penalty in ' - 'registration-based distortion correction. Set this trait to a single, non-negative ' - 'number which specifies the weight. A large regularization weight encourages ' - 'smoother distortion field at the cost of low measure of image similarity ' - 'after distortion correction. On the other hand, a smaller regularization ' - 'weight can result into higher measure of image similarity but with ' - 'unrealistic and unsmooth distortion field. A weight of 0.5 would reduce ' - 'the penalty to half of the default regularization penalty (By default, this weight ' - 'is set to 1.0). Similarly, a weight of 2.0 ' - 'would increase the penalty to twice of the default penalty. ') + argstr="--dcorr-regularization-wt %f", + desc="Sets the (scalar) weighting parameter for regularization penalty in " + "registration-based distortion correction. Set this trait to a single, non-negative " + "number which specifies the weight. A large regularization weight encourages " + "smoother distortion field at the cost of low measure of image similarity " + "after distortion correction. On the other hand, a smaller regularization " + "weight can result into higher measure of image similarity but with " + "unrealistic and unsmooth distortion field. A weight of 0.5 would reduce " + "the penalty to half of the default regularization penalty (By default, this weight " + "is set to 1.0). Similarly, a weight of 2.0 " + "would increase the penalty to twice of the default penalty. ", + ) skipDistortionCorr = traits.Bool( - argstr='--no-distortion-correction', - desc='Skips distortion correction completely and performs only a rigid ' - 'registration of diffusion and T1-weighted image. This can be useful when ' - 'the input diffusion images do not have any distortion or they have been ' - 'corrected for distortion. ') + argstr="--no-distortion-correction", + desc="Skips distortion correction completely and performs only a rigid " + "registration of diffusion and T1-weighted image. This can be useful when " + "the input diffusion images do not have any distortion or they have been " + "corrected for distortion. ", + ) skipNonuniformityCorr = traits.Bool( - argstr='--no-nonuniformity-correction', - desc='Skips intensity non-uniformity correction in b=0 image for ' - 'registration-based distortion correction. The intensity non-uniformity ' - 'correction does not affect any diffusion modeling. ') + argstr="--no-nonuniformity-correction", + desc="Skips intensity non-uniformity correction in b=0 image for " + "registration-based distortion correction. The intensity non-uniformity " + "correction does not affect any diffusion modeling. ", + ) skipIntensityCorr = traits.Bool( - argstr='--no-intensity-correction', - xor=['fieldmapCorrectionMethod'], - desc= - 'Disables intensity correction when performing distortion correction. ' - 'Intensity correction can change the noise distribution in the corrected ' - 'image, but it does not affect estimated diffusion parameters like FA, ' - 'etc. ') + argstr="--no-intensity-correction", + xor=["fieldmapCorrectionMethod"], + desc="Disables intensity correction when performing distortion correction. " + "Intensity correction can change the noise distribution in the corrected " + "image, but it does not affect estimated diffusion parameters like FA, " + "etc. ", + ) fieldmapCorrection = File( - argstr='--fieldmap-correction %s', - requires=['echoSpacing'], - desc= - 'Use an acquired fieldmap for distortion correction. The fieldmap must ' - 'have units of radians/second. Specify the filename of the fieldmap file. ' - 'The field of view (FOV) of the fieldmap scan must cover the FOV of the diffusion ' - 'scan. BDP will try to check the overlap of the FOV of the two scans and ' + argstr="--fieldmap-correction %s", + requires=["echoSpacing"], + desc="Use an acquired fieldmap for distortion correction. The fieldmap must " + "have units of radians/second. Specify the filename of the fieldmap file. " + "The field of view (FOV) of the fieldmap scan must cover the FOV of the diffusion " + "scan. BDP will try to check the overlap of the FOV of the two scans and " 'will issue a warning/error if the diffusion scan"s FOV is not fully ' 'covered by the fieldmap"s FOV. BDP uses all of the information saved in ' - 'the NIfTI header to compute the FOV. If you get this error and think ' - 'that it is incorrect, then it can be suppressed using the flag ' - 'ignore-fieldmap-FOV. Neither the image matrix size nor the imaging ' - 'grid resolution of the fieldmap needs to be the same as that of the ' - 'diffusion scan, but the fieldmap must be pre-registred to the diffusion ' - 'scan. BDP does NOT align the fieldmap to the diffusion scan, nor does it ' - 'check the alignment of the fieldmap and diffusion scans. Only NIfTI ' - 'files with extension of .nii or .nii.gz are supported. Fieldmap-based ' - 'distortion correction also requires the echoSpacing. Also ' - 'fieldmapCorrectionMethod allows you to define method for ' - 'distortion correction. least squares is the default method. ') + "the NIfTI header to compute the FOV. If you get this error and think " + "that it is incorrect, then it can be suppressed using the flag " + "ignore-fieldmap-FOV. Neither the image matrix size nor the imaging " + "grid resolution of the fieldmap needs to be the same as that of the " + "diffusion scan, but the fieldmap must be pre-registred to the diffusion " + "scan. BDP does NOT align the fieldmap to the diffusion scan, nor does it " + "check the alignment of the fieldmap and diffusion scans. Only NIfTI " + "files with extension of .nii or .nii.gz are supported. Fieldmap-based " + "distortion correction also requires the echoSpacing. Also " + "fieldmapCorrectionMethod allows you to define method for " + "distortion correction. least squares is the default method. ", + ) fieldmapCorrectionMethod = traits.Enum( - 'pixelshift', - 'leastsq', - xor=['skipIntensityCorr'], - argstr='--fieldmap-correction-method %s', - desc='Defines the distortion correction method while using fieldmap. ' + "pixelshift", + "leastsq", + xor=["skipIntensityCorr"], + argstr="--fieldmap-correction-method %s", + desc="Defines the distortion correction method while using fieldmap. " 'Possible methods are "pixelshift" and "leastsq". leastsq is the default ' - 'method when this flag is not used. Pixel-shift (pixelshift) method uses ' - 'image interpolation to un-distort the distorted diffusion images. Least ' - 'squares (leastsq) method uses a physical model of distortion which is ' - 'more accurate (and more computationally expensive) than pixel-shift ' - 'method.') + "method when this flag is not used. Pixel-shift (pixelshift) method uses " + "image interpolation to un-distort the distorted diffusion images. Least " + "squares (leastsq) method uses a physical model of distortion which is " + "more accurate (and more computationally expensive) than pixel-shift " + "method.", + ) ignoreFieldmapFOV = traits.Bool( - argstr='--ignore-fieldmap-fov', - desc= - 'Supresses the error generated by an insufficient field of view of the ' - 'input fieldmap and continues with the processing. It is useful only when ' - 'used with fieldmap-based distortion correction. See ' - 'fieldmap-correction for a detailed explanation. ') + argstr="--ignore-fieldmap-fov", + desc="Suppresses the error generated by an insufficient field of view of the " + "input fieldmap and continues with the processing. It is useful only when " + "used with fieldmap-based distortion correction. See " + "fieldmap-correction for a detailed explanation. ", + ) fieldmapSmooth = traits.Float( - argstr='--fieldmap-smooth3=%f', - desc='Applies 3D Gaussian smoothing with a standard deviation of S ' - 'millimeters (mm) to the input fieldmap before applying distortion ' - 'correction. This trait is only useful with ' - 'fieldmapCorrection. Skip this trait for no smoothing. ') + argstr="--fieldmap-smooth3=%f", + desc="Applies 3D Gaussian smoothing with a standard deviation of S " + "millimeters (mm) to the input fieldmap before applying distortion " + "correction. This trait is only useful with " + "fieldmapCorrection. Skip this trait for no smoothing. ", + ) transformDiffusionVolume = File( - argstr='--transform-diffusion-volume %s', - desc='This flag allows to define custom volumes in diffusion coordinate ' - 'which would be transformed into T1 coordinate in a rigid fashion. The ' - 'flag must be followed by the name of either a NIfTI file or of a folder ' - 'that contains one or more NIfTI files. All of the files must be in ' - 'diffusion coordinate, i.e. the files should overlay correctly with the ' - 'diffusion scan in BrainSuite. Only NIfTI files with an extension of .nii ' - 'or .nii.gz are supported. The transformed files are written to the ' + argstr="--transform-diffusion-volume %s", + desc="This flag allows to define custom volumes in diffusion coordinate " + "which would be transformed into T1 coordinate in a rigid fashion. The " + "flag must be followed by the name of either a NIfTI file or of a folder " + "that contains one or more NIfTI files. All of the files must be in " + "diffusion coordinate, i.e. the files should overlay correctly with the " + "diffusion scan in BrainSuite. Only NIfTI files with an extension of .nii " + "or .nii.gz are supported. The transformed files are written to the " 'output directory with suffix ".T1_coord" in the filename and will not be ' - 'corrected for distortion, if any. The trait transformInterpolation can ' - 'be used to define the type of interpolation that would be used (default ' - 'is set to linear). If you are attempting to transform a label file or ' + "corrected for distortion, if any. The trait transformInterpolation can " + "be used to define the type of interpolation that would be used (default " + "is set to linear). If you are attempting to transform a label file or " 'mask file, use "nearest" interpolation method with transformInterpolation. ' - 'See also transformT1Volume and transformInterpolation') + "See also transformT1Volume and transformInterpolation", + ) transformT1Volume = File( - argstr='--transform-t1-volume %s', - desc='Same as transformDiffusionVolume except that files specified must ' - 'be in T1 coordinate, i.e. the files should overlay correctly with the ' - 'input .bfc.nii.gz files in BrainSuite. BDP transforms these ' - 'data/images from T1 coordinate to diffusion coordinate. The transformed ' + argstr="--transform-t1-volume %s", + desc="Same as transformDiffusionVolume except that files specified must " + "be in T1 coordinate, i.e. the files should overlay correctly with the " + "input .bfc.nii.gz files in BrainSuite. BDP transforms these " + "data/images from T1 coordinate to diffusion coordinate. The transformed " 'files are written to the output directory with suffix ".D_coord" in the ' - 'filename. See also transformDiffusionVolume and transformInterpolation. ' + "filename. See also transformDiffusionVolume and transformInterpolation. ", ) transformInterpolation = traits.Enum( - 'linear', - 'nearest', - 'cubic', - 'spline', - argstr='--transform-interpolation %s', - desc= - 'Defines the type of interpolation method which would be used while ' - 'transforming volumes defined by transformT1Volume and ' + "linear", + "nearest", + "cubic", + "spline", + argstr="--transform-interpolation %s", + desc="Defines the type of interpolation method which would be used while " + "transforming volumes defined by transformT1Volume and " 'transformDiffusionVolume. Possible methods are "linear", "nearest", ' - '"cubic" and "spline". By default, "linear" interpolation is used. ') + '"cubic" and "spline". By default, "linear" interpolation is used. ', + ) transformT1Surface = File( - argstr='--transform-t1-surface %s', - desc='Similar to transformT1Volume, except that this flag allows ' - 'transforming surfaces (instead of volumes) in T1 coordinate into ' - 'diffusion coordinate in a rigid fashion. The flag must be followed by ' - 'the name of either a .dfs file or of a folder that contains one or more ' - 'dfs files. All of the files must be in T1 coordinate, i.e. the files ' - 'should overlay correctly with the T1-weighted scan in BrainSuite. The ' - 'transformed files are written to the output directory with suffix ' - 'D_coord" in the filename. ') + argstr="--transform-t1-surface %s", + desc="Similar to transformT1Volume, except that this flag allows " + "transforming surfaces (instead of volumes) in T1 coordinate into " + "diffusion coordinate in a rigid fashion. The flag must be followed by " + "the name of either a .dfs file or of a folder that contains one or more " + "dfs files. All of the files must be in T1 coordinate, i.e. the files " + "should overlay correctly with the T1-weighted scan in BrainSuite. The " + "transformed files are written to the output directory with suffix " + 'D_coord" in the filename. ', + ) transformDiffusionSurface = File( - argstr='--transform-diffusion-surface %s', - desc='Same as transformT1Volume, except that the .dfs files specified ' - 'must be in diffusion coordinate, i.e. the surface files should overlay ' - 'correctly with the diffusion scan in BrainSuite. The transformed files ' + argstr="--transform-diffusion-surface %s", + desc="Same as transformT1Volume, except that the .dfs files specified " + "must be in diffusion coordinate, i.e. the surface files should overlay " + "correctly with the diffusion scan in BrainSuite. The transformed files " 'are written to the output directory with suffix ".T1_coord" in the ' - 'filename. See also transformT1Volume. ') + "filename. See also transformT1Volume. ", + ) transformDataOnly = traits.Bool( - argstr='--transform-data-only', - desc= - 'Skip all of the processing (co-registration, distortion correction and ' - 'tensor/ODF estimation) and directly start transformation of defined ' - 'custom volumes, mask and labels (using transformT1Volume, ' - 'transformDiffusionVolume, transformT1Surface, ' - 'transformDiffusionSurface, customDiffusionLabel, ' - 'customT1Label). This flag is useful when BDP was previously run on a ' - 'subject (or ) and some more data (volumes, mask or labels) ' - 'need to be transformed across the T1-diffusion coordinate spaces. This ' - 'assumes that all the necessary files were generated earlier and all of ' - 'the other flags MUST be used in the same way as they were in the initial ' - 'BDP run that processed the data. ') + argstr="--transform-data-only", + desc="Skip all of the processing (co-registration, distortion correction and " + "tensor/ODF estimation) and directly start transformation of defined " + "custom volumes, mask and labels (using transformT1Volume, " + "transformDiffusionVolume, transformT1Surface, " + "transformDiffusionSurface, customDiffusionLabel, " + "customT1Label). This flag is useful when BDP was previously run on a " + "subject (or ) and some more data (volumes, mask or labels) " + "need to be transformed across the T1-diffusion coordinate spaces. This " + "assumes that all the necessary files were generated earlier and all of " + "the other flags MUST be used in the same way as they were in the initial " + "BDP run that processed the data. ", + ) generateStats = traits.Bool( - argstr='--generate-stats', - desc= - 'Generate ROI-wise statistics of estimated diffusion tensor parameters. ' - 'Units of the reported statistics are same as that of the estimated ' - 'tensor parameters (see estimateTensors). Mean, variance, and voxel counts of ' - 'white matter(WM), grey matter(GM), and both WM and GM combined are ' - 'written for each estimated parameter in a separate comma-seperated value ' - 'csv) file. BDP uses the ROI labels generated by Surface-Volume ' - 'Registration (SVReg) in the BrainSuite extraction sequence. ' - 'Specifically, it looks for labels saved in either ' - 'fileprefix>.svreg.corr.label.nii.gz or .svreg.label.nii.gz. ' - 'In case both files are present, only the first file is used. Also see ' - 'customDiffusionLabel and customT1Label for specifying your own ' - 'ROIs. It is also possible to forgo computing the SVReg ROI-wise ' - 'statistics and only compute stats with custom labels if SVReg label is ' - 'missing. BDP also transfers (and saves) the label/mask files to ' - 'appropriate coordinates before computing statistics. Also see ' - 'outputDiffusionCoordinates for outputs in diffusion coordinate and ' - 'forcePartialROIStats for an important note about field of view of ' - 'diffusion and T1-weighted scans. ') + argstr="--generate-stats", + desc="Generate ROI-wise statistics of estimated diffusion tensor parameters. " + "Units of the reported statistics are same as that of the estimated " + "tensor parameters (see estimateTensors). Mean, variance, and voxel counts of " + "white matter(WM), grey matter(GM), and both WM and GM combined are " + "written for each estimated parameter in a separate comma-seperated value " + "csv) file. BDP uses the ROI labels generated by Surface-Volume " + "Registration (SVReg) in the BrainSuite extraction sequence. " + "Specifically, it looks for labels saved in either " + "fileprefix>.svreg.corr.label.nii.gz or .svreg.label.nii.gz. " + "In case both files are present, only the first file is used. Also see " + "customDiffusionLabel and customT1Label for specifying your own " + "ROIs. It is also possible to forgo computing the SVReg ROI-wise " + "statistics and only compute stats with custom labels if SVReg label is " + "missing. BDP also transfers (and saves) the label/mask files to " + "appropriate coordinates before computing statistics. Also see " + "outputDiffusionCoordinates for outputs in diffusion coordinate and " + "forcePartialROIStats for an important note about field of view of " + "diffusion and T1-weighted scans. ", + ) onlyStats = traits.Bool( - argstr='--generate-only-stats', - desc= - 'Skip all of the processing (co-registration, distortion correction and ' - 'tensor/ODF estimation) and directly start computation of statistics. ' - 'This flag is useful when BDP was previously run on a subject (or ' - 'fileprefix>) and statistics need to be (re-)computed later. This ' - 'assumes that all the necessary files were generated earlier. All of the ' - 'other flags MUST be used in the same way as they were in the initial BDP ' - 'run that processed the data. ') + argstr="--generate-only-stats", + desc="Skip all of the processing (co-registration, distortion correction and " + "tensor/ODF estimation) and directly start computation of statistics. " + "This flag is useful when BDP was previously run on a subject (or " + "fileprefix>) and statistics need to be (re-)computed later. This " + "assumes that all the necessary files were generated earlier. All of the " + "other flags MUST be used in the same way as they were in the initial BDP " + "run that processed the data. ", + ) forcePartialROIStats = traits.Bool( - argstr='--force-partial-roi-stats', - desc= - 'The field of view (FOV) of the diffusion and T1-weighted scans may ' - 'differ significantly in some situations. This may result in partial ' - 'acquisitions of some ROIs in the diffusion scan. By default, BDP does ' - 'not compute statistics for partially acquired ROIs and shows warnings. ' - 'This flag forces computation of statistics for all ROIs, including those ' - 'which are partially acquired. When this flag is used, number of missing ' - 'voxels are also reported for each ROI in statistics files. Number of ' - 'missing voxels are reported in the same coordinate system as the ' - 'statistics file. ') + argstr="--force-partial-roi-stats", + desc="The field of view (FOV) of the diffusion and T1-weighted scans may " + "differ significantly in some situations. This may result in partial " + "acquisitions of some ROIs in the diffusion scan. By default, BDP does " + "not compute statistics for partially acquired ROIs and shows warnings. " + "This flag forces computation of statistics for all ROIs, including those " + "which are partially acquired. When this flag is used, number of missing " + "voxels are also reported for each ROI in statistics files. Number of " + "missing voxels are reported in the same coordinate system as the " + "statistics file. ", + ) customDiffusionLabel = File( - argstr='--custom-diffusion-label %s', - desc= - 'BDP supports custom ROIs in addition to those generated by BrainSuite ' - 'SVReg) for ROI-wise statistics calculation. The flag must be followed ' - 'by the name of either a file (custom ROI file) or of a folder that ' - 'contains one or more ROI files. All of the files must be in diffusion ' - 'coordinate, i.e. the label files should overlay correctly with the ' - 'diffusion scan in BrainSuite. These input label files are also ' - 'transferred (and saved) to T1 coordinate for statistics in T1 ' - 'coordinate. BDP uses nearest-neighborhood interpolation for this ' - 'transformation. Only NIfTI files, with an extension of .nii or .nii.gz ' - 'are supported. In order to avoid confusion with other ROI IDs in the ' - 'statistic files, a 5-digit ROI ID is generated for each custom label ' - 'found and the mapping of ID to label file is saved in the file ' - 'fileprefix>.BDP_ROI_MAP.xml. Custom label files can also be generated ' - 'by using the label painter tool in BrainSuite. See also ' - 'customLabelXML') + argstr="--custom-diffusion-label %s", + desc="BDP supports custom ROIs in addition to those generated by BrainSuite " + "SVReg) for ROI-wise statistics calculation. The flag must be followed " + "by the name of either a file (custom ROI file) or of a folder that " + "contains one or more ROI files. All of the files must be in diffusion " + "coordinate, i.e. the label files should overlay correctly with the " + "diffusion scan in BrainSuite. These input label files are also " + "transferred (and saved) to T1 coordinate for statistics in T1 " + "coordinate. BDP uses nearest-neighborhood interpolation for this " + "transformation. Only NIfTI files, with an extension of .nii or .nii.gz " + "are supported. In order to avoid confusion with other ROI IDs in the " + "statistic files, a 5-digit ROI ID is generated for each custom label " + "found and the mapping of ID to label file is saved in the file " + "fileprefix>.BDP_ROI_MAP.xml. Custom label files can also be generated " + "by using the label painter tool in BrainSuite. See also " + "customLabelXML", + ) customT1Label = File( - argstr='--custom-t1-label %s', - desc='Same as customDiffusionLabelexcept that the label files specified ' - 'must be in T1 coordinate, i.e. the label files should overlay correctly ' - 'with the T1-weighted scan in BrainSuite. If the trait ' - 'outputDiffusionCoordinates is also used then these input label files ' - 'are also transferred (and saved) to diffusion coordinate for statistics ' - 'in diffusion coordinate. BDP uses nearest-neighborhood interpolation for ' - 'this transformation. See also customLabelXML. ') + argstr="--custom-t1-label %s", + desc="Same as customDiffusionLabelexcept that the label files specified " + "must be in T1 coordinate, i.e. the label files should overlay correctly " + "with the T1-weighted scan in BrainSuite. If the trait " + "outputDiffusionCoordinates is also used then these input label files " + "are also transferred (and saved) to diffusion coordinate for statistics " + "in diffusion coordinate. BDP uses nearest-neighborhood interpolation for " + "this transformation. See also customLabelXML. ", + ) customLabelXML = File( - argstr='--custom-label-xml %s', - desc= - 'BrainSuite saves a descriptions of the SVReg labels (ROI name, ID, ' - 'color, and description) in an .xml file ' + argstr="--custom-label-xml %s", + desc="BrainSuite saves a descriptions of the SVReg labels (ROI name, ID, " + "color, and description) in an .xml file " 'brainsuite_labeldescription.xml). BDP uses the ROI ID"s from this xml ' - 'file to report statistics. This flag allows for the use of a custom ' - 'label description xml file. The flag must be followed by an xml ' - 'filename. This can be useful when you want to limit the ROIs for which ' - 'you compute statistics. You can also use custom xml files to name your ' + "file to report statistics. This flag allows for the use of a custom " + "label description xml file. The flag must be followed by an xml " + "filename. This can be useful when you want to limit the ROIs for which " + "you compute statistics. You can also use custom xml files to name your " 'own ROIs (assign ID"s) for custom labels. BrainSuite can save a label ' - 'description in .xml format after using the label painter tool to create ' + "description in .xml format after using the label painter tool to create " 'a ROI label. The xml file MUST be in the same format as BrainSuite"s ' - 'label description file (see brainsuite_labeldescription.xml for an ' - 'example). When this flag is used, NO 5-digit ROI ID is generated for ' - 'custom label files and NO Statistics will be calculated for ROIs not ' - 'identified in the custom xml file. See also customDiffusionLabel and ' - 'customT1Label.') + "label description file (see brainsuite_labeldescription.xml for an " + "example). When this flag is used, NO 5-digit ROI ID is generated for " + "custom label files and NO Statistics will be calculated for ROIs not " + "identified in the custom xml file. See also customDiffusionLabel and " + "customT1Label.", + ) outputSubdir = traits.Str( - argstr='--output-subdir %s', - desc= - 'By default, BDP writes out all the output (and intermediate) files in ' - 'the same directory (or folder) as the BFC file. This flag allows to ' - 'specify a sub-directory name in which output (and intermediate) files ' - 'would be written. BDP will create the sub-directory in the same ' - 'directory as BFC file. should be the name of the ' - 'sub-directory without any path. This can be useful to organize all ' - 'outputs generated by BDP in a separate sub-directory. ') + argstr="--output-subdir %s", + desc="By default, BDP writes out all the output (and intermediate) files in " + "the same directory (or folder) as the BFC file. This flag allows to " + "specify a sub-directory name in which output (and intermediate) files " + "would be written. BDP will create the sub-directory in the same " + "directory as BFC file. should be the name of the " + "sub-directory without any path. This can be useful to organize all " + "outputs generated by BDP in a separate sub-directory. ", + ) outputDiffusionCoordinates = traits.Bool( - argstr='--output-diffusion-coordinate', - desc= - 'Enables estimation of diffusion tensors and/or ODFs (and statistics if ' - 'applicable) in the native diffusion coordinate in addition to the ' - 'default T1-coordinate. All native diffusion coordinate files are saved ' - 'in a seperate folder named "diffusion_coord_outputs". In case statistics ' - 'computation is required, it will also transform/save all label/mask ' - 'files required to diffusion coordinate (see generateStats for ' - 'details). ') + argstr="--output-diffusion-coordinate", + desc="Enables estimation of diffusion tensors and/or ODFs (and statistics if " + "applicable) in the native diffusion coordinate in addition to the " + "default T1-coordinate. All native diffusion coordinate files are saved " + 'in a separate folder named "diffusion_coord_outputs". In case statistics ' + "computation is required, it will also transform/save all label/mask " + "files required to diffusion coordinate (see generateStats for " + "details). ", + ) flagConfigFile = File( - argstr='--flag-conf-file %s', - desc= - 'Uses the defined file to specify BDP flags which can be useful for ' - 'batch processing. A flag configuration file is a plain text file which ' + argstr="--flag-conf-file %s", + desc="Uses the defined file to specify BDP flags which can be useful for " + "batch processing. A flag configuration file is a plain text file which " 'can contain any number of BDP"s optional flags (and their parameters) ' - 'separated by whitespace. Everything coming after # until end-of-line is ' - 'treated as comment and is ignored. If a flag is defined in configuration ' - 'file and is also specified in the command used to run BDP, then the ' - 'later get preference and overrides the definition in configuration ' - 'file. ') + "separated by whitespace. Everything coming after # until end-of-line is " + "treated as comment and is ignored. If a flag is defined in configuration " + "file and is also specified in the command used to run BDP, then the " + "later get preference and overrides the definition in configuration " + "file. ", + ) outPrefix = traits.Str( - argstr='--output-fileprefix %s', - desc='Specifies output fileprefix when noStructuralRegistration is ' - 'used. The fileprefix can not start with a dash (-) and should be a ' - 'simple string reflecting the absolute path to desired location, along with outPrefix. When this flag is ' - 'not specified (and noStructuralRegistration is used) then the output ' - 'files have same file-base as the input diffusion file. This trait is ' - 'ignored when noStructuralRegistration is not used. ') + argstr="--output-fileprefix %s", + desc="Specifies output fileprefix when noStructuralRegistration is " + "used. The fileprefix can not start with a dash (-) and should be a " + "simple string reflecting the absolute path to desired location, along with outPrefix. When this flag is " + "not specified (and noStructuralRegistration is used) then the output " + "files have same file-base as the input diffusion file. This trait is " + "ignored when noStructuralRegistration is not used. ", + ) threads = traits.Int( - argstr='--threads=%d', - desc='Sets the number of parallel process threads which can be used for ' - 'computations to N, where N must be an integer. Default value of N is ' - ' ') + argstr="--threads=%d", + desc="Sets the number of parallel process threads which can be used for " + "computations to N, where N must be an integer. Default value of N is " + " ", + ) lowMemory = traits.Bool( - argstr='--low-memory', - desc='Activates low-memory mode. This will run the registration-based ' - 'distortion correction at a lower resolution, which could result in a ' - 'less-accurate correction. This should only be used when no other ' - 'alternative is available. ') + argstr="--low-memory", + desc="Activates low-memory mode. This will run the registration-based " + "distortion correction at a lower resolution, which could result in a " + "less-accurate correction. This should only be used when no other " + "alternative is available. ", + ) ignoreMemory = traits.Bool( - argstr='--ignore-memory', - desc='Deactivates the inbuilt memory checks and forces BDP to run ' - 'registration-based distortion correction at its default resolution even ' - 'on machines with a low amount of memory. This may result in an ' - 'out-of-memory error when BDP cannot allocate sufficient memory. ') + argstr="--ignore-memory", + desc="Deactivates the inbuilt memory checks and forces BDP to run " + "registration-based distortion correction at its default resolution even " + "on machines with a low amount of memory. This may result in an " + "out-of-memory error when BDP cannot allocate sufficient memory. ", + ) class BDP(CommandLine): @@ -1698,21 +1748,22 @@ class BDP(CommandLine): """ input_spec = BDPInputSpec - _cmd = 'bdp.sh' + _cmd = "bdp.sh" def _format_arg(self, name, spec, value): - if name == 'BVecBValPair': + if name == "BVecBValPair": return spec.argstr % (value[0], value[1]) - if name == 'dataSinkDelay': - return spec.argstr % '' - return super(BDP, self)._format_arg(name, spec, value) + if name == "dataSinkDelay": + return spec.argstr % "" + return super()._format_arg(name, spec, value) class ThicknessPVCInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( - argstr='%s', + argstr="%s", mandatory=True, - desc='Absolute path and filename prefix of the subject data') + desc="Absolute path and filename prefix of the subject data", + ) class ThicknessPVC(CommandLine): @@ -1739,7 +1790,7 @@ class ThicknessPVC(CommandLine): """ input_spec = ThicknessPVCInputSpec - _cmd = 'thicknessPVC.sh' + _cmd = "thicknessPVC.sh" # used to generate file names for outputs @@ -1750,7 +1801,7 @@ def getFileName(inputName, suffix): dotRegex = regex.compile("[^.]+") # extract between last slash and first period inputNoExtension = dotRegex.findall(fullInput)[0] - return os.path.abspath(''.join((inputNoExtension, suffix))) + return os.path.abspath(f"{inputNoExtension}{suffix}") def l_outputs(self): diff --git a/nipype/interfaces/brainsuite/tests/__init__.py b/nipype/interfaces/brainsuite/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/brainsuite/tests/__init__.py +++ b/nipype/interfaces/brainsuite/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/brainsuite/tests/test_auto_BDP.py b/nipype/interfaces/brainsuite/tests/test_auto_BDP.py index ff705edfeb..0da29c372d 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_BDP.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_BDP.py @@ -1,96 +1,182 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import BDP def test_BDP_inputs(): input_map = dict( BVecBValPair=dict( - argstr='--bvec %s --bval %s', + argstr="--bvec %s --bval %s", mandatory=True, position=-1, - xor=['bMatrixFile'], + xor=["bMatrixFile"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), bMatrixFile=dict( - argstr='--bmat %s', + argstr="--bmat %s", + extensions=None, mandatory=True, position=-1, - xor=['BVecBValPair'], + xor=["BVecBValPair"], + ), + bValRatioThreshold=dict( + argstr="--bval-ratio-threshold %f", ), - bValRatioThreshold=dict(argstr='--bval-ratio-threshold %f', ), bfcFile=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, - xor=['noStructuralRegistration'], - ), - customDiffusionLabel=dict(argstr='--custom-diffusion-label %s', ), - customLabelXML=dict(argstr='--custom-label-xml %s', ), - customT1Label=dict(argstr='--custom-t1-label %s', ), - dataSinkDelay=dict(argstr='%s', ), - dcorrRegMeasure=dict(argstr='--dcorr-reg-method %s', ), - dcorrWeight=dict(argstr='--dcorr-regularization-wt %f', ), - dwiMask=dict(argstr='--dwi-mask %s', ), - echoSpacing=dict(argstr='--echo-spacing=%f', ), + xor=["noStructuralRegistration"], + ), + customDiffusionLabel=dict( + argstr="--custom-diffusion-label %s", + extensions=None, + ), + customLabelXML=dict( + argstr="--custom-label-xml %s", + extensions=None, + ), + customT1Label=dict( + argstr="--custom-t1-label %s", + extensions=None, + ), + dataSinkDelay=dict( + argstr="%s", + ), + dcorrRegMeasure=dict( + argstr="--dcorr-reg-method %s", + ), + dcorrWeight=dict( + argstr="--dcorr-regularization-wt %f", + ), + dwiMask=dict( + argstr="--dwi-mask %s", + extensions=None, + ), + echoSpacing=dict( + argstr="--echo-spacing=%f", + ), environ=dict( nohash=True, usedefault=True, ), - estimateODF_3DShore=dict(argstr='--3dshore --diffusion_time_ms %f', ), - estimateODF_FRACT=dict(argstr='--FRACT', ), - estimateODF_FRT=dict(argstr='--FRT', ), - estimateTensors=dict(argstr='--tensors', ), + estimateODF_3DShore=dict( + argstr="--3dshore --diffusion_time_ms %f", + ), + estimateODF_FRACT=dict( + argstr="--FRACT", + ), + estimateODF_FRT=dict( + argstr="--FRT", + ), + estimateTensors=dict( + argstr="--tensors", + ), fieldmapCorrection=dict( - argstr='--fieldmap-correction %s', - requires=['echoSpacing'], + argstr="--fieldmap-correction %s", + extensions=None, + requires=["echoSpacing"], ), fieldmapCorrectionMethod=dict( - argstr='--fieldmap-correction-method %s', - xor=['skipIntensityCorr'], - ), - fieldmapSmooth=dict(argstr='--fieldmap-smooth3=%f', ), - flagConfigFile=dict(argstr='--flag-conf-file %s', ), - forcePartialROIStats=dict(argstr='--force-partial-roi-stats', ), - generateStats=dict(argstr='--generate-stats', ), - ignoreFieldmapFOV=dict(argstr='--ignore-fieldmap-fov', ), - ignoreMemory=dict(argstr='--ignore-memory', ), + argstr="--fieldmap-correction-method %s", + xor=["skipIntensityCorr"], + ), + fieldmapSmooth=dict( + argstr="--fieldmap-smooth3=%f", + ), + flagConfigFile=dict( + argstr="--flag-conf-file %s", + extensions=None, + ), + forcePartialROIStats=dict( + argstr="--force-partial-roi-stats", + ), + generateStats=dict( + argstr="--generate-stats", + ), + ignoreFieldmapFOV=dict( + argstr="--ignore-fieldmap-fov", + ), + ignoreMemory=dict( + argstr="--ignore-memory", + ), inputDiffusionData=dict( - argstr='--nii %s', + argstr="--nii %s", + extensions=None, mandatory=True, position=-2, ), - lowMemory=dict(argstr='--low-memory', ), + lowMemory=dict( + argstr="--low-memory", + ), noStructuralRegistration=dict( - argstr='--no-structural-registration', + argstr="--no-structural-registration", mandatory=True, position=0, - xor=['bfcFile'], + xor=["bfcFile"], + ), + odfLambta=dict( + argstr="--odf-lambda ", + ), + onlyStats=dict( + argstr="--generate-only-stats", + ), + outPrefix=dict( + argstr="--output-fileprefix %s", ), - odfLambta=dict(argstr='--odf-lambda ', ), - onlyStats=dict(argstr='--generate-only-stats', ), - outPrefix=dict(argstr='--output-fileprefix %s', ), outputDiffusionCoordinates=dict( - argstr='--output-diffusion-coordinate', ), - outputSubdir=dict(argstr='--output-subdir %s', ), - phaseEncodingDirection=dict(argstr='--dir=%s', ), - rigidRegMeasure=dict(argstr='--rigid-reg-measure %s', ), - skipDistortionCorr=dict(argstr='--no-distortion-correction', ), + argstr="--output-diffusion-coordinate", + ), + outputSubdir=dict( + argstr="--output-subdir %s", + ), + phaseEncodingDirection=dict( + argstr="--dir=%s", + ), + rigidRegMeasure=dict( + argstr="--rigid-reg-measure %s", + ), + skipDistortionCorr=dict( + argstr="--no-distortion-correction", + ), skipIntensityCorr=dict( - argstr='--no-intensity-correction', - xor=['fieldmapCorrectionMethod'], + argstr="--no-intensity-correction", + xor=["fieldmapCorrectionMethod"], + ), + skipNonuniformityCorr=dict( + argstr="--no-nonuniformity-correction", + ), + t1Mask=dict( + argstr="--t1-mask %s", + extensions=None, + ), + threads=dict( + argstr="--threads=%d", + ), + transformDataOnly=dict( + argstr="--transform-data-only", ), - skipNonuniformityCorr=dict(argstr='--no-nonuniformity-correction', ), - t1Mask=dict(argstr='--t1-mask %s', ), - threads=dict(argstr='--threads=%d', ), - transformDataOnly=dict(argstr='--transform-data-only', ), transformDiffusionSurface=dict( - argstr='--transform-diffusion-surface %s', ), + argstr="--transform-diffusion-surface %s", + extensions=None, + ), transformDiffusionVolume=dict( - argstr='--transform-diffusion-volume %s', ), - transformInterpolation=dict(argstr='--transform-interpolation %s', ), - transformT1Surface=dict(argstr='--transform-t1-surface %s', ), - transformT1Volume=dict(argstr='--transform-t1-volume %s', ), + argstr="--transform-diffusion-volume %s", + extensions=None, + ), + transformInterpolation=dict( + argstr="--transform-interpolation %s", + ), + transformT1Surface=dict( + argstr="--transform-t1-surface %s", + extensions=None, + ), + transformT1Volume=dict( + argstr="--transform-t1-volume %s", + extensions=None, + ), ) inputs = BDP.input_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py b/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py index 01200e50fc..dbb1f3d839 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py @@ -1,72 +1,121 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Bfc def test_Bfc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - biasEstimateConvergenceThreshold=dict(argstr='--beps %f', ), - biasEstimateSpacing=dict(argstr='-s %d', ), - biasFieldEstimatesOutputPrefix=dict(argstr='--biasprefix %s', ), - biasRange=dict(argstr='%s', ), - controlPointSpacing=dict(argstr='-c %d', ), - convergenceThreshold=dict(argstr='--eps %f', ), - correctWholeVolume=dict(argstr='--extrapolate', ), - correctedImagesOutputPrefix=dict(argstr='--prefix %s', ), - correctionScheduleFile=dict(argstr='--schedule %s', ), + args=dict( + argstr="%s", + ), + biasEstimateConvergenceThreshold=dict( + argstr="--beps %f", + ), + biasEstimateSpacing=dict( + argstr="-s %d", + ), + biasFieldEstimatesOutputPrefix=dict( + argstr="--biasprefix %s", + ), + biasRange=dict( + argstr="%s", + ), + controlPointSpacing=dict( + argstr="-c %d", + ), + convergenceThreshold=dict( + argstr="--eps %f", + ), + correctWholeVolume=dict( + argstr="--extrapolate", + ), + correctedImagesOutputPrefix=dict( + argstr="--prefix %s", + ), + correctionScheduleFile=dict( + argstr="--schedule %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - histogramRadius=dict(argstr='-r %d', ), - histogramType=dict(argstr='%s', ), + histogramRadius=dict( + argstr="-r %d", + ), + histogramType=dict( + argstr="%s", + ), inputMRIFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), inputMaskFile=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, hash_files=False, ), - intermediate_file_type=dict(argstr='%s', ), - iterativeMode=dict(argstr='--iterate', ), + intermediate_file_type=dict( + argstr="%s", + ), + iterativeMode=dict( + argstr="--iterate", + ), maxBias=dict( - argstr='-U %f', + argstr="-U %f", usedefault=True, ), minBias=dict( - argstr='-L %f', + argstr="-L %f", usedefault=True, ), outputBiasField=dict( - argstr='--bias %s', + argstr="--bias %s", + extensions=None, hash_files=False, ), outputMRIVolume=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), outputMaskedBiasField=dict( - argstr='--maskedbias %s', + argstr="--maskedbias %s", + extensions=None, hash_files=False, ), - splineLambda=dict(argstr='-w %f', ), - timer=dict(argstr='--timer', ), - verbosityLevel=dict(argstr='-v %d', ), + splineLambda=dict( + argstr="-w %f", + ), + timer=dict( + argstr="--timer", + ), + verbosityLevel=dict( + argstr="-v %d", + ), ) inputs = Bfc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bfc_outputs(): output_map = dict( - correctionScheduleFile=dict(), - outputBiasField=dict(), - outputMRIVolume=dict(), - outputMaskedBiasField=dict(), + correctionScheduleFile=dict( + extensions=None, + ), + outputBiasField=dict( + extensions=None, + ), + outputMRIVolume=dict( + extensions=None, + ), + outputMaskedBiasField=dict( + extensions=None, + ), ) outputs = Bfc.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Bse.py b/nipype/interfaces/brainsuite/tests/test_auto_Bse.py index bbd154bc24..748defcc00 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Bse.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Bse.py @@ -1,25 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Bse def test_Bse_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), diffusionConstant=dict( - argstr='-d %f', + argstr="-d %f", usedefault=True, ), diffusionIterations=dict( - argstr='-n %d', + argstr="-n %d", usedefault=True, ), dilateFinalMask=dict( - argstr='-p', + argstr="-p", usedefault=True, ), edgeDetectionConstant=dict( - argstr='-s %f', + argstr="-s %f", usedefault=True, ), environ=dict( @@ -27,47 +28,58 @@ def test_Bse_inputs(): usedefault=True, ), inputMRIFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - noRotate=dict(argstr='--norotate', ), + noRotate=dict( + argstr="--norotate", + ), outputCortexFile=dict( - argstr='--cortex %s', + argstr="--cortex %s", + extensions=None, hash_files=False, ), outputDetailedBrainMask=dict( - argstr='--hires %s', + argstr="--hires %s", + extensions=None, hash_files=False, ), outputDiffusionFilter=dict( - argstr='--adf %s', + argstr="--adf %s", + extensions=None, hash_files=False, ), outputEdgeMap=dict( - argstr='--edge %s', + argstr="--edge %s", + extensions=None, hash_files=False, ), outputMRIVolume=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), outputMaskFile=dict( - argstr='--mask %s', + argstr="--mask %s", + extensions=None, genfile=True, hash_files=False, ), radius=dict( - argstr='-r %f', + argstr="-r %f", usedefault=True, ), - timer=dict(argstr='--timer', ), + timer=dict( + argstr="--timer", + ), trim=dict( - argstr='--trim', + argstr="--trim", usedefault=True, ), verbosityLevel=dict( - argstr='-v %f', + argstr="-v %f", usedefault=True, ), ) @@ -76,14 +88,28 @@ def test_Bse_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bse_outputs(): output_map = dict( - outputCortexFile=dict(), - outputDetailedBrainMask=dict(), - outputDiffusionFilter=dict(), - outputEdgeMap=dict(), - outputMRIVolume=dict(), - outputMaskFile=dict(), + outputCortexFile=dict( + extensions=None, + ), + outputDetailedBrainMask=dict( + extensions=None, + ), + outputDiffusionFilter=dict( + extensions=None, + ), + outputEdgeMap=dict( + extensions=None, + ), + outputMRIVolume=dict( + extensions=None, + ), + outputMaskFile=dict( + extensions=None, + ), ) outputs = Bse.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py b/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py index 808e4347c3..a924d7ce0f 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Cerebro def test_Cerebro_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), costFunction=dict( - argstr='-c %d', + argstr="-c %d", usedefault=True, ), environ=dict( @@ -15,54 +16,90 @@ def test_Cerebro_inputs(): usedefault=True, ), inputAtlasLabelFile=dict( - argstr='--atlaslabels %s', + argstr="--atlaslabels %s", + extensions=None, mandatory=True, ), inputAtlasMRIFile=dict( - argstr='--atlas %s', + argstr="--atlas %s", + extensions=None, mandatory=True, ), - inputBrainMaskFile=dict(argstr='-m %s', ), + inputBrainMaskFile=dict( + argstr="-m %s", + extensions=None, + ), inputMRIFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - keepTempFiles=dict(argstr='--keep', ), - linearConvergence=dict(argstr='--linconv %f', ), + keepTempFiles=dict( + argstr="--keep", + ), + linearConvergence=dict( + argstr="--linconv %f", + ), outputAffineTransformFile=dict( - argstr='--air %s', + argstr="--air %s", + extensions=None, genfile=True, ), outputCerebrumMaskFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), outputLabelVolumeFile=dict( - argstr='-l %s', + argstr="-l %s", + extensions=None, genfile=True, ), outputWarpTransformFile=dict( - argstr='--warp %s', + argstr="--warp %s", + extensions=None, genfile=True, ), - tempDirectory=dict(argstr='--tempdir %s', ), - tempDirectoryBase=dict(argstr='--tempdirbase %s', ), - useCentroids=dict(argstr='--centroids', ), - verbosity=dict(argstr='-v %d', ), - warpConvergence=dict(argstr='--warpconv %f', ), - warpLabel=dict(argstr='--warplevel %d', ), + tempDirectory=dict( + argstr="--tempdir %s", + ), + tempDirectoryBase=dict( + argstr="--tempdirbase %s", + ), + useCentroids=dict( + argstr="--centroids", + ), + verbosity=dict( + argstr="-v %d", + ), + warpConvergence=dict( + argstr="--warpconv %f", + ), + warpLabel=dict( + argstr="--warplevel %d", + ), ) inputs = Cerebro.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cerebro_outputs(): output_map = dict( - outputAffineTransformFile=dict(), - outputCerebrumMaskFile=dict(), - outputLabelVolumeFile=dict(), - outputWarpTransformFile=dict(), + outputAffineTransformFile=dict( + extensions=None, + ), + outputCerebrumMaskFile=dict( + extensions=None, + ), + outputLabelVolumeFile=dict( + extensions=None, + ), + outputWarpTransformFile=dict( + extensions=None, + ), ) outputs = Cerebro.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py b/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py index 536cb158f2..30287edf90 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Cortex def test_Cortex_inputs(): input_map = dict( - args=dict(argstr='%s', ), - computeGCBoundary=dict(argstr='-g', ), + args=dict( + argstr="%s", + ), + computeGCBoundary=dict( + argstr="-g", + ), computeWGBoundary=dict( - argstr='-w', + argstr="-w", usedefault=True, ), environ=dict( @@ -16,35 +19,48 @@ def test_Cortex_inputs(): usedefault=True, ), includeAllSubcorticalAreas=dict( - argstr='-a', + argstr="-a", usedefault=True, ), inputHemisphereLabelFile=dict( - argstr='-h %s', + argstr="-h %s", + extensions=None, mandatory=True, ), inputTissueFractionFile=dict( - argstr='-f %s', + argstr="-f %s", + extensions=None, mandatory=True, ), outputCerebrumMask=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - timer=dict(argstr='--timer', ), + timer=dict( + argstr="--timer", + ), tissueFractionThreshold=dict( - argstr='-p %f', + argstr="-p %f", usedefault=True, ), - verbosity=dict(argstr='-v %d', ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Cortex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cortex_outputs(): - output_map = dict(outputCerebrumMask=dict(), ) + output_map = dict( + outputCerebrumMask=dict( + extensions=None, + ), + ) outputs = Cortex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py b/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py index ba430fb1fd..4d986e22f3 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py @@ -1,35 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Dewisp def test_Dewisp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputMaskFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - maximumIterations=dict(argstr='-n %d', ), + maximumIterations=dict( + argstr="-n %d", + ), outputMaskFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - sizeThreshold=dict(argstr='-t %d', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + sizeThreshold=dict( + argstr="-t %d", + ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Dewisp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dewisp_outputs(): - output_map = dict(outputMaskFile=dict(), ) + output_map = dict( + outputMaskFile=dict( + extensions=None, + ), + ) outputs = Dewisp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py index c69232fd01..3122791cf5 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py @@ -1,61 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Dfs def test_Dfs_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), curvatureWeighting=dict( - argstr='-w %f', + argstr="-w %f", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - inputShadingVolume=dict(argstr='-c %s', ), + inputShadingVolume=dict( + argstr="-c %s", + extensions=None, + ), inputVolumeFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - noNormalsFlag=dict(argstr='--nonormals', ), + noNormalsFlag=dict( + argstr="--nonormals", + ), nonZeroTessellation=dict( - argstr='-nz', - xor=('nonZeroTessellation', 'specialTessellation'), + argstr="-nz", + xor=("nonZeroTessellation", "specialTessellation"), ), outputSurfaceFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - postSmoothFlag=dict(argstr='--postsmooth', ), - scalingPercentile=dict(argstr='-f %f', ), + postSmoothFlag=dict( + argstr="--postsmooth", + ), + scalingPercentile=dict( + argstr="-f %f", + ), smoothingConstant=dict( - argstr='-a %f', + argstr="-a %f", usedefault=True, ), smoothingIterations=dict( - argstr='-n %d', + argstr="-n %d", usedefault=True, ), specialTessellation=dict( - argstr='%s', + argstr="%s", position=-1, - requires=['tessellationThreshold'], - xor=('nonZeroTessellation', 'specialTessellation'), + requires=["tessellationThreshold"], + xor=("nonZeroTessellation", "specialTessellation"), + ), + tessellationThreshold=dict( + argstr="%f", + ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), + zeroPadFlag=dict( + argstr="-z", ), - tessellationThreshold=dict(argstr='%f', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), - zeroPadFlag=dict(argstr='-z', ), ) inputs = Dfs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dfs_outputs(): - output_map = dict(outputSurfaceFile=dict(), ) + output_map = dict( + outputSurfaceFile=dict( + extensions=None, + ), + ) outputs = Dfs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py b/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py index 9e3db80dd9..0696f11992 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py @@ -1,54 +1,78 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Hemisplit def test_Hemisplit_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputHemisphereLabelFile=dict( - argstr='-l %s', + argstr="-l %s", + extensions=None, mandatory=True, ), inputSurfaceFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), outputLeftHemisphere=dict( - argstr='--left %s', + argstr="--left %s", + extensions=None, genfile=True, ), outputLeftPialHemisphere=dict( - argstr='-pl %s', + argstr="-pl %s", + extensions=None, genfile=True, ), outputRightHemisphere=dict( - argstr='--right %s', + argstr="--right %s", + extensions=None, genfile=True, ), outputRightPialHemisphere=dict( - argstr='-pr %s', + argstr="-pr %s", + extensions=None, genfile=True, ), - pialSurfaceFile=dict(argstr='-p %s', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + pialSurfaceFile=dict( + argstr="-p %s", + extensions=None, + ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Hemisplit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Hemisplit_outputs(): output_map = dict( - outputLeftHemisphere=dict(), - outputLeftPialHemisphere=dict(), - outputRightHemisphere=dict(), - outputRightPialHemisphere=dict(), + outputLeftHemisphere=dict( + extensions=None, + ), + outputLeftPialHemisphere=dict( + extensions=None, + ), + outputRightHemisphere=dict( + extensions=None, + ), + outputRightPialHemisphere=dict( + extensions=None, + ), ) outputs = Hemisplit.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py b/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py index afc621a56e..f5ba0725df 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py @@ -1,76 +1,97 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Pialmesh def test_Pialmesh_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - exportPrefix=dict(argstr='--prefix %s', ), + exportPrefix=dict( + argstr="--prefix %s", + ), inputMaskFile=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), inputSurfaceFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), inputTissueFractionFile=dict( - argstr='-f %s', + argstr="-f %s", + extensions=None, mandatory=True, ), laplacianSmoothing=dict( - argstr='--smooth %f', + argstr="--smooth %f", usedefault=True, ), maxThickness=dict( - argstr='--max %f', + argstr="--max %f", usedefault=True, ), normalSmoother=dict( - argstr='--nc %f', + argstr="--nc %f", usedefault=True, ), numIterations=dict( - argstr='-n %d', + argstr="-n %d", usedefault=True, ), outputInterval=dict( - argstr='--interval %d', + argstr="--interval %d", usedefault=True, ), outputSurfaceFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - recomputeNormals=dict(argstr='--norm', ), + recomputeNormals=dict( + argstr="--norm", + ), searchRadius=dict( - argstr='-r %f', + argstr="-r %f", usedefault=True, ), stepSize=dict( - argstr='-s %f', + argstr="-s %f", usedefault=True, ), - tangentSmoother=dict(argstr='--tc %f', ), - timer=dict(argstr='--timer', ), + tangentSmoother=dict( + argstr="--tc %f", + ), + timer=dict( + argstr="--timer", + ), tissueThreshold=dict( - argstr='-t %f', + argstr="-t %f", usedefault=True, ), - verbosity=dict(argstr='-v %d', ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Pialmesh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pialmesh_outputs(): - output_map = dict(outputSurfaceFile=dict(), ) + output_map = dict( + outputSurfaceFile=dict( + extensions=None, + ), + ) outputs = Pialmesh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py b/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py index d425d4ddac..a6f52a26a7 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py @@ -1,42 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Pvc def test_Pvc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputMRIFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - inputMaskFile=dict(argstr='-m %s', ), + inputMaskFile=dict( + argstr="-m %s", + extensions=None, + ), outputLabelFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), outputTissueFractionFile=dict( - argstr='-f %s', + argstr="-f %s", + extensions=None, genfile=True, ), - spatialPrior=dict(argstr='-l %f', ), - threeClassFlag=dict(argstr='-3', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + spatialPrior=dict( + argstr="-l %f", + ), + threeClassFlag=dict( + argstr="-3", + ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Pvc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pvc_outputs(): output_map = dict( - outputLabelFile=dict(), - outputTissueFractionFile=dict(), + outputLabelFile=dict( + extensions=None, + ), + outputTissueFractionFile=dict( + extensions=None, + ), ) outputs = Pvc.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py b/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py index 7449e1488a..4c29c2bfda 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py @@ -1,51 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import SVReg def test_SVReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), atlasFilePrefix=dict( argstr="'%s'", position=1, ), - curveMatchingInstructions=dict(argstr="'-cur %s'", ), - dataSinkDelay=dict(argstr='%s', ), - displayModuleName=dict(argstr="'-m'", ), - displayTimestamps=dict(argstr="'-t'", ), + curveMatchingInstructions=dict( + argstr="'-cur %s'", + ), + dataSinkDelay=dict( + argstr="%s", + ), + displayModuleName=dict( + argstr="'-m'", + ), + displayTimestamps=dict( + argstr="'-t'", + ), environ=dict( nohash=True, usedefault=True, ), - iterations=dict(argstr="'-H %d'", ), - keepIntermediates=dict(argstr="'-k'", ), - pialSurfaceMaskDilation=dict(argstr="'-D %d'", ), - refineOutputs=dict(argstr="'-r'", ), - shortMessages=dict(argstr="'-gui'", ), - skipToIntensityReg=dict(argstr="'-p'", ), - skipToVolumeReg=dict(argstr="'-s'", ), - skipVolumetricProcessing=dict(argstr="'-S'", ), + iterations=dict( + argstr="'-H %d'", + ), + keepIntermediates=dict( + argstr="'-k'", + ), + pialSurfaceMaskDilation=dict( + argstr="'-D %d'", + ), + refineOutputs=dict( + argstr="'-r'", + ), + shortMessages=dict( + argstr="'-gui'", + ), + skipToIntensityReg=dict( + argstr="'-p'", + ), + skipToVolumeReg=dict( + argstr="'-s'", + ), + skipVolumetricProcessing=dict( + argstr="'-S'", + ), subjectFilePrefix=dict( argstr="'%s'", mandatory=True, position=0, ), - useCerebrumMask=dict(argstr="'-C'", ), - useManualMaskFile=dict(argstr="'-cbm'", ), - useMultiThreading=dict(argstr="'-P'", ), - useSingleThreading=dict(argstr="'-U'", ), + useCerebrumMask=dict( + argstr="'-C'", + ), + useManualMaskFile=dict( + argstr="'-cbm'", + ), + useMultiThreading=dict( + argstr="'-P'", + ), + useSingleThreading=dict( + argstr="'-U'", + ), verbosity0=dict( argstr="'-v0'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), + xor=("verbosity0", "verbosity1", "verbosity2"), ), verbosity1=dict( argstr="'-v1'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), + xor=("verbosity0", "verbosity1", "verbosity2"), ), verbosity2=dict( argstr="'v2'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), + xor=("verbosity0", "verbosity1", "verbosity2"), ), ) inputs = SVReg.input_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py b/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py index eb672d12b7..97094db018 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Scrubmask def test_Scrubmask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), backgroundFillThreshold=dict( - argstr='-b %d', + argstr="-b %d", usedefault=True, ), environ=dict( @@ -15,28 +16,42 @@ def test_Scrubmask_inputs(): usedefault=True, ), foregroundTrimThreshold=dict( - argstr='-f %d', + argstr="-f %d", usedefault=True, ), inputMaskFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - numberIterations=dict(argstr='-n %d', ), + numberIterations=dict( + argstr="-n %d", + ), outputMaskFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Scrubmask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Scrubmask_outputs(): - output_map = dict(outputMaskFile=dict(), ) + output_map = dict( + outputMaskFile=dict( + extensions=None, + ), + ) outputs = Scrubmask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py b/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py index 2191f7b133..3120f00184 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py @@ -1,45 +1,75 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Skullfinder def test_Skullfinder_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgLabelValue=dict(argstr='--bglabel %d', ), - brainLabelValue=dict(argstr='--brainlabel %d', ), + args=dict( + argstr="%s", + ), + bgLabelValue=dict( + argstr="--bglabel %d", + ), + brainLabelValue=dict( + argstr="--brainlabel %d", + ), environ=dict( nohash=True, usedefault=True, ), inputMRIFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), inputMaskFile=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), - lowerThreshold=dict(argstr='-l %d', ), + lowerThreshold=dict( + argstr="-l %d", + ), outputLabelFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - performFinalOpening=dict(argstr='--finalOpening', ), - scalpLabelValue=dict(argstr='--scalplabel %d', ), - skullLabelValue=dict(argstr='--skulllabel %d', ), - spaceLabelValue=dict(argstr='--spacelabel %d', ), - surfaceFilePrefix=dict(argstr='-s %s', ), - upperThreshold=dict(argstr='-u %d', ), - verbosity=dict(argstr='-v %d', ), + performFinalOpening=dict( + argstr="--finalOpening", + ), + scalpLabelValue=dict( + argstr="--scalplabel %d", + ), + skullLabelValue=dict( + argstr="--skulllabel %d", + ), + spaceLabelValue=dict( + argstr="--spacelabel %d", + ), + surfaceFilePrefix=dict( + argstr="-s %s", + ), + upperThreshold=dict( + argstr="-u %d", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Skullfinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Skullfinder_outputs(): - output_map = dict(outputLabelFile=dict(), ) + output_map = dict( + outputLabelFile=dict( + extensions=None, + ), + ) outputs = Skullfinder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Tca.py b/nipype/interfaces/brainsuite/tests/test_auto_Tca.py index ec2886b42f..eaba6a1d5f 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Tca.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Tca.py @@ -1,42 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import Tca def test_Tca_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), foregroundDelta=dict( - argstr='--delta %d', + argstr="--delta %d", usedefault=True, ), inputMaskFile=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - maxCorrectionSize=dict(argstr='-n %d', ), + maxCorrectionSize=dict( + argstr="-n %d", + ), minCorrectionSize=dict( - argstr='-m %d', + argstr="-m %d", usedefault=True, ), outputMaskFile=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + timer=dict( + argstr="--timer", + ), + verbosity=dict( + argstr="-v %d", + ), ) inputs = Tca.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tca_outputs(): - output_map = dict(outputMaskFile=dict(), ) + output_map = dict( + outputMaskFile=dict( + extensions=None, + ), + ) outputs = Tca.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py b/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py index d614ce335a..8b043c63c7 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsuite import ThicknessPVC def test_ThicknessPVC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), subjectFilePrefix=dict( - argstr='%s', + argstr="%s", mandatory=True, ), ) diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 9c6a5c6acf..746af18f1a 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -1,36 +1,39 @@ -# -*- coding: utf-8 -*- """The bru2nii module provides basic functions for dicom conversion """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from .base import (CommandLine, CommandLineInputSpec, traits, TraitedSpec, - isdefined, File, Directory) +from .base import ( + CommandLine, + CommandLineInputSpec, + traits, + TraitedSpec, + isdefined, + File, + Directory, +) class Bru2InputSpec(CommandLineInputSpec): input_dir = Directory( - desc="Input Directory", - exists=True, - mandatory=True, - position=-1, - argstr="%s") + desc="Input Directory", exists=True, mandatory=True, position=-1, argstr="%s" + ) actual_size = traits.Bool( - argstr='-a', - desc="Keep actual size - otherwise x10 scale so animals match human.") + argstr="-a", + desc="Keep actual size - otherwise x10 scale so animals match human.", + ) force_conversion = traits.Bool( - argstr='-f', - desc="Force conversion of localizers images (multiple slice " - "orientations).") - compress = traits.Bool( - argstr='-z', desc='gz compress images (".nii.gz").') + argstr="-f", + desc="Force conversion of localizers images (multiple slice orientations).", + ) + compress = traits.Bool(argstr="-z", desc='gz compress images (".nii.gz").') append_protocol_name = traits.Bool( - argstr='-p', desc="Append protocol name to output filename.") + argstr="-p", desc="Append protocol name to output filename." + ) output_filename = traits.Str( argstr="-o %s", desc='Output filename (".nii" will be appended, or ".nii.gz" if the "-z" compress option is selected)', - genfile=True) + genfile=True, + ) class Bru2OutputSpec(TraitedSpec): @@ -49,6 +52,7 @@ class Bru2(CommandLine): >>> converter.cmdline # doctest: +ELLIPSIS 'Bru2 -o .../data/brukerdir brukerdir' """ + input_spec = Bru2InputSpec output_spec = Bru2OutputSpec _cmd = "Bru2" @@ -58,7 +62,7 @@ def _list_outputs(self): if isdefined(self.inputs.output_filename): output_filename1 = os.path.abspath(self.inputs.output_filename) else: - output_filename1 = self._gen_filename('output_filename') + output_filename1 = self._gen_filename("output_filename") if self.inputs.compress: outputs["nii_file"] = output_filename1 + ".nii.gz" else: @@ -66,8 +70,8 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'output_filename': + if name == "output_filename": outfile = os.path.join( - os.getcwd(), - os.path.basename(os.path.normpath(self.inputs.input_dir))) + os.getcwd(), os.path.basename(os.path.normpath(self.inputs.input_dir)) + ) return outfile diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 115804cc3f..3871120d2c 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,15 +1,19 @@ -# -*- coding: utf-8 -*- -"""The ants module provides basic functions for interfacing with ants - functions. -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""Convert3D is a command-line tool for converting 3D images between common file formats.""" + import os from glob import glob -from .base import (CommandLineInputSpec, traits, TraitedSpec, File, - SEMLikeCommandLine, InputMultiPath, OutputMultiPath, - CommandLine, isdefined) +from .base import ( + CommandLineInputSpec, + traits, + TraitedSpec, + File, + SEMLikeCommandLine, + InputMultiPath, + OutputMultiPath, + CommandLine, + isdefined, +) from ..utils.filemanip import split_filename from .. import logging @@ -18,16 +22,17 @@ class C3dAffineToolInputSpec(CommandLineInputSpec): reference_file = File(exists=True, argstr="-ref %s", position=1) - source_file = File(exists=True, argstr='-src %s', position=2) - transform_file = File(exists=True, argstr='%s', position=3) + source_file = File(exists=True, argstr="-src %s", position=2) + transform_file = File(exists=True, argstr="%s", position=3) itk_transform = traits.Either( traits.Bool, File(), hash_files=False, desc="Export ITK transform.", argstr="-oitk %s", - position=5) - fsl2ras = traits.Bool(argstr='-fsl2ras', position=4) + position=5, + ) + fsl2ras = traits.Bool(argstr="-fsl2ras", position=4) class C3dAffineToolOutputSpec(TraitedSpec): @@ -48,11 +53,12 @@ class C3dAffineTool(SEMLikeCommandLine): >>> c3.cmdline 'c3d_affine_tool -src cmatrix.mat -fsl2ras -oitk affine.txt' """ + input_spec = C3dAffineToolInputSpec output_spec = C3dAffineToolOutputSpec - _cmd = 'c3d_affine_tool' - _outputs_filenames = {'itk_transform': 'affine.txt'} + _cmd = "c3d_affine_tool" + _outputs_filenames = {"itk_transform": "affine.txt"} class C3dInputSpec(CommandLineInputSpec): @@ -61,62 +67,97 @@ class C3dInputSpec(CommandLineInputSpec): position=1, argstr="%s", mandatory=True, - desc="Input file (wildcard and multiple are supported).") + desc="Input file (wildcard and multiple are supported).", + ) out_file = File( exists=False, argstr="-o %s", position=-1, xor=["out_files"], - desc="Output file of last image on the stack.") + desc="Output file of last image on the stack.", + ) out_files = InputMultiPath( File(), argstr="-oo %s", xor=["out_file"], position=-1, - desc=("Write all images on the convert3d stack as multiple files." - " Supports both list of output files or a pattern for the output" - " filenames (using %d substituion).")) + desc=( + "Write all images on the convert3d stack as multiple files." + " Supports both list of output files or a pattern for the output" + " filenames (using %d substitution)." + ), + ) pix_type = traits.Enum( - "float", "char", "uchar", "short", "ushort", "int", "uint", "double", + "float", + "char", + "uchar", + "short", + "ushort", + "int", + "uint", + "double", argstr="-type %s", - desc=("Specifies the pixel type for the output image. By default," - " images are written in floating point (float) format")) + desc=( + "Specifies the pixel type for the output image. By default," + " images are written in floating point (float) format" + ), + ) scale = traits.Either( - traits.Int(), traits.Float(), + traits.Int(), + traits.Float(), argstr="-scale %s", - desc=("Multiplies the intensity of each voxel in the last image on the" - " stack by the given factor.")) + desc=( + "Multiplies the intensity of each voxel in the last image on the" + " stack by the given factor." + ), + ) shift = traits.Either( - traits.Int(), traits.Float(), + traits.Int(), + traits.Float(), argstr="-shift %s", - desc='Adds the given constant to every voxel.') + desc="Adds the given constant to every voxel.", + ) interp = traits.Enum( - "Linear", "NearestNeighbor", "Cubic", "Sinc", "Gaussian", + "Linear", + "NearestNeighbor", + "Cubic", + "Sinc", + "Gaussian", argstr="-interpolation %s", - desc=("Specifies the interpolation used with -resample and other" - " commands. Default is Linear.")) + desc=( + "Specifies the interpolation used with -resample and other" + " commands. Default is Linear." + ), + ) resample = traits.Str( argstr="-resample %s", - desc=("Resamples the image, keeping the bounding box the same, but" - " changing the number of voxels in the image. The dimensions can be" - " specified as a percentage, for example to double the number of voxels" - " in each direction. The -interpolation flag affects how sampling is" - " performed.")) + desc=( + "Resamples the image, keeping the bounding box the same, but" + " changing the number of voxels in the image. The dimensions can be" + " specified as a percentage, for example to double the number of voxels" + " in each direction. The -interpolation flag affects how sampling is" + " performed." + ), + ) smooth = traits.Str( argstr="-smooth %s", - desc=("Applies Gaussian smoothing to the image. The parameter vector" - " specifies the standard deviation of the Gaussian kernel.")) + desc=( + "Applies Gaussian smoothing to the image. The parameter vector" + " specifies the standard deviation of the Gaussian kernel." + ), + ) multicomp_split = traits.Bool( False, usedefault=True, argstr="-mcr", position=0, - desc="Enable reading of multi-component images.") + desc="Enable reading of multi-component images.", + ) is_4d = traits.Bool( False, usedefault=True, - desc=("Changes command to support 4D file operations (default is" - " false).")) + desc=("Changes command to support 4D file operations (default is false)."), + ) class C3dOutputSpec(TraitedSpec): @@ -149,13 +190,14 @@ class C3d(CommandLine): >>> c3.cmdline 'c4d epi.nii -type short -o epi.img' """ + input_spec = C3dInputSpec output_spec = C3dOutputSpec _cmd = "c3d" def __init__(self, **inputs): - super(C3d, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._is_4d, "is_4d") if self.inputs.is_4d: self._is_4d() @@ -165,25 +207,25 @@ def _is_4d(self): def _run_interface(self, runtime): cmd = self._cmd - if (not isdefined(self.inputs.out_file) - and not isdefined(self.inputs.out_files)): + if not isdefined(self.inputs.out_file) and not isdefined(self.inputs.out_files): # Convert3d does not want to override file, by default # so we define a new output file self._gen_outfile() - runtime = super(C3d, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) self._cmd = cmd return runtime def _gen_outfile(self): # if many infiles, raise exception if (len(self.inputs.in_file) > 1) or ("*" in self.inputs.in_file[0]): - raise AttributeError("Multiple in_files found - specify either" - " `out_file` or `out_files`.") + raise AttributeError( + "Multiple in_files found - specify either `out_file` or `out_files`." + ) _, fn, ext = split_filename(self.inputs.in_file[0]) self.inputs.out_file = fn + "_generated" + ext # if generated file will overwrite, raise error if os.path.exists(os.path.abspath(self.inputs.out_file)): - raise IOError("File already found - to overwrite, use `out_file`.") + raise OSError("File already found - to overwrite, use `out_file`.") iflogger.info("Generating `out_file`.") def _list_outputs(self): @@ -194,9 +236,11 @@ def _list_outputs(self): if len(self.inputs.out_files) == 1: _out_files = glob(os.path.abspath(self.inputs.out_files[0])) else: - _out_files = [os.path.abspath(f) for f in self.inputs.out_files - if os.path.exists(os.path.abspath(f))] + _out_files = [ + os.path.abspath(f) + for f in self.inputs.out_files + if os.path.exists(os.path.abspath(f)) + ] outputs["out_files"] = _out_files return outputs - diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index 0120732ef6..766fa9c906 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -1,18 +1,39 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino top level namespace """ from .connectivity import Conmat -from .convert import (Image2Voxel, FSL2Scheme, VtkStreamlines, ProcStreamlines, - TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader, - Shredder) -from .dti import (DTIFit, ModelFit, DTLUTGen, PicoPDFs, Track, TrackPICo, - TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap, - TrackBedpostxDeter, TrackBedpostxProba, - ComputeFractionalAnisotropy, ComputeMeanDiffusivity, - ComputeTensorTrace, ComputeEigensystem, DTMetric) -from .calib import (SFPICOCalibData, SFLUTGen) -from .odf import (QBallMX, LinRecon, SFPeaks, MESD) +from .convert import ( + Image2Voxel, + FSL2Scheme, + VtkStreamlines, + ProcStreamlines, + TractShredder, + DT2NIfTI, + NIfTIDT2Camino, + AnalyzeHeader, + Shredder, +) +from .dti import ( + DTIFit, + ModelFit, + DTLUTGen, + PicoPDFs, + Track, + TrackPICo, + TrackBayesDirac, + TrackDT, + TrackBallStick, + TrackBootstrap, + TrackBedpostxDeter, + TrackBedpostxProba, + ComputeFractionalAnisotropy, + ComputeMeanDiffusivity, + ComputeTensorTrace, + ComputeEigensystem, + DTMetric, +) +from .calib import SFPICOCalibData, SFLUTGen +from .odf import QBallMX, LinRecon, SFPeaks, MESD from .utils import ImageStats diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index 1921f62651..6345e01cdb 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -1,99 +1,122 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec) +from ..base import ( + traits, + TraitedSpec, + File, + StdOutCommandLine, + StdOutCommandLineInputSpec, +) class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): snr = traits.Float( - argstr='-snr %f', - units='NA', - desc=('Specifies the signal-to-noise ratio of the ' - 'non-diffusion-weighted measurements to use in simulations.')) + argstr="-snr %f", + units="NA", + desc=( + "Specifies the signal-to-noise ratio of the " + "non-diffusion-weighted measurements to use in simulations." + ), + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) info_file = File( - desc='The name to be given to the information output filename.', - argstr='-infooutputfile %s', + desc="The name to be given to the information output filename.", + argstr="-infooutputfile %s", mandatory=True, genfile=True, - hash_files=False) # Genfile and hash_files? + hash_files=False, + ) # Genfile and hash_files? trace = traits.Float( - argstr='-trace %f', - units='NA', - desc='Trace of the diffusion tensor(s) used in the test function.') + argstr="-trace %f", + units="NA", + desc="Trace of the diffusion tensor(s) used in the test function.", + ) onedtfarange = traits.List( traits.Float, - argstr='-onedtfarange %s', + argstr="-onedtfarange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum FA for the single tensor ' - 'synthetic data.')) + units="NA", + desc=("Minimum and maximum FA for the single tensor synthetic data."), + ) onedtfastep = traits.Float( - argstr='-onedtfastep %f', - units='NA', - desc=('FA step size controlling how many steps there are ' - 'between the minimum and maximum FA settings.')) + argstr="-onedtfastep %f", + units="NA", + desc=( + "FA step size controlling how many steps there are " + "between the minimum and maximum FA settings." + ), + ) twodtfarange = traits.List( traits.Float, - argstr='-twodtfarange %s', + argstr="-twodtfarange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum FA for the two tensor ' - 'synthetic data. FA is varied for both tensors ' - 'to give all the different permutations.')) + units="NA", + desc=( + "Minimum and maximum FA for the two tensor " + "synthetic data. FA is varied for both tensors " + "to give all the different permutations." + ), + ) twodtfastep = traits.Float( - argstr='-twodtfastep %f', - units='NA', - desc=('FA step size controlling how many steps there are ' - 'between the minimum and maximum FA settings ' - 'for the two tensor cases.')) + argstr="-twodtfastep %f", + units="NA", + desc=( + "FA step size controlling how many steps there are " + "between the minimum and maximum FA settings " + "for the two tensor cases." + ), + ) twodtanglerange = traits.List( traits.Float, - argstr='-twodtanglerange %s', + argstr="-twodtanglerange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum crossing angles ' - 'between the two fibres.')) + units="NA", + desc=("Minimum and maximum crossing angles between the two fibres."), + ) twodtanglestep = traits.Float( - argstr='-twodtanglestep %f', - units='NA', - desc=('Angle step size controlling how many steps there are ' - 'between the minimum and maximum crossing angles for ' - 'the two tensor cases.')) + argstr="-twodtanglestep %f", + units="NA", + desc=( + "Angle step size controlling how many steps there are " + "between the minimum and maximum crossing angles for " + "the two tensor cases." + ), + ) twodtmixmax = traits.Float( - argstr='-twodtmixmax %f', - units='NA', - desc= - ('Mixing parameter controlling the proportion of one fibre population ' - 'to the other. The minimum mixing parameter is (1 - twodtmixmax).')) + argstr="-twodtmixmax %f", + units="NA", + desc=( + "Mixing parameter controlling the proportion of one fibre population " + "to the other. The minimum mixing parameter is (1 - twodtmixmax)." + ), + ) twodtmixstep = traits.Float( - argstr='-twodtmixstep %f', - units='NA', - desc=('Mixing parameter step size for the two tensor cases. ' - 'Specify how many mixing parameter increments to use.')) + argstr="-twodtmixstep %f", + units="NA", + desc=( + "Mixing parameter step size for the two tensor cases. " + "Specify how many mixing parameter increments to use." + ), + ) seed = traits.Float( - argstr='-seed %f', - units='NA', - desc= - 'Specifies the random seed to use for noise generation in simulation trials.' + argstr="-seed %f", + units="NA", + desc="Specifies the random seed to use for noise generation in simulation trials.", ) class SFPICOCalibDataOutputSpec(TraitedSpec): - PICOCalib = File(exists=True, desc='Calibration dataset') - calib_info = File(exists=True, desc='Calibration dataset') + PICOCalib = File(exists=True, desc="Calibration dataset") + calib_info = File(exists=True, desc="Calibration dataset") class SFPICOCalibData(StdOutCommandLine): @@ -109,8 +132,8 @@ class SFPICOCalibData(StdOutCommandLine): which stores information about the datafile, is generated along with the datafile. - Example 1 - --------- + Examples + -------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -126,8 +149,6 @@ class SFPICOCalibData(StdOutCommandLine): data produced can be varied by specifying the ranges and steps of the parameters for both the one and two fibre datasets used. - Example 2 - --------- To create a custom calibration dataset >>> import nipype.interfaces.camino as cam @@ -147,89 +168,107 @@ class SFPICOCalibData(StdOutCommandLine): simulate the one fibre cases and 72,912 voxels simulate the various two fibre cases. However, care should be taken to ensure that enough data is generated for calculating the LUT. # doctest: +SKIP + """ - _cmd = 'sfpicocalibdata' + + _cmd = "sfpicocalibdata" input_spec = SFPICOCalibDataInputSpec output_spec = SFPICOCalibDataOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['PICOCalib'] = os.path.abspath(self._gen_outfilename()) - outputs['calib_info'] = os.path.abspath(self.inputs.info_file) + outputs["PICOCalib"] = os.path.abspath(self._gen_outfilename()) + outputs["calib_info"] = os.path.abspath(self.inputs.info_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_PICOCalib.Bfloat' + return name + "_PICOCalib.Bfloat" class SFLUTGenInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Voxel-order data of the spherical functions peaks.') + desc="Voxel-order data of the spherical functions peaks.", + ) info_file = File( - argstr='-infofile %s', + argstr="-infofile %s", mandatory=True, - desc=('The Info file that corresponds to the calibration ' - 'datafile used in the reconstruction.')) + desc=( + "The Info file that corresponds to the calibration " + "datafile used in the reconstruction." + ), + ) outputstem = traits.Str( - 'LUT', - argstr='-outputstem %s', - desc= - ('Define the name of the generated luts. The form of the filenames will be ' - '[outputstem]_oneFibreSurfaceCoeffs.Bdouble and ' - '[outputstem]_twoFibreSurfaceCoeffs.Bdouble'), - usedefault=True) + "LUT", + argstr="-outputstem %s", + desc=( + "Define the name of the generated luts. The form of the filenames will be " + "[outputstem]_oneFibreSurfaceCoeffs.Bdouble and " + "[outputstem]_twoFibreSurfaceCoeffs.Bdouble" + ), + usedefault=True, + ) pdf = traits.Enum( - 'bingham', - 'watson', - argstr='-pdf %s', - desc= - ('Sets the distribution to use for the calibration. The default is the Bingham ' - 'distribution, which allows elliptical probability density contours. ' - 'Currently supported options are: ' - ' bingham - The Bingham distribution, which allows elliptical probability ' - ' density contours. ' - ' watson - The Watson distribution. This distribution is rotationally symmetric.' - ), - usedefault=True) + "bingham", + "watson", + argstr="-pdf %s", + desc="""\ +Sets the distribution to use for the calibration. The default is the Bingham +distribution, which allows elliptical probability density contours. +Currently supported options are: + + * bingham -- The Bingham distribution, which allows elliptical probability + density contours. + * watson -- The Watson distribution. This distribution is rotationally symmetric. + +""", + usedefault=True, + ) binincsize = traits.Int( - argstr='-binincsize %d', - units='NA', - desc= - ('Sets the size of the bins. In the case of 2D histograms such as the ' - 'Bingham, the bins are always square. Default is 1.')) + argstr="-binincsize %d", + units="NA", + desc=( + "Sets the size of the bins. In the case of 2D histograms such as the " + "Bingham, the bins are always square. Default is 1." + ), + ) minvectsperbin = traits.Int( - argstr='-minvectsperbin %d', - units='NA', - desc= - ('Specifies the minimum number of fibre-orientation estimates a bin ' - 'must contain before it is used in the lut line/surface generation. ' - 'Default is 50. If you get the error "no fibre-orientation estimates ' - 'in histogram!", the calibration data set is too small to get enough ' - 'samples in any of the histogram bins. You can decrease the minimum ' - 'number per bin to get things running in quick tests, but the sta- ' - 'tistics will not be reliable and for serious applications, you need ' - 'to increase the size of the calibration data set until the error goes.' - )) + argstr="-minvectsperbin %d", + units="NA", + desc=( + "Specifies the minimum number of fibre-orientation estimates a bin " + "must contain before it is used in the lut line/surface generation. " + 'Default is 50. If you get the error "no fibre-orientation estimates ' + 'in histogram!", the calibration data set is too small to get enough ' + "samples in any of the histogram bins. You can decrease the minimum " + "number per bin to get things running in quick tests, but the sta- " + "tistics will not be reliable and for serious applications, you need " + "to increase the size of the calibration data set until the error goes." + ), + ) directmap = traits.Bool( - argstr='-directmap', - desc= - ('Use direct mapping between the eigenvalues and the distribution parameters ' - 'instead of the log of the eigenvalues.')) + argstr="-directmap", + desc=( + "Use direct mapping between the eigenvalues and the distribution parameters " + "instead of the log of the eigenvalues." + ), + ) order = traits.Int( - argstr='-order %d', - units='NA', - desc= - ('The order of the polynomial fitting the surface. Order 1 is linear. ' - 'Order 2 (default) is quadratic.')) + argstr="-order %d", + units="NA", + desc=( + "The order of the polynomial fitting the surface. Order 1 is linear. " + "Order 2 (default) is quadratic." + ), + ) class SFLUTGenOutputSpec(TraitedSpec): - lut_one_fibre = File(exists=True, desc='PICo lut for one-fibre model') - lut_two_fibres = File(exists=True, desc='PICo lut for two-fibre model') + lut_one_fibre = File(exists=True, desc="PICo lut for one-fibre model") + lut_two_fibres = File(exists=True, desc="PICo lut for two-fibre model") class SFLUTGen(StdOutCommandLine): @@ -246,9 +285,9 @@ class SFLUTGen(StdOutCommandLine): This utility uses calibration data generated from SFPICOCalibData and peak information created by SFPeaks. - The utility outputs two lut's, *_oneFibreSurfaceCoeffs.Bdouble and - *_twoFibreSurfaceCoeffs.Bdouble. Each of these files contains big- - endian doubles as standard. The format of the output is: :: + The utility outputs two lut's, ``*_oneFibreSurfaceCoeffs.Bdouble`` and + ``*_twoFibreSurfaceCoeffs.Bdouble``. Each of these files contains big-endian doubles + as standard. The format of the output is:: dimensions (1 for Watson, 2 for Bingham) order (the order of the polynomial) @@ -258,12 +297,12 @@ class SFLUTGen(StdOutCommandLine): coefficient_N In the case of the Watson, there is a single set of coefficients, - which are ordered: :: + which are ordered:: constant, x, x^2, ..., x^order. In the case of the Bingham, there are two sets of coefficients (one - for each surface), ordered so that: :: + for each surface), ordered so that:: for j = 1 to order for k = 1 to order @@ -271,7 +310,7 @@ class SFLUTGen(StdOutCommandLine): where j+k < order Example - --------- + ------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -279,18 +318,22 @@ class SFLUTGen(StdOutCommandLine): >>> lutgen.inputs.in_file = 'QSH_peaks.Bdouble' >>> lutgen.inputs.info_file = 'PICO_calib.info' >>> lutgen.run() # doctest: +SKIP + """ - _cmd = 'sflutgen' + + _cmd = "sflutgen" input_spec = SFLUTGenInputSpec output_spec = SFLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs[ - 'lut_one_fibre'] = self.inputs.outputstem + '_oneFibreSurfaceCoeffs.Bdouble' - outputs[ - 'lut_two_fibres'] = self.inputs.outputstem + '_twoFibreSurfaceCoeffs.Bdouble' + outputs["lut_one_fibre"] = ( + self.inputs.outputstem + "_oneFibreSurfaceCoeffs.Bdouble" + ) + outputs["lut_two_fibres"] = ( + self.inputs.outputstem + "_twoFibreSurfaceCoeffs.Bdouble" + ) return outputs def _gen_outfilename(self): - return '/dev/null' + return "/dev/null" diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 97e400e0f5..3421afced2 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -1,46 +1,55 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + traits, + TraitedSpec, + File, + CommandLine, + CommandLineInputSpec, + isdefined, +) class ConmatInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Streamlines as generated by the Track interface') + desc="Streamlines as generated by the Track interface", + ) target_file = File( exists=True, - argstr='-targetfile %s', + argstr="-targetfile %s", mandatory=True, - desc= - 'An image containing targets, as used in ProcStreamlines interface.') + desc="An image containing targets, as used in ProcStreamlines interface.", + ) scalar_file = File( exists=True, - argstr='-scalarfile %s', - desc=('Optional scalar file for computing tract-based statistics. ' - 'Must be in the same space as the target file.'), - requires=['tract_stat']) + argstr="-scalarfile %s", + desc=( + "Optional scalar file for computing tract-based statistics. " + "Must be in the same space as the target file." + ), + requires=["tract_stat"], + ) targetname_file = File( exists=True, - argstr='-targetnamefile %s', - desc= - ('Optional names of targets. This file should contain one entry per line, ' - 'with the target intensity followed by the name, separated by white space. ' - 'For example: ' - ' 1 some_brain_region ' - ' 2 some_other_region ' - 'These names will be used in the output. The names themselves should not ' - 'contain spaces or commas. The labels may be in any order but the output ' - 'matrices will be ordered by label intensity.')) + argstr="-targetnamefile %s", + desc=( + "Optional names of targets. This file should contain one entry per line, " + "with the target intensity followed by the name, separated by white space. " + "For example: " + " 1 some_brain_region " + " 2 some_other_region " + "These names will be used in the output. The names themselves should not " + "contain spaces or commas. The labels may be in any order but the output " + "matrices will be ordered by label intensity." + ), + ) tract_stat = traits.Enum( "mean", @@ -49,31 +58,38 @@ class ConmatInputSpec(CommandLineInputSpec): "sum", "median", "var", - argstr='-tractstat %s', - units='NA', + argstr="-tractstat %s", + units="NA", desc=("Tract statistic to use. See TractStats for other options."), - requires=['scalar_file'], - xor=['tract_prop']) + requires=["scalar_file"], + xor=["tract_prop"], + ) tract_prop = traits.Enum( "length", "endpointsep", - argstr='-tractstat %s', - units='NA', - xor=['tract_stat'], - desc=('Tract property average to compute in the connectivity matrix. ' - 'See TractStats for details.')) + argstr="-tractstat %s", + units="NA", + xor=["tract_stat"], + desc=( + "Tract property average to compute in the connectivity matrix. " + "See TractStats for details." + ), + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", genfile=True, - desc=('filename root prepended onto the names of the output files. ' - 'The extension will be determined from the input.')) + desc=( + "filename root prepended onto the names of the output files. " + "The extension will be determined from the input." + ), + ) class ConmatOutputSpec(TraitedSpec): - conmat_sc = File(exists=True, desc='Connectivity matrix in CSV file.') - conmat_ts = File(desc='Tract statistics in CSV file.') + conmat_sc = File(exists=True, desc="Connectivity matrix in CSV file.") + conmat_ts = File(desc="Tract statistics in CSV file.") class Conmat(CommandLine): @@ -116,7 +132,7 @@ class Conmat(CommandLine): Such fibers will add to the diagonal elements of the matrix. To remove these entries, run procstreamlines with -endpointfile before running conmat. - If the seed point is inside a labled region, it counts as one end of the + If the seed point is inside a labeled region, it counts as one end of the connection. So :: ----[SEED inside A]---------B @@ -129,8 +145,8 @@ class Conmat(CommandLine): In all cases, distance to the seed point is defined along the streamline path. - Example 1 - --------- + Examples + -------- To create a standard connectivity matrix based on streamline counts. >>> import nipype.interfaces.camino as cam @@ -139,8 +155,6 @@ class Conmat(CommandLine): >>> conmat.inputs.target_file = 'atlas.nii.gz' >>> conmat.run() # doctest: +SKIP - Example 1 - --------- To create a standard connectivity matrix and mean tractwise FA statistics. >>> import nipype.interfaces.camino as cam @@ -150,16 +164,18 @@ class Conmat(CommandLine): >>> conmat.inputs.scalar_file = 'fa.nii.gz' >>> conmat.tract_stat = 'mean' >>> conmat.run() # doctest: +SKIP + """ - _cmd = 'conmat' + + _cmd = "conmat" input_spec = ConmatInputSpec output_spec = ConmatOutputSpec def _list_outputs(self): outputs = self.output_spec().get() output_root = self._gen_outputroot() - outputs['conmat_sc'] = os.path.abspath(output_root + "sc.csv") - outputs['conmat_ts'] = os.path.abspath(output_root + "ts.csv") + outputs["conmat_sc"] = os.path.abspath(output_root + "sc.csv") + outputs["conmat_ts"] = os.path.abspath(output_root + "ts.csv") return outputs def _gen_outfilename(self): @@ -168,11 +184,11 @@ def _gen_outfilename(self): def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): - output_root = self._gen_filename('output_root') + output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): - if name == 'output_root': + if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index ee2ae2eb82..4dfd65375e 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -1,23 +1,28 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os import glob from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, StdOutCommandLine, OutputMultiPath, - StdOutCommandLineInputSpec, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + StdOutCommandLine, + OutputMultiPath, + StdOutCommandLineInputSpec, + isdefined, +) class Image2VoxelInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-4dimage %s', + argstr="-4dimage %s", mandatory=True, position=1, - desc='4d image file') + desc="4d image file", + ) # TODO convert list of files on the fly # imagelist = File(exists=True, argstr='-imagelist %s', # mandatory=True, position=1, @@ -33,16 +38,15 @@ class Image2VoxelInputSpec(StdOutCommandLineInputSpec): "int", "long", "double", - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', - usedefault=True) + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', + usedefault=True, + ) class Image2VoxelOutputSpec(TraitedSpec): - voxel_order = File( - exists=True, desc='path/name of 4D volume in voxel order') + voxel_order = File(exists=True, desc="path/name of 4D volume in voxel order") class Image2Voxel(StdOutCommandLine): @@ -61,71 +65,76 @@ class Image2Voxel(StdOutCommandLine): >>> img2vox.inputs.in_file = '4d_dwi.nii' >>> img2vox.run() # doctest: +SKIP """ - _cmd = 'image2voxel' + + _cmd = "image2voxel" input_spec = Image2VoxelInputSpec output_spec = Image2VoxelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['voxel_order'] = os.path.abspath(self._gen_outfilename()) + outputs["voxel_order"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.B' + self.inputs.out_type + return name + ".B" + self.inputs.out_type class FSL2SchemeInputSpec(StdOutCommandLineInputSpec): bvec_file = File( exists=True, - argstr='-bvecfile %s', + argstr="-bvecfile %s", mandatory=True, position=1, - desc='b vector file') + desc="b vector file", + ) bval_file = File( exists=True, - argstr='-bvalfile %s', + argstr="-bvalfile %s", mandatory=True, position=2, - desc='b value file') + desc="b value file", + ) numscans = traits.Int( - argstr='-numscans %d', - units='NA', - desc= - "Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session." + argstr="-numscans %d", + units="NA", + desc="Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session.", ) interleave = traits.Bool( - argstr='-interleave', - desc="Interleave repeated scans. Only used with -numscans.") + argstr="-interleave", + desc="Interleave repeated scans. Only used with -numscans.", + ) bscale = traits.Float( - argstr='-bscale %d', - units='NA', - desc= - "Scaling factor to convert the b-values into different units. Default is 10^6." + argstr="-bscale %d", + units="NA", + desc="Scaling factor to convert the b-values into different units. Default is 10^6.", ) diffusiontime = traits.Float( - argstr='-diffusiontime %f', units='NA', desc="Diffusion time") + argstr="-diffusiontime %f", units="NA", desc="Diffusion time" + ) flipx = traits.Bool( - argstr='-flipx', desc="Negate the x component of all the vectors.") + argstr="-flipx", desc="Negate the x component of all the vectors." + ) flipy = traits.Bool( - argstr='-flipy', desc="Negate the y component of all the vectors.") + argstr="-flipy", desc="Negate the y component of all the vectors." + ) flipz = traits.Bool( - argstr='-flipz', desc="Negate the z component of all the vectors.") + argstr="-flipz", desc="Negate the z component of all the vectors." + ) usegradmod = traits.Bool( - argstr='-usegradmod', - desc= - "Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude." + argstr="-usegradmod", + desc="Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude.", ) class FSL2SchemeOutputSpec(TraitedSpec): - scheme = File(exists=True, desc='Scheme file') + scheme = File(exists=True, desc="Scheme file") class FSL2Scheme(StdOutCommandLine): @@ -142,80 +151,81 @@ class FSL2Scheme(StdOutCommandLine): >>> makescheme.run() # doctest: +SKIP """ - _cmd = 'fsl2scheme' + + _cmd = "fsl2scheme" input_spec = FSL2SchemeInputSpec output_spec = FSL2SchemeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['scheme'] = os.path.abspath(self._gen_outfilename()) + outputs["scheme"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.bvec_file) - return name + '.scheme' + return name + ".scheme" class VtkStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( - 'raw', - 'voxels', - argstr='-inputmodel %s', - desc='input model type (raw or voxels)', - usedefault=True) + "raw", + "voxels", + argstr="-inputmodel %s", + desc="input model type (raw or voxels)", + usedefault=True, + ) in_file = File( - exists=True, - argstr=' < %s', - mandatory=True, - position=-2, - desc='data file') + exists=True, argstr=" < %s", mandatory=True, position=-2, desc="data file" + ) voxeldims = traits.List( traits.Int, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, position=4, - units='mm') + units="mm", + ) seed_file = File( exists=False, - argstr='-seedfile %s', + argstr="-seedfile %s", position=1, - desc='image containing seed points') + desc="image containing seed points", + ) target_file = File( exists=False, - argstr='-targetfile %s', + argstr="-targetfile %s", position=2, - desc='image containing integer-valued target regions') + desc="image containing integer-valued target regions", + ) scalar_file = File( exists=False, - argstr='-scalarfile %s', + argstr="-scalarfile %s", position=3, - desc='image that is in the same physical space as the tracts') + desc="image that is in the same physical space as the tracts", + ) colourorient = traits.Bool( - argstr='-colourorient', - desc= - "Each point on the streamline is coloured by the local orientation.") + argstr="-colourorient", + desc="Each point on the streamline is coloured by the local orientation.", + ) interpolatescalars = traits.Bool( - argstr='-interpolatescalars', - desc= - "the scalar value at each point on the streamline is calculated by trilinear interpolation" + argstr="-interpolatescalars", + desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) interpolate = traits.Bool( - argstr='-interpolate', - desc= - "the scalar value at each point on the streamline is calculated by trilinear interpolation" + argstr="-interpolate", + desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) class VtkStreamlinesOutputSpec(TraitedSpec): - vtk = File(exists=True, desc='Streamlines in VTK format') + vtk = File(exists=True, desc="Streamlines in VTK format") class VtkStreamlines(StdOutCommandLine): @@ -231,189 +241,183 @@ class VtkStreamlines(StdOutCommandLine): >>> vtk.inputs.voxeldims = [1,1,1] >>> vtk.run() # doctest: +SKIP """ - _cmd = 'vtkstreamlines' + + _cmd = "vtkstreamlines" input_spec = VtkStreamlinesInputSpec output_spec = VtkStreamlinesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['vtk'] = os.path.abspath(self._gen_outfilename()) + outputs["vtk"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.vtk' + return name + ".vtk" class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( - 'raw', - 'voxels', - argstr='-inputmodel %s', - desc='input model type (raw or voxels)', - usedefault=True) + "raw", + "voxels", + argstr="-inputmodel %s", + desc="input model type (raw or voxels)", + usedefault=True, + ) in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='data file') + desc="data file", + ) maxtractpoints = traits.Int( - argstr='-maxtractpoints %d', - units='NA', - desc="maximum number of tract points") + argstr="-maxtractpoints %d", units="NA", desc="maximum number of tract points" + ) mintractpoints = traits.Int( - argstr='-mintractpoints %d', - units='NA', - desc="minimum number of tract points") + argstr="-mintractpoints %d", units="NA", desc="minimum number of tract points" + ) maxtractlength = traits.Int( - argstr='-maxtractlength %d', - units='mm', - desc="maximum length of tracts") + argstr="-maxtractlength %d", units="mm", desc="maximum length of tracts" + ) mintractlength = traits.Int( - argstr='-mintractlength %d', - units='mm', - desc="minimum length of tracts") + argstr="-mintractlength %d", units="mm", desc="minimum length of tracts" + ) datadims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxeldims = traits.List( traits.Int, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) seedpointmm = traits.List( traits.Int, - desc='The coordinates of a single seed point for tractography in mm', - argstr='-seedpointmm %s', + desc="The coordinates of a single seed point for tractography in mm", + argstr="-seedpointmm %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) seedpointvox = traits.List( traits.Int, - desc= - 'The coordinates of a single seed point for tractography in voxels', - argstr='-seedpointvox %s', + desc="The coordinates of a single seed point for tractography in voxels", + argstr="-seedpointvox %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) seedfile = File( - exists=False, - argstr='-seedfile %s', - desc='Image Containing Seed Points') + exists=False, argstr="-seedfile %s", desc="Image Containing Seed Points" + ) regionindex = traits.Int( - argstr='-regionindex %d', - units='mm', - desc="index of specific region to process") + argstr="-regionindex %d", units="mm", desc="index of specific region to process" + ) iterations = traits.Float( - argstr='-iterations %d', - units='NA', - desc= - "Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images." + argstr="-iterations %d", + units="NA", + desc="Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.", ) targetfile = File( - exists=False, - argstr='-targetfile %s', - desc='Image containing target volumes.') + exists=False, argstr="-targetfile %s", desc="Image containing target volumes." + ) allowmultitargets = traits.Bool( - argstr='-allowmultitargets', - desc="Allows streamlines to connect to multiple target volumes.") + argstr="-allowmultitargets", + desc="Allows streamlines to connect to multiple target volumes.", + ) directional = traits.List( traits.Int, - desc= - 'Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).', - argstr='-directional %s', + desc="Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).", + argstr="-directional %s", minlen=3, maxlen=3, - units='NA') + units="NA", + ) waypointfile = File( exists=False, - argstr='-waypointfile %s', - desc= - 'Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.' + argstr="-waypointfile %s", + desc="Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.", ) truncateloops = traits.Bool( - argstr='-truncateloops', - desc= - "This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint." + argstr="-truncateloops", + desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint.", ) discardloops = traits.Bool( - argstr='-discardloops', - desc= - "This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint." + argstr="-discardloops", + desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint.", ) exclusionfile = File( exists=False, - argstr='-exclusionfile %s', - desc= - 'Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.' + argstr="-exclusionfile %s", + desc="Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) truncateinexclusion = traits.Bool( - argstr='-truncateinexclusion', - desc="Retain segments of a streamline before entry to an exclusion ROI." + argstr="-truncateinexclusion", + desc="Retain segments of a streamline before entry to an exclusion ROI.", ) endpointfile = File( exists=False, - argstr='-endpointfile %s', - desc= - 'Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.' + argstr="-endpointfile %s", + desc="Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) resamplestepsize = traits.Float( - argstr='-resamplestepsize %d', - units='NA', - desc= - "Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option." + argstr="-resamplestepsize %d", + units="NA", + desc="Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.", ) noresample = traits.Bool( - argstr='-noresample', - desc= - "Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels." + argstr="-noresample", + desc="Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels.", ) outputtracts = traits.Bool( - argstr='-outputtracts', - desc="Output streamlines in raw binary format.") + argstr="-outputtracts", desc="Output streamlines in raw binary format." + ) outputroot = File( exists=False, - argstr='-outputroot %s', - desc='Prepended onto all output file names.') + argstr="-outputroot %s", + desc="Prepended onto all output file names.", + ) - gzip = traits.Bool( - argstr='-gzip', desc="save the output image in gzip format") + gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") outputcp = traits.Bool( - argstr='-outputcp', + argstr="-outputcp", desc="output the connection probability map (Analyze image, float)", - requires=['outputroot', 'seedfile']) + requires=["outputroot", "seedfile"], + ) outputsc = traits.Bool( - argstr='-outputsc', + argstr="-outputsc", desc="output the connection probability map (raw streamlines, int)", - requires=['outputroot', 'seedfile']) + requires=["outputroot", "seedfile"], + ) outputacm = traits.Bool( - argstr='-outputacm', - desc= - "output all tracts in a single connection probability map (Analyze image)", - requires=['outputroot', 'seedfile']) + argstr="-outputacm", + desc="output all tracts in a single connection probability map (Analyze image)", + requires=["outputroot", "seedfile"], + ) outputcbs = traits.Bool( - argstr='-outputcbs', - desc= - "outputs connectivity-based segmentation maps; requires target outputfile", - requires=['outputroot', 'targetfile', 'seedfile']) + argstr="-outputcbs", + desc="outputs connectivity-based segmentation maps; requires target outputfile", + requires=["outputroot", "targetfile", "seedfile"], + ) class ProcStreamlinesOutputSpec(TraitedSpec): - proc = File(exists=True, desc='Processed Streamlines') + proc = File(exists=True, desc="Processed Streamlines") outputroot_files = OutputMultiPath(File(exists=True)) @@ -432,14 +436,19 @@ class ProcStreamlines(StdOutCommandLine): >>> proc.inputs.in_file = 'tract_data.Bfloat' >>> proc.run() # doctest: +SKIP """ - _cmd = 'procstreamlines' + + _cmd = "procstreamlines" input_spec = ProcStreamlinesInputSpec output_spec = ProcStreamlinesOutputSpec def _format_arg(self, name, spec, value): - if name == 'outputroot': + if name == "outputroot": return spec.argstr % self._get_actual_outputroot(value) - return super(ProcStreamlines, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.outputroot_files = [] def _run_interface(self, runtime): outputroot = self.inputs.outputroot @@ -448,55 +457,51 @@ def _run_interface(self, runtime): base, filename, ext = split_filename(actual_outputroot) if not os.path.exists(base): os.makedirs(base) - new_runtime = super(ProcStreamlines, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) self.outputroot_files = glob.glob( - os.path.join(os.getcwd(), actual_outputroot + '*')) + os.path.join(os.getcwd(), actual_outputroot + "*") + ) return new_runtime else: - new_runtime = super(ProcStreamlines, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) return new_runtime def _get_actual_outputroot(self, outputroot): - actual_outputroot = os.path.join('procstream_outfiles', outputroot) + actual_outputroot = os.path.join("procstream_outfiles", outputroot) return actual_outputroot def _list_outputs(self): outputs = self.output_spec().get() - outputs['proc'] = os.path.abspath(self._gen_outfilename()) - outputs['outputroot_files'] = self.outputroot_files + outputs["proc"] = os.path.abspath(self._gen_outfilename()) + outputs["outputroot_files"] = self.outputroot_files return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_proc' + return name + "_proc" class TractShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( - exists=True, - argstr='< %s', - mandatory=True, - position=-2, - desc='tract file') + exists=True, argstr="< %s", mandatory=True, position=-2, desc="tract file" + ) offset = traits.Int( - argstr='%d', - units='NA', - desc='initial offset of offset tracts', - position=1) + argstr="%d", units="NA", desc="initial offset of offset tracts", position=1 + ) bunchsize = traits.Int( - argstr='%d', - units='NA', - desc='reads and outputs a group of bunchsize tracts', - position=2) + argstr="%d", + units="NA", + desc="reads and outputs a group of bunchsize tracts", + position=2, + ) - space = traits.Int( - argstr='%d', units='NA', desc='skips space tracts', position=3) + space = traits.Int(argstr="%d", units="NA", desc="skips space tracts", position=3) class TractShredderOutputSpec(TraitedSpec): - shredded = File(exists=True, desc='Shredded tract file') + shredded = File(exists=True, desc="Shredded tract file") class TractShredder(StdOutCommandLine): @@ -520,13 +525,14 @@ class TractShredder(StdOutCommandLine): >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ - _cmd = 'tractshredder' + + _cmd = "tractshredder" input_spec = TractShredderInputSpec output_spec = TractShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['shredded'] = os.path.abspath(self._gen_outfilename()) + outputs["shredded"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -537,35 +543,38 @@ def _gen_outfilename(self): class DT2NIfTIInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='tract file') + desc="tract file", + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", position=2, genfile=True, - desc='filename root prepended onto the names of three output files.') + desc="filename root prepended onto the names of three output files.", + ) header_file = File( exists=True, - argstr='-header %s', + argstr="-header %s", mandatory=True, position=3, - desc=' A Nifti .nii or .hdr file containing the header information') + desc=" A Nifti .nii or .hdr file containing the header information", + ) class DT2NIfTIOutputSpec(TraitedSpec): - dt = File(exists=True, desc='diffusion tensors in NIfTI format') + dt = File(exists=True, desc="diffusion tensors in NIfTI format") exitcode = File( - exists=True, - desc='exit codes from Camino reconstruction in NIfTI format') + exists=True, desc="exit codes from Camino reconstruction in NIfTI format" + ) lns0 = File( - exists=True, - desc='estimated lns0 from Camino reconstruction in NIfTI format') + exists=True, desc="estimated lns0 from Camino reconstruction in NIfTI format" + ) class DT2NIfTI(CommandLine): @@ -574,7 +583,8 @@ class DT2NIfTI(CommandLine): Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files. """ - _cmd = 'dt2nii' + + _cmd = "dt2nii" input_spec = DT2NIfTIInputSpec output_spec = DT2NIfTIOutputSpec @@ -592,11 +602,11 @@ def _gen_outfilename(self): def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): - output_root = self._gen_filename('output_root') + output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): - if name == 'output_root': + if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename @@ -605,56 +615,55 @@ def _gen_filename(self, name): class NIfTIDT2CaminoInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc= - 'A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be ' - 'in lower-triangular order as specified by the NIFTI standard for the storage of ' - 'symmetric matrices. This file should be either a .nii or a .hdr file.' + desc="A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be " + "in lower-triangular order as specified by the NIFTI standard for the storage of " + "symmetric matrices. This file should be either a .nii or a .hdr file.", ) s0_file = File( - argstr='-s0 %s', + argstr="-s0 %s", exists=True, - desc= - 'File containing the unweighted signal for each voxel, may be a raw binary ' - 'file (specify type with -inputdatatype) or a supported image file.') + desc="File containing the unweighted signal for each voxel, may be a raw binary " + "file (specify type with -inputdatatype) or a supported image file.", + ) lns0_file = File( - argstr='-lns0 %s', + argstr="-lns0 %s", exists=True, - desc= - 'File containing the log of the unweighted signal for each voxel, may be a ' - 'raw binary file (specify type with -inputdatatype) or a supported image file.' + desc="File containing the log of the unweighted signal for each voxel, may be a " + "raw binary file (specify type with -inputdatatype) or a supported image file.", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Binary valued brain / background segmentation, may be a raw binary file ' - '(specify type with -maskdatatype) or a supported image file.') + desc="Binary valued brain / background segmentation, may be a raw binary file " + "(specify type with -maskdatatype) or a supported image file.", + ) scaleslope = traits.Float( - argstr='-scaleslope %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 1.0.' + argstr="-scaleslope %s", + desc="A value v in the diffusion tensor is scaled to v * s + i. This is " + "applied after any scaling specified by the input image. Default is 1.0.", ) scaleinter = traits.Float( - argstr='-scaleinter %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 0.0.' + argstr="-scaleinter %s", + desc="A value v in the diffusion tensor is scaled to v * s + i. This is " + "applied after any scaling specified by the input image. Default is 0.0.", ) uppertriangular = traits.Bool( - argstr='-uppertriangular %s', - desc='Specifies input in upper-triangular (VTK style) order.') + argstr="-uppertriangular %s", + desc="Specifies input in upper-triangular (VTK style) order.", + ) class NIfTIDT2CaminoOutputSpec(TraitedSpec): - out_file = File(desc='diffusion tensors data in Camino format') + out_file = File(desc="diffusion tensors data in Camino format") class NIfTIDT2Camino(CommandLine): @@ -678,17 +687,18 @@ class NIfTIDT2Camino(CommandLine): to use the -uppertriangular option to convert these correctly. """ - _cmd = 'niftidt2camino' + + _cmd = "niftidt2camino" input_spec = NIfTIDT2CaminoInputSpec output_spec = NIfTIDT2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs["out_file"] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": _, filename, _ = split_filename(self.inputs.in_file) return filename @@ -696,33 +706,37 @@ def _gen_filename(self, name): class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc=('Camino scheme file (b values / vectors, ' - 'see camino.fsl2scheme)')) + desc=("Camino scheme file (b values / vectors, see camino.fsl2scheme)"), + ) readheader = File( exists=True, - argstr='-readheader %s', + argstr="-readheader %s", position=3, - desc=('Reads header information from file and prints to ' - 'stdout. If this option is not specified, then the ' - 'program writes a header based on the other ' - 'arguments.')) + desc=( + "Reads header information from file and prints to " + "stdout. If this option is not specified, then the " + "program writes a header based on the other " + "arguments." + ), + ) printimagedims = File( exists=True, - argstr='-printimagedims %s', + argstr="-printimagedims %s", position=3, - desc=('Prints image data and voxel dimensions as ' - 'Camino arguments and exits.')) + desc=("Prints image data and voxel dimensions as Camino arguments and exits."), + ) # How do we implement both file and enum (for the program) in one argument? # Is this option useful anyway? @@ -732,143 +746,170 @@ class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): # vcthreshselect, pdview, track. printprogargs = File( exists=True, - argstr='-printprogargs %s', + argstr="-printprogargs %s", position=3, - desc=('Prints data dimension (and type, if relevant) ' - 'arguments for a specific Camino program, where ' - 'prog is one of shredder, scanner2voxel, ' - 'vcthreshselect, pdview, track.')) + desc=( + "Prints data dimension (and type, if relevant) " + "arguments for a specific Camino program, where " + "prog is one of shredder, scanner2voxel, " + "vcthreshselect, pdview, track." + ), + ) printintelbyteorder = File( exists=True, - argstr='-printintelbyteorder %s', + argstr="-printintelbyteorder %s", position=3, - desc=('Prints 1 if the header is little-endian, ' - '0 otherwise.')) + desc=("Prints 1 if the header is little-endian, 0 otherwise."), + ) printbigendian = File( exists=True, - argstr='-printbigendian %s', + argstr="-printbigendian %s", position=3, - desc=('Prints 1 if the header is big-endian, 0 ' - 'otherwise.')) + desc=("Prints 1 if the header is big-endian, 0 otherwise."), + ) initfromheader = File( exists=True, - argstr='-initfromheader %s', + argstr="-initfromheader %s", position=3, - desc=('Reads header information from file and ' - 'intializes a new header with the values read ' - 'from the file. You may replace any ' - 'combination of fields in the new header by ' - 'specifying subsequent options.')) + desc=( + "Reads header information from file and " + "initializes a new header with the values read " + "from the file. You may replace any " + "combination of fields in the new header by " + "specifying subsequent options." + ), + ) data_dims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxel_dims = traits.List( traits.Float, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) centre = traits.List( traits.Int, - argstr='-centre %s', + argstr="-centre %s", minlen=3, maxlen=3, - units='mm', - desc=('Voxel specifying origin of Talairach ' - 'coordinate system for SPM, default [0 0 0].')) + units="mm", + desc=( + "Voxel specifying origin of Talairach " + "coordinate system for SPM, default [0 0 0]." + ), + ) picoseed = traits.List( traits.Int, - argstr='-picoseed %s', + argstr="-picoseed %s", minlen=3, maxlen=3, - desc=('Voxel specifying the seed (for PICo maps), ' - 'default [0 0 0].'), - units='mm') + desc=("Voxel specifying the seed (for PICo maps), default [0 0 0]."), + units="mm", + ) nimages = traits.Int( - argstr='-nimages %d', - units='NA', - desc="Number of images in the img file. Default 1.") + argstr="-nimages %d", + units="NA", + desc="Number of images in the img file. Default 1.", + ) datatype = traits.Enum( - 'byte', - 'char', - '[u]short', - '[u]int', - 'float', - 'complex', - 'double', - argstr='-datatype %s', - desc=('The char datatype is 8 bit (not the 16 bit ' - 'char of Java), as specified by the Analyze ' - '7.5 standard. The byte, ushort and uint ' - 'types are not part of the Analyze ' - 'specification but are supported by SPM.'), - mandatory=True) + "byte", + "char", + "[u]short", + "[u]int", + "float", + "complex", + "double", + argstr="-datatype %s", + desc=( + "The char datatype is 8 bit (not the 16 bit " + "char of Java), as specified by the Analyze " + "7.5 standard. The byte, ushort and uint " + "types are not part of the Analyze " + "specification but are supported by SPM." + ), + mandatory=True, + ) offset = traits.Int( - argstr='-offset %d', - units='NA', - desc=('According to the Analyze 7.5 standard, this is ' - 'the byte offset in the .img file at which ' - 'voxels start. This value can be negative to ' - 'specify that the absolute value is applied for ' - 'every image in the file.')) + argstr="-offset %d", + units="NA", + desc=( + "According to the Analyze 7.5 standard, this is " + "the byte offset in the .img file at which " + "voxels start. This value can be negative to " + "specify that the absolute value is applied for " + "every image in the file." + ), + ) greylevels = traits.List( traits.Int, - argstr='-gl %s', + argstr="-gl %s", minlen=2, maxlen=2, - desc=('Minimum and maximum greylevels. Stored as ' - 'shorts in the header.'), - units='NA') + desc=("Minimum and maximum greylevels. Stored as shorts in the header."), + units="NA", + ) scaleslope = traits.Float( - argstr='-scaleslope %d', - units='NA', - desc=('Intensities in the image are scaled by ' - 'this factor by SPM and MRICro. Default is ' - '1.0.')) + argstr="-scaleslope %d", + units="NA", + desc=( + "Intensities in the image are scaled by " + "this factor by SPM and MRICro. Default is " + "1.0." + ), + ) scaleinter = traits.Float( - argstr='-scaleinter %d', - units='NA', - desc=('Constant to add to the image intensities. ' - 'Used by SPM and MRIcro.')) + argstr="-scaleinter %d", + units="NA", + desc=("Constant to add to the image intensities. Used by SPM and MRIcro."), + ) description = traits.String( - argstr='-description %s', - desc=('Short description - No spaces, max ' - 'length 79 bytes. Will be null ' - 'terminated automatically.')) + argstr="-description %s", + desc=( + "Short description - No spaces, max " + "length 79 bytes. Will be null " + "terminated automatically." + ), + ) intelbyteorder = traits.Bool( - argstr='-intelbyteorder', - desc=("Write header in intel byte order " - "(little-endian).")) + argstr="-intelbyteorder", + desc=("Write header in intel byte order (little-endian)."), + ) networkbyteorder = traits.Bool( - argstr='-networkbyteorder', - desc=("Write header in network byte order " - "(big-endian). This is the default " - "for new headers.")) + argstr="-networkbyteorder", + desc=( + "Write header in network byte order " + "(big-endian). This is the default " + "for new headers." + ), + ) class AnalyzeHeaderOutputSpec(TraitedSpec): - header = File(exists=True, desc='Analyze header') + header = File(exists=True, desc="Analyze header") class AnalyzeHeader(StdOutCommandLine): @@ -895,13 +936,14 @@ class AnalyzeHeader(StdOutCommandLine): >>> hdr.inputs.voxel_dims = [1,1,1] >>> hdr.run() # doctest: +SKIP """ - _cmd = 'analyzeheader' + + _cmd = "analyzeheader" input_spec = AnalyzeHeaderInputSpec output_spec = AnalyzeHeaderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['header'] = os.path.abspath(self._gen_outfilename()) + outputs["header"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -912,29 +954,28 @@ def _gen_outfilename(self): class ShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=-2, - desc='raw binary data file') + desc="raw binary data file", + ) offset = traits.Int( - argstr='%d', - units='NA', - desc='initial offset of offset bytes', - position=1) + argstr="%d", units="NA", desc="initial offset of offset bytes", position=1 + ) chunksize = traits.Int( - argstr='%d', - units='NA', - desc='reads and outputs a chunk of chunksize bytes', - position=2) + argstr="%d", + units="NA", + desc="reads and outputs a chunk of chunksize bytes", + position=2, + ) - space = traits.Int( - argstr='%d', units='NA', desc='skips space bytes', position=3) + space = traits.Int(argstr="%d", units="NA", desc="skips space bytes", position=3) class ShredderOutputSpec(TraitedSpec): - shredded = File(exists=True, desc='Shredded binary data file') + shredded = File(exists=True, desc="Shredded binary data file") class Shredder(StdOutCommandLine): @@ -960,13 +1001,14 @@ class Shredder(StdOutCommandLine): >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ - _cmd = 'shredder' + + _cmd = "shredder" input_spec = ShredderInputSpec output_spec = ShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['shredded_file'] = os.path.abspath(self._gen_outfilename()) + outputs["shredded_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index b32b9dc528..ba2131b8ac 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -1,162 +1,176 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, Directory, StdOutCommandLine, - StdOutCommandLineInputSpec, isdefined, InputMultiPath) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + Directory, + StdOutCommandLine, + StdOutCommandLineInputSpec, + isdefined, + InputMultiPath, +) class DTIFitInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - ('Provides the name of a file containing a background mask computed using, ' - 'for example, FSL bet2 program. The mask file contains zero in background ' - 'voxels and non-zero in foreground.')) + desc=( + "Provides the name of a file containing a background mask computed using, " + "for example, FSL bet2 program. The mask file contains zero in background " + "voxels and non-zero in foreground." + ), + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) non_linear = traits.Bool( - argstr='-nonlinear', + argstr="-nonlinear", position=3, - desc= - "Use non-linear fitting instead of the default linear regression to the log measurements. " + desc="Use non-linear fitting instead of the default linear regression " + "to the log measurements. ", ) class DTIFitOutputSpec(TraitedSpec): - tensor_fitted = File( - exists=True, desc='path/name of 4D volume in voxel order') + tensor_fitted = File(exists=True, desc="path/name of 4D volume in voxel order") class DTIFit(StdOutCommandLine): """ - Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, from the data file. + Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, + from the data file. Use non-linear fitting instead of the default linear regression to the log measurements. - The data file stores the diffusion MRI data in voxel order with the measurements stored in big-endian format and ordered as in the scheme file. - The default input data type is four-byte float. The default output data type is eight-byte double. + The data file stores the diffusion MRI data in voxel order with the measurements stored + in big-endian format and ordered as in the scheme file. + The default input data type is four-byte float. + The default output data type is eight-byte double. See modelfit and camino for the format of the data file and scheme file. The program fits the diffusion tensor to each voxel and outputs the results, in voxel order and as big-endian eight-byte doubles, to the standard output. - The program outputs eight values in each voxel: [exit code, ln(S(0)), D_xx, D_xy, D_xz, D_yy, D_yz, D_zz]. - An exit code of zero indicates no problems. For a list of other exit codes, see modelfit(1). The entry S(0) is an estimate of the signal at q=0. + The program outputs eight values in each voxel: + [exit code, ln(S(0)), D_xx, D_xy, D_xz, D_yy, D_yz, D_zz]. + An exit code of zero indicates no problems. + For a list of other exit codes, see modelfit(1). + The entry S(0) is an estimate of the signal at q=0. Example ------- - >>> import nipype.interfaces.camino as cmon >>> fit = cmon.DTIFit() >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP + """ - _cmd = 'dtfit' + + _cmd = "dtfit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tensor_fitted'] = os.path.abspath(self._gen_outfilename()) + outputs["tensor_fitted"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_DT.Bdouble' + return name + "_DT.Bdouble" class DTMetricInputSpec(CommandLineInputSpec): eigen_data = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) metric = traits.Enum( - 'fa', - 'md', - 'rd', - 'l1', - 'l2', - 'l3', - 'tr', - 'ra', - '2dfa', - 'cl', - 'cp', - 'cs', - argstr='-stat %s', + "fa", + "md", + "rd", + "l1", + "l2", + "l3", + "tr", + "ra", + "2dfa", + "cl", + "cp", + "cs", + argstr="-stat %s", mandatory=True, - desc= - ('Specifies the metric to compute. Possible choices are: ' - '"fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp" or "cs".' - )) + desc="Specifies the metric to compute.", + ) inputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-inputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-inputdatatype %s", usedefault=True, - desc=('Specifies the data type of the input data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc="Specifies the data type of the input data.", + ) outputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-outputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-outputdatatype %s", usedefault=True, - desc=('Specifies the data type of the output data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc="Specifies the data type of the output data.", + ) data_header = File( - argstr='-header %s', + argstr="-header %s", exists=True, desc=( - 'A Nifti .nii or .nii.gz file containing the header information. ' - 'Usually this will be the header of the raw data file from which ' - 'the diffusion tensors were reconstructed.')) + "A Nifti .nii or .nii.gz file containing the header information. " + "Usually this will be the header of the raw data file from which " + "the diffusion tensors were reconstructed." + ), + ) outputfile = File( - argstr='-outputfile %s', + argstr="-outputfile %s", genfile=True, - desc= - ('Output name. Output will be a .nii.gz file if data_header is provided and' - 'in voxel order with outputdatatype datatype (default: double) otherwise.' - )) + desc=( + "Output name. Output will be a .nii.gz file if data_header is provided and" + "in voxel order with outputdatatype datatype (default: double) otherwise." + ), + ) class DTMetricOutputSpec(TraitedSpec): metric_stats = File( - exists=True, desc='Diffusion Tensor statistics of the chosen metric') + exists=True, desc="Diffusion Tensor statistics of the chosen metric" + ) class DTMetric(CommandLine): @@ -193,14 +207,16 @@ class DTMetric(CommandLine): >>> dtmetric.inputs.metric = 'cp' >>> dtmetric.inputs.outputdatatype = 'float' >>> dtmetric.run() # doctest: +SKIP + """ - _cmd = 'dtshape' + + _cmd = "dtshape" input_spec = DTMetricInputSpec output_spec = DTMetricOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['metric_stats'] = os.path.abspath(self._gen_outfilename()) + outputs["metric_stats"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -209,18 +225,18 @@ def _gen_outfilename(self): def _gen_outputfile(self): outputfile = self.inputs.outputfile if not isdefined(outputfile): - outputfile = self._gen_filename('outputfile') + outputfile = self._gen_filename("outputfile") return outputfile def _gen_filename(self, name): - if name == 'outputfile': + if name == "outputfile": _, name, _ = split_filename(self.inputs.eigen_data) metric = self.inputs.metric datatype = self.inputs.outputdatatype if isdefined(self.inputs.data_header): - filename = name + '_' + metric + '.nii.gz' + filename = name + "_" + metric + ".nii.gz" else: - filename = name + '_' + metric + '.B' + datatype + filename = name + "_" + metric + ".B" + datatype return filename @@ -230,132 +246,151 @@ def _gen_model_options(): # @NoSelf Generate all possible permutations of < multi - tensor > < single - tensor > options """ - single_tensor = [ - 'dt', 'restore', 'algdt', 'nldt_pos', 'nldt', 'ldt_wtd' - ] + single_tensor = ["dt", "restore", "algdt", "nldt_pos", "nldt", "ldt_wtd"] multi_tensor = [ - 'cylcyl', 'cylcyl_eq', 'pospos', 'pospos_eq', 'poscyl', - 'poscyl_eq', 'cylcylcyl', 'cylcylcyl_eq', 'pospospos', - 'pospospos_eq', 'posposcyl', 'posposcyl_eq', 'poscylcyl', - 'poscylcyl_eq' + "cylcyl", + "cylcyl_eq", + "pospos", + "pospos_eq", + "poscyl", + "poscyl_eq", + "cylcylcyl", + "cylcylcyl_eq", + "pospospos", + "pospospos_eq", + "posposcyl", + "posposcyl_eq", + "poscylcyl", + "poscylcyl_eq", ] - other = ['adc', 'ball_stick'] + other = ["adc", "ball_stick"] model_list = single_tensor model_list.extend(other) - model_list.extend([ - multi + ' ' + single for multi in multi_tensor - for single in single_tensor - ]) + model_list.extend( + [multi + " " + single for multi in multi_tensor for single in single_tensor] + ) return model_list model = traits.Enum( _gen_model_options(), - argstr='-model %s', + argstr="-model %s", mandatory=True, - desc='Specifies the model to be fit to the data.') + desc="Specifies the model to be fit to the data.", + ) in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inputdatatype = traits.Enum( - 'float', - 'char', - 'short', - 'int', - 'long', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file: "char", "short", "int", "long", "float" or "double". The input file must have BIG-ENDIAN ordering. By default, the input type is "float".' + "float", + "char", + "short", + "int", + "long", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file. " + "The input file must have BIG-ENDIAN ordering. " + "By default, the input type is ``float``.", ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) - outputfile = File( - argstr='-outputfile %s', desc='Filename of the output file.') + outputfile = File(argstr="-outputfile %s", desc="Filename of the output file.") outlier = File( - argstr='-outliermap %s', + argstr="-outliermap %s", exists=True, - desc= - 'Specifies the name of the file to contain the outlier map generated by the RESTORE algorithm.' + desc="Specifies the name of the file to contain the outlier map generated by " + "the RESTORE algorithm.", ) noisemap = File( - argstr='-noisemap %s', + argstr="-noisemap %s", exists=True, - desc= - 'Specifies the name of the file to contain the estimated noise variance on the diffusion-weighted signal, generated by a weighted tensor fit. The data type of this file is big-endian double.' + desc="Specifies the name of the file to contain the estimated noise variance on the " + "diffusion-weighted signal, generated by a weighted tensor fit. " + "The data type of this file is big-endian double.", ) residualmap = File( - argstr='-residualmap %s', + argstr="-residualmap %s", exists=True, - desc= - 'Specifies the name of the file to contain the weighted residual errors after computing a weighted linear tensor fit. One value is produced per measurement, in voxel order.The data type of this file is big-endian double. Images of the residuals for each measurement can be extracted with shredder.' + desc="Specifies the name of the file to contain the weighted residual errors after " + "computing a weighted linear tensor fit. " + "One value is produced per measurement, in voxel order. " + "The data type of this file is big-endian double. " + "Images of the residuals for each measurement can be extracted with shredder.", ) sigma = traits.Float( - argstr='-sigma %G', - desc= - 'Specifies the standard deviation of the noise in the data. Required by the RESTORE algorithm.' + argstr="-sigma %G", + desc="Specifies the standard deviation of the noise in the data. " + "Required by the RESTORE algorithm.", ) bgthresh = traits.Float( - argstr='-bgthresh %G', - desc= - 'Sets a threshold on the average q=0 measurement to separate foreground and background. The program does not process background voxels, but outputs the same number of values in background voxels and foreground voxels. Each value is zero in background voxels apart from the exit code which is -1.' + argstr="-bgthresh %G", + desc="Sets a threshold on the average q=0 measurement to separate " + "foreground and background. The program does not process background voxels, " + "but outputs the same number of values in background voxels and foreground voxels. " + "Each value is zero in background voxels apart from the exit code which is -1.", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.' + desc="Provides the name of a file containing a background mask computed using, " + "for example, FSL's bet2 program. The mask file contains zero in background voxels " + "and non-zero in foreground.", ) cfthresh = traits.Float( - argstr='-csfthresh %G', - desc= - 'Sets a threshold on the average q=0 measurement to determine which voxels are CSF. This program does not treat CSF voxels any different to other voxels.' + argstr="-csfthresh %G", + desc="Sets a threshold on the average q=0 measurement to determine which voxels " + "are CSF. This program does not treat CSF voxels any different to other voxels.", ) fixedmodq = traits.List( traits.Float, - argstr='-fixedmod %s', + argstr="-fixedmod %s", minlen=4, maxlen=4, - desc= - 'Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with |q|=Q and diffusion time tau. The N measurements with |q|=Q have unique directions. The program reads in the directions from the files in directory PointSets.' + desc="Specifies a spherical acquisition scheme with M measurements " + "with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. " + "The N measurements with :math:`|q|=Q` have unique directions. The program reads in " + "the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( traits.Float, - argstr='-fixedbvalue %s', + argstr="-fixedbvalue %s", minlen=3, maxlen=3, - desc= - 'As above, but specifies . The resulting scheme is the same whether you specify b directly or indirectly using -fixedmodq.' + desc="As above, but specifies . The resulting scheme is the same whether " + "you specify b directly or indirectly using -fixedmodq.", ) tau = traits.Float( - argstr='-tau %G', - desc= - 'Sets the diffusion time separately. This overrides the diffusion time specified in a scheme file or by a scheme index for both the acquisition scheme and in the data synthesis.' + argstr="-tau %G", + desc="Sets the diffusion time separately. This overrides the diffusion time " + "specified in a scheme file or by a scheme index for both the acquisition scheme " + "and in the data synthesis.", ) class ModelFitOutputSpec(TraitedSpec): - fitted_data = File( - exists=True, desc='output file of 4D volume in voxel order') + fitted_data = File(exists=True, desc="output file of 4D volume in voxel order") class ModelFit(StdOutCommandLine): @@ -370,106 +405,111 @@ class ModelFit(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> fit = cmon.ModelFit() >>> fit.model = 'dt' >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP + """ - _cmd = 'modelfit' + + _cmd = "modelfit" input_spec = ModelFitInputSpec output_spec = ModelFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['fitted_data'] = os.path.abspath(self._gen_outfilename()) + outputs["fitted_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_fit.Bdouble' + return name + "_fit.Bdouble" class DTLUTGenInputSpec(StdOutCommandLineInputSpec): lrange = traits.List( traits.Float, - desc='Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' - 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' - 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, ' - 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', - argstr='-lrange %s', + desc="Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3." + "The LUT is square, with half the values calculated (because L2 / L3 cannot be " + "less than L1 / L3 by definition)." + "The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 " + "corresponds to an FA of 0.891, and L1 / L3 = 15 with L2 / L3 = 1 corresponds " + "to an FA of 0.929. The default range is 1 to 10.", + argstr="-lrange %s", minlen=2, maxlen=2, position=1, - units='NA') + units="NA", + ) frange = traits.List( traits.Float, - desc='Index to two-tensor LUTs. This is the fractional anisotropy' - ' of the two tensors. The default is 0.3 to 0.94', - argstr='-frange %s', + desc="Index to two-tensor LUTs. This is the fractional anisotropy" + " of the two tensors. The default is 0.3 to 0.94", + argstr="-frange %s", minlen=2, maxlen=2, position=1, - units='NA') + units="NA", + ) step = traits.Float( - argstr='-step %f', - units='NA', - desc='Distance between points in the LUT.' - 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' - 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' - 'For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.' + argstr="-step %f", + units="NA", + desc="Distance between points in the LUT." + "For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed " + "at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3." + "For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.", ) samples = traits.Int( - argstr='-samples %d', - units='NA', - desc= - 'The number of synthetic measurements to generate at each point in the LUT. The default is 2000.' + argstr="-samples %d", + units="NA", + desc="The number of synthetic measurements to generate at each point in the LUT. " + "The default is 2000.", ) snr = traits.Float( - argstr='-snr %f', - units='NA', - desc='The signal to noise ratio of the unweighted (q = 0) measurements.' - 'This should match the SNR (in white matter) of the images that the LUTs are used with.' + argstr="-snr %f", + units="NA", + desc="The signal to noise ratio of the unweighted (q = 0) measurements." + "This should match the SNR (in white matter) of the images that the LUTs are used with.", ) bingham = traits.Bool( - argstr='-bingham', - desc="Compute a LUT for the Bingham PDF. This is the default.") + argstr="-bingham", + desc="Compute a LUT for the Bingham PDF. This is the default.", + ) - acg = traits.Bool(argstr='-acg', desc="Compute a LUT for the ACG PDF.") + acg = traits.Bool(argstr="-acg", desc="Compute a LUT for the ACG PDF.") - watson = traits.Bool( - argstr='-watson', desc="Compute a LUT for the Watson PDF.") + watson = traits.Bool(argstr="-watson", desc="Compute a LUT for the Watson PDF.") inversion = traits.Int( - argstr='-inversion %d', - units='NA', - desc= - 'Index of the inversion to use. The default is 1 (linear single tensor inversion).' + argstr="-inversion %d", + units="NA", + desc="Index of the inversion to use. The default is 1 (linear single tensor inversion).", ) trace = traits.Float( - argstr='-trace %G', - units='NA', - desc= - 'Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.' + argstr="-trace %G", + units="NA", + desc="Trace of the diffusion tensor(s) used in the test function in the LUT generation. " + "The default is 2100E-12 m^2 s^-1.", ) scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, position=2, - desc='The scheme file of the images to be processed using this LUT.') + desc="The scheme file of the images to be processed using this LUT.", + ) class DTLUTGenOutputSpec(TraitedSpec): - dtLUT = File(exists=True, desc='Lookup Table') + dtLUT = File(exists=True, desc="Lookup Table") class DTLUTGen(StdOutCommandLine): @@ -477,9 +517,11 @@ class DTLUTGen(StdOutCommandLine): Calibrates the PDFs for PICo probabilistic tractography. This program needs to be run once for every acquisition scheme. - It outputs a lookup table that is used by the dtpicoparams program to find PICo PDF parameters for an image. - The default single tensor LUT contains parameters of the Bingham distribution and is generated by supplying - a scheme file and an estimated signal to noise in white matter regions of the (q=0) image. + It outputs a lookup table that is used by the dtpicoparams program to find PICo PDF + parameters for an image. + The default single tensor LUT contains parameters of the Bingham distribution and is + generated by supplying a scheme file and an estimated signal to noise in white matter + regions of the (q=0) image. The default inversion is linear (inversion index 1). Advanced users can control several options, including the extent and resolution of the LUT, @@ -487,91 +529,103 @@ class DTLUTGen(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> dtl = cmon.DTLUTGen() >>> dtl.inputs.snr = 16 >>> dtl.inputs.scheme_file = 'A.scheme' >>> dtl.run() # doctest: +SKIP + """ - _cmd = 'dtlutgen' + + _cmd = "dtlutgen" input_spec = DTLUTGenInputSpec output_spec = DTLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['dtLUT'] = os.path.abspath(self._gen_outfilename()) + outputs["dtLUT"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '.dat' + return name + ".dat" class PicoPDFsInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inputmodel = traits.Enum( - 'dt', - 'multitensor', - 'pds', - argstr='-inputmodel %s', + "dt", + "multitensor", + "pds", + argstr="-inputmodel %s", position=2, - desc='input model type', - usedefault=True) + desc="input model type", + usedefault=True, + ) luts = InputMultiPath( File(exists=True), - argstr='-luts %s', + argstr="-luts %s", mandatory=True, - desc='Files containing the lookup tables.' - 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).' - 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).' - 'These LUTs may be generated with dtlutgen.') + desc="Files containing the lookup tables." + "For tensor data, one lut must be specified for each type of inversion used in the " + "image (one-tensor, two-tensor, three-tensor)." + "For pds, the number of LUTs must match -numpds (it is acceptable to use the same " + "LUT several times - see example, above)." + "These LUTs may be generated with dtlutgen.", + ) pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', + "bingham", + "watson", + "acg", + argstr="-pdf %s", position=4, - desc=' Specifies the PDF to use. There are three choices:' - 'watson - The Watson distribution. This distribution is rotationally symmetric.' - 'bingham - The Bingham distributionn, which allows elliptical probability density contours.' - 'acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours', - usedefault=True) + desc="""\ +Specifies the PDF to use. There are three choices: + + * watson - The Watson distribution. This distribution is rotationally symmetric. + * bingham - The Bingham distributionn, which allows elliptical probability density contours. + * acg - The Angular Central Gaussian distribution, which also allows elliptical probability + density contours. + +""", + usedefault=True, + ) directmap = traits.Bool( - argstr='-directmap', - desc= - "Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues." + argstr="-directmap", + desc="Only applicable when using pds as the inputmodel. Use direct mapping between " + "the eigenvalues and the distribution parameters instead of the log of the eigenvalues.", ) maxcomponents = traits.Int( - argstr='-maxcomponents %d', - units='NA', - desc= - 'The maximum number of tensor components in a voxel (default 2) for multitensor data.' - 'Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.' + argstr="-maxcomponents %d", + units="NA", + desc="The maximum number of tensor components in a voxel (default 2) for multitensor data." + "Currently, only the default is supported, but future releases may allow the input " + "of three-tensor data using this option.", ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc='The maximum number of PDs in a voxel (default 3) for PD data.' - 'This option determines the size of the input and output voxels.' - 'This means that the data file may be large enough to accomodate three or more PDs,' - 'but does not mean that any of the voxels are classified as containing three or more PDs.' + argstr="-numpds %d", + units="NA", + desc="The maximum number of PDs in a voxel (default 3) for PD data." + "This option determines the size of the input and output voxels." + "This means that the data file may be large enough to accommodate three or more PDs," + "but does not mean that any of the voxels are classified as containing three or more PDs.", ) class PicoPDFsOutputSpec(TraitedSpec): - pdfs = File(exists=True, desc='path/name of 4D volume in voxel order') + pdfs = File(exists=True, desc="path/name of 4D volume in voxel order") class PicoPDFs(StdOutCommandLine): @@ -580,218 +634,245 @@ class PicoPDFs(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> pdf = cmon.PicoPDFs() >>> pdf.inputs.inputmodel = 'dt' >>> pdf.inputs.luts = ['lut_file'] >>> pdf.inputs.in_file = 'voxel-order_data.Bfloat' >>> pdf.run() # doctest: +SKIP + """ - _cmd = 'picopdfs' + + _cmd = "picopdfs" input_spec = PicoPDFsInputSpec output_spec = PicoPDFsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['pdfs'] = os.path.abspath(self._gen_outfilename()) + outputs["pdfs"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_pdfs.Bdouble' + return name + "_pdfs.Bdouble" class TrackInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-inputfile %s', - position=1, - desc='input data file') + exists=True, argstr="-inputfile %s", position=1, desc="input data file" + ) - seed_file = File( - exists=True, argstr='-seedfile %s', position=2, desc='seed file') + seed_file = File(exists=True, argstr="-seedfile %s", position=2, desc="seed file") inputmodel = traits.Enum( - 'dt', - 'multitensor', - 'sfpeak', - 'pico', - 'repbs_dt', - 'repbs_multitensor', - 'ballstick', - 'wildbs_dt', - 'bayesdirac', - 'bayesdirac_dt', - 'bedpostx_dyad', - 'bedpostx', - argstr='-inputmodel %s', - desc='input model type', - usedefault=True) + "dt", + "multitensor", + "sfpeak", + "pico", + "repbs_dt", + "repbs_multitensor", + "ballstick", + "wildbs_dt", + "bayesdirac", + "bayesdirac_dt", + "bedpostx_dyad", + "bedpostx", + argstr="-inputmodel %s", + desc="input model type", + usedefault=True, + ) tracker = traits.Enum( - 'fact', - 'euler', - 'rk4', - argstr='-tracker %s', - desc=("The tracking algorithm controls streamlines are " - "generated from the data. The choices are: " - "- FACT, which follows the local fibre orientation " - "in each voxel. No interpolation is used." - "- EULER, which uses a fixed step size along the " - "local fibre orientation. With nearest-neighbour " - "interpolation, this method may be very similar to " - "FACT, except that the step size is fixed, whereas " - "FACT steps extend to the boundary of the next voxel " - "(distance variable depending on the entry and exit " - "points to the voxel)." - "- RK4: Fourth-order Runge-Kutta method. The step " - "size is fixed, however the eventual direction of " - "the step is determined by taking and averaging a " - "series of partial steps."), - usedefault=True) + "fact", + "euler", + "rk4", + argstr="-tracker %s", + desc=( + "The tracking algorithm controls streamlines are " + "generated from the data. The choices are: " + "- FACT, which follows the local fibre orientation " + "in each voxel. No interpolation is used." + "- EULER, which uses a fixed step size along the " + "local fibre orientation. With nearest-neighbour " + "interpolation, this method may be very similar to " + "FACT, except that the step size is fixed, whereas " + "FACT steps extend to the boundary of the next voxel " + "(distance variable depending on the entry and exit " + "points to the voxel)." + "- RK4: Fourth-order Runge-Kutta method. The step " + "size is fixed, however the eventual direction of " + "the step is determined by taking and averaging a " + "series of partial steps." + ), + usedefault=True, + ) interpolator = traits.Enum( - 'nn', - 'prob_nn', - 'linear', - argstr='-interpolator %s', - desc=("The interpolation algorithm determines how " - "the fiber orientation(s) are defined at a given " - "continuous point within the input image. " - "Interpolators are only used when the tracking " - "algorithm is not FACT. The choices are: " - "- NN: Nearest-neighbour interpolation, just " - "uses the local voxel data directly." - "- PROB_NN: Probabilistic nearest-neighbor " - "interpolation, similar to the method pro- " - "posed by Behrens et al [Magnetic Resonance " - "in Medicine, 50:1077-1088, 2003]. The data " - "is not interpolated, but at each point we " - "randomly choose one of the 8 voxels sur- " - "rounding a point. The probability of choosing " - "a particular voxel is based on how close the " - "point is to the centre of that voxel." - "- LINEAR: Linear interpolation of the vector " - "field containing the principal directions at " - "each point.")) + "nn", + "prob_nn", + "linear", + argstr="-interpolator %s", + desc=( + "The interpolation algorithm determines how " + "the fiber orientation(s) are defined at a given " + "continuous point within the input image. " + "Interpolators are only used when the tracking " + "algorithm is not FACT. The choices are: " + "- NN: Nearest-neighbour interpolation, just " + "uses the local voxel data directly." + "- PROB_NN: Probabilistic nearest-neighbor " + "interpolation, similar to the method pro- " + "posed by Behrens et al [Magnetic Resonance " + "in Medicine, 50:1077-1088, 2003]. The data " + "is not interpolated, but at each point we " + "randomly choose one of the 8 voxels sur- " + "rounding a point. The probability of choosing " + "a particular voxel is based on how close the " + "point is to the centre of that voxel." + "- LINEAR: Linear interpolation of the vector " + "field containing the principal directions at " + "each point." + ), + ) stepsize = traits.Float( - argstr='-stepsize %f', - requires=['tracker'], - desc=('Step size for EULER and RK4 tracking. ' - 'The default is 1mm.')) + argstr="-stepsize %f", + requires=["tracker"], + desc=("Step size for EULER and RK4 tracking. The default is 1mm."), + ) inputdatatype = traits.Enum( - 'float', 'double', argstr='-inputdatatype %s', desc='input file type') + "float", "double", argstr="-inputdatatype %s", desc="input file type" + ) - gzip = traits.Bool( - argstr='-gzip', desc="save the output image in gzip format") + gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") maxcomponents = traits.Int( - argstr='-maxcomponents %d', - units='NA', - desc=("The maximum number of tensor components in a " - "voxel. This determines the size of the input " - "file and does not say anything about the " - "voxel classification. The default is 2 if " - "the input model is multitensor and 1 if the " - "input model is dt.")) + argstr="-maxcomponents %d", + units="NA", + desc=( + "The maximum number of tensor components in a " + "voxel. This determines the size of the input " + "file and does not say anything about the " + "voxel classification. The default is 2 if " + "the input model is multitensor and 1 if the " + "input model is dt." + ), + ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc=("The maximum number of PDs in a voxel for input " - "models sfpeak and pico. The default is 3 for input " - "model sfpeak and 1 for input model pico. This option " - "determines the size of the voxels in the input file " - "and does not affect tracking. For tensor data, use " - "the -maxcomponents option.")) + argstr="-numpds %d", + units="NA", + desc=( + "The maximum number of PDs in a voxel for input " + "models sfpeak and pico. The default is 3 for input " + "model sfpeak and 1 for input model pico. This option " + "determines the size of the voxels in the input file " + "and does not affect tracking. For tensor data, use " + "the -maxcomponents option." + ), + ) data_dims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxel_dims = traits.List( traits.Float, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) ipthresh = traits.Float( - argstr='-ipthresh %f', - desc=('Curvature threshold for tracking, expressed as ' - 'the minimum dot product between two streamline ' - 'orientations calculated over the length of a ' - 'voxel. If the dot product between the previous ' - 'and current directions is less than this ' - 'threshold, then the streamline terminates. The ' - 'default setting will terminate fibres that curve ' - 'by more than 80 degrees. Set this to -1.0 to ' - 'disable curvature checking completely.')) + argstr="-ipthresh %f", + desc=( + "Curvature threshold for tracking, expressed as " + "the minimum dot product between two streamline " + "orientations calculated over the length of a " + "voxel. If the dot product between the previous " + "and current directions is less than this " + "threshold, then the streamline terminates. The " + "default setting will terminate fibres that curve " + "by more than 80 degrees. Set this to -1.0 to " + "disable curvature checking completely." + ), + ) curvethresh = traits.Float( - argstr='-curvethresh %f', - desc=('Curvature threshold for tracking, expressed ' - 'as the maximum angle (in degrees) between ' - 'between two streamline orientations ' - 'calculated over the length of a voxel. If ' - 'the angle is greater than this, then the ' - 'streamline terminates.')) + argstr="-curvethresh %f", + desc=( + "Curvature threshold for tracking, expressed " + "as the maximum angle (in degrees) between " + "between two streamline orientations " + "calculated over the length of a voxel. If " + "the angle is greater than this, then the " + "streamline terminates." + ), + ) curveinterval = traits.Float( - argstr='-curveinterval %f', - requires=['curvethresh'], - desc=('Interval over which the curvature threshold ' - 'should be evaluated, in mm. The default is ' - '5mm. When using the default curvature ' - 'threshold of 90 degrees, this means that ' - 'streamlines will terminate if they curve by ' - 'more than 90 degrees over a path length ' - 'of 5mm.')) + argstr="-curveinterval %f", + requires=["curvethresh"], + desc=( + "Interval over which the curvature threshold " + "should be evaluated, in mm. The default is " + "5mm. When using the default curvature " + "threshold of 90 degrees, this means that " + "streamlines will terminate if they curve by " + "more than 90 degrees over a path length " + "of 5mm." + ), + ) anisthresh = traits.Float( - argstr='-anisthresh %f', - desc=('Terminate fibres that enter a voxel with lower ' - 'anisotropy than the threshold.')) + argstr="-anisthresh %f", + desc=( + "Terminate fibres that enter a voxel with lower " + "anisotropy than the threshold." + ), + ) anisfile = File( - argstr='-anisfile %s', + argstr="-anisfile %s", exists=True, - desc=('File containing the anisotropy map. This is required to ' - 'apply an anisotropy threshold with non tensor data. If ' - 'the map issupplied it is always used, even in tensor ' - 'data.')) + desc=( + "File containing the anisotropy map. This is required to " + "apply an anisotropy threshold with non tensor data. If " + "the map issupplied it is always used, even in tensor " + "data." + ), + ) outputtracts = traits.Enum( - 'float', - 'double', - 'oogl', - argstr='-outputtracts %s', - desc='output tract file type') + "float", + "double", + "oogl", + argstr="-outputtracts %s", + desc="output tract file type", + ) out_file = File( - argstr='-outputfile %s', - position=-1, - genfile=True, - desc='output data file') + argstr="-outputfile %s", position=-1, genfile=True, desc="output data file" + ) output_root = File( exists=False, - argstr='-outputroot %s', + argstr="-outputroot %s", position=-1, - desc='root directory for output') + desc="root directory for output", + ) class TrackOutputSpec(TraitedSpec): - tracked = File( - exists=True, desc='output file containing reconstructed tracts') + tracked = File(exists=True, desc="output file containing reconstructed tracts") class Track(CommandLine): @@ -801,16 +882,16 @@ class Track(CommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.Track() >>> track.inputs.inputmodel = 'dt' >>> track.inputs.in_file = 'data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ - _cmd = 'track' + _cmd = "track" input_spec = TrackInputSpec output_spec = TrackOutputSpec @@ -821,11 +902,11 @@ def _list_outputs(self): out_file_path = os.path.abspath(self.inputs.out_file) else: out_file_path = os.path.abspath(self._gen_outfilename()) - outputs['tracked'] = out_file_path + outputs["tracked"] = out_file_path return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -833,10 +914,10 @@ def _gen_filename(self, name): def _gen_outfilename(self): # Currently in_file is only undefined for bedpostx input if not isdefined(self.inputs.in_file): - name = 'bedpostx' + name = "bedpostx" else: _, name, _ = split_filename(self.inputs.in_file) - return name + '_tracked' + return name + "_tracked" class TrackDT(Track): @@ -845,70 +926,73 @@ class TrackDT(Track): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackDT() >>> track.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "dt" - return super(TrackDT, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackPICoInputSpec(TrackInputSpec): pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', - desc='Specifies the model for PICo parameters. The default is "bingham.' + "bingham", + "watson", + "acg", + argstr="-pdf %s", + desc='Specifies the model for PICo parameters. The default is "bingham.', ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc= - "Number of streamlines to generate at each seed point. The default is 5000." + argstr="-iterations %d", + units="NA", + desc="Number of streamlines to generate at each seed point. The default is 5000.", ) class TrackPICo(Track): """ - Performs streamline tractography using the Probabilistic Index of Connectivity (PICo) algorithm + Performs streamline tractography using Probabilistic Index of Connectivity (PICo). Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackPICo() >>> track.inputs.in_file = 'pdfs.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackPICoInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "pico" - return super(TrackPICo, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBedpostxDeterInputSpec(TrackInputSpec): bedpostxdir = Directory( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=("Directory containing bedpostx output"), + ) min_vol_frac = traits.Float( - argstr='-bedpostxminf %d', - units='NA', - desc=("Zeros out compartments in bedpostx data " - "with a mean volume fraction f of less than " - "min_vol_frac. The default is 0.01.")) + argstr="-bedpostxminf %d", + units="NA", + desc=( + "Zeros out compartments in bedpostx data " + "with a mean volume fraction f of less than " + "min_vol_frac. The default is 0.01." + ), + ) class TrackBedpostxDeter(Track): @@ -927,40 +1011,44 @@ class TrackBedpostxDeter(Track): Example ------- - >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxDeter() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBedpostxDeterInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx_dyad" - return super(TrackBedpostxDeter, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBedpostxProbaInputSpec(TrackInputSpec): bedpostxdir = Directory( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=("Directory containing bedpostx output"), + ) min_vol_frac = traits.Float( - argstr='-bedpostxminf %d', - units='NA', - desc=("Zeros out compartments in bedpostx data " - "with a mean volume fraction f of less than " - "min_vol_frac. The default is 0.01.")) + argstr="-bedpostxminf %d", + units="NA", + desc=( + "Zeros out compartments in bedpostx data " + "with a mean volume fraction f of less than " + "min_vol_frac. The default is 0.01." + ), + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc=("Number of streamlines to generate at each " - "seed point. The default is 1.")) + argstr="-iterations %d", + units="NA", + desc="Number of streamlines to generate at each seed point. The default is 1.", + ) class TrackBedpostxProba(Track): @@ -983,88 +1071,103 @@ class TrackBedpostxProba(Track): Example ------- - >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxProba() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.iterations = 100 >>> track.run() # doctest: +SKIP + """ input_spec = TrackBedpostxProbaInputSpec def __init__(self, command=None, **inputs): - inputs["inputmodel"] = "bedpostx_dyad" - return super(TrackBedpostxProba, self).__init__(command, **inputs) + inputs["inputmodel"] = "bedpostx" + return super().__init__(command, **inputs) class TrackBayesDiracInputSpec(TrackInputSpec): scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, exists=True, - desc=('The scheme file corresponding to the data being ' - 'processed.')) + desc=("The scheme file corresponding to the data being processed."), + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc=("Number of streamlines to generate at each " - "seed point. The default is 5000.")) + argstr="-iterations %d", + units="NA", + desc=( + "Number of streamlines to generate at each " + "seed point. The default is 5000." + ), + ) pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', - desc= - 'Specifies the model for PICo priors (not the curvature priors). The default is "bingham".' + "bingham", + "watson", + "acg", + argstr="-pdf %s", + desc="Specifies the model for PICo priors (not the curvature priors). " + "The default is 'bingham'.", ) pointset = traits.Int( - argstr='-pointset %s', - desc= - 'Index to the point set to use for Bayesian likelihood calculation. The index specifies a set of evenly distributed points on the unit sphere, where each point x defines two possible step directions (x or -x) for the streamline path. A larger number indexes a larger point set, which gives higher angular resolution at the expense of computation time. The default is index 1, which gives 1922 points, index 0 gives 1082 points, index 2 gives 3002 points.' + argstr="-pointset %s", + desc="""\ +Index to the point set to use for Bayesian likelihood calculation. The index +specifies a set of evenly distributed points on the unit sphere, where each point x +defines two possible step directions (x or -x) for the streamline path. A larger +number indexes a larger point set, which gives higher angular resolution at the +expense of computation time. The default is index 1, which gives 1922 points, index 0 +gives 1082 points, index 2 gives 3002 points.""", ) datamodel = traits.Enum( - 'cylsymmdt', - 'ballstick', - argstr='-datamodel %s', - desc= - 'Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is "ballstick", the partial volume model (see ballstickfit).' + "cylsymmdt", + "ballstick", + argstr="-datamodel %s", + desc="""\ +Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion +tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is +"ballstick", the partial volume model (see ballstickfit).""", ) curvepriork = traits.Float( - argstr='-curvepriork %G', - desc= - 'Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of k make curvature less likely.' + argstr="-curvepriork %G", + desc="""\ +Concentration parameter for the prior distribution on fibre orientations given the fibre +orientation at the previous step. Larger values of k make curvature less likely.""", ) curvepriorg = traits.Float( - argstr='-curvepriorg %G', - desc= - 'Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of g make curvature less likely.' + argstr="-curvepriorg %G", + desc="""\ +Concentration parameter for the prior distribution on fibre orientations given +the fibre orientation at the previous step. Larger values of g make curvature less likely.""", ) extpriorfile = File( exists=True, - argstr='-extpriorfile %s', - desc= - 'Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for the fibre orientation in Bayesian tracking. The prior image must be in the same space as the diffusion data.' + argstr="-extpriorfile %s", + desc="""\ +Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for +the fibre orientation in Bayesian tracking. The prior image must be in the same space +as the diffusion data.""", ) extpriordatatype = traits.Enum( - 'float', - 'double', - argstr='-extpriordatatype %s', - desc='Datatype of the prior image. The default is "double".') + "float", + "double", + argstr="-extpriordatatype %s", + desc='Datatype of the prior image. The default is "double".', + ) class TrackBayesDirac(Track): """ - Performs streamline tractography using a Bayesian tracking with Dirac priors + Perform streamline tractography using a Bayesian tracking with Dirac priors. Example ------- @@ -1075,13 +1178,14 @@ class TrackBayesDirac(Track): >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.scheme_file = 'bvecs.scheme' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBayesDiracInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bayesdirac" - return super(TrackBayesDirac, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBallStick(Track): @@ -1090,60 +1194,65 @@ class TrackBallStick(Track): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBallStick() >>> track.inputs.in_file = 'ballstickfit_data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "ballstick" - return super(TrackBallStick, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBootstrapInputSpec(TrackInputSpec): scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, exists=True, - desc='The scheme file corresponding to the data being processed.') + desc="The scheme file corresponding to the data being processed.", + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc="Number of streamlines to generate at each seed point.") + argstr="-iterations %d", + units="NA", + desc="Number of streamlines to generate at each seed point.", + ) inversion = traits.Int( - argstr='-inversion %s', - desc= - 'Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).' + argstr="-inversion %s", + desc="""\ +Tensor reconstruction algorithm for repetition bootstrapping. +Default is 1 (linear reconstruction, single tensor).""", ) bsdatafiles = traits.List( File(exists=True), mandatory=True, - argstr='-bsdatafile %s', - desc= - 'Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.' + argstr="-bsdatafile %s", + desc="""\ +Specifies files containing raw data for repetition bootstrapping. +Use -inputfile for wild bootstrap data.""", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.' + desc="""\ +Provides the name of a file containing a background mask computed using, for example, +FSL's bet2 program. +The mask file contains zero in background voxels and non-zero in foreground.""", ) class TrackBootstrap(Track): """ - Performs bootstrap streamline tractography using mulitple scans of the same subject + Performs bootstrap streamline tractography using multiple scans of the same subject Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBootstrap() >>> track.inputs.inputmodel='repbs_dt' @@ -1151,70 +1260,69 @@ class TrackBootstrap(Track): >>> track.inputs.bsdatafiles = ['fitted_data1.Bfloat', 'fitted_data2.Bfloat'] >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBootstrapInputSpec def __init__(self, command=None, **inputs): - return super(TrackBootstrap, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) out_file = File(argstr="> %s", position=-1, genfile=True) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "dt", + "twotensor", + "threetensor", + argstr="-inputmodel %s", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data.", ) class ComputeMeanDiffusivityOutputSpec(TraitedSpec): - md = File(exists=True, desc='Mean Diffusivity Map') + md = File(exists=True, desc="Mean Diffusivity Map") class ComputeMeanDiffusivity(StdOutCommandLine): @@ -1223,14 +1331,15 @@ class ComputeMeanDiffusivity(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> md = cmon.ComputeMeanDiffusivity() >>> md.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> md.inputs.scheme_file = 'A.scheme' >>> md.run() # doctest: +SKIP + """ - _cmd = 'md' + + _cmd = "md" input_spec = ComputeMeanDiffusivityInputSpec output_spec = ComputeMeanDiffusivityOutputSpec @@ -1247,58 +1356,56 @@ def _gen_outfilename(self): class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "dt", + "twotensor", + "threetensor", + "multitensor", + argstr="-inputmodel %s", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data.", ) class ComputeFractionalAnisotropyOutputSpec(TraitedSpec): - fa = File(exists=True, desc='Fractional Anisotropy Map') + fa = File(exists=True, desc="Fractional Anisotropy Map") class ComputeFractionalAnisotropy(StdOutCommandLine): @@ -1313,82 +1420,81 @@ class ComputeFractionalAnisotropy(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> fa = cmon.ComputeFractionalAnisotropy() >>> fa.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fa.inputs.scheme_file = 'A.scheme' >>> fa.run() # doctest: +SKIP + """ - _cmd = 'fa' + + _cmd = "fa" input_spec = ComputeFractionalAnisotropyInputSpec output_spec = ComputeFractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['fa'] = os.path.abspath(self._gen_outfilename()) + outputs["fa"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_FA.Bdouble' # Need to change to self.inputs.outputdatatype + return name + "_FA.Bdouble" # Need to change to self.inputs.outputdatatype class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "dt", + "twotensor", + "threetensor", + "multitensor", + argstr="-inputmodel %s", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data.", ) class ComputeTensorTraceOutputSpec(TraitedSpec): - trace = File(exists=True, desc='Trace of the diffusion tensor') + trace = File(exists=True, desc="Trace of the diffusion tensor") class ComputeTensorTrace(StdOutCommandLine): @@ -1405,80 +1511,81 @@ class ComputeTensorTrace(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> trace = cmon.ComputeTensorTrace() >>> trace.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> trace.inputs.scheme_file = 'A.scheme' >>> trace.run() # doctest: +SKIP + """ - _cmd = 'trd' + + _cmd = "trd" input_spec = ComputeTensorTraceInputSpec output_spec = ComputeTensorTraceOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['trace'] = os.path.abspath(self._gen_outfilename()) + outputs["trace"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_TrD.img' # Need to change to self.inputs.outputdatatype + return name + "_TrD.img" # Need to change to self.inputs.outputdatatype class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) inputmodel = traits.Enum( - 'dt', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input data contains parameters for. Possible model types are: "dt" (diffusion-tensor data) and "multitensor"' + "dt", + "multitensor", + argstr="-inputmodel %s", + desc="Specifies the model that the input data contains parameters for", ) maxcomponents = traits.Int( - argstr='-maxcomponents %d', - desc= - 'The maximum number of tensor components in a voxel of the input data.' + argstr="-maxcomponents %d", + desc="The maximum number of tensor components in a voxel of the input data.", ) inputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-inputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-inputdatatype %s", usedefault=True, - desc=('Specifies the data type of the input data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc=( + "Specifies the data type of the input data. " + "The data type can be any of the following strings: " + '"char", "short", "int", "long", "float" or "double".' + "Default is double data type" + ), + ) outputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-outputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-outputdatatype %s", usedefault=True, - desc=('Specifies the data type of the output data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc="Specifies the data type of the output data.", + ) class ComputeEigensystemOutputSpec(TraitedSpec): - eigen = File(exists=True, desc='Trace of the diffusion tensor') + eigen = File(exists=True, desc="Trace of the diffusion tensor") class ComputeEigensystem(StdOutCommandLine): @@ -1501,7 +1608,8 @@ class ComputeEigensystem(StdOutCommandLine): >>> dteig.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> dteig.run() # doctest: +SKIP """ - _cmd = 'dteig' + + _cmd = "dteig" input_spec = ComputeEigensystemInputSpec output_spec = ComputeEigensystemOutputSpec @@ -1513,4 +1621,4 @@ def _list_outputs(self): def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) datatype = self.inputs.outputdatatype - return name + '_eig.B' + datatype + return name + "_eig.B" + datatype diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index 6fea6fdcfd..90d72f114a 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -1,56 +1,69 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec) +from ..base import ( + traits, + TraitedSpec, + File, + StdOutCommandLine, + StdOutCommandLineInputSpec, +) class QBallMXInputSpec(StdOutCommandLineInputSpec): basistype = traits.Enum( - 'rbf', - 'sh', - argstr='-basistype %s', - desc=('Basis function type. "rbf" to use radial basis functions ' - '"sh" to use spherical harmonics'), - usedefault=True) + "rbf", + "sh", + argstr="-basistype %s", + desc=( + 'Basis function type. "rbf" to use radial basis functions ' + '"sh" to use spherical harmonics' + ), + usedefault=True, + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) order = traits.Int( - argstr='-order %d', - units='NA', + argstr="-order %d", + units="NA", desc=( - 'Specific to sh. Maximum order of the spherical harmonic series. ' - 'Default is 4.')) + "Specific to sh. Maximum order of the spherical harmonic series. " + "Default is 4." + ), + ) rbfpointset = traits.Int( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", desc=( - 'Specific to rbf. Sets the number of radial basis functions to use. ' - 'The value specified must be present in the Pointsets directory. ' - 'The default value is 246.')) + "Specific to rbf. Sets the number of radial basis functions to use. " + "The value specified must be present in the Pointsets directory. " + "The default value is 246." + ), + ) rbfsigma = traits.Float( - argstr='-rbfsigma %f', - units='NA', - desc= - ('Specific to rbf. Sets the width of the interpolating basis functions. ' - 'The default value is 0.2618 (15 degrees).')) + argstr="-rbfsigma %f", + units="NA", + desc=( + "Specific to rbf. Sets the width of the interpolating basis functions. " + "The default value is 0.2618 (15 degrees)." + ), + ) smoothingsigma = traits.Float( - argstr='-smoothingsigma %f', - units='NA', + argstr="-smoothingsigma %f", + units="NA", desc=( - 'Specific to rbf. Sets the width of the smoothing basis functions. ' - 'The default value is 0.1309 (7.5 degrees).')) + "Specific to rbf. Sets the width of the smoothing basis functions. " + "The default value is 0.1309 (7.5 degrees)." + ), + ) class QBallMXOutputSpec(TraitedSpec): - qmat = File(exists=True, desc='Q-Ball reconstruction matrix') + qmat = File(exists=True, desc="Q-Ball reconstruction matrix") class QBallMX(StdOutCommandLine): @@ -58,8 +71,8 @@ class QBallMX(StdOutCommandLine): Generates a reconstruction matrix for Q-Ball. Used in LinRecon with the same scheme file to reconstruct data. - Example 1 - --------- + Examples + -------- To create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam @@ -69,8 +82,6 @@ class QBallMX(StdOutCommandLine): >>> qballmx.inputs.order = 6 >>> qballmx.run() # doctest: +SKIP - Example 2 - --------- To create a linear transform matrix using Radial Basis Functions (rbf). This command uses the default setting of rbf sigma = 0.2618 (15 degrees), data smoothing sigma = 0.1309 (7.5 degrees), rbf @@ -91,53 +102,61 @@ class QBallMX(StdOutCommandLine): >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.inputs.bgmask = 'brain_mask.nii' >>> qballcoeffs.run() # doctest: +SKIP + """ - _cmd = 'qballmx' + + _cmd = "qballmx" input_spec = QBallMXInputSpec output_spec = QBallMXOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['qmat'] = os.path.abspath(self._gen_outfilename()) + outputs["qmat"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_qmat.Bdouble' + return name + "_qmat.Bdouble" class LinReconInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) qball_mat = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='Linear transformation matrix.') + desc="Linear transformation matrix.", + ) normalize = traits.Bool( - argstr='-normalize', - desc=('Normalize the measurements and discard ' - 'the zero measurements before the linear transform.')) + argstr="-normalize", + desc=( + "Normalize the measurements and discard " + "the zero measurements before the linear transform." + ), + ) log = traits.Bool( - argstr='-log', - desc=('Transform the log measurements rather than the ' - 'measurements themselves')) - bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') + argstr="-log", + desc=("Transform the log measurements rather than the measurements themselves"), + ) + bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") class LinReconOutputSpec(TraitedSpec): - recon_data = File(exists=True, desc='Transformed data') + recon_data = File(exists=True, desc="Transformed data") class LinRecon(StdOutCommandLine): @@ -147,7 +166,7 @@ class LinRecon(StdOutCommandLine): Reads a linear transformation from the matrix file assuming the imaging scheme specified in the scheme file. Performs the linear transformation on the data in every voxel and outputs the result to - the standard output. The ouput in every voxel is actually: :: + the standard output. The output in every voxel is actually: :: [exit code, ln(S(0)), p1, ..., pR] @@ -163,7 +182,7 @@ class LinRecon(StdOutCommandLine): are stored row by row. Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -182,85 +201,103 @@ class LinRecon(StdOutCommandLine): >>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble' >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.run() # doctest: +SKIP + """ - _cmd = 'linrecon' + + _cmd = "linrecon" input_spec = LinReconInputSpec output_spec = LinReconOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['recon_data'] = os.path.abspath(self._gen_outfilename()) + outputs["recon_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_recondata.Bdouble' + return name + "_recondata.Bdouble" class MESDInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inverter = traits.Enum( - 'SPIKE', - 'PAS', - argstr='-filter %s', + "SPIKE", + "PAS", + argstr="-filter %s", position=2, mandatory=True, - desc= - ('The inversion index specifies the type of inversion to perform on the data.' - 'The currently available choices are:' - 'Inverter name | Inverter parameters' - '---------------|------------------' - 'SPIKE | bd (b-value x diffusivity along the fibre.)' - 'PAS | r')) + desc=""" +The inversion index specifies the type of inversion to perform on the data. +The currently available choices are: + + +----------------+---------------------------------------------+ + | Inverter name | Inverter parameters | + +================+=============================================+ + | SPIKE | bd (b-value x diffusivity along the fibre.) | + +----------------+---------------------------------------------+ + | PAS | r | + +----------------+---------------------------------------------+ + +""", + ) inverter_param = traits.Float( - argstr='%f', - units='NA', + argstr="%f", + units="NA", position=3, mandatory=True, - desc= - ('Parameter associated with the inverter. Cf. inverter description for' - 'more information.')) + desc=( + "Parameter associated with the inverter. Cf. inverter description for" + "more information." + ), + ) fastmesd = traits.Bool( - argstr='-fastmesd', - requires=['mepointset'], - desc= - ('Turns off numerical integration checks and fixes the integration point set size at that of' - 'the index specified by -basepointset..')) + argstr="-fastmesd", + requires=["mepointset"], + desc=( + "Turns off numerical integration checks and fixes the integration point set size at that of" + "the index specified by -basepointset.." + ), + ) mepointset = traits.Int( - argstr='-mepointset %d', - units='NA', - desc= - ('Use a set of directions other than those in the scheme file for the deconvolution kernel.' - 'The number refers to the number of directions on the unit sphere. For example, ' - '"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".' - )) + argstr="-mepointset %d", + units="NA", + desc=( + "Use a set of directions other than those in the scheme file for the deconvolution kernel." + "The number refers to the number of directions on the unit sphere. For example, " + '"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".' + ), + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') - bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') + desc="Specifies the scheme file for the diffusion MRI data", + ) + bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") inputdatatype = traits.Enum( - 'float', - 'char', - 'short', - 'int', - 'long', - 'double', - argstr='-inputdatatype %s', - desc= - ('Specifies the data type of the input file: "char", "short", "int", "long",' - '"float" or "double". The input file must have BIG-ENDIAN ordering.' - 'By default, the input type is "float".')) + "float", + "char", + "short", + "int", + "long", + "double", + argstr="-inputdatatype %s", + desc=( + 'Specifies the data type of the input file: "char", "short", "int", "long",' + '"float" or "double". The input file must have BIG-ENDIAN ordering.' + 'By default, the input type is "float".' + ), + ) class MESDOutputSpec(TraitedSpec): - mesd_data = File(exists=True, desc='MESD data') + mesd_data = File(exists=True, desc="MESD data") class MESD(StdOutCommandLine): @@ -329,7 +366,7 @@ class MESD(StdOutCommandLine): Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010. Example - --------- + ------- Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel. >>> import nipype.interfaces.camino as cam @@ -339,117 +376,130 @@ class MESD(StdOutCommandLine): >>> mesd.inputs.inverter = 'PAS' >>> mesd.inputs.inverter_param = 1.4 >>> mesd.run() # doctest: +SKIP + """ - _cmd = 'mesd' + + _cmd = "mesd" input_spec = MESDInputSpec output_spec = MESDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['mesd_data'] = os.path.abspath(self._gen_outfilename()) + outputs["mesd_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_MESD.Bdouble' + return name + "_MESD.Bdouble" class SFPeaksInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Voxel-order data of spherical functions') + desc="Voxel-order data of spherical functions", + ) inputmodel = traits.Enum( - 'sh', - 'maxent', - 'rbf', - argstr='-inputmodel %s', + "sh", + "maxent", + "rbf", + argstr="-inputmodel %s", mandatory=True, - desc= - ('Type of functions input via in_file. Currently supported options are: ' - ' sh - Spherical harmonic series. Specify the maximum order of the SH series ' - ' with the "order" attribute if different from the default of 4. ' - ' maxent - Maximum entropy representations output by MESD. The reconstruction ' - ' directions input to MESD must be specified. By default this is the ' - ' same set of gradient directions (excluding zero gradients) in the ' - ' scheme file, so specify the "schemefile" attribute unless the ' - ' "mepointset" attribute was set in MESD. ' - ' rbf - Sums of radial basis functions. Specify the pointset with the attribute ' - ' "rbfpointset" if different from the default. See QBallMX.')) + desc=( + "Type of functions input via in_file. Currently supported options are: " + " sh - Spherical harmonic series. Specify the maximum order of the SH series " + ' with the "order" attribute if different from the default of 4. ' + " maxent - Maximum entropy representations output by MESD. The reconstruction " + " directions input to MESD must be specified. By default this is the " + " same set of gradient directions (excluding zero gradients) in the " + ' scheme file, so specify the "schemefile" attribute unless the ' + ' "mepointset" attribute was set in MESD. ' + " rbf - Sums of radial basis functions. Specify the pointset with the attribute " + ' "rbfpointset" if different from the default. See QBallMX.' + ), + ) order = traits.Int( - argstr='-order %d', - units='NA', - desc='Specific to sh. Maximum order of the spherical harmonic series.') + argstr="-order %d", + units="NA", + desc="Specific to sh. Maximum order of the spherical harmonic series.", + ) scheme_file = File( - exists=True, - argstr='%s', - desc='Specific to maxent. Specifies the scheme file.') + exists=True, argstr="%s", desc="Specific to maxent. Specifies the scheme file." + ) rbfpointset = traits.Int( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", desc=( - 'Specific to rbf. Sets the number of radial basis functions to use. ' - 'The value specified must be present in the Pointsets directory. ' - 'The default value is 246.')) + "Specific to rbf. Sets the number of radial basis functions to use. " + "The value specified must be present in the Pointsets directory. " + "The default value is 246." + ), + ) mepointset = traits.Int( - argstr='-mepointset %d', - units='NA', - desc= - ('Use a set of directions other than those in the scheme file for the deconvolution ' - 'kernel. The number refers to the number of directions on the unit sphere. ' - 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' - 'Use this option only if you told MESD to use a custom set of directions with the same ' - 'option. Otherwise, specify the scheme file with the "schemefile" attribute.' - )) + argstr="-mepointset %d", + units="NA", + desc=( + "Use a set of directions other than those in the scheme file for the deconvolution " + "kernel. The number refers to the number of directions on the unit sphere. " + 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' + "Use this option only if you told MESD to use a custom set of directions with the same " + 'option. Otherwise, specify the scheme file with the "schemefile" attribute.' + ), + ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc='The largest number of peak directions to output in each voxel.') + argstr="-numpds %d", + units="NA", + desc="The largest number of peak directions to output in each voxel.", + ) noconsistencycheck = traits.Bool( - argstr='-noconsistencycheck', - desc= - 'Turns off the consistency check. The output shows all consistencies as true.' + argstr="-noconsistencycheck", + desc="Turns off the consistency check. The output shows all consistencies as true.", ) searchradius = traits.Float( - argstr='-searchradius %f', - units='NA', - desc= - 'The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")' + argstr="-searchradius %f", + units="NA", + desc='The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")', ) density = traits.Int( - argstr='-density %d', - units='NA', - desc= - ('The number of randomly rotated icosahedra to use in constructing the set of points for ' - 'random sampling in the peak finding algorithm. Default is 1000, which works well for very ' - 'spiky maxent functions. For other types of function, it is reasonable to set the density ' - 'much lower and increase the search radius slightly, which speeds up the computation.' - )) + argstr="-density %d", + units="NA", + desc=( + "The number of randomly rotated icosahedra to use in constructing the set of points for " + "random sampling in the peak finding algorithm. Default is 1000, which works well for very " + "spiky maxent functions. For other types of function, it is reasonable to set the density " + "much lower and increase the search radius slightly, which speeds up the computation." + ), + ) pointset = traits.Int( - argstr='-pointset %d', - units='NA', - desc= - ('To sample using an evenly distributed set of points instead. The integer can be ' - '0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, ' - '4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872.')) + argstr="-pointset %d", + units="NA", + desc=( + "To sample using an evenly distributed set of points instead. The integer can be " + "0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, " + "4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872." + ), + ) pdthresh = traits.Float( - argstr='-pdthresh %f', - units='NA', - desc= - ('Base threshold on the actual peak direction strength divided by the mean of the ' - 'function. The default is 1.0 (the peak must be equal or greater than the mean).' - )) + argstr="-pdthresh %f", + units="NA", + desc=( + "Base threshold on the actual peak direction strength divided by the mean of the " + "function. The default is 1.0 (the peak must be equal or greater than the mean)." + ), + ) stdsfrommean = traits.Float( - argstr='-stdsfrommean %f', - units='NA', - desc= - ('This is the number of standard deviations of the function to be added to the ' - '"pdthresh" attribute in the peak directions pruning.')) + argstr="-stdsfrommean %f", + units="NA", + desc=( + "This is the number of standard deviations of the function to be added to the " + '"pdthresh" attribute in the peak directions pruning.' + ), + ) class SFPeaksOutputSpec(TraitedSpec): - peaks = File(exists=True, desc='Peaks of the spherical functions.') + peaks = File(exists=True, desc="Peaks of the spherical functions.") class SFPeaks(StdOutCommandLine): @@ -517,7 +567,7 @@ class SFPeaks(StdOutCommandLine): Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -529,16 +579,18 @@ class SFPeaks(StdOutCommandLine): >>> sf_peaks.inputs.density = 100 >>> sf_peaks.inputs.searchradius = 1.0 >>> sf_peaks.run() # doctest: +SKIP + """ - _cmd = 'sfpeaks' + + _cmd = "sfpeaks" input_spec = SFPeaksInputSpec output_spec = SFPeaksOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['peaks'] = os.path.abspath(self._gen_outfilename()) + outputs["peaks"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_peaks.Bdouble' + return name + "_peaks.Bdouble" diff --git a/nipype/interfaces/camino/tests/__init__.py b/nipype/interfaces/camino/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/camino/tests/__init__.py +++ b/nipype/interfaces/camino/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py b/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py index 5d796126cc..9b6110d30d 100644 --- a/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py +++ b/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py @@ -1,95 +1,111 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import AnalyzeHeader def test_AnalyzeHeader_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), centre=dict( - argstr='-centre %s', - units='mm', + argstr="-centre %s", + units="mm", ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), datatype=dict( - argstr='-datatype %s', + argstr="-datatype %s", mandatory=True, ), - description=dict(argstr='-description %s', ), + description=dict( + argstr="-description %s", + ), environ=dict( nohash=True, usedefault=True, ), greylevels=dict( - argstr='-gl %s', - units='NA', + argstr="-gl %s", + units="NA", ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), initfromheader=dict( - argstr='-initfromheader %s', + argstr="-initfromheader %s", + extensions=None, position=3, ), - intelbyteorder=dict(argstr='-intelbyteorder', ), - networkbyteorder=dict(argstr='-networkbyteorder', ), + intelbyteorder=dict( + argstr="-intelbyteorder", + ), + networkbyteorder=dict( + argstr="-networkbyteorder", + ), nimages=dict( - argstr='-nimages %d', - units='NA', + argstr="-nimages %d", + units="NA", ), offset=dict( - argstr='-offset %d', - units='NA', + argstr="-offset %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), picoseed=dict( - argstr='-picoseed %s', - units='mm', + argstr="-picoseed %s", + units="mm", ), printbigendian=dict( - argstr='-printbigendian %s', + argstr="-printbigendian %s", + extensions=None, position=3, ), printimagedims=dict( - argstr='-printimagedims %s', + argstr="-printimagedims %s", + extensions=None, position=3, ), printintelbyteorder=dict( - argstr='-printintelbyteorder %s', + argstr="-printintelbyteorder %s", + extensions=None, position=3, ), printprogargs=dict( - argstr='-printprogargs %s', + argstr="-printprogargs %s", + extensions=None, position=3, ), readheader=dict( - argstr='-readheader %s', + argstr="-readheader %s", + extensions=None, position=3, ), scaleinter=dict( - argstr='-scaleinter %d', - units='NA', + argstr="-scaleinter %d", + units="NA", ), scaleslope=dict( - argstr='-scaleslope %d', - units='NA', + argstr="-scaleslope %d", + units="NA", ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = AnalyzeHeader.input_spec() @@ -97,8 +113,14 @@ def test_AnalyzeHeader_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AnalyzeHeader_outputs(): - output_map = dict(header=dict(), ) + output_map = dict( + header=dict( + extensions=None, + ), + ) outputs = AnalyzeHeader.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py b/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py index caee9e68cf..6181cf7541 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py @@ -1,33 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ComputeEigensystem def test_ComputeEigensystem_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), inputdatatype=dict( - argstr='-inputdatatype %s', + argstr="-inputdatatype %s", usedefault=True, ), - inputmodel=dict(argstr='-inputmodel %s', ), - maxcomponents=dict(argstr='-maxcomponents %d', ), + inputmodel=dict( + argstr="-inputmodel %s", + ), + maxcomponents=dict( + argstr="-maxcomponents %d", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), outputdatatype=dict( - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", usedefault=True, ), ) @@ -36,8 +43,14 @@ def test_ComputeEigensystem_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeEigensystem_outputs(): - output_map = dict(eigen=dict(), ) + output_map = dict( + eigen=dict( + extensions=None, + ), + ) outputs = ComputeEigensystem.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py b/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py index 49cfdfd96a..0a13ac4f64 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py @@ -1,30 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ComputeFractionalAnisotropy def test_ComputeFractionalAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), + inputmodel=dict( + argstr="-inputmodel %s", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - outputdatatype=dict(argstr='-outputdatatype %s', ), + outputdatatype=dict( + argstr="-outputdatatype %s", + ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), ) @@ -33,8 +43,14 @@ def test_ComputeFractionalAnisotropy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeFractionalAnisotropy_outputs(): - output_map = dict(fa=dict(), ) + output_map = dict( + fa=dict( + extensions=None, + ), + ) outputs = ComputeFractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py b/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py index 212477d149..822bd0306e 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py @@ -1,30 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ComputeMeanDiffusivity def test_ComputeMeanDiffusivity_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), + inputmodel=dict( + argstr="-inputmodel %s", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - outputdatatype=dict(argstr='-outputdatatype %s', ), + outputdatatype=dict( + argstr="-outputdatatype %s", + ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), ) @@ -33,8 +43,14 @@ def test_ComputeMeanDiffusivity_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMeanDiffusivity_outputs(): - output_map = dict(md=dict(), ) + output_map = dict( + md=dict( + extensions=None, + ), + ) outputs = ComputeMeanDiffusivity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py b/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py index 1112c7c743..8a912685ae 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py @@ -1,30 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ComputeTensorTrace def test_ComputeTensorTrace_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), + inputmodel=dict( + argstr="-inputmodel %s", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - outputdatatype=dict(argstr='-outputdatatype %s', ), + outputdatatype=dict( + argstr="-outputdatatype %s", + ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), ) @@ -33,8 +43,14 @@ def test_ComputeTensorTrace_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeTensorTrace_outputs(): - output_map = dict(trace=dict(), ) + output_map = dict( + trace=dict( + extensions=None, + ), + ) outputs = ComputeTensorTrace.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Conmat.py b/nipype/interfaces/camino/tests/test_auto_Conmat.py index 06a76fdf2b..a9ea16865d 100644 --- a/nipype/interfaces/camino/tests/test_auto_Conmat.py +++ b/nipype/interfaces/camino/tests/test_auto_Conmat.py @@ -1,42 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..connectivity import Conmat def test_Conmat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, genfile=True, ), scalar_file=dict( - argstr='-scalarfile %s', - requires=['tract_stat'], + argstr="-scalarfile %s", + extensions=None, + requires=["tract_stat"], ), target_file=dict( - argstr='-targetfile %s', + argstr="-targetfile %s", + extensions=None, mandatory=True, ), - targetname_file=dict(argstr='-targetnamefile %s', ), + targetname_file=dict( + argstr="-targetnamefile %s", + extensions=None, + ), tract_prop=dict( - argstr='-tractstat %s', - units='NA', - xor=['tract_stat'], + argstr="-tractstat %s", + units="NA", + xor=["tract_stat"], ), tract_stat=dict( - argstr='-tractstat %s', - requires=['scalar_file'], - units='NA', - xor=['tract_prop'], + argstr="-tractstat %s", + requires=["scalar_file"], + units="NA", + xor=["tract_prop"], ), ) inputs = Conmat.input_spec() @@ -44,10 +52,16 @@ def test_Conmat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Conmat_outputs(): output_map = dict( - conmat_sc=dict(), - conmat_ts=dict(), + conmat_sc=dict( + extensions=None, + ), + conmat_ts=dict( + extensions=None, + ), ) outputs = Conmat.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py b/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py index b20dcbf3bf..b88fe01ba9 100644 --- a/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py +++ b/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py @@ -1,27 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import DT2NIfTI def test_DT2NIfTI_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), header_file=dict( - argstr='-header %s', + argstr="-header %s", + extensions=None, mandatory=True, position=3, ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, position=1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, genfile=True, position=2, ), @@ -31,11 +35,19 @@ def test_DT2NIfTI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DT2NIfTI_outputs(): output_map = dict( - dt=dict(), - exitcode=dict(), - lns0=dict(), + dt=dict( + extensions=None, + ), + exitcode=dict( + extensions=None, + ), + lns0=dict( + extensions=None, + ), ) outputs = DT2NIfTI.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_DTIFit.py b/nipype/interfaces/camino/tests/test_auto_DTIFit.py index a23da89552..757f870fe3 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTIFit.py +++ b/nipype/interfaces/camino/tests/test_auto_DTIFit.py @@ -1,32 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTIFit def test_DTIFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), non_linear=dict( - argstr='-nonlinear', + argstr="-nonlinear", position=3, ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), @@ -36,8 +43,14 @@ def test_DTIFit_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIFit_outputs(): - output_map = dict(tensor_fitted=dict(), ) + output_map = dict( + tensor_fitted=dict( + extensions=None, + ), + ) outputs = DTIFit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py b/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py index 95e8e0bea5..0ee1ffea8f 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py +++ b/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py @@ -1,66 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTLUTGen def test_DTLUTGen_inputs(): input_map = dict( - acg=dict(argstr='-acg', ), - args=dict(argstr='%s', ), - bingham=dict(argstr='-bingham', ), + acg=dict( + argstr="-acg", + ), + args=dict( + argstr="%s", + ), + bingham=dict( + argstr="-bingham", + ), environ=dict( nohash=True, usedefault=True, ), frange=dict( - argstr='-frange %s', + argstr="-frange %s", position=1, - units='NA', + units="NA", ), inversion=dict( - argstr='-inversion %d', - units='NA', + argstr="-inversion %d", + units="NA", ), lrange=dict( - argstr='-lrange %s', + argstr="-lrange %s", position=1, - units='NA', + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), samples=dict( - argstr='-samples %d', - units='NA', + argstr="-samples %d", + units="NA", ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, position=2, ), snr=dict( - argstr='-snr %f', - units='NA', + argstr="-snr %f", + units="NA", ), step=dict( - argstr='-step %f', - units='NA', + argstr="-step %f", + units="NA", ), trace=dict( - argstr='-trace %G', - units='NA', + argstr="-trace %G", + units="NA", + ), + watson=dict( + argstr="-watson", ), - watson=dict(argstr='-watson', ), ) inputs = DTLUTGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTLUTGen_outputs(): - output_map = dict(dtLUT=dict(), ) + output_map = dict( + dtLUT=dict( + extensions=None, + ), + ) outputs = DTLUTGen.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_DTMetric.py b/nipype/interfaces/camino/tests/test_auto_DTMetric.py index 12256a50d8..11e971b28b 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTMetric.py +++ b/nipype/interfaces/camino/tests/test_auto_DTMetric.py @@ -1,14 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTMetric def test_DTMetric_inputs(): input_map = dict( - args=dict(argstr='%s', ), - data_header=dict(argstr='-header %s', ), + args=dict( + argstr="%s", + ), + data_header=dict( + argstr="-header %s", + extensions=None, + ), eigen_data=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, ), environ=dict( @@ -16,19 +21,20 @@ def test_DTMetric_inputs(): usedefault=True, ), inputdatatype=dict( - argstr='-inputdatatype %s', + argstr="-inputdatatype %s", usedefault=True, ), metric=dict( - argstr='-stat %s', + argstr="-stat %s", mandatory=True, ), outputdatatype=dict( - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", usedefault=True, ), outputfile=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, ), ) @@ -37,8 +43,14 @@ def test_DTMetric_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTMetric_outputs(): - output_map = dict(metric_stats=dict(), ) + output_map = dict( + metric_stats=dict( + extensions=None, + ), + ) outputs = DTMetric.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py b/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py index a2ebfbf0a6..376fa1bf3e 100644 --- a/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py +++ b/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py @@ -1,55 +1,75 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import FSL2Scheme def test_FSL2Scheme_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), bscale=dict( - argstr='-bscale %d', - units='NA', + argstr="-bscale %d", + units="NA", ), bval_file=dict( - argstr='-bvalfile %s', + argstr="-bvalfile %s", + extensions=None, mandatory=True, position=2, ), bvec_file=dict( - argstr='-bvecfile %s', + argstr="-bvecfile %s", + extensions=None, mandatory=True, position=1, ), diffusiontime=dict( - argstr='-diffusiontime %f', - units='NA', + argstr="-diffusiontime %f", + units="NA", ), environ=dict( nohash=True, usedefault=True, ), - flipx=dict(argstr='-flipx', ), - flipy=dict(argstr='-flipy', ), - flipz=dict(argstr='-flipz', ), - interleave=dict(argstr='-interleave', ), + flipx=dict( + argstr="-flipx", + ), + flipy=dict( + argstr="-flipy", + ), + flipz=dict( + argstr="-flipz", + ), + interleave=dict( + argstr="-interleave", + ), numscans=dict( - argstr='-numscans %d', - units='NA', + argstr="-numscans %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - usegradmod=dict(argstr='-usegradmod', ), + usegradmod=dict( + argstr="-usegradmod", + ), ) inputs = FSL2Scheme.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSL2Scheme_outputs(): - output_map = dict(scheme=dict(), ) + output_map = dict( + scheme=dict( + extensions=None, + ), + ) outputs = FSL2Scheme.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py b/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py index adae10f0bb..ae49936d02 100644 --- a/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py +++ b/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import Image2Voxel def test_Image2Voxel_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-4dimage %s', + argstr="-4dimage %s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), out_type=dict( - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", position=2, usedefault=True, ), @@ -31,8 +34,14 @@ def test_Image2Voxel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Image2Voxel_outputs(): - output_map = dict(voxel_order=dict(), ) + output_map = dict( + voxel_order=dict( + extensions=None, + ), + ) outputs = Image2Voxel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ImageStats.py b/nipype/interfaces/camino/tests/test_auto_ImageStats.py index 3813051025..4bc6aa941b 100644 --- a/nipype/interfaces/camino/tests/test_auto_ImageStats.py +++ b/nipype/interfaces/camino/tests/test_auto_ImageStats.py @@ -1,32 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ImageStats def test_ImageStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='-images %s', + argstr="-images %s", mandatory=True, position=-1, ), out_type=dict( - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", usedefault=True, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, mandatory=True, ), stat=dict( - argstr='-stat %s', + argstr="-stat %s", mandatory=True, - units='NA', + units="NA", ), ) inputs = ImageStats.input_spec() @@ -34,8 +36,14 @@ def test_ImageStats_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageStats_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_LinRecon.py b/nipype/interfaces/camino/tests/test_auto_LinRecon.py index 8998f42602..1ed5bbbe6b 100644 --- a/nipype/interfaces/camino/tests/test_auto_LinRecon.py +++ b/nipype/interfaces/camino/tests/test_auto_LinRecon.py @@ -1,35 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import LinRecon def test_LinRecon_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), - log=dict(argstr='-log', ), - normalize=dict(argstr='-normalize', ), + log=dict( + argstr="-log", + ), + normalize=dict( + argstr="-normalize", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), qball_mat=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=3, ), scheme_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), @@ -39,8 +51,14 @@ def test_LinRecon_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LinRecon_outputs(): - output_map = dict(recon_data=dict(), ) + output_map = dict( + recon_data=dict( + extensions=None, + ), + ) outputs = LinRecon.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_MESD.py b/nipype/interfaces/camino/tests/test_auto_MESD.py index 73089a3395..189dd2e2d4 100644 --- a/nipype/interfaces/camino/tests/test_auto_MESD.py +++ b/nipype/interfaces/camino/tests/test_auto_MESD.py @@ -1,48 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import MESD def test_MESD_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), fastmesd=dict( - argstr='-fastmesd', - requires=['mepointset'], + argstr="-fastmesd", + requires=["mepointset"], ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inverter=dict( - argstr='-filter %s', + argstr="-filter %s", mandatory=True, position=2, ), inverter_param=dict( - argstr='%f', + argstr="%f", mandatory=True, position=3, - units='NA', + units="NA", ), mepointset=dict( - argstr='-mepointset %d', - units='NA', + argstr="-mepointset %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), ) @@ -51,8 +60,14 @@ def test_MESD_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MESD_outputs(): - output_map = dict(mesd_data=dict(), ) + output_map = dict( + mesd_data=dict( + extensions=None, + ), + ) outputs = MESD.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ModelFit.py b/nipype/interfaces/camino/tests/test_auto_ModelFit.py index 04d22bbe59..82bd1a8400 100644 --- a/nipype/interfaces/camino/tests/test_auto_ModelFit.py +++ b/nipype/interfaces/camino/tests/test_auto_ModelFit.py @@ -1,52 +1,91 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ModelFit def test_ModelFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), - bgthresh=dict(argstr='-bgthresh %G', ), - cfthresh=dict(argstr='-csfthresh %G', ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), + bgthresh=dict( + argstr="-bgthresh %G", + ), + cfthresh=dict( + argstr="-csfthresh %G", + ), environ=dict( nohash=True, usedefault=True, ), - fixedbvalue=dict(argstr='-fixedbvalue %s', ), - fixedmodq=dict(argstr='-fixedmod %s', ), + fixedbvalue=dict( + argstr="-fixedbvalue %s", + ), + fixedmodq=dict( + argstr="-fixedmod %s", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), model=dict( - argstr='-model %s', + argstr="-model %s", mandatory=True, ), - noisemap=dict(argstr='-noisemap %s', ), + noisemap=dict( + argstr="-noisemap %s", + extensions=None, + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - outlier=dict(argstr='-outliermap %s', ), - outputfile=dict(argstr='-outputfile %s', ), - residualmap=dict(argstr='-residualmap %s', ), + outlier=dict( + argstr="-outliermap %s", + extensions=None, + ), + outputfile=dict( + argstr="-outputfile %s", + extensions=None, + ), + residualmap=dict( + argstr="-residualmap %s", + extensions=None, + ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), - sigma=dict(argstr='-sigma %G', ), - tau=dict(argstr='-tau %G', ), + sigma=dict( + argstr="-sigma %G", + ), + tau=dict( + argstr="-tau %G", + ), ) inputs = ModelFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelFit_outputs(): - output_map = dict(fitted_data=dict(), ) + output_map = dict( + fitted_data=dict( + extensions=None, + ), + ) outputs = ModelFit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py b/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py index 9a4867a73c..82b4276a0f 100644 --- a/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py +++ b/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py @@ -1,39 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import NIfTIDT2Camino def test_NIfTIDT2Camino_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, position=1, ), - lns0_file=dict(argstr='-lns0 %s', ), + lns0_file=dict( + argstr="-lns0 %s", + extensions=None, + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), - s0_file=dict(argstr='-s0 %s', ), - scaleinter=dict(argstr='-scaleinter %s', ), - scaleslope=dict(argstr='-scaleslope %s', ), - uppertriangular=dict(argstr='-uppertriangular %s', ), + s0_file=dict( + argstr="-s0 %s", + extensions=None, + ), + scaleinter=dict( + argstr="-scaleinter %s", + ), + scaleslope=dict( + argstr="-scaleslope %s", + ), + uppertriangular=dict( + argstr="-uppertriangular %s", + ), ) inputs = NIfTIDT2Camino.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NIfTIDT2Camino_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = NIfTIDT2Camino.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py b/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py index f2b59666a2..09f3a93cac 100644 --- a/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py +++ b/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py @@ -1,45 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import PicoPDFs def test_PicoPDFs_inputs(): input_map = dict( - args=dict(argstr='%s', ), - directmap=dict(argstr='-directmap', ), + args=dict( + argstr="%s", + ), + directmap=dict( + argstr="-directmap", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=1, ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", position=2, usedefault=True, ), luts=dict( - argstr='-luts %s', + argstr="-luts %s", mandatory=True, ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), pdf=dict( - argstr='-pdf %s', + argstr="-pdf %s", position=4, usedefault=True, ), @@ -49,8 +54,14 @@ def test_PicoPDFs_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PicoPDFs_outputs(): - output_map = dict(pdfs=dict(), ) + output_map = dict( + pdfs=dict( + extensions=None, + ), + ) outputs = PicoPDFs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py index 8838bb72cf..b1b9fda588 100644 --- a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py +++ b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py @@ -1,116 +1,155 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import ProcStreamlines def test_ProcStreamlines_inputs(): input_map = dict( - allowmultitargets=dict(argstr='-allowmultitargets', ), - args=dict(argstr='%s', ), + allowmultitargets=dict( + argstr="-allowmultitargets", + ), + args=dict( + argstr="%s", + ), datadims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), directional=dict( - argstr='-directional %s', - units='NA', + argstr="-directional %s", + units="NA", + ), + discardloops=dict( + argstr="-discardloops", + ), + endpointfile=dict( + argstr="-endpointfile %s", + extensions=None, ), - discardloops=dict(argstr='-discardloops', ), - endpointfile=dict(argstr='-endpointfile %s', ), environ=dict( nohash=True, usedefault=True, ), - exclusionfile=dict(argstr='-exclusionfile %s', ), - gzip=dict(argstr='-gzip', ), + exclusionfile=dict( + argstr="-exclusionfile %s", + extensions=None, + ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, position=1, ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), iterations=dict( - argstr='-iterations %d', - units='NA', + argstr="-iterations %d", + units="NA", ), maxtractlength=dict( - argstr='-maxtractlength %d', - units='mm', + argstr="-maxtractlength %d", + units="mm", ), maxtractpoints=dict( - argstr='-maxtractpoints %d', - units='NA', + argstr="-maxtractpoints %d", + units="NA", ), mintractlength=dict( - argstr='-mintractlength %d', - units='mm', + argstr="-mintractlength %d", + units="mm", ), mintractpoints=dict( - argstr='-mintractpoints %d', - units='NA', + argstr="-mintractpoints %d", + units="NA", + ), + noresample=dict( + argstr="-noresample", ), - noresample=dict(argstr='-noresample', ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), outputacm=dict( - argstr='-outputacm', - requires=['outputroot', 'seedfile'], + argstr="-outputacm", + requires=["outputroot", "seedfile"], ), outputcbs=dict( - argstr='-outputcbs', - requires=['outputroot', 'targetfile', 'seedfile'], + argstr="-outputcbs", + requires=["outputroot", "targetfile", "seedfile"], ), outputcp=dict( - argstr='-outputcp', - requires=['outputroot', 'seedfile'], + argstr="-outputcp", + requires=["outputroot", "seedfile"], + ), + outputroot=dict( + argstr="-outputroot %s", + extensions=None, ), - outputroot=dict(argstr='-outputroot %s', ), outputsc=dict( - argstr='-outputsc', - requires=['outputroot', 'seedfile'], + argstr="-outputsc", + requires=["outputroot", "seedfile"], + ), + outputtracts=dict( + argstr="-outputtracts", ), - outputtracts=dict(argstr='-outputtracts', ), regionindex=dict( - argstr='-regionindex %d', - units='mm', + argstr="-regionindex %d", + units="mm", ), resamplestepsize=dict( - argstr='-resamplestepsize %d', - units='NA', + argstr="-resamplestepsize %d", + units="NA", + ), + seedfile=dict( + argstr="-seedfile %s", + extensions=None, ), - seedfile=dict(argstr='-seedfile %s', ), seedpointmm=dict( - argstr='-seedpointmm %s', - units='mm', + argstr="-seedpointmm %s", + units="mm", ), seedpointvox=dict( - argstr='-seedpointvox %s', - units='voxels', + argstr="-seedpointvox %s", + units="voxels", + ), + targetfile=dict( + argstr="-targetfile %s", + extensions=None, + ), + truncateinexclusion=dict( + argstr="-truncateinexclusion", + ), + truncateloops=dict( + argstr="-truncateloops", ), - targetfile=dict(argstr='-targetfile %s', ), - truncateinexclusion=dict(argstr='-truncateinexclusion', ), - truncateloops=dict(argstr='-truncateloops', ), voxeldims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", + ), + waypointfile=dict( + argstr="-waypointfile %s", + extensions=None, ), - waypointfile=dict(argstr='-waypointfile %s', ), ) inputs = ProcStreamlines.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProcStreamlines_outputs(): output_map = dict( outputroot_files=dict(), - proc=dict(), + proc=dict( + extensions=None, + ), ) outputs = ProcStreamlines.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_QBallMX.py b/nipype/interfaces/camino/tests/test_auto_QBallMX.py index e965cb606a..30fda3a483 100644 --- a/nipype/interfaces/camino/tests/test_auto_QBallMX.py +++ b/nipype/interfaces/camino/tests/test_auto_QBallMX.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import QBallMX def test_QBallMX_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), basistype=dict( - argstr='-basistype %s', + argstr="-basistype %s", usedefault=True, ), environ=dict( @@ -15,29 +16,31 @@ def test_QBallMX_inputs(): usedefault=True, ), order=dict( - argstr='-order %d', - units='NA', + argstr="-order %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), rbfpointset=dict( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", ), rbfsigma=dict( - argstr='-rbfsigma %f', - units='NA', + argstr="-rbfsigma %f", + units="NA", ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), smoothingsigma=dict( - argstr='-smoothingsigma %f', - units='NA', + argstr="-smoothingsigma %f", + units="NA", ), ) inputs = QBallMX.input_spec() @@ -45,8 +48,14 @@ def test_QBallMX_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QBallMX_outputs(): - output_map = dict(qmat=dict(), ) + output_map = dict( + qmat=dict( + extensions=None, + ), + ) outputs = QBallMX.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py b/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py index c52ca04929..fc58b2f2e9 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py +++ b/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py @@ -1,47 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..calib import SFLUTGen def test_SFLUTGen_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), binincsize=dict( - argstr='-binincsize %d', - units='NA', + argstr="-binincsize %d", + units="NA", + ), + directmap=dict( + argstr="-directmap", ), - directmap=dict(argstr='-directmap', ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, ), info_file=dict( - argstr='-infofile %s', + argstr="-infofile %s", + extensions=None, mandatory=True, ), minvectsperbin=dict( - argstr='-minvectsperbin %d', - units='NA', + argstr="-minvectsperbin %d", + units="NA", ), order=dict( - argstr='-order %d', - units='NA', + argstr="-order %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), outputstem=dict( - argstr='-outputstem %s', + argstr="-outputstem %s", usedefault=True, ), pdf=dict( - argstr='-pdf %s', + argstr="-pdf %s", usedefault=True, ), ) @@ -50,10 +56,16 @@ def test_SFLUTGen_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFLUTGen_outputs(): output_map = dict( - lut_one_fibre=dict(), - lut_two_fibres=dict(), + lut_one_fibre=dict( + extensions=None, + ), + lut_two_fibres=dict( + extensions=None, + ), ) outputs = SFLUTGen.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py b/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py index e83a01e2c3..5c20399cbc 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py +++ b/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py @@ -1,73 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..calib import SFPICOCalibData def test_SFPICOCalibData_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), info_file=dict( - argstr='-infooutputfile %s', + argstr="-infooutputfile %s", + extensions=None, genfile=True, hash_files=False, mandatory=True, ), onedtfarange=dict( - argstr='-onedtfarange %s', - units='NA', + argstr="-onedtfarange %s", + units="NA", ), onedtfastep=dict( - argstr='-onedtfastep %f', - units='NA', + argstr="-onedtfastep %f", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), seed=dict( - argstr='-seed %f', - units='NA', + argstr="-seed %f", + units="NA", ), snr=dict( - argstr='-snr %f', - units='NA', + argstr="-snr %f", + units="NA", ), trace=dict( - argstr='-trace %f', - units='NA', + argstr="-trace %f", + units="NA", ), twodtanglerange=dict( - argstr='-twodtanglerange %s', - units='NA', + argstr="-twodtanglerange %s", + units="NA", ), twodtanglestep=dict( - argstr='-twodtanglestep %f', - units='NA', + argstr="-twodtanglestep %f", + units="NA", ), twodtfarange=dict( - argstr='-twodtfarange %s', - units='NA', + argstr="-twodtfarange %s", + units="NA", ), twodtfastep=dict( - argstr='-twodtfastep %f', - units='NA', + argstr="-twodtfastep %f", + units="NA", ), twodtmixmax=dict( - argstr='-twodtmixmax %f', - units='NA', + argstr="-twodtmixmax %f", + units="NA", ), twodtmixstep=dict( - argstr='-twodtmixstep %f', - units='NA', + argstr="-twodtmixstep %f", + units="NA", ), ) inputs = SFPICOCalibData.input_spec() @@ -75,10 +79,16 @@ def test_SFPICOCalibData_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFPICOCalibData_outputs(): output_map = dict( - PICOCalib=dict(), - calib_info=dict(), + PICOCalib=dict( + extensions=None, + ), + calib_info=dict( + extensions=None, + ), ) outputs = SFPICOCalibData.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_SFPeaks.py b/nipype/interfaces/camino/tests/test_auto_SFPeaks.py index 959545d042..775a9061e6 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFPeaks.py +++ b/nipype/interfaces/camino/tests/test_auto_SFPeaks.py @@ -1,65 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import SFPeaks def test_SFPeaks_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), density=dict( - argstr='-density %d', - units='NA', + argstr="-density %d", + units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, mandatory=True, ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", mandatory=True, ), mepointset=dict( - argstr='-mepointset %d', - units='NA', + argstr="-mepointset %d", + units="NA", + ), + noconsistencycheck=dict( + argstr="-noconsistencycheck", ), - noconsistencycheck=dict(argstr='-noconsistencycheck', ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), order=dict( - argstr='-order %d', - units='NA', + argstr="-order %d", + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), pdthresh=dict( - argstr='-pdthresh %f', - units='NA', + argstr="-pdthresh %f", + units="NA", ), pointset=dict( - argstr='-pointset %d', - units='NA', + argstr="-pointset %d", + units="NA", ), rbfpointset=dict( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", + ), + scheme_file=dict( + argstr="%s", + extensions=None, ), - scheme_file=dict(argstr='%s', ), searchradius=dict( - argstr='-searchradius %f', - units='NA', + argstr="-searchradius %f", + units="NA", ), stdsfrommean=dict( - argstr='-stdsfrommean %f', - units='NA', + argstr="-stdsfrommean %f", + units="NA", ), ) inputs = SFPeaks.input_spec() @@ -67,8 +75,14 @@ def test_SFPeaks_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFPeaks_outputs(): - output_map = dict(peaks=dict(), ) + output_map = dict( + peaks=dict( + extensions=None, + ), + ) outputs = SFPeaks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Shredder.py b/nipype/interfaces/camino/tests/test_auto_Shredder.py index 1099693874..bf0f9dc9dc 100644 --- a/nipype/interfaces/camino/tests/test_auto_Shredder.py +++ b/nipype/interfaces/camino/tests/test_auto_Shredder.py @@ -1,39 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import Shredder def test_Shredder_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), chunksize=dict( - argstr='%d', + argstr="%d", position=2, - units='NA', + units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=-2, ), offset=dict( - argstr='%d', + argstr="%d", position=1, - units='NA', + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), space=dict( - argstr='%d', + argstr="%d", position=3, - units='NA', + units="NA", ), ) inputs = Shredder.input_spec() @@ -41,8 +44,14 @@ def test_Shredder_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Shredder_outputs(): - output_map = dict(shredded=dict(), ) + output_map = dict( + shredded=dict( + extensions=None, + ), + ) outputs = Shredder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Track.py b/nipype/interfaces/camino/tests/test_auto_Track.py index ad8d3ff2bb..697b2d5275 100644 --- a/nipype/interfaces/camino/tests/test_auto_Track.py +++ b/nipype/interfaces/camino/tests/test_auto_Track.py @@ -1,71 +1,93 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import Track def test_Track_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = Track.input_spec() @@ -73,8 +95,14 @@ def test_Track_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Track_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = Track.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py index 89515cef55..a117d5d782 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py @@ -1,71 +1,93 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackBallStick def test_TrackBallStick_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackBallStick.input_spec() @@ -73,8 +95,14 @@ def test_TrackBallStick_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBallStick_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackBallStick.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py index 2815ef18ee..56ca8ece97 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py @@ -1,86 +1,124 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackBayesDirac def test_TrackBayesDirac_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvepriorg=dict( + argstr="-curvepriorg %G", + ), + curvepriork=dict( + argstr="-curvepriork %G", + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvepriorg=dict(argstr='-curvepriorg %G', ), - curvepriork=dict(argstr='-curvepriork %G', ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", + ), + datamodel=dict( + argstr="-datamodel %s", ), - datamodel=dict(argstr='-datamodel %s', ), environ=dict( nohash=True, usedefault=True, ), - extpriordatatype=dict(argstr='-extpriordatatype %s', ), - extpriorfile=dict(argstr='-extpriorfile %s', ), - gzip=dict(argstr='-gzip', ), + extpriordatatype=dict( + argstr="-extpriordatatype %s", + ), + extpriorfile=dict( + argstr="-extpriorfile %s", + extensions=None, + ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), iterations=dict( - argstr='-iterations %d', - units='NA', + argstr="-iterations %d", + units="NA", ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), - pdf=dict(argstr='-pdf %s', ), - pointset=dict(argstr='-pointset %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), + pdf=dict( + argstr="-pdf %s", + ), + pointset=dict( + argstr="-pointset %s", + ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackBayesDirac.input_spec() @@ -88,8 +126,14 @@ def test_TrackBayesDirac_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBayesDirac_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackBayesDirac.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py index 827dbb2f27..dbd8f89478 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py @@ -1,79 +1,101 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackBedpostxDeter def test_TrackBedpostxDeter_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), bedpostxdir=dict( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), min_vol_frac=dict( - argstr='-bedpostxminf %d', - units='NA', + argstr="-bedpostxminf %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackBedpostxDeter.input_spec() @@ -81,8 +103,14 @@ def test_TrackBedpostxDeter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBedpostxDeter_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackBedpostxDeter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py index e250ec9c40..7d1baa0e43 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py @@ -1,83 +1,105 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackBedpostxProba def test_TrackBedpostxProba_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), bedpostxdir=dict( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), iterations=dict( - argstr='-iterations %d', - units='NA', + argstr="-iterations %d", + units="NA", ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), min_vol_frac=dict( - argstr='-bedpostxminf %d', - units='NA', + argstr="-bedpostxminf %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackBedpostxProba.input_spec() @@ -85,8 +107,14 @@ def test_TrackBedpostxProba_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBedpostxProba_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackBedpostxProba.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py index 90ffd05a2d..75cd2e3d11 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py @@ -1,85 +1,113 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackBootstrap def test_TrackBootstrap_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - bgmask=dict(argstr='-bgmask %s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), + bgmask=dict( + argstr="-bgmask %s", + extensions=None, + ), bsdatafiles=dict( - argstr='-bsdatafile %s', + argstr="-bsdatafile %s", mandatory=True, ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - inversion=dict(argstr='-inversion %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + inversion=dict( + argstr="-inversion %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), iterations=dict( - argstr='-iterations %d', - units='NA', + argstr="-iterations %d", + units="NA", ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), scheme_file=dict( - argstr='-schemefile %s', + argstr="-schemefile %s", + extensions=None, mandatory=True, ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackBootstrap.input_spec() @@ -87,8 +115,14 @@ def test_TrackBootstrap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBootstrap_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackBootstrap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackDT.py b/nipype/interfaces/camino/tests/test_auto_TrackDT.py index 7ab3772468..c60ba7b5f5 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackDT.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackDT.py @@ -1,71 +1,93 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackDT def test_TrackDT_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackDT.input_spec() @@ -73,8 +95,14 @@ def test_TrackDT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackDT_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackDT.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py index 30ff12db9f..1d3647e151 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py @@ -1,76 +1,100 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TrackPICo def test_TrackPICo_inputs(): input_map = dict( - anisfile=dict(argstr='-anisfile %s', ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), + anisfile=dict( + argstr="-anisfile %s", + extensions=None, + ), + anisthresh=dict( + argstr="-anisthresh %f", + ), + args=dict( + argstr="%s", + ), curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], + argstr="-curveinterval %f", + requires=["curvethresh"], + ), + curvethresh=dict( + argstr="-curvethresh %f", ), - curvethresh=dict(argstr='-curvethresh %f', ), data_dims=dict( - argstr='-datadims %s', - units='voxels', + argstr="-datadims %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - gzip=dict(argstr='-gzip', ), + gzip=dict( + argstr="-gzip", + ), in_file=dict( - argstr='-inputfile %s', + argstr="-inputfile %s", + extensions=None, position=1, ), - inputdatatype=dict(argstr='-inputdatatype %s', ), + inputdatatype=dict( + argstr="-inputdatatype %s", + ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), + interpolator=dict( + argstr="-interpolator %s", + ), + ipthresh=dict( + argstr="-ipthresh %f", + ), iterations=dict( - argstr='-iterations %d', - units='NA', + argstr="-iterations %d", + units="NA", ), maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', + argstr="-maxcomponents %d", + units="NA", ), numpds=dict( - argstr='-numpds %d', - units='NA', + argstr="-numpds %d", + units="NA", ), out_file=dict( - argstr='-outputfile %s', + argstr="-outputfile %s", + extensions=None, genfile=True, position=-1, ), output_root=dict( - argstr='-outputroot %s', + argstr="-outputroot %s", + extensions=None, position=-1, ), - outputtracts=dict(argstr='-outputtracts %s', ), - pdf=dict(argstr='-pdf %s', ), + outputtracts=dict( + argstr="-outputtracts %s", + ), + pdf=dict( + argstr="-pdf %s", + ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=2, ), stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], + argstr="-stepsize %f", + requires=["tracker"], ), tracker=dict( - argstr='-tracker %s', + argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', + argstr="-voxeldims %s", + units="mm", ), ) inputs = TrackPICo.input_spec() @@ -78,8 +102,14 @@ def test_TrackPICo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackPICo_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = TrackPICo.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TractShredder.py b/nipype/interfaces/camino/tests/test_auto_TractShredder.py index c9cf40e1c3..07678c5d64 100644 --- a/nipype/interfaces/camino/tests/test_auto_TractShredder.py +++ b/nipype/interfaces/camino/tests/test_auto_TractShredder.py @@ -1,39 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import TractShredder def test_TractShredder_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), bunchsize=dict( - argstr='%d', + argstr="%d", position=2, - units='NA', + units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='< %s', + argstr="< %s", + extensions=None, mandatory=True, position=-2, ), offset=dict( - argstr='%d', + argstr="%d", position=1, - units='NA', + units="NA", ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), space=dict( - argstr='%d', + argstr="%d", position=3, - units='NA', + units="NA", ), ) inputs = TractShredder.input_spec() @@ -41,8 +44,14 @@ def test_TractShredder_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractShredder_outputs(): - output_map = dict(shredded=dict(), ) + output_map = dict( + shredded=dict( + extensions=None, + ), + ) outputs = TractShredder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py b/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py index 8dc8bd03e9..194f233cc1 100644 --- a/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py +++ b/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py @@ -1,48 +1,60 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import VtkStreamlines def test_VtkStreamlines_inputs(): input_map = dict( - args=dict(argstr='%s', ), - colourorient=dict(argstr='-colourorient', ), + args=dict( + argstr="%s", + ), + colourorient=dict( + argstr="-colourorient", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr=' < %s', + argstr=" < %s", + extensions=None, mandatory=True, position=-2, ), inputmodel=dict( - argstr='-inputmodel %s', + argstr="-inputmodel %s", usedefault=True, ), - interpolate=dict(argstr='-interpolate', ), - interpolatescalars=dict(argstr='-interpolatescalars', ), + interpolate=dict( + argstr="-interpolate", + ), + interpolatescalars=dict( + argstr="-interpolatescalars", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), scalar_file=dict( - argstr='-scalarfile %s', + argstr="-scalarfile %s", + extensions=None, position=3, ), seed_file=dict( - argstr='-seedfile %s', + argstr="-seedfile %s", + extensions=None, position=1, ), target_file=dict( - argstr='-targetfile %s', + argstr="-targetfile %s", + extensions=None, position=2, ), voxeldims=dict( - argstr='-voxeldims %s', + argstr="-voxeldims %s", position=4, - units='mm', + units="mm", ), ) inputs = VtkStreamlines.input_spec() @@ -50,8 +62,14 @@ def test_VtkStreamlines_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VtkStreamlines_outputs(): - output_map = dict(vtk=dict(), ) + output_map = dict( + vtk=dict( + extensions=None, + ), + ) outputs = VtkStreamlines.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 57fcd58d9a..93bd4fe5d4 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -1,22 +1,28 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from ..base import (traits, TraitedSpec, File, CommandLine, - CommandLineInputSpec, InputMultiPath) +from ..base import ( + traits, + TraitedSpec, + File, + CommandLine, + CommandLineInputSpec, + InputMultiPath, +) from ...utils.filemanip import split_filename class ImageStatsInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='-images %s', + argstr="-images %s", mandatory=True, position=-1, - desc=('List of images to process. They must ' - 'be in the same space and have the same ' - 'dimensions.')) + desc=( + "List of images to process. They must " + "be in the same space and have the same " + "dimensions." + ), + ) stat = traits.Enum( "min", "max", @@ -25,10 +31,11 @@ class ImageStatsInputSpec(CommandLineInputSpec): "sum", "std", "var", - argstr='-stat %s', - units='NA', + argstr="-stat %s", + units="NA", mandatory=True, - desc="The statistic to compute.") + desc="The statistic to compute.", + ) out_type = traits.Enum( "float", @@ -37,21 +44,24 @@ class ImageStatsInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", usedefault=True, - desc=('A Camino data type string, default is "float". ' - 'Type must be signed.')) + desc=('A Camino data type string, default is "float". Type must be signed.'), + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", mandatory=True, - desc=('Filename root prepended onto the names of the output ' - ' files. The extension will be determined from the input.')) + desc=( + "Filename root prepended onto the names of the output " + " files. The extension will be determined from the input." + ), + ) class ImageStatsOutputSpec(TraitedSpec): out_file = File( - exists=True, - desc='Path of the file computed with the statistic chosen') + exists=True, desc="Path of the file computed with the statistic chosen" + ) class ImageStats(CommandLine): @@ -69,13 +79,14 @@ class ImageStats(CommandLine): >>> imstats.inputs.stat = 'max' >>> imstats.run() # doctest: +SKIP """ - _cmd = 'imagestats' + + _cmd = "imagestats" input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): diff --git a/nipype/interfaces/camino2trackvis/__init__.py b/nipype/interfaces/camino2trackvis/__init__.py index 94d3e458a7..ce31d60610 100644 --- a/nipype/interfaces/camino2trackvis/__init__.py +++ b/nipype/interfaces/camino2trackvis/__init__.py @@ -1,7 +1,4 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Camino2Trackvis top level namespace -""" - +"""Camino-Trackvis allows interoperability between Camino and TrackVis.""" from .convert import Camino2Trackvis, Trackvis2Camino diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index 573ddffe2d..8d1db28b95 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,9 +1,4 @@ -# -*- coding: utf-8 -*- -""" -Provides interfaces to various commands provided by Camino-Trackvis -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""Provides interfaces to various commands provided by Camino-Trackvis.""" import os @@ -14,74 +9,78 @@ class Camino2TrackvisInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='The input .Bfloat (camino) file.') + desc="The input .Bfloat (camino) file.", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", genfile=True, position=2, - desc='The filename to which to write the .trk (trackvis) file.') + desc="The filename to which to write the .trk (trackvis) file.", + ) min_length = traits.Float( - argstr='-l %d', + argstr="-l %d", position=3, - units='mm', - desc='The minimum length of tracts to output') + units="mm", + desc="The minimum length of tracts to output", + ) data_dims = traits.List( traits.Int, - argstr='-d %s', - sep=',', + argstr="-d %s", + sep=",", mandatory=True, position=4, minlen=3, maxlen=3, - desc= - 'Three comma-separated integers giving the number of voxels along each dimension of the source scans.' + desc="Three comma-separated integers giving the number of voxels along each dimension of the source scans.", ) voxel_dims = traits.List( traits.Float, - argstr='-x %s', - sep=',', + argstr="-x %s", + sep=",", mandatory=True, position=5, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) # Change to enum with all combinations? i.e. LAS, LPI, RAS, etc.. voxel_order = File( - argstr='--voxel-order %s', + argstr="--voxel-order %s", mandatory=True, position=6, - desc='Set the order in which various directions were stored.\ + desc="Set the order in which various directions were stored.\ Specify with three letters consisting of one each \ from the pairs LR, AP, and SI. These stand for Left-Right, \ Anterior-Posterior, and Superior-Inferior. \ Whichever is specified in each position will \ be the direction of increasing order. \ - Read coordinate system from a NIfTI file.') + Read coordinate system from a NIfTI file.", + ) nifti_file = File( - argstr='--nifti %s', + argstr="--nifti %s", exists=True, position=7, - desc='Read coordinate system from a NIfTI file.') + desc="Read coordinate system from a NIfTI file.", + ) class Camino2TrackvisOutputSpec(TraitedSpec): trackvis = File( - exists=True, - desc='The filename to which to write the .trk (trackvis) file.') + exists=True, desc="The filename to which to write the .trk (trackvis) file." + ) class Camino2Trackvis(CommandLine): - """ Wraps camino_to_trackvis from Camino-Trackvis + """Wraps camino_to_trackvis from Camino-Trackvis Convert files from camino .Bfloat format to trackvis .trk format. @@ -99,28 +98,28 @@ class Camino2Trackvis(CommandLine): >>> c2t.run() # doctest: +SKIP """ - _cmd = 'camino_to_trackvis' + _cmd = "camino_to_trackvis" input_spec = Camino2TrackvisInputSpec output_spec = Camino2TrackvisOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['trackvis'] = os.path.abspath(self._gen_outfilename()) + outputs["trackvis"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.trk' + return name + ".trk" class Trackvis2CaminoInputSpec(CommandLineInputSpec): - """ Wraps trackvis_to_camino from Camino-Trackvis + """Wraps trackvis_to_camino from Camino-Trackvis Convert files from camino .Bfloat format to trackvis .trk format. @@ -136,46 +135,49 @@ class Trackvis2CaminoInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='The input .trk (trackvis) file.') + desc="The input .trk (trackvis) file.", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", genfile=True, position=2, - desc='The filename to which to write the .Bfloat (camino).') + desc="The filename to which to write the .Bfloat (camino).", + ) append_file = File( exists=True, - argstr='-a %s', + argstr="-a %s", position=2, - desc='A file to which the append the .Bfloat data. ') + desc="A file to which the append the .Bfloat data. ", + ) class Trackvis2CaminoOutputSpec(TraitedSpec): camino = File( - exists=True, - desc='The filename to which to write the .Bfloat (camino).') + exists=True, desc="The filename to which to write the .Bfloat (camino)." + ) class Trackvis2Camino(CommandLine): - _cmd = 'trackvis_to_camino' + _cmd = "trackvis_to_camino" input_spec = Trackvis2CaminoInputSpec output_spec = Trackvis2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['camino'] = os.path.abspath(self._gen_outfilename()) + outputs["camino"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.Bfloat' + return name + ".Bfloat" diff --git a/nipype/interfaces/camino2trackvis/tests/__init__.py b/nipype/interfaces/camino2trackvis/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/camino2trackvis/tests/__init__.py +++ b/nipype/interfaces/camino2trackvis/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py b/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py index b4d5092dab..fb076c1107 100644 --- a/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py +++ b/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py @@ -1,48 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import Camino2Trackvis def test_Camino2Trackvis_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), data_dims=dict( - argstr='-d %s', + argstr="-d %s", mandatory=True, position=4, - sep=',', + sep=",", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), min_length=dict( - argstr='-l %d', + argstr="-l %d", position=3, - units='mm', + units="mm", ), nifti_file=dict( - argstr='--nifti %s', + argstr="--nifti %s", + extensions=None, position=7, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, position=2, ), voxel_dims=dict( - argstr='-x %s', + argstr="-x %s", mandatory=True, position=5, - sep=',', + sep=",", ), voxel_order=dict( - argstr='--voxel-order %s', + argstr="--voxel-order %s", + extensions=None, mandatory=True, position=6, ), @@ -52,8 +57,14 @@ def test_Camino2Trackvis_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Camino2Trackvis_outputs(): - output_map = dict(trackvis=dict(), ) + output_map = dict( + trackvis=dict( + extensions=None, + ), + ) outputs = Camino2Trackvis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py b/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py index 978b2439a2..ec7ed22d0c 100644 --- a/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py +++ b/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py @@ -1,26 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import Trackvis2Camino def test_Trackvis2Camino_inputs(): input_map = dict( append_file=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, position=2, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, position=2, ), @@ -30,8 +34,14 @@ def test_Trackvis2Camino_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Trackvis2Camino_outputs(): - output_map = dict(camino=dict(), ) + output_map = dict( + camino=dict( + extensions=None, + ), + ) outputs = Trackvis2Camino.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cat12/__init__.py b/nipype/interfaces/cat12/__init__.py new file mode 100644 index 0000000000..40059b23e1 --- /dev/null +++ b/nipype/interfaces/cat12/__init__.py @@ -0,0 +1,5 @@ +from .preprocess import CAT12Segment, CAT12SANLMDenoising +from .surface import ( + ExtractAdditionalSurfaceParameters, + ExtractROIBasedSurfaceMeasures, +) diff --git a/nipype/interfaces/cat12/base.py b/nipype/interfaces/cat12/base.py new file mode 100644 index 0000000000..03091f1fb9 --- /dev/null +++ b/nipype/interfaces/cat12/base.py @@ -0,0 +1,15 @@ +class Cell: + def __init__(self, arg): + self.arg = arg + + def to_string(self): + if isinstance(self.arg, list): + v = "\n".join([f"'{el}'" for el in self.arg]) + else: + v = self.arg + return v + + +class NestedCell(Cell): + def __str__(self): + return "{{%s}}" % self.to_string() diff --git a/nipype/interfaces/cat12/preprocess.py b/nipype/interfaces/cat12/preprocess.py new file mode 100644 index 0000000000..0153c556fd --- /dev/null +++ b/nipype/interfaces/cat12/preprocess.py @@ -0,0 +1,746 @@ +import os +from pathlib import Path + +from nipype.interfaces.base import ( + InputMultiPath, + TraitedSpec, + traits, + Tuple, + isdefined, + File, + Str, +) +from nipype.interfaces.cat12.base import Cell + +from nipype.interfaces.spm import SPMCommand +from nipype.interfaces.spm.base import ( + SPMCommandInputSpec, + ImageFileSPM, + scans_for_fnames, + scans_for_fname, +) +from nipype.utils.filemanip import split_filename, fname_presuffix + + +class CAT12SegmentInputSpec(SPMCommandInputSpec): + in_files = InputMultiPath( + ImageFileSPM(exists=True), + field="data", + desc="file to segment", + mandatory=True, + copyfile=False, + ) + + _help_tpm = ( + "Tissue Probability Map. Select the tissue probability image that includes 6 tissue probability " + "classes for (1) grey matter, (2) white matter, (3) cerebrospinal fluid, (4) bone, (5) non-brain " + "soft tissue, and (6) the background. CAT uses the TPM only for the initial SPM segmentation." + ) + tpm = InputMultiPath( + ImageFileSPM(exists=True), + field="tpm", + desc=_help_tpm, + mandatory=False, + copyfile=False, + ) + + _help_shoots_tpm = ( + "Shooting Template %d. The Shooting template must be in multi-volume nifti format and should contain GM," + " WM, and background segmentations and have to be saved with at least 16 bit. " + ) + + shooting_tpm = ImageFileSPM( + exists=True, + field="extopts.registration.shooting.shootingtpm", + desc=_help_shoots_tpm % 0, + mandatory=False, + copyfile=False, + ) + + shooting_tpm_template_1 = ImageFileSPM( + exists=True, desc=_help_shoots_tpm % 1, mandatory=False, copyfile=False + ) + shooting_tpm_template_2 = ImageFileSPM( + exists=True, desc=_help_shoots_tpm % 2, mandatory=False, copyfile=False + ) + shooting_tpm_template_3 = ImageFileSPM( + exists=True, desc=_help_shoots_tpm % 3, mandatory=False, copyfile=False + ) + shooting_tpm_template_4 = ImageFileSPM( + exists=True, desc=_help_shoots_tpm % 4, mandatory=False, copyfile=False + ) + + n_jobs = traits.Int( + 1, usedefault=True, mandatory=True, field="nproc", desc="Number of threads" + ) + + _help_affine_reg = ( + "Affine Regularization. The procedure is a local optimisation, so it needs reasonable initial " + "starting estimates. Images should be placed in approximate alignment using the Display " + "function of SPM before beginning. A Mutual Information affine registration with the tissue " + "probability maps (D" + "Agostino et al, 2004) is used to achieve approximate alignment." + ) + affine_regularization = Str( + default_value="mni", field="opts.affreg", usedefault=True, desc=_help_affine_reg + ) + + _help_bias_acc = ( + "Strength of the SPM inhomogeneity (bias) correction that simultaneously controls the SPM " + "biasreg, biasfwhm, samp (resolution), and tol (iteration) parameter." + ) + power_spm_inhomogeneity_correction = traits.Float( + default_value=0.5, field="opts.biasacc", usedefault=True, desc=_help_bias_acc + ) + # Extended options for CAT12 preprocessing + _help_app = ( + "Affine registration and SPM preprocessing can fail in some subjects with deviating anatomy (e.g. " + "other species/neonates) or in images with strong signal inhomogeneities, or untypical intensities " + "(e.g. synthetic images). An initial bias correction can help to reduce such problems (see details " + 'below). Recommended are the "default" and "full" option.' + ) + affine_preprocessing = traits.Int( + 1070, field="extopts.APP", desc=_help_app, usedefault=True + ) + + _help_initial_seg = ( + "In rare cases the Unified Segmentation can fail in highly abnormal brains, where e.g. the " + "cerebrospinal fluid of superlarge ventricles (hydrocephalus) were classified as white " + "matter. However, if the affine registration is correct, the AMAP segmentation with an " + "prior-independent k-means initialization can be used to replace the SPM brain tissue " + "classification. Moreover, if the default Dartel and Shooting registrations will fail then " + 'rhe "Optimized Shooting - superlarge ventricles" option for "Spatial registration" is ! ' + "required Values: \nnone: 0;\nlight: 1;\nfull: 2;\ndefault: 1070." + ) + initial_segmentation = traits.Int( + 0, field="extopts.spm_kamap", desc=_help_initial_seg, usedefault=True + ) + + _help_las = ( + "Additionally to WM-inhomogeneities, GM intensity can vary across different regions such as the motor" + " cortex, the basal ganglia, or the occipital lobe. These changes have an anatomical background " + "(e.g. iron content, myelinization), but are dependent on the MR-protocol and often lead to " + "underestimation of GM at higher intensities and overestimation of CSF at lower intensities. " + "Therefore, a local intensity transformation of all tissue classes is used to reduce these effects in" + " the image. This local adaptive segmentation (LAS) is applied before the final AMAP segmentation." + "Possible Values: \nSPM Unified Segmentation: 0 \nk-means AMAP: 2" + ) + local_adaptive_seg = traits.Float( + 0.5, field="extopts.LASstr", usedefault=True, desc=_help_las + ) + + _help_gcutstr = ( + "Method of initial skull-stripping before AMAP segmentation. The SPM approach works quite stable " + "for the majority of data. However, in some rare cases parts of GM (i.e. in frontal lobe) might " + "be cut. If this happens the GCUT approach is a good alternative. GCUT is a graph-cut/region-" + "growing approach starting from the WM area. APRG (adaptive probability region-growing) is a new" + " method that refines the probability maps of the SPM approach by region-growing techniques of " + "the gcut approach with a final surface-based optimization strategy. This is currently the method" + " with the most accurate and reliable results. If you use already skull-stripped data you can " + "turn off skull-stripping although this is automatically detected in most cases. Please note that " + "the choice of the skull-stripping method will also influence the estimation of TIV, because the" + " methods mainly differ in the handling of the outer CSF around the cortical surface. " + "\nPossible Values:\n - none (already skull-stripped): -1;\n - SPM approach: 0; " + "\n - GCUT approach: 0.50; \n - APRG approach: 2" + ) + skull_strip = traits.Float( + 2, field="extopts.gcutstr", desc=_help_gcutstr, usedefault=True + ) + + _help_wmhc = ( + "WARNING: Please note that the detection of WM hyperintensies is still under development and does " + "not have the same accuracy as approaches that additionally consider FLAIR images (e.g. Lesion " + "Segmentation Toolbox)! In aging or (neurodegenerative) diseases WM intensity can be reduced " + "locally in T1 or increased in T2/PD images. These so-called WM hyperintensies (WMHs) can lead to " + "preprocessing errors. Large GM areas next to the ventricle can cause normalization problems. " + "Therefore, a temporary correction for normalization is useful if WMHs are expected. CAT allows " + "different ways to handle WMHs: " + "\n0) No Correction (handled as GM). \n1) Temporary (internal) correction as WM for spatial " + "normalization and estimation of cortical thickness. \n2) Permanent correction to WM. " + ) + wm_hyper_intensity_correction = traits.Int( + 1, field="extopts.WMHC", desc=_help_wmhc, usedefault=True + ) + + _help_vox = ( + "The (isotropic) voxel sizes of any spatially normalised written images. A non-finite value will be " + "replaced by the average voxel size of the tissue probability maps used by the segmentation." + ) + voxel_size = traits.Float(1.5, field="extopts.vox", desc=_help_vox, usedefault=True) + + _help_resampling = ( + "Internal resampling for preprocessing.\n The default fixed image resolution offers a good " + "trade-off between optimal quality and preprocessing time and memory demands. Standard " + "structural data with a voxel resolution around 1mm or even data with high in-plane resolution" + " and large slice thickness (e.g. 0.5x0.5x1.5 mm) will benefit from this setting. If you have" + ' higher native resolutions the highres option "Fixed 0.8 mm" will sometimes offer slightly' + " better preprocessing quality with an increase of preprocessing time and memory demands. In" + " case of even higher resolutions and high signal-to-noise ratio (e.g. for 7 T data) the " + '"Best native" option will process the data on the highest native resolution. A resolution' + " of 0.4x0.7x1.0 mm will be interpolated to 0.4x0.4x0.4 mm. A tolerance range of 0.1 mm is " + "used to avoid interpolation artifacts, i.e. a resolution of 0.95x1.01x1.08 mm will not be " + 'interpolated in case of the "Fixed 1.0 mm"! This "optimal" option prefers an isotropic voxel ' + "size with at least 1.1 mm that is controlled by the median voxel size and a volume term that " + "penalizes highly anisotropic voxels." + "Values:\nOptimal: [1.0 0.1]\nFixed 1.0 mm: [1.0 0.1];\nFixed 0.8 mm:[0.8 0.1]" + "\nBest native: [0.5 0.1]" + ) + internal_resampling_process = Tuple( + traits.Float(1), + traits.Float(0.1), + minlen=2, + maxlen=2, + usedefault=True, + field="extopts.restypes.optimal", + desc="help_resampling", + ) + _errors_help = ( + "Error handling.\nTry to catch preprocessing errors and continue with the next data set or ignore " + "all warnings (e.g., bad intensities) and use an experimental pipeline which is still in " + "development. In case of errors, CAT continues with the next subject if this option is enabled. If " + "the experimental option with backup functions is selected and warnings occur, CAT will try to use" + " backup routines and skip some processing steps which require good T1 contrasts (e.g., LAS). If " + "you want to avoid processing of critical data and ensure that only the main pipeline is used then" + ' select the option "Ignore errors (continue with the next subject)". It is strongly recommended ' + "to check for preprocessing problems, especially with non-T1 contrasts. " + "\nValues:\nnone: 0,\ndefault: 1,\ndetails: 2." + ) + ignore_errors = traits.Int( + 1, field="extopts.ignoreErrors", desc=_errors_help, usedefault=True + ) + + # Writing options + _help_surf = ( + "Surface and thickness estimation. \nUse projection-based thickness (PBT) (Dahnke et al. 2012) to" + " estimate cortical thickness and to create the central cortical surface for left and right " + "hemisphere. Surface reconstruction includes topology correction (Yotter et al. 2011), spherical " + "inflation (Yotter et al.) and spherical registration. Additionally you can also estimate surface " + "parameters such as gyrification, cortical complexity or sulcal depth that can be subsequently " + "analyzed at each vertex of the surface. Please note, that surface reconstruction and spherical " + "registration additionally requires about 20-60 min of computation time. A fast (1-3 min) surface " + "pipeline is available for visual preview (e.g., to check preprocessing quality) in the " + "cross-sectional, but not in the longitudinal pipeline. Only the initial surfaces are created with " + "a lower resolution and without topology correction, spherical mapping and surface registration. " + "Please note that the files with the estimated surface thickness can therefore not be used for " + 'further analysis! For distinction, these files contain "preview" in their filename and they' + " are not available as batch dependencies objects. " + ) + surface_and_thickness_estimation = traits.Int( + 1, field="surface", desc=_help_surf, usedefault=True + ) + surface_measures = traits.Int( + 1, + field="output.surf_measures", + usedefault=True, + desc="Extract surface measures", + ) + + # Templates + neuromorphometrics = traits.Bool( + True, + field="output.ROImenu.atlases.neuromorphometrics", + usedefault=True, + desc="Extract brain measures for Neuromorphometrics template", + ) + lpba40 = traits.Bool( + True, + field="output.ROImenu.atlases.lpba40", + usedefault=True, + desc="Extract brain measures for LPBA40 template", + ) + cobra = traits.Bool( + True, + field="output.ROImenu.atlases.hammers", + usedefault=True, + desc="Extract brain measures for COBRA template", + ) + hammers = traits.Bool( + True, + field="output.ROImenu.atlases.cobra", + usedefault=True, + desc="Extract brain measures for Hammers template", + ) + own_atlas = InputMultiPath( + ImageFileSPM(exists=True), + field="output.ROImenu.atlases.ownatlas", + desc="Extract brain measures for a given template", + mandatory=False, + copyfile=False, + ) + + # Grey matter + gm_output_native = traits.Bool( + False, + field="output.GM.native", + usedefault=True, + desc="Save modulated grey matter images.", + ) + gm_output_modulated = traits.Bool( + True, + field="output.GM.mod", + usedefault=True, + desc="Save native grey matter images.", + ) + gm_output_dartel = traits.Bool( + False, + field="output.GM.dartel", + usedefault=True, + desc="Save dartel grey matter images.", + ) + + # White matter + _wm_desc = "Options to save white matter images." + wm_output_native = traits.Bool( + False, + field="output.WM.native", + usedefault=True, + desc="Save dartel white matter images.", + ) + wm_output_modulated = traits.Bool( + True, + field="output.WM.mod", + usedefault=True, + desc="Save dartel white matter images.", + ) + wm_output_dartel = traits.Bool( + False, + field="output.WM.dartel", + usedefault=True, + desc="Save dartel white matter images.", + ) + + # CSF matter + _csf_desc = "Options to save CSF images." + csf_output_native = traits.Bool( + False, + field="output.CSF.native", + usedefault=True, + desc="Save dartel CSF images.", + ) + csf_output_modulated = traits.Bool( + True, field="output.CSF.mod", usedefault=True, desc="Save dartel CSF images." + ) + csf_output_dartel = traits.Bool( + False, + field="output.CSF.dartel", + usedefault=True, + desc="Save dartel CSF images.", + ) + + # Labels + _help_label_desc = ( + "This is the option to save a labeled version of your segmentations in the %s space for fast visual " + "comparison. Labels are saved as Partial Volume Estimation (PVE) values with different mix " + "classes for GM-WM (2.5) and GM-CSF (1.5). BG=0, CSF=1, GM=2, WM=3, WMH=4 (if WMHC=3), " + "SL=1.5 (if SLC)" + ) + label_native = traits.Bool( + False, + field="output.label.native", + usedefault=True, + desc=_help_label_desc % "native", + ) + label_warped = traits.Bool( + True, + field="output.label.warped", + usedefault=True, + desc=_help_label_desc % "warped", + ) + label_dartel = traits.Bool( + False, + field="output.label.dartel", + usedefault=True, + desc=_help_label_desc % "dartel", + ) + output_labelnative = traits.Bool( + False, + field="output.labelnative", + usedefault=True, + desc=_help_label_desc % "native", + ) + + # Bias + save_bias_corrected = traits.Bool( + True, + field="output.bias.warped", + usedefault=True, + desc="Save bias corrected image", + ) + + # las + _las_desc = ( + "This is the option to save a bias, noise, and local intensity corrected version of the original T1" + " image in the %s space. MR images are usually corrupted by a smooth, spatially varying artifact that modulates the" + " intensity of the image (bias). These artifacts, although not usually a problem for visual " + "inspection, can impede automated processing of the images. The bias corrected version should have " + "more uniform intensities within the different types of tissues and can be saved in native space " + "and/or normalised. Noise is corrected by an adaptive non-local mean (NLM) filter (Manjon 2008, " + "Medical Image Analysis 12)." + ) + las_native = traits.Bool( + False, field="output.las.native", usedefault=True, desc=_las_desc % "native" + ) + las_warped = traits.Bool( + True, field="output.las.warped", usedefault=True, desc=_las_desc % "warped" + ) + las_dartel = traits.Bool( + False, field="output.las.dartel", usedefault=True, desc=_las_desc % "dartel" + ) + + # Jacobian Warped + _help_jacobian = ( + "This is the option to save the Jacobian determinant, which expresses local volume changes. This" + " image can be used in a pure deformation based morphometry (DBM) design. Please note that the" + " affine part of the deformation field is ignored. Thus, there is no need for any additional" + " correction for different brain sizes using ICV." + ) + jacobianwarped = traits.Bool( + True, field="output.jacobianwarped", usedefault=True, desc=_help_jacobian + ) + + # Deformation Fields + _help_warp = ( + "Deformation fields can be saved to disk, and used by the Deformations Utility and/or applied to " + "coregistered data from other modalities (e.g. fMRI). For spatially normalising images to MNI space," + " you will need the forward deformation, whereas for spatially normalising (eg) GIFTI surface files," + " you" + "ll need the inverse. It is also possible to transform data in MNI space on to the individual" + " subject, which also requires the inverse transform. Deformations are saved as .nii files, which" + " contain three volumes to encode the x, y and z coordinates." + "\nValues: No:[0 0];\nImage->Template (forward): [1 0];\nTemplate->Image (inverse): [0 1]; " + "\ninverse + forward: [1 1]" + ) + warps = Tuple( + traits.Int(1), + traits.Int(0), + minlen=2, + maxlen=2, + field="output.warps", + usedefault=True, + desc=_help_warp, + ) + + +class CAT12SegmentOutputSpec(TraitedSpec): + ########################################## + # Label XML files + ########################################## + label_files = traits.List( + File(exists=True), desc="Files with the measures extracted for OI ands ROIs" + ) + + label_rois = File(exists=True, desc="Files with thickness values of ROIs.") + label_roi = File(exists=True, desc="Files with thickness values of ROI.") + + ########################################## + # MRI .nii files + ########################################## + + mri_images = traits.List(File(exists=True), desc="Different segmented images.") + + # Grey Matter + gm_modulated_image = File(exists=True, desc="Grey matter modulated image.") + gm_dartel_image = File(exists=True, desc="Grey matter dartel image.") + gm_native_image = File(exists=True, desc="Grey matter native space.") + + # White Matter + wm_modulated_image = File(exists=True, desc="White matter modulated image.") + wm_dartel_image = File(exists=True, desc="White matter dartel image.") + wm_native_image = File(exists=True, desc="White matter in native space.") + + # CSF + csf_modulated_image = File(exists=True, desc="CSF modulated image.") + csf_dartel_image = File(exists=True, desc="CSF dartel image.") + csf_native_image = File(exists=True, desc="CSF in native space.") + + bias_corrected_image = File(exists=True, desc="Bias corrected image") + ########################################## + # Surface files + ########################################## + + surface_files = traits.List(File(exists=True), desc="Surface files") + + # Right hemisphere + rh_central_surface = File(exists=True, desc="Central right hemisphere files") + rh_sphere_surface = File(exists=True, desc="Sphere right hemisphere files") + + # Left hemisphere + lh_central_surface = File(exists=True, desc="Central left hemisphere files") + lh_sphere_surface = File(exists=True, desc="Sphere left hemisphere files") + + # Report files + report_files = traits.List(File(exists=True), desc="Report files.") + report = File(exists=True, desc="Report file.") + + +class CAT12Segment(SPMCommand): + """ + CAT12: Segmentation + + This toolbox is an extension to the default segmentation in SPM12, but uses a completely different segmentation + approach. + The segmentation approach is based on an Adaptive Maximum A Posterior (MAP) technique without the need for a priori + information about tissue probabilities. That is, the Tissue Probability Maps (TPM) are not used constantly in the + sense of the classical Unified Segmentation approach (Ashburner et. al. 2005), but just for spatial normalization. + The following AMAP estimation is adaptive in the sense that local variations of the parameters (i.e., means and + variance) are modeled as slowly varying spatial functions (Rajapakse et al. 1997). This not only accounts for + intensity inhomogeneities but also for other local variations of intensity. + Additionally, the segmentation approach uses a Partial Volume Estimation (PVE) with a simplified mixed model of at + most two tissue types (Tohka et al. 2004). We start with an initial segmentation into three pure classes: gray + matter (GM), white matter (WM), and cerebrospinal fluid (CSF) based on the above described AMAP estimation. The + initial segmentation is followed by a PVE of two additional mixed classes: GM-WM and GM-CSF. This results in an + estimation of the amount (or fraction) of each pure tissue type present in every voxel (as single voxels - given by + Another important extension to the SPM12 segmentation is the integration of the Dartel or Geodesic Shooting + registration into the toolbox by an already existing Dartel/Shooting template in MNI space. This template was + derived from 555 healthy control subjects of the IXI-database (http://www.brain-development.org) and provides the + several Dartel or Shooting iterations. Thus, for the majority of studies the creation of sample-specific templates + is not necessary anymore and is mainly recommended for children data.'}; + + http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=15 + + Examples + -------- + >>> path_mr = 'structural.nii' + >>> cat = CAT12Segment(in_files=path_mr) + >>> cat.run() # doctest: +SKIP + """ + + input_spec = CAT12SegmentInputSpec + output_spec = CAT12SegmentOutputSpec + + def __init__(self, **inputs): + _local_version = SPMCommand().version + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "cat.estwrite" + + SPMCommand.__init__(self, **inputs) + + def _format_arg(self, opt, spec, val): + """Convert input to appropriate format for spm""" + if opt == "in_files": + if isinstance(val, list): + return scans_for_fnames(val) + else: + return scans_for_fname(val) + elif opt in ["tpm", "shooting_tpm"]: + return Cell2Str(val) + + return super()._format_arg(opt, spec, val) + + def _list_outputs(self): + outputs = self._outputs().get() + f = self.inputs.in_files[0] + pth, base, ext = split_filename(f) + + outputs["mri_images"] = [ + str(mri) for mri in Path(pth).glob("mri/*") if mri.is_file() + ] + + for tidx, tissue in enumerate(["gm", "wm", "csf"]): + for image, prefix in [("modulated", "mw"), ("dartel", "r"), ("native", "")]: + outtype = f"{tissue}_output_{image}" + if isdefined(getattr(self.inputs, outtype)) and getattr( + self.inputs, outtype + ): + outfield = f"{tissue}_{image}_image" + prefix = os.path.join("mri", f"{prefix}p{tidx + 1}") + if image != "dartel": + outputs[outfield] = fname_presuffix(f, prefix=prefix) + else: + outputs[outfield] = fname_presuffix( + f, prefix=prefix, suffix="_rigid" + ) + + if self.inputs.save_bias_corrected: + outputs["bias_corrected_image"] = fname_presuffix( + f, prefix=os.path.join("mri", "wmi") + ) + + outputs["surface_files"] = [ + str(surf) for surf in Path(pth).glob("surf/*") if surf.is_file() + ] + + for hemisphere in ["rh", "lh"]: + for suffix in ["central", "sphere"]: + outfield = f"{hemisphere}_{suffix}_surface" + outputs[outfield] = fname_presuffix( + f, + prefix=os.path.join("surf", f"{hemisphere}.{suffix}."), + suffix=".gii", + use_ext=False, + ) + + outputs["report_files"] = outputs["report_files"] = [ + str(report) for report in Path(pth).glob("report/*") if report.is_file() + ] + + outputs["report"] = fname_presuffix( + f, prefix=os.path.join("report", "cat_"), suffix=".xml", use_ext=False + ) + + outputs["label_files"] = [ + str(label) for label in Path(pth).glob("label/*") if label.is_file() + ] + + outputs["label_rois"] = fname_presuffix( + f, prefix=os.path.join("label", "catROIs_"), suffix=".xml", use_ext=False + ) + outputs["label_roi"] = fname_presuffix( + f, prefix=os.path.join("label", "catROI_"), suffix=".xml", use_ext=False + ) + + return outputs + + +class CAT12SANLMDenoisingInputSpec(SPMCommandInputSpec): + in_files = InputMultiPath( + ImageFileSPM(exists=True), + field="data", + desc="Images for filtering.", + mandatory=True, + copyfile=False, + ) + + spm_type = traits.Enum( + "float32", + "uint16", + "uint8", + "same", + field="spm_type", + usedefault=True, + desc="Data type of the output images. 'same' matches the input image type.", + ) + + intlim = traits.Int( + field="intlim", + default_value=100, + usedefault=True, + desc="intensity limitation (default = 100)", + ) + + filename_prefix = traits.Str( + field="prefix", + default_value="sanlm_", + usedefault=True, + desc="Filename prefix. Specify the string to be prepended to the filenames of the filtered image file(s).", + ) + + filename_suffix = traits.Str( + field="suffix", + default_value="", + usedefault=True, + desc="Filename suffix. Specify the string to be appended to the filenames of the filtered image file(s).", + ) + + addnoise = traits.Float( + default_value=0.5, + usedefault=True, + field="addnoise", + desc="""Strength of additional noise in noise-free regions. + Add minimal amount of noise in regions without any noise to avoid image segmentation problems. + This parameter defines the strength of additional noise as percentage of the average signal intensity.""", + ) + + rician = traits.Bool( + True, + field="rician", + usedefault=True, + desc="""Rician noise + MRIs can have Gaussian or Rician distributed noise with uniform or nonuniform variance across the image. + If SNR is high enough (>3) noise can be well approximated by Gaussian noise in the foreground. However, for + SENSE reconstruction or DTI data a Rician distribution is expected. Please note that the Rician noise estimation + is sensitive for large signals in the neighbourhood and can lead to artefacts, e.g. cortex can be affected by + very high values in the scalp or in blood vessels.""", + ) + + replace_nan_and_inf = traits.Bool( + True, + field="replaceNANandINF", + usedefault=True, + desc="Replace NAN by 0, -INF by the minimum and INF by the maximum of the image.", + ) + + noisecorr_strength = traits.Enum( + "-Inf", + 2, + 4, + field="nlmfilter.optimized.NCstr", + usedefault=True, + desc="""Strength of Noise Corrections + Strength of the (sub-resolution) spatial adaptive non local means (SANLM) noise correction. Please note + that the filter strength is automatically estimated. Change this parameter only for specific conditions. The + "light" option applies half of the filter strength of the adaptive "medium" cases, whereas the "strong" + option uses the full filter strength, force sub-resolution filtering and applies an additional iteration. + Sub-resolution filtering is only used in case of high image resolution below 0.8 mm or in case of the + "strong" option. light = 2, medium = -Inf, strong = 4""", + ) + + +class CAT12SANLMDenoisingOutputSpec(TraitedSpec): + out_file = File(desc="out file") + + +class CAT12SANLMDenoising(SPMCommand): + """ + Spatially adaptive non-local means (SANLM) denoising filter + + This function applies an spatial adaptive (sub-resolution) non-local means denoising filter + to the data. This filter will remove noise while preserving edges. The filter strength is + automatically estimated based on the standard deviation of the noise. + + This filter is internally used in the segmentation procedure anyway. Thus, it is not + necessary (and not recommended) to apply the filter before segmentation. + ______________________________________________________________________ + Christian Gaser, Robert Dahnke + Structural Brain Mapping Group (http://www.neuro.uni-jena.de) + Departments of Neurology and Psychiatry + Jena University Hospital + ______________________________________________________________________ + + Examples + -------- + >>> from nipype.interfaces import cat12 + >>> c = cat12.CAT12SANLMDenoising() + >>> c.inputs.in_files = 'anatomical.nii' + >>> c.run() # doctest: +SKIP + """ + + input_spec = CAT12SANLMDenoisingInputSpec + output_spec = CAT12SANLMDenoisingOutputSpec + + def __init__(self, **inputs): + _local_version = SPMCommand().version + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "cat.tools.sanlm" + + SPMCommand.__init__(self, **inputs) + + def _format_arg(self, opt, spec, val): + """Convert input to appropriate format for spm""" + if opt == "in_files": + if isinstance(val, list): + return scans_for_fnames(val) + else: + return scans_for_fname(val) + if opt == "spm_type": + type_map = {"same": 0, "uint8": 2, "uint16": 512, "float32": 16} + val = type_map[val] + return super()._format_arg(opt, spec, val) + + def _list_outputs(self): + outputs = self._outputs().get() + outputs["out_file"] = fname_presuffix( + self.inputs.in_files[0], + newpath=os.getcwd(), + prefix=self.inputs.filename_prefix, + suffix=self.inputs.filename_suffix, + ) + return outputs + + +class Cell2Str(Cell): + def __str__(self): + """Convert input to appropriate format for cat12""" + return "{'%s'}" % self.to_string() diff --git a/nipype/interfaces/cat12/surface.py b/nipype/interfaces/cat12/surface.py new file mode 100644 index 0000000000..4186bb899e --- /dev/null +++ b/nipype/interfaces/cat12/surface.py @@ -0,0 +1,274 @@ +import os +from pathlib import Path + +from nipype.interfaces.base import File, InputMultiPath, TraitedSpec, traits, isdefined +from nipype.interfaces.cat12.base import NestedCell, Cell +from nipype.interfaces.spm import SPMCommand +from nipype.interfaces.spm.base import SPMCommandInputSpec +from nipype.utils.filemanip import split_filename + + +class ExtractAdditionalSurfaceParametersInputSpec(SPMCommandInputSpec): + left_central_surfaces = InputMultiPath( + File(exists=True), + field="data_surf", + desc="Left and central surfaces files", + mandatory=True, + copyfile=False, + ) + surface_files = InputMultiPath( + File(exists=True), desc="All surface files", mandatory=False, copyfile=False + ) + + gyrification = traits.Bool( + True, + field="GI", + usedefault=True, + desc="Extract gyrification index (GI) based on absolute mean curvature. The" + " method is described in Luders et al. Neuroimage, 29:1224-1230, 2006", + ) + gmv = traits.Bool(True, field="gmv", usedefault=True, desc="Extract volume") + area = traits.Bool(True, field="area", usedefault=True, desc="Extract area surface") + depth = traits.Bool( + False, + field="SD", + usedefault=True, + desc="Extract sulcus depth based on euclidean distance between the central " + "surface anf its convex hull.", + ) + fractal_dimension = traits.Bool( + False, + field="FD", + usedefault=True, + desc="Extract cortical complexity (fractal dimension) which is " + "described in Yotter ar al. Neuroimage, 56(3): 961-973, 2011", + ) + + +class ExtractAdditionalSurfaceParametersOutputSpec(TraitedSpec): + lh_extracted_files = traits.List( + File(exists=True), desc="Files of left Hemisphere extracted measures" + ) + rh_extracted_files = traits.List( + File(exists=True), desc="Files of right Hemisphere extracted measures" + ) + + lh_gyrification = traits.List( + File(exists=True), desc="Gyrification of left Hemisphere" + ) + rh_gyrification = traits.List( + File(exists=True), desc="Gyrification of right Hemisphere" + ) + + lh_gmv = traits.List( + File(exists=True), desc="Grey matter volume of left Hemisphere" + ) + rh_gmv = traits.List( + File(exists=True), desc="Grey matter volume of right Hemisphere" + ) + + lh_area = traits.List(File(exists=True), desc="Area of left Hemisphere") + rh_area = traits.List(File(exists=True), desc="Area of right Hemisphere") + + lh_depth = traits.List(File(exists=True), desc="Depth of left Hemisphere") + rh_depth = traits.List(File(exists=True), desc="Depth of right Hemisphere") + + lh_fractaldimension = traits.List( + File(exists=True), desc="Fractal Dimension of left Hemisphere" + ) + rh_fractaldimension = traits.List( + File(exists=True), desc="Fractal Dimension of right Hemisphere" + ) + + +class ExtractAdditionalSurfaceParameters(SPMCommand): + """ + Additional surface parameters can be extracted that can be used for statistical analysis, such as: + + * Central surfaces + * Surface area + * Surface GM volume + * Gyrification Index + * Sulcus depth + * Toro's gyrification index + * Shaer's local gyrification index + * Laplacian gyrification indices + * Addicional surfaces + * Measure normalization + * Lazy processing + + http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=53 + + Examples + -------- + >>> # Set the left surface files, both will be processed + >>> lh_path_central = 'lh.central.structural.gii' + >>> # Put here all surface files generated by CAT12 Segment, this is only required if the this approach is putted in + >>> surf_files = ['lh.sphere.reg.structural.gii', 'rh.sphere.reg.structural.gii', 'lh.sphere.structural.gii', 'rh.sphere.structural.gii', 'rh.central.structural.gii', 'lh.pbt.structural', 'rh.pbt.structural'] + >>> extract_additional_measures = ExtractAdditionalSurfaceParameters(left_central_surfaces=lh_path_central, surface_files=surf_files) + >>> extract_additional_measures.run() # doctest: +SKIP + + """ + + input_spec = ExtractAdditionalSurfaceParametersInputSpec + output_spec = ExtractAdditionalSurfaceParametersOutputSpec + + def __init__(self, **inputs): + _local_version = SPMCommand().version + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "cat.stools.surfextract" + + super().__init__(**inputs) + + def _list_outputs(self): + outputs = self._outputs().get() + + names_outputs = [ + (self.inputs.gyrification, "gyrification"), + (self.inputs.gmv, "gmv"), + (self.inputs.area, "area"), + (self.inputs.depth, "depth"), + (self.inputs.fractal_dimension, "fractaldimension"), + ] + + for filename in self.inputs.left_central_surfaces: + pth, base, ext = split_filename(filename) + # The first part of the filename is rh.central or lh.central + original_filename = base.split(".", 2)[-1] + for extracted_parameter, parameter_name in names_outputs: + if extracted_parameter: + for hemisphere in ["rh", "lh"]: + all_files_hemisphere = hemisphere + "_extracted_files" + name_hemisphere = hemisphere + "_" + parameter_name + if not isdefined(outputs[name_hemisphere]): + outputs[name_hemisphere] = [] + if not isdefined(outputs[all_files_hemisphere]): + outputs[all_files_hemisphere] = [] + generated_filename = ( + f"{hemisphere}.{parameter_name}.{original_filename}" + ) + outputs[name_hemisphere].append( + os.path.join(pth, generated_filename) + ) + + # Add all hemisphere files into one list, this is important because only the left hemisphere + # files are used as input in the Surface ROI Tools, for instance. + outputs[all_files_hemisphere].append( + os.path.join(pth, generated_filename) + ) + + return outputs + + def _format_arg(self, opt, spec, val): + if opt == "left_central_surfaces": + return Cell2Str(val) + return super()._format_arg(opt, spec, val) + + +class ExtractROIBasedSurfaceMeasuresInputSpec(SPMCommandInputSpec): + # Only these files are given as input, yet the right hemisphere (rh) files should also be on the processing + # directory. + + surface_files = InputMultiPath( + File(exists=True), + desc="Surface data files. This variable should be a list with all", + mandatory=False, + copyfile=False, + ) + lh_roi_atlas = InputMultiPath( + File(exists=True), + field="rdata", + desc="(Left) ROI Atlas. These are the ROI's ", + mandatory=True, + copyfile=False, + ) + + rh_roi_atlas = InputMultiPath( + File(exists=True), + desc="(Right) ROI Atlas. These are the ROI's ", + mandatory=False, + copyfile=False, + ) + + lh_surface_measure = InputMultiPath( + File(exists=True), + field="cdata", + desc="(Left) Surface data files. ", + mandatory=True, + copyfile=False, + ) + rh_surface_measure = InputMultiPath( + File(exists=True), + desc="(Right) Surface data files.", + mandatory=False, + copyfile=False, + ) + + +class ExtractROIBasedSurfaceMeasuresOutputSpec(TraitedSpec): + label_files = traits.List( + File(exists=True), desc="Files with the measures extracted for ROIs." + ) + + +class ExtractROIBasedSurfaceMeasures(SPMCommand): + """ + Extract ROI-based surface values + While ROI-based values for VBM (volume) data are automatically saved in the ``label`` folder as XML file it is + necessary to additionally extract these values for surface data (except for thickness which is automatically + extracted during segmentation). This has to be done after preprocessing the data and creating cortical surfaces. + + You can extract ROI-based values for cortical thickness but also for any other surface parameter that was extracted + using the Extract Additional Surface Parameters such as volume, area, depth, gyrification and fractal dimension. + + + http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=53 + + Examples + -------- + >>> # Template surface files + >>> lh_atlas = 'lh.aparc_a2009s.freesurfer.annot' + >>> rh_atlas = 'rh.aparc_a2009s.freesurfer.annot' + >>> surf_files = ['lh.sphere.reg.structural.gii', 'rh.sphere.reg.structural.gii', 'lh.sphere.structural.gii', 'rh.sphere.structural.gii', 'lh.central.structural.gii', 'rh.central.structural.gii', 'lh.pbt.structural', 'rh.pbt.structural'] + >>> lh_measure = 'lh.area.structural' + >>> extract_additional_measures = ExtractROIBasedSurfaceMeasures(surface_files=surf_files, lh_surface_measure=lh_measure, lh_roi_atlas=lh_atlas, rh_roi_atlas=rh_atlas) + >>> extract_additional_measures.run() # doctest: +SKIP + + + """ + + input_spec = ExtractROIBasedSurfaceMeasuresInputSpec + output_spec = ExtractROIBasedSurfaceMeasuresOutputSpec + + def __init__(self, **inputs): + _local_version = SPMCommand().version + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "cat.stools.surf2roi" + + SPMCommand.__init__(self, **inputs) + + def _format_arg(self, opt, spec, val): + if opt == "lh_surface_measure": + return NestedCell(val) + elif opt == "lh_roi_atlas": + return Cell2Str(val) + + return super()._format_arg(opt, spec, val) + + def _list_outputs(self): + outputs = self._outputs().get() + + pth, base, ext = split_filename(self.inputs.lh_surface_measure[0]) + + outputs["label_files"] = [ + str(label) for label in Path(pth).glob("label/*") if label.is_file() + ] + return outputs + + +class Cell2Str(Cell): + def __str__(self): + """Convert input to appropriate format for cat12""" + return "{%s}" % self.to_string() diff --git a/nipype/workflows/rsfmri/fsl/tests/__init__.py b/nipype/interfaces/cat12/tests/__init__.py similarity index 100% rename from nipype/workflows/rsfmri/fsl/tests/__init__.py rename to nipype/interfaces/cat12/tests/__init__.py diff --git a/nipype/interfaces/cat12/tests/test_auto_CAT12SANLMDenoising.py b/nipype/interfaces/cat12/tests/test_auto_CAT12SANLMDenoising.py new file mode 100644 index 0000000000..43c0d5e4ea --- /dev/null +++ b/nipype/interfaces/cat12/tests/test_auto_CAT12SANLMDenoising.py @@ -0,0 +1,72 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import CAT12SANLMDenoising + + +def test_CAT12SANLMDenoising_inputs(): + input_map = dict( + addnoise=dict( + field="addnoise", + usedefault=True, + ), + filename_prefix=dict( + field="prefix", + usedefault=True, + ), + filename_suffix=dict( + field="suffix", + usedefault=True, + ), + in_files=dict( + copyfile=False, + field="data", + mandatory=True, + ), + intlim=dict( + field="intlim", + usedefault=True, + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + noisecorr_strength=dict( + field="nlmfilter.optimized.NCstr", + usedefault=True, + ), + paths=dict(), + replace_nan_and_inf=dict( + field="replaceNANandINF", + usedefault=True, + ), + rician=dict( + field="rician", + usedefault=True, + ), + spm_type=dict( + field="spm_type", + usedefault=True, + ), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + ) + inputs = CAT12SANLMDenoising.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_CAT12SANLMDenoising_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = CAT12SANLMDenoising.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/cat12/tests/test_auto_CAT12Segment.py b/nipype/interfaces/cat12/tests/test_auto_CAT12Segment.py new file mode 100644 index 0000000000..979b3afa6b --- /dev/null +++ b/nipype/interfaces/cat12/tests/test_auto_CAT12Segment.py @@ -0,0 +1,277 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import CAT12Segment + + +def test_CAT12Segment_inputs(): + input_map = dict( + affine_preprocessing=dict( + field="extopts.APP", + usedefault=True, + ), + affine_regularization=dict( + field="opts.affreg", + usedefault=True, + ), + cobra=dict( + field="output.ROImenu.atlases.hammers", + usedefault=True, + ), + csf_output_dartel=dict( + field="output.CSF.dartel", + usedefault=True, + ), + csf_output_modulated=dict( + field="output.CSF.mod", + usedefault=True, + ), + csf_output_native=dict( + field="output.CSF.native", + usedefault=True, + ), + gm_output_dartel=dict( + field="output.GM.dartel", + usedefault=True, + ), + gm_output_modulated=dict( + field="output.GM.mod", + usedefault=True, + ), + gm_output_native=dict( + field="output.GM.native", + usedefault=True, + ), + hammers=dict( + field="output.ROImenu.atlases.cobra", + usedefault=True, + ), + ignore_errors=dict( + field="extopts.ignoreErrors", + usedefault=True, + ), + in_files=dict( + copyfile=False, + field="data", + mandatory=True, + ), + initial_segmentation=dict( + field="extopts.spm_kamap", + usedefault=True, + ), + internal_resampling_process=dict( + field="extopts.restypes.optimal", + maxlen=2, + minlen=2, + usedefault=True, + ), + jacobianwarped=dict( + field="output.jacobianwarped", + usedefault=True, + ), + label_dartel=dict( + field="output.label.dartel", + usedefault=True, + ), + label_native=dict( + field="output.label.native", + usedefault=True, + ), + label_warped=dict( + field="output.label.warped", + usedefault=True, + ), + las_dartel=dict( + field="output.las.dartel", + usedefault=True, + ), + las_native=dict( + field="output.las.native", + usedefault=True, + ), + las_warped=dict( + field="output.las.warped", + usedefault=True, + ), + local_adaptive_seg=dict( + field="extopts.LASstr", + usedefault=True, + ), + lpba40=dict( + field="output.ROImenu.atlases.lpba40", + usedefault=True, + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + n_jobs=dict( + field="nproc", + mandatory=True, + usedefault=True, + ), + neuromorphometrics=dict( + field="output.ROImenu.atlases.neuromorphometrics", + usedefault=True, + ), + output_labelnative=dict( + field="output.labelnative", + usedefault=True, + ), + own_atlas=dict( + copyfile=False, + field="output.ROImenu.atlases.ownatlas", + mandatory=False, + ), + paths=dict(), + power_spm_inhomogeneity_correction=dict( + field="opts.biasacc", + usedefault=True, + ), + save_bias_corrected=dict( + field="output.bias.warped", + usedefault=True, + ), + shooting_tpm=dict( + copyfile=False, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="extopts.registration.shooting.shootingtpm", + mandatory=False, + ), + shooting_tpm_template_1=dict( + copyfile=False, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + mandatory=False, + ), + shooting_tpm_template_2=dict( + copyfile=False, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + mandatory=False, + ), + shooting_tpm_template_3=dict( + copyfile=False, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + mandatory=False, + ), + shooting_tpm_template_4=dict( + copyfile=False, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + mandatory=False, + ), + skull_strip=dict( + field="extopts.gcutstr", + usedefault=True, + ), + surface_and_thickness_estimation=dict( + field="surface", + usedefault=True, + ), + surface_measures=dict( + field="output.surf_measures", + usedefault=True, + ), + tpm=dict( + copyfile=False, + field="tpm", + mandatory=False, + ), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + voxel_size=dict( + field="extopts.vox", + usedefault=True, + ), + warps=dict( + field="output.warps", + maxlen=2, + minlen=2, + usedefault=True, + ), + wm_hyper_intensity_correction=dict( + field="extopts.WMHC", + usedefault=True, + ), + wm_output_dartel=dict( + field="output.WM.dartel", + usedefault=True, + ), + wm_output_modulated=dict( + field="output.WM.mod", + usedefault=True, + ), + wm_output_native=dict( + field="output.WM.native", + usedefault=True, + ), + ) + inputs = CAT12Segment.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_CAT12Segment_outputs(): + output_map = dict( + bias_corrected_image=dict( + extensions=None, + ), + csf_dartel_image=dict( + extensions=None, + ), + csf_modulated_image=dict( + extensions=None, + ), + csf_native_image=dict( + extensions=None, + ), + gm_dartel_image=dict( + extensions=None, + ), + gm_modulated_image=dict( + extensions=None, + ), + gm_native_image=dict( + extensions=None, + ), + label_files=dict(), + label_roi=dict( + extensions=None, + ), + label_rois=dict( + extensions=None, + ), + lh_central_surface=dict( + extensions=None, + ), + lh_sphere_surface=dict( + extensions=None, + ), + mri_images=dict(), + report=dict( + extensions=None, + ), + report_files=dict(), + rh_central_surface=dict( + extensions=None, + ), + rh_sphere_surface=dict( + extensions=None, + ), + surface_files=dict(), + wm_dartel_image=dict( + extensions=None, + ), + wm_modulated_image=dict( + extensions=None, + ), + wm_native_image=dict( + extensions=None, + ), + ) + outputs = CAT12Segment.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/cat12/tests/test_auto_ExtractAdditionalSurfaceParameters.py b/nipype/interfaces/cat12/tests/test_auto_ExtractAdditionalSurfaceParameters.py new file mode 100644 index 0000000000..cde7f2057e --- /dev/null +++ b/nipype/interfaces/cat12/tests/test_auto_ExtractAdditionalSurfaceParameters.py @@ -0,0 +1,73 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..surface import ExtractAdditionalSurfaceParameters + + +def test_ExtractAdditionalSurfaceParameters_inputs(): + input_map = dict( + area=dict( + field="area", + usedefault=True, + ), + depth=dict( + field="SD", + usedefault=True, + ), + fractal_dimension=dict( + field="FD", + usedefault=True, + ), + gmv=dict( + field="gmv", + usedefault=True, + ), + gyrification=dict( + field="GI", + usedefault=True, + ), + left_central_surfaces=dict( + copyfile=False, + field="data_surf", + mandatory=True, + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + paths=dict(), + surface_files=dict( + copyfile=False, + mandatory=False, + ), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + ) + inputs = ExtractAdditionalSurfaceParameters.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ExtractAdditionalSurfaceParameters_outputs(): + output_map = dict( + lh_area=dict(), + lh_depth=dict(), + lh_extracted_files=dict(), + lh_fractaldimension=dict(), + lh_gmv=dict(), + lh_gyrification=dict(), + rh_area=dict(), + rh_depth=dict(), + rh_extracted_files=dict(), + rh_fractaldimension=dict(), + rh_gmv=dict(), + rh_gyrification=dict(), + ) + outputs = ExtractAdditionalSurfaceParameters.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/cat12/tests/test_auto_ExtractROIBasedSurfaceMeasures.py b/nipype/interfaces/cat12/tests/test_auto_ExtractROIBasedSurfaceMeasures.py new file mode 100644 index 0000000000..ffc18324aa --- /dev/null +++ b/nipype/interfaces/cat12/tests/test_auto_ExtractROIBasedSurfaceMeasures.py @@ -0,0 +1,55 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..surface import ExtractROIBasedSurfaceMeasures + + +def test_ExtractROIBasedSurfaceMeasures_inputs(): + input_map = dict( + lh_roi_atlas=dict( + copyfile=False, + field="rdata", + mandatory=True, + ), + lh_surface_measure=dict( + copyfile=False, + field="cdata", + mandatory=True, + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + paths=dict(), + rh_roi_atlas=dict( + copyfile=False, + mandatory=False, + ), + rh_surface_measure=dict( + copyfile=False, + mandatory=False, + ), + surface_files=dict( + copyfile=False, + mandatory=False, + ), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + ) + inputs = ExtractROIBasedSurfaceMeasures.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ExtractROIBasedSurfaceMeasures_outputs(): + output_map = dict( + label_files=dict(), + ) + outputs = ExtractROIBasedSurfaceMeasures.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/cmtk/__init__.py b/nipype/interfaces/cmtk/__init__.py index 60c7d636d5..d71ac76e2c 100644 --- a/nipype/interfaces/cmtk/__init__.py +++ b/nipype/interfaces/cmtk/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +"""CMP implements a full processing pipeline for creating connectomes with dMRI data.""" + from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks from .parcellation import Parcellate diff --git a/nipype/interfaces/cmtk/base.py b/nipype/interfaces/cmtk/base.py index 4aedd56bdb..d0c226dc49 100644 --- a/nipype/interfaces/cmtk/base.py +++ b/nipype/interfaces/cmtk/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for cmtk """ @@ -8,7 +7,7 @@ class CFFBaseInterface(LibraryBaseInterface): - _pkg = 'cfflib' + _pkg = "cfflib" # Originally set in convert, nbs, nx, parcellation @@ -16,18 +15,18 @@ class CFFBaseInterface(LibraryBaseInterface): # Remove in 2.0 have_cmp = True try: - package_check('cmp') + package_check("cmp") except ImportError: have_cmp = False have_cfflib = True try: - package_check('cfflib') + package_check("cfflib") except ImportError: have_cfflib = False have_cv = True try: - package_check('cviewer') + package_check("cviewer") except ImportError: have_cv = False diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 20293ab630..50902d5d1c 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import pickle import os.path as op @@ -14,11 +9,19 @@ from ... import logging from ...utils.filemanip import split_filename -from ...utils import NUMPY_MMAP -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, Directory, OutputMultiPath, isdefined) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + Directory, + OutputMultiPath, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") def length(xyz, along=False): @@ -60,7 +63,7 @@ def length(xyz, along=False): if along: return np.array([0]) return 0 - dists = np.sqrt((np.diff(xyz, axis=0)**2).sum(axis=1)) + dists = np.sqrt((np.diff(xyz, axis=0) ** 2).sum(axis=1)) if along: return np.cumsum(dists) return np.sum(dists) @@ -69,15 +72,16 @@ def length(xyz, along=False): def get_rois_crossed(pointsmm, roiData, voxelSize): n_points = len(pointsmm) rois_crossed = [] - for j in range(0, n_points): + for j in range(n_points): # store point x = int(pointsmm[j, 0] / float(voxelSize[0])) y = int(pointsmm[j, 1] / float(voxelSize[1])) z = int(pointsmm[j, 2] / float(voxelSize[2])) - if not roiData[x, y, z] == 0: + if roiData[x, y, z] != 0: rois_crossed.append(roiData[x, y, z]) rois_crossed = list( - dict.fromkeys(rois_crossed).keys()) # Removed duplicates from the list + dict.fromkeys(rois_crossed).keys() + ) # Removed duplicates from the list return rois_crossed @@ -87,15 +91,14 @@ def get_connectivity_matrix(n_rois, list_of_roi_crossed_lists): for idx_i, roi_i in enumerate(rois_crossed): for idx_j, roi_j in enumerate(rois_crossed): if idx_i > idx_j: - if not roi_i == roi_j: + if roi_i != roi_j: connectivity_matrix[roi_i - 1, roi_j - 1] += 1 connectivity_matrix = connectivity_matrix + connectivity_matrix.T return connectivity_matrix def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): - """ Create the intersection arrays for each fiber - """ + """Create the intersection arrays for each fiber""" n_fib = len(streamlines) pc = -1 # Computation for each fiber @@ -105,38 +108,46 @@ def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): pcN = int(round(float(100 * i) / n_fib)) if pcN > pc and pcN % 1 == 0: pc = pcN - print('%4.0f%%' % (pc)) + print("%4.0f%%" % (pc)) rois_crossed = get_rois_crossed(fiber[0], roiData, voxelSize) if len(rois_crossed) > 0: list_of_roi_crossed_lists.append(list(rois_crossed)) final_fiber_ids.append(i) - connectivity_matrix = get_connectivity_matrix(n_rois, - list_of_roi_crossed_lists) + connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) dis = n_fib - len(final_fiber_ids) iflogger.info( - 'Found %i (%f percent out of %i fibers) fibers that start or ' - 'terminate in a voxel which is not labeled. (orphans)', dis, - dis * 100.0 / n_fib, n_fib) - iflogger.info('Valid fibers: %i (%f percent)', n_fib - dis, - 100 - dis * 100.0 / n_fib) - iflogger.info('Returning the intersecting point connectivity matrix') + "Found %i (%f percent out of %i fibers) fibers that start or " + "terminate in a voxel which is not labeled. (orphans)", + dis, + dis * 100.0 / n_fib, + n_fib, + ) + iflogger.info( + "Valid fibers: %i (%f percent)", n_fib - dis, 100 - dis * 100.0 / n_fib + ) + iflogger.info("Returning the intersecting point connectivity matrix") return connectivity_matrix, final_fiber_ids def create_endpoints_array(fib, voxelSize): - """ Create the endpoints arrays for each fiber + """Create the endpoints arrays for each fiber. + Parameters ---------- - fib: the fibers data - voxelSize: 3-tuple containing the voxel size of the ROI image + fib : array-like + the fibers data + voxelSize : tuple + 3-tuple containing the voxel size of the ROI image + Returns ------- - (endpoints: matrix of size [#fibers, 2, 3] containing for each fiber the - index of its first and last point in the voxelSize volume - endpointsmm) : endpoints in milimeter coordinates - """ + endpoints : ndarray of size [#fibers, 2, 3] + containing for each fiber the index of its first and last point in the voxelSize volume + endpointsmm : ndarray of size [#fibers, 2, 3] + endpoints in millimeter coordinates + """ # Init n = len(fib) endpoints = np.zeros((n, 2, 3)) @@ -165,43 +176,46 @@ def create_endpoints_array(fib, voxelSize): endpoints[i, 1, 2] = int(endpoints[i, 1, 2] / float(voxelSize[2])) # Return the matrices - iflogger.info('Returning the endpoint matrix') + iflogger.info("Returning the endpoint matrix") return (endpoints, endpointsmm) -def cmat(track_file, - roi_file, - resolution_network_file, - matrix_name, - matrix_mat_name, - endpoint_name, - intersections=False): - """ Create the connection matrix for each resolution using fibers and ROIs. """ +def cmat( + track_file, + roi_file, + resolution_network_file, + matrix_name, + matrix_mat_name, + endpoint_name, + intersections=False, +): + """Create the connection matrix for each resolution using fibers and ROIs.""" import scipy.io as sio stats = {} - iflogger.info('Running cmat function') + iflogger.info("Running cmat function") # Identify the endpoints of each fiber - en_fname = op.abspath(endpoint_name + '_endpoints.npy') - en_fnamemm = op.abspath(endpoint_name + '_endpointsmm.npy') + en_fname = op.abspath(endpoint_name + "_endpoints.npy") + en_fnamemm = op.abspath(endpoint_name + "_endpointsmm.npy") - iflogger.info('Reading Trackvis file %s', track_file) + iflogger.info("Reading Trackvis file %s", track_file) fib, hdr = nb.trackvis.read(track_file, False) - stats['orig_n_fib'] = len(fib) + stats["orig_n_fib"] = len(fib) - roi = nb.load(roi_file, mmap=NUMPY_MMAP) - roiData = roi.get_data() + roi = nb.load(roi_file) + # Preserve on-disk type unless scaled + roiData = np.asanyarray(roi.dataobj) roiVoxelSize = roi.header.get_zooms() (endpoints, endpointsmm) = create_endpoints_array(fib, roiVoxelSize) # Output endpoint arrays - iflogger.info('Saving endpoint array: %s', en_fname) + iflogger.info("Saving endpoint array: %s", en_fname) np.save(en_fname, endpoints) - iflogger.info('Saving endpoint array in mm: %s', en_fnamemm) + iflogger.info("Saving endpoint array in mm: %s", en_fnamemm) np.save(en_fnamemm, endpointsmm) n = len(fib) - iflogger.info('Number of fibers: %i', n) + iflogger.info("Number of fibers: %i", n) # Create empty fiber label array fiberlabels = np.zeros((n, 2)) @@ -210,16 +224,17 @@ def cmat(track_file, # Add node information from specified parcellation scheme path, name, ext = split_filename(resolution_network_file) - if ext == '.pck': - gp = nx.read_gpickle(resolution_network_file) - elif ext == '.graphml': + if ext == ".pck": + with open(resolution_network_file, 'rb') as f: + gp = pickle.load(f) + elif ext == ".graphml": gp = nx.read_graphml(resolution_network_file) else: raise TypeError("Unable to read file:", resolution_network_file) nROIs = len(gp.nodes()) # add node information from parcellation - if 'dn_position' in gp.nodes[list(gp.nodes())[0]]: + if "dn_position" in gp.nodes[list(gp.nodes())[0]]: G = gp.copy() else: G = nx.Graph() @@ -229,39 +244,47 @@ def cmat(track_file, # ROI in voxel coordinates (segmentation volume ) xyz = tuple( np.mean( - np.where( - np.flipud(roiData) == int(d["dn_correspondence_id"])), - axis=1)) - G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), + axis=1, + ) + ) + G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1]) if intersections: iflogger.info("Filtering tractography from intersections") intersection_matrix, final_fiber_ids = create_allpoints_cmat( - fib, roiData, roiVoxelSize, nROIs) + fib, roiData, roiVoxelSize, nROIs + ) finalfibers_fname = op.abspath( - endpoint_name + '_intersections_streamline_final.trk') - stats['intersections_n_fib'] = save_fibers(hdr, fib, finalfibers_fname, - final_fiber_ids) + endpoint_name + "_intersections_streamline_final.trk" + ) + stats["intersections_n_fib"] = save_fibers( + hdr, fib, finalfibers_fname, final_fiber_ids + ) intersection_matrix = np.matrix(intersection_matrix) I = G.copy() - H = nx.from_numpy_matrix(np.matrix(intersection_matrix)) - H = nx.relabel_nodes( - H, lambda x: x + 1) # relabel nodes so they start at 1 + H = nx.from_numpy_array(np.matrix(intersection_matrix)) + H = nx.relabel_nodes(H, lambda x: x + 1) # relabel nodes so they start at 1 I.add_weighted_edges_from( - ((u, v, d['weight']) for u, v, d in H.edges(data=True))) + ((u, v, d["weight"]) for u, v, d in H.edges(data=True)) + ) dis = 0 for i in range(endpoints.shape[0]): - # ROI start => ROI end try: - startROI = int(roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], - endpoints[i, 0, 2]]) - endROI = int(roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], - endpoints[i, 1, 2]]) + startROI = int( + roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], endpoints[i, 0, 2]] + ) + endROI = int( + roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], endpoints[i, 1, 2]] + ) except IndexError: - iflogger.error('AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. ' - 'PLEASE CHECK ENDPOINT GENERATION', i) + iflogger.error( + "AN INDEXERROR EXCEPTION OCCURRED FOR FIBER %s. " + "PLEASE CHECK ENDPOINT GENERATION", + i, + ) break # Filter @@ -274,8 +297,7 @@ def cmat(track_file, iflogger.error( "Start or endpoint of fiber terminate in a voxel which is labeled higher" ) - iflogger.error( - "than is expected by the parcellation node information.") + iflogger.error("than is expected by the parcellation node information.") iflogger.error("Start ROI: %i, End ROI: %i", startROI, endROI) iflogger.error("This needs bugfixing!") continue @@ -294,9 +316,8 @@ def cmat(track_file, final_fibers_idx.append(i) # Add edge to graph - if G.has_edge(startROI, - endROI) and 'fiblist' in G.edge[startROI][endROI]: - G.edge[startROI][endROI]['fiblist'].append(i) + if G.has_edge(startROI, endROI) and "fiblist" in G.edge[startROI][endROI]: + G.edge[startROI][endROI]["fiblist"].append(i) else: G.add_edge(startROI, endROI, fiblist=[i]) @@ -307,9 +328,11 @@ def cmat(track_file, else: final_fibers_indices = final_fibers_idx - for idx in final_fibers_indices: + finalfiberlength.extend( # compute length of fiber - finalfiberlength.append(length(fib[idx][0])) + length(fib[idx][0]) + for idx in final_fibers_indices + ) # convert to array final_fiberlength_array = np.array(finalfiberlength) @@ -318,10 +341,13 @@ def cmat(track_file, final_fiberlabels_array = np.array(final_fiberlabels, dtype=int) iflogger.info( - 'Found %i (%f percent out of %i fibers) fibers that start or ' - 'terminate in a voxel which is not labeled. (orphans)', dis, - dis * 100.0 / n, n) - iflogger.info('Valid fibers: %i (%f%%)', n - dis, 100 - dis * 100.0 / n) + "Found %i (%f percent out of %i fibers) fibers that start or " + "terminate in a voxel which is not labeled. (orphans)", + dis, + dis * 100.0 / n, + n, + ) + iflogger.info("Valid fibers: %i (%f%%)", n - dis, 100 - dis * 100.0 / n) numfib = nx.Graph() numfib.add_nodes_from(G) @@ -331,227 +357,229 @@ def cmat(track_file, for u, v, d in G.edges(data=True): G.remove_edge(u, v) di = {} - if 'fiblist' in d: - di['number_of_fibers'] = len(d['fiblist']) - idx = np.where((final_fiberlabels_array[:, 0] == int(u)) & - (final_fiberlabels_array[:, 1] == int(v)))[0] - di['fiber_length_mean'] = float( - np.mean(final_fiberlength_array[idx])) - di['fiber_length_median'] = float( - np.median(final_fiberlength_array[idx])) - di['fiber_length_std'] = float( - np.std(final_fiberlength_array[idx])) + if "fiblist" in d: + di["number_of_fibers"] = len(d["fiblist"]) + idx = np.where( + (final_fiberlabels_array[:, 0] == int(u)) + & (final_fiberlabels_array[:, 1] == int(v)) + )[0] + di["fiber_length_mean"] = float(np.mean(final_fiberlength_array[idx])) + di["fiber_length_median"] = float(np.median(final_fiberlength_array[idx])) + di["fiber_length_std"] = float(np.std(final_fiberlength_array[idx])) else: - di['number_of_fibers'] = 0 - di['fiber_length_mean'] = 0 - di['fiber_length_median'] = 0 - di['fiber_length_std'] = 0 - if not u == v: # Fix for self loop problem + di["number_of_fibers"] = 0 + di["fiber_length_mean"] = 0 + di["fiber_length_median"] = 0 + di["fiber_length_std"] = 0 + if u != v: # Fix for self loop problem G.add_edge(u, v, **di) - if 'fiblist' in d: - numfib.add_edge(u, v, weight=di['number_of_fibers']) - fibmean.add_edge(u, v, weight=di['fiber_length_mean']) - fibmedian.add_edge(u, v, weight=di['fiber_length_median']) - fibdev.add_edge(u, v, weight=di['fiber_length_std']) - - iflogger.info('Writing network as %s', matrix_name) - nx.write_gpickle(G, op.abspath(matrix_name)) - - numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int) - numfib_dict = {'number_of_fibers': numfib_mlab} - fibmean_mlab = nx.to_numpy_matrix(fibmean, dtype=np.float64) - fibmean_dict = {'mean_fiber_length': fibmean_mlab} - fibmedian_mlab = nx.to_numpy_matrix(fibmedian, dtype=np.float64) - fibmedian_dict = {'median_fiber_length': fibmedian_mlab} - fibdev_mlab = nx.to_numpy_matrix(fibdev, dtype=np.float64) - fibdev_dict = {'fiber_length_std': fibdev_mlab} + if "fiblist" in d: + numfib.add_edge(u, v, weight=di["number_of_fibers"]) + fibmean.add_edge(u, v, weight=di["fiber_length_mean"]) + fibmedian.add_edge(u, v, weight=di["fiber_length_median"]) + fibdev.add_edge(u, v, weight=di["fiber_length_std"]) + + iflogger.info("Writing network as %s", matrix_name) + with open(op.abspath(matrix_name), 'wb') as f: + pickle.dump(G, f, pickle.HIGHEST_PROTOCOL) + + numfib_mlab = nx.to_numpy_array(numfib, dtype=int) + numfib_dict = {"number_of_fibers": numfib_mlab} + fibmean_mlab = nx.to_numpy_array(fibmean, dtype=np.float64) + fibmean_dict = {"mean_fiber_length": fibmean_mlab} + fibmedian_mlab = nx.to_numpy_array(fibmedian, dtype=np.float64) + fibmedian_dict = {"median_fiber_length": fibmedian_mlab} + fibdev_mlab = nx.to_numpy_array(fibdev, dtype=np.float64) + fibdev_dict = {"fiber_length_std": fibdev_mlab} if intersections: path, name, ext = split_filename(matrix_name) - intersection_matrix_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection network as %s', - intersection_matrix_name) - nx.write_gpickle(I, intersection_matrix_name) + intersection_matrix_name = op.abspath(name + "_intersections") + ext + iflogger.info("Writing intersection network as %s", intersection_matrix_name) + with open(intersection_matrix_name, 'wb') as f: + pickle.dump(I, f, pickle.HIGHEST_PROTOCOL) path, name, ext = split_filename(matrix_mat_name) - if not ext == '.mat': - ext = '.mat' + if ext != ".mat": + ext = ".mat" matrix_mat_name = matrix_mat_name + ext - iflogger.info('Writing matlab matrix as %s', matrix_mat_name) + iflogger.info("Writing matlab matrix as %s", matrix_mat_name) sio.savemat(matrix_mat_name, numfib_dict) if intersections: - intersect_dict = {'intersections': intersection_matrix} - intersection_matrix_mat_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection matrix as %s', - intersection_matrix_mat_name) + intersect_dict = {"intersections": intersection_matrix} + intersection_matrix_mat_name = op.abspath(name + "_intersections") + ext + iflogger.info("Writing intersection matrix as %s", intersection_matrix_mat_name) sio.savemat(intersection_matrix_mat_name, intersect_dict) - mean_fiber_length_matrix_name = op.abspath( - name + '_mean_fiber_length') + ext - iflogger.info('Writing matlab mean fiber length matrix as %s', - mean_fiber_length_matrix_name) + mean_fiber_length_matrix_name = op.abspath(name + "_mean_fiber_length") + ext + iflogger.info( + "Writing matlab mean fiber length matrix as %s", mean_fiber_length_matrix_name + ) sio.savemat(mean_fiber_length_matrix_name, fibmean_dict) - median_fiber_length_matrix_name = op.abspath( - name + '_median_fiber_length') + ext - iflogger.info('Writing matlab median fiber length matrix as %s', - median_fiber_length_matrix_name) + median_fiber_length_matrix_name = op.abspath(name + "_median_fiber_length") + ext + iflogger.info( + "Writing matlab median fiber length matrix as %s", + median_fiber_length_matrix_name, + ) sio.savemat(median_fiber_length_matrix_name, fibmedian_dict) - fiber_length_std_matrix_name = op.abspath(name + '_fiber_length_std') + ext - iflogger.info('Writing matlab fiber length deviation matrix as %s', - fiber_length_std_matrix_name) + fiber_length_std_matrix_name = op.abspath(name + "_fiber_length_std") + ext + iflogger.info( + "Writing matlab fiber length deviation matrix as %s", + fiber_length_std_matrix_name, + ) sio.savemat(fiber_length_std_matrix_name, fibdev_dict) - fiberlengths_fname = op.abspath(endpoint_name + '_final_fiberslength.npy') - iflogger.info('Storing final fiber length array as %s', fiberlengths_fname) + fiberlengths_fname = op.abspath(endpoint_name + "_final_fiberslength.npy") + iflogger.info("Storing final fiber length array as %s", fiberlengths_fname) np.save(fiberlengths_fname, final_fiberlength_array) - fiberlabels_fname = op.abspath(endpoint_name + '_filtered_fiberslabel.npy') - iflogger.info('Storing all fiber labels (with orphans) as %s', - fiberlabels_fname) - np.save( - fiberlabels_fname, - np.array(fiberlabels, dtype=np.int32), - ) + fiberlabels_fname = op.abspath(endpoint_name + "_filtered_fiberslabel.npy") + iflogger.info("Storing all fiber labels (with orphans) as %s", fiberlabels_fname) + np.save(fiberlabels_fname, np.array(fiberlabels, dtype=np.int32)) - fiberlabels_noorphans_fname = op.abspath( - endpoint_name + '_final_fiberslabels.npy') - iflogger.info('Storing final fiber labels (no orphans) as %s', - fiberlabels_noorphans_fname) + fiberlabels_noorphans_fname = op.abspath(endpoint_name + "_final_fiberslabels.npy") + iflogger.info( + "Storing final fiber labels (no orphans) as %s", fiberlabels_noorphans_fname + ) np.save(fiberlabels_noorphans_fname, final_fiberlabels_array) iflogger.info("Filtering tractography - keeping only no orphan fibers") - finalfibers_fname = op.abspath(endpoint_name + '_streamline_final.trk') - stats['endpoint_n_fib'] = save_fibers(hdr, fib, finalfibers_fname, - final_fibers_idx) - stats['endpoints_percent'] = float(stats['endpoint_n_fib']) / float( - stats['orig_n_fib']) * 100 - stats['intersections_percent'] = float( - stats['intersections_n_fib']) / float(stats['orig_n_fib']) * 100 - - out_stats_file = op.abspath(endpoint_name + '_statistics.mat') - iflogger.info('Saving matrix creation statistics as %s', out_stats_file) + finalfibers_fname = op.abspath(endpoint_name + "_streamline_final.trk") + stats["endpoint_n_fib"] = save_fibers(hdr, fib, finalfibers_fname, final_fibers_idx) + stats["endpoints_percent"] = ( + float(stats["endpoint_n_fib"]) / float(stats["orig_n_fib"]) * 100 + ) + stats["intersections_percent"] = ( + float(stats["intersections_n_fib"]) / float(stats["orig_n_fib"]) * 100 + ) + + out_stats_file = op.abspath(endpoint_name + "_statistics.mat") + iflogger.info("Saving matrix creation statistics as %s", out_stats_file) sio.savemat(out_stats_file, stats) def save_fibers(oldhdr, oldfib, fname, indices): - """ Stores a new trackvis file fname using only given indices """ + """Stores a new trackvis file fname using only given indices""" hdrnew = oldhdr.copy() - outstreams = [] - for i in indices: - outstreams.append(oldfib[i]) + outstreams = [oldfib[i] for i in indices] n_fib_out = len(outstreams) - hdrnew['n_count'] = n_fib_out - iflogger.info('Writing final non-orphan fibers as %s', fname) + hdrnew["n_count"] = n_fib_out + iflogger.info("Writing final non-orphan fibers as %s", fname) nb.trackvis.write(fname, outstreams, hdrnew) return n_fib_out class CreateMatrixInputSpec(TraitedSpec): - roi_file = File( - exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') - tract_file = File(exists=True, mandatory=True, desc='Trackvis tract file') + roi_file = File(exists=True, mandatory=True, desc="Freesurfer aparc+aseg file") + tract_file = File(exists=True, mandatory=True, desc="Trackvis tract file") resolution_network_file = File( exists=True, mandatory=True, - desc='Parcellation files from Connectome Mapping Toolkit') + desc="Parcellation files from Connectome Mapping Toolkit", + ) count_region_intersections = traits.Bool( False, usedefault=True, - desc= - 'Counts all of the fiber-region traversals in the connectivity matrix (requires significantly more computational time)' + desc="Counts all of the fiber-region traversals in the connectivity matrix (requires significantly more computational time)", ) out_matrix_file = File( - genfile=True, desc='NetworkX graph describing the connectivity') + genfile=True, desc="NetworkX graph describing the connectivity" + ) out_matrix_mat_file = File( - 'cmatrix.mat', - usedefault=True, - desc='Matlab matrix describing the connectivity') + "cmatrix.mat", usedefault=True, desc="Matlab matrix describing the connectivity" + ) out_mean_fiber_length_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the mean fiber lengths between each node.') + desc="Matlab matrix describing the mean fiber lengths between each node.", + ) out_median_fiber_length_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the mean fiber lengths between each node.') + desc="Matlab matrix describing the mean fiber lengths between each node.", + ) out_fiber_length_std_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the deviation in fiber lengths connecting each node.' + desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", ) out_intersection_matrix_mat_file = File( genfile=True, - desc= - 'Matlab connectivity matrix if all region/fiber intersections are counted.' + desc="Matlab connectivity matrix if all region/fiber intersections are counted.", ) out_endpoint_array_name = File( - genfile=True, desc='Name for the generated endpoint arrays') + genfile=True, desc="Name for the generated endpoint arrays" + ) class CreateMatrixOutputSpec(TraitedSpec): - matrix_file = File( - desc='NetworkX graph describing the connectivity', exists=True) + matrix_file = File(desc="NetworkX graph describing the connectivity", exists=True) intersection_matrix_file = File( - desc='NetworkX graph describing the connectivity', exists=True) + desc="NetworkX graph describing the connectivity", exists=True + ) matrix_files = OutputMultiPath( File( - desc='All of the gpickled network files output by this interface', - exists=True)) + desc="All of the gpickled network files output by this interface", + exists=True, + ) + ) matlab_matrix_files = OutputMultiPath( - File( - desc='All of the MATLAB .mat files output by this interface', - exists=True)) + File(desc="All of the MATLAB .mat files output by this interface", exists=True) + ) matrix_mat_file = File( - desc='Matlab matrix describing the connectivity', exists=True) + desc="Matlab matrix describing the connectivity", exists=True + ) intersection_matrix_mat_file = File( - desc= - 'Matlab matrix describing the mean fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the mean fiber lengths between each node.", + exists=True, + ) mean_fiber_length_matrix_mat_file = File( - desc= - 'Matlab matrix describing the mean fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the mean fiber lengths between each node.", + exists=True, + ) median_fiber_length_matrix_mat_file = File( - desc= - 'Matlab matrix describing the median fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the median fiber lengths between each node.", + exists=True, + ) fiber_length_std_matrix_mat_file = File( - desc= - 'Matlab matrix describing the deviation in fiber lengths connecting each node.', - exists=True) + desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", + exists=True, + ) endpoint_file = File( - desc='Saved Numpy array with the endpoints of each fiber', exists=True) + desc="Saved Numpy array with the endpoints of each fiber", exists=True + ) endpoint_file_mm = File( - desc= - 'Saved Numpy array with the endpoints of each fiber (in millimeters)', - exists=True) + desc="Saved Numpy array with the endpoints of each fiber (in millimeters)", + exists=True, + ) fiber_length_file = File( - desc='Saved Numpy array with the lengths of each fiber', exists=True) + desc="Saved Numpy array with the lengths of each fiber", exists=True + ) fiber_label_file = File( - desc='Saved Numpy array with the labels for each fiber', exists=True) + desc="Saved Numpy array with the labels for each fiber", exists=True + ) fiber_labels_noorphans = File( - desc='Saved Numpy array with the labels for each non-orphan fiber', - exists=True) + desc="Saved Numpy array with the labels for each non-orphan fiber", exists=True + ) filtered_tractography = File( - desc= - 'TrackVis file containing only those fibers originate in one and terminate in another region', - exists=True) + desc="TrackVis file containing only those fibers originate in one and terminate in another region", + exists=True, + ) filtered_tractography_by_intersections = File( - desc='TrackVis file containing all fibers which connect two regions', - exists=True) + desc="TrackVis file containing all fibers which connect two regions", + exists=True, + ) filtered_tractographies = OutputMultiPath( File( - desc= - 'TrackVis file containing only those fibers originate in one and terminate in another region', - exists=True)) + desc="TrackVis file containing only those fibers originate in one and terminate in another region", + exists=True, + ) + ) stats_file = File( - desc= - 'Saved Matlab .mat file with the number of fibers saved at each stage', - exists=True) + desc="Saved Matlab .mat file with the number of fibers saved at each stage", + exists=True, + ) class CreateMatrix(BaseInterface): @@ -574,36 +602,42 @@ class CreateMatrix(BaseInterface): def _run_interface(self, runtime): if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) - matrix_file = op.abspath(name + '.pck') + matrix_file = op.abspath(name + ".pck") else: - matrix_file = self._gen_outfilename('.pck') + matrix_file = self._gen_outfilename(".pck") matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) - if not ext == '.mat': - ext = '.mat' + if ext != ".mat": + ext = ".mat" matrix_mat_file = matrix_mat_file + ext if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): mean_fiber_length_matrix_mat_file = op.abspath( - self.inputs.out_mean_fiber_length_matrix_mat_file) + self.inputs.out_mean_fiber_length_matrix_mat_file + ) else: mean_fiber_length_matrix_name = op.abspath( - self._gen_outfilename('_mean_fiber_length.mat')) + self._gen_outfilename("_mean_fiber_length.mat") + ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): median_fiber_length_matrix_mat_file = op.abspath( - self.inputs.out_median_fiber_length_matrix_mat_file) + self.inputs.out_median_fiber_length_matrix_mat_file + ) else: median_fiber_length_matrix_name = op.abspath( - self._gen_outfilename('_median_fiber_length.mat')) + self._gen_outfilename("_median_fiber_length.mat") + ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): fiber_length_std_matrix_mat_file = op.abspath( - self.inputs.out_fiber_length_std_matrix_mat_file) + self.inputs.out_fiber_length_std_matrix_mat_file + ) else: fiber_length_std_matrix_name = op.abspath( - self._gen_outfilename('_fiber_length_std.mat')) + self._gen_outfilename("_fiber_length_std.mat") + ) if not isdefined(self.inputs.out_endpoint_array_name): _, endpoint_name, _ = split_filename(self.inputs.tract_file) @@ -611,118 +645,135 @@ def _run_interface(self, runtime): else: endpoint_name = op.abspath(self.inputs.out_endpoint_array_name) - cmat(self.inputs.tract_file, self.inputs.roi_file, - self.inputs.resolution_network_file, matrix_file, matrix_mat_file, - endpoint_name, self.inputs.count_region_intersections) + cmat( + self.inputs.tract_file, + self.inputs.roi_file, + self.inputs.resolution_network_file, + matrix_file, + matrix_mat_file, + endpoint_name, + self.inputs.count_region_intersections, + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) - out_matrix_file = op.abspath(name + '.pck') - out_intersection_matrix_file = op.abspath( - name + '_intersections.pck') + out_matrix_file = op.abspath(name + ".pck") + out_intersection_matrix_file = op.abspath(name + "_intersections.pck") else: - out_matrix_file = op.abspath(self._gen_outfilename('.pck')) + out_matrix_file = op.abspath(self._gen_outfilename(".pck")) out_intersection_matrix_file = op.abspath( - self._gen_outfilename('_intersections.pck')) + self._gen_outfilename("_intersections.pck") + ) - outputs['matrix_file'] = out_matrix_file - outputs['intersection_matrix_file'] = out_intersection_matrix_file + outputs["matrix_file"] = out_matrix_file + outputs["intersection_matrix_file"] = out_intersection_matrix_file matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) - if not ext == '.mat': - ext = '.mat' + if ext != ".mat": + ext = ".mat" matrix_mat_file = matrix_mat_file + ext - outputs['matrix_mat_file'] = matrix_mat_file + outputs["matrix_mat_file"] = matrix_mat_file if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): - outputs['mean_fiber_length_matrix_mat_file'] = op.abspath( - self.inputs.out_mean_fiber_length_matrix_mat_file) + outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( + self.inputs.out_mean_fiber_length_matrix_mat_file + ) else: - outputs['mean_fiber_length_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_mean_fiber_length.mat')) + outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_mean_fiber_length.mat") + ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): - outputs['median_fiber_length_matrix_mat_file'] = op.abspath( - self.inputs.out_median_fiber_length_matrix_mat_file) + outputs["median_fiber_length_matrix_mat_file"] = op.abspath( + self.inputs.out_median_fiber_length_matrix_mat_file + ) else: - outputs['median_fiber_length_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_median_fiber_length.mat')) + outputs["median_fiber_length_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_median_fiber_length.mat") + ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): - outputs['fiber_length_std_matrix_mat_file'] = op.abspath( - self.inputs.out_fiber_length_std_matrix_mat_file) + outputs["fiber_length_std_matrix_mat_file"] = op.abspath( + self.inputs.out_fiber_length_std_matrix_mat_file + ) else: - outputs['fiber_length_std_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_fiber_length_std.mat')) + outputs["fiber_length_std_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_fiber_length_std.mat") + ) if isdefined(self.inputs.out_intersection_matrix_mat_file): - outputs['intersection_matrix_mat_file'] = op.abspath( - self.inputs.out_intersection_matrix_mat_file) + outputs["intersection_matrix_mat_file"] = op.abspath( + self.inputs.out_intersection_matrix_mat_file + ) else: - outputs['intersection_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_intersections.mat')) + outputs["intersection_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_intersections.mat") + ) if isdefined(self.inputs.out_endpoint_array_name): endpoint_name = self.inputs.out_endpoint_array_name - outputs['endpoint_file'] = op.abspath( - self.inputs.out_endpoint_array_name + '_endpoints.npy') - outputs['endpoint_file_mm'] = op.abspath( - self.inputs.out_endpoint_array_name + '_endpointsmm.npy') - outputs['fiber_length_file'] = op.abspath( - self.inputs.out_endpoint_array_name + '_final_fiberslength.npy' + outputs["endpoint_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_endpoints.npy" + ) + outputs["endpoint_file_mm"] = op.abspath( + self.inputs.out_endpoint_array_name + "_endpointsmm.npy" ) - outputs['fiber_label_file'] = op.abspath( - self.inputs.out_endpoint_array_name + - '_filtered_fiberslabel.npy') - outputs['fiber_labels_noorphans'] = op.abspath( - self.inputs.out_endpoint_array_name + '_final_fiberslabels.npy' + outputs["fiber_length_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_final_fiberslength.npy" + ) + outputs["fiber_label_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_filtered_fiberslabel.npy" + ) + outputs["fiber_labels_noorphans"] = op.abspath( + self.inputs.out_endpoint_array_name + "_final_fiberslabels.npy" ) else: _, endpoint_name, _ = split_filename(self.inputs.tract_file) - outputs['endpoint_file'] = op.abspath( - endpoint_name + '_endpoints.npy') - outputs['endpoint_file_mm'] = op.abspath( - endpoint_name + '_endpointsmm.npy') - outputs['fiber_length_file'] = op.abspath( - endpoint_name + '_final_fiberslength.npy') - outputs['fiber_label_file'] = op.abspath( - endpoint_name + '_filtered_fiberslabel.npy') - outputs['fiber_labels_noorphans'] = op.abspath( - endpoint_name + '_final_fiberslabels.npy') + outputs["endpoint_file"] = op.abspath(endpoint_name + "_endpoints.npy") + outputs["endpoint_file_mm"] = op.abspath(endpoint_name + "_endpointsmm.npy") + outputs["fiber_length_file"] = op.abspath( + endpoint_name + "_final_fiberslength.npy" + ) + outputs["fiber_label_file"] = op.abspath( + endpoint_name + "_filtered_fiberslabel.npy" + ) + outputs["fiber_labels_noorphans"] = op.abspath( + endpoint_name + "_final_fiberslabels.npy" + ) if self.inputs.count_region_intersections: - outputs['matrix_files'] = [ - out_matrix_file, out_intersection_matrix_file - ] - outputs['matlab_matrix_files'] = [ - outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], - outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file'], - outputs['intersection_matrix_mat_file'] + outputs["matrix_files"] = [out_matrix_file, out_intersection_matrix_file] + outputs["matlab_matrix_files"] = [ + outputs["matrix_mat_file"], + outputs["mean_fiber_length_matrix_mat_file"], + outputs["median_fiber_length_matrix_mat_file"], + outputs["fiber_length_std_matrix_mat_file"], + outputs["intersection_matrix_mat_file"], ] else: - outputs['matrix_files'] = [out_matrix_file] - outputs['matlab_matrix_files'] = [ - outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], - outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file'] + outputs["matrix_files"] = [out_matrix_file] + outputs["matlab_matrix_files"] = [ + outputs["matrix_mat_file"], + outputs["mean_fiber_length_matrix_mat_file"], + outputs["median_fiber_length_matrix_mat_file"], + outputs["fiber_length_std_matrix_mat_file"], ] - outputs['filtered_tractography'] = op.abspath( - endpoint_name + '_streamline_final.trk') - outputs['filtered_tractography_by_intersections'] = op.abspath( - endpoint_name + '_intersections_streamline_final.trk') - outputs['filtered_tractographies'] = [ - outputs['filtered_tractography'], - outputs['filtered_tractography_by_intersections'] + outputs["filtered_tractography"] = op.abspath( + endpoint_name + "_streamline_final.trk" + ) + outputs["filtered_tractography_by_intersections"] = op.abspath( + endpoint_name + "_intersections_streamline_final.trk" + ) + outputs["filtered_tractographies"] = [ + outputs["filtered_tractography"], + outputs["filtered_tractography_by_intersections"], ] - outputs['stats_file'] = op.abspath(endpoint_name + '_statistics.mat') + outputs["stats_file"] = op.abspath(endpoint_name + "_statistics.mat") return outputs def _gen_outfilename(self, ext): @@ -737,27 +788,29 @@ def _gen_outfilename(self, ext): class ROIGenInputSpec(BaseInterfaceInputSpec): aparc_aseg_file = File( - exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') + exists=True, mandatory=True, desc="Freesurfer aparc+aseg file" + ) LUT_file = File( exists=True, - xor=['use_freesurfer_LUT'], - desc='Custom lookup table (cf. FreeSurferColorLUT.txt)') + xor=["use_freesurfer_LUT"], + desc="Custom lookup table (cf. FreeSurferColorLUT.txt)", + ) use_freesurfer_LUT = traits.Bool( - xor=['LUT_file'], - desc= - 'Boolean value; Set to True to use default Freesurfer LUT, False for custom LUT' + xor=["LUT_file"], + desc="Boolean value; Set to True to use default Freesurfer LUT, False for custom LUT", ) freesurfer_dir = Directory( - requires=['use_freesurfer_LUT'], desc='Freesurfer main directory') + requires=["use_freesurfer_LUT"], desc="Freesurfer main directory" + ) out_roi_file = File( - genfile=True, desc='Region of Interest file for connectivity mapping') - out_dict_file = File( - genfile=True, desc='Label dictionary saved in Pickle format') + genfile=True, desc="Region of Interest file for connectivity mapping" + ) + out_dict_file = File(genfile=True, desc="Label dictionary saved in Pickle format") class ROIGenOutputSpec(TraitedSpec): - roi_file = File(desc='Region of Interest file for connectivity mapping') - dict_file = File(desc='Label dictionary saved in Pickle format') + roi_file = File(desc="Region of Interest file for connectivity mapping") + dict_file = File(desc="Label dictionary saved in Pickle format") class ROIGen(BaseInterface): @@ -788,104 +841,166 @@ class ROIGen(BaseInterface): def _run_interface(self, runtime): aparc_aseg_file = self.inputs.aparc_aseg_file aparcpath, aparcname, aparcext = split_filename(aparc_aseg_file) - iflogger.info('Using Aparc+Aseg file: %s', aparcname + aparcext) - niiAPARCimg = nb.load(aparc_aseg_file, mmap=NUMPY_MMAP) - niiAPARCdata = niiAPARCimg.get_data() + iflogger.info("Using Aparc+Aseg file: %s", aparcname + aparcext) + niiAPARCimg = nb.load(aparc_aseg_file) + # Preserve on-disk type + niiAPARCdata = np.asanyarray(niiAPARCimg.dataobj) niiDataLabels = np.unique(niiAPARCdata) numDataLabels = np.size(niiDataLabels) - iflogger.info('Number of labels in image: %s', numDataLabels) + iflogger.info("Number of labels in image: %s", numDataLabels) write_dict = True if self.inputs.use_freesurfer_LUT: - self.LUT_file = self.inputs.freesurfer_dir + '/FreeSurferColorLUT.txt' - iflogger.info('Using Freesurfer LUT: %s', self.LUT_file) - prefix = 'fsLUT' - elif not self.inputs.use_freesurfer_LUT and isdefined( - self.inputs.LUT_file): + self.LUT_file = self.inputs.freesurfer_dir + "/FreeSurferColorLUT.txt" + iflogger.info("Using Freesurfer LUT: %s", self.LUT_file) + prefix = "fsLUT" + elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): self.LUT_file = op.abspath(self.inputs.LUT_file) lutpath, lutname, lutext = split_filename(self.LUT_file) - iflogger.info('Using Custom LUT file: %s', lutname + lutext) + iflogger.info("Using Custom LUT file: %s", lutname + lutext) prefix = lutname else: - prefix = 'hardcoded' + prefix = "hardcoded" write_dict = False if isdefined(self.inputs.out_roi_file): roi_file = op.abspath(self.inputs.out_roi_file) else: - roi_file = op.abspath(prefix + '_' + aparcname + '.nii') + roi_file = op.abspath(prefix + "_" + aparcname + ".nii") if isdefined(self.inputs.out_dict_file): dict_file = op.abspath(self.inputs.out_dict_file) else: - dict_file = op.abspath(prefix + '_' + aparcname + '.pck') + dict_file = op.abspath(prefix + "_" + aparcname + ".pck") if write_dict: - iflogger.info('Lookup table: %s', op.abspath(self.LUT_file)) + iflogger.info("Lookup table: %s", op.abspath(self.LUT_file)) LUTlabelsRGBA = np.loadtxt( self.LUT_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], - comments='#', + comments="#", dtype={ - 'names': ('index', 'label', 'R', 'G', 'B', 'A'), - 'formats': ('int', '|S30', 'int', 'int', 'int', 'int') - }) + "names": ("index", "label", "R", "G", "B", "A"), + "formats": ("int", "|S30", "int", "int", "int", "int"), + }, + ) numLUTLabels = np.size(LUTlabelsRGBA) if numLUTLabels < numDataLabels: iflogger.error( - 'LUT file provided does not contain all of the regions in the image' + "LUT file provided does not contain all of the regions in the image" ) - iflogger.error('Removing unmapped regions') - iflogger.info('Number of labels in LUT: %s', numLUTLabels) + iflogger.error("Removing unmapped regions") + iflogger.info("Number of labels in LUT: %s", numLUTLabels) LUTlabelDict = {} """ Create dictionary for input LUT table""" - for labels in range(0, numLUTLabels): + for labels in range(numLUTLabels): LUTlabelDict[LUTlabelsRGBA[labels][0]] = [ - LUTlabelsRGBA[labels][1], LUTlabelsRGBA[labels][2], - LUTlabelsRGBA[labels][3], LUTlabelsRGBA[labels][4], - LUTlabelsRGBA[labels][5] + LUTlabelsRGBA[labels][1], + LUTlabelsRGBA[labels][2], + LUTlabelsRGBA[labels][3], + LUTlabelsRGBA[labels][4], + LUTlabelsRGBA[labels][5], ] - iflogger.info('Printing LUT label dictionary') + iflogger.info("Printing LUT label dictionary") iflogger.info(LUTlabelDict) mapDict = {} - MAPPING = [[1, 2012], [2, 2019], [3, 2032], [4, 2014], [5, 2020], [ - 6, 2018 - ], [7, 2027], [8, 2028], [9, 2003], [10, 2024], [11, 2017], [12, 2026], - [13, 2002], [14, 2023], [15, 2010], [16, 2022], [17, 2031], - [18, 2029], [19, 2008], [20, 2025], [21, 2005], [22, 2021], - [23, 2011], [24, 2013], [25, 2007], [26, 2016], [27, 2006], - [28, 2033], [29, 2009], [30, 2015], [31, 2001], [32, 2030], - [33, 2034], [34, 2035], [35, 49], [36, 50], [37, 51], [ - 38, 52 - ], [39, 58], [40, 53], [41, 54], [42, 1012], [43, 1019], [ - 44, 1032 - ], [45, 1014], [46, 1020], [47, 1018], [48, 1027], [ - 49, 1028 - ], [50, 1003], [51, 1024], [52, 1017], [53, 1026], [ - 54, 1002 - ], [55, 1023], [56, 1010], [57, 1022], [58, 1031], [ - 59, 1029 - ], [60, 1008], [61, 1025], [62, 1005], [63, 1021], [ - 64, 1011 - ], [65, 1013], [66, 1007], [67, 1016], [68, 1006], [ - 69, 1033 - ], [70, 1009], [71, 1015], [72, 1001], [73, 1030], [ - 74, 1034 - ], [75, 1035], [76, 10], [77, 11], [78, 12], [79, 13], [ - 80, 26 - ], [81, 17], [82, 18], [83, 16]] + MAPPING = [ + [1, 2012], + [2, 2019], + [3, 2032], + [4, 2014], + [5, 2020], + [6, 2018], + [7, 2027], + [8, 2028], + [9, 2003], + [10, 2024], + [11, 2017], + [12, 2026], + [13, 2002], + [14, 2023], + [15, 2010], + [16, 2022], + [17, 2031], + [18, 2029], + [19, 2008], + [20, 2025], + [21, 2005], + [22, 2021], + [23, 2011], + [24, 2013], + [25, 2007], + [26, 2016], + [27, 2006], + [28, 2033], + [29, 2009], + [30, 2015], + [31, 2001], + [32, 2030], + [33, 2034], + [34, 2035], + [35, 49], + [36, 50], + [37, 51], + [38, 52], + [39, 58], + [40, 53], + [41, 54], + [42, 1012], + [43, 1019], + [44, 1032], + [45, 1014], + [46, 1020], + [47, 1018], + [48, 1027], + [49, 1028], + [50, 1003], + [51, 1024], + [52, 1017], + [53, 1026], + [54, 1002], + [55, 1023], + [56, 1010], + [57, 1022], + [58, 1031], + [59, 1029], + [60, 1008], + [61, 1025], + [62, 1005], + [63, 1021], + [64, 1011], + [65, 1013], + [66, 1007], + [67, 1016], + [68, 1006], + [69, 1033], + [70, 1009], + [71, 1015], + [72, 1001], + [73, 1030], + [74, 1034], + [75, 1035], + [76, 10], + [77, 11], + [78, 12], + [79, 13], + [80, 26], + [81, 17], + [82, 18], + [83, 16], + ] """ Create empty grey matter mask, Populate with only those regions defined in the mapping.""" niiGM = np.zeros(niiAPARCdata.shape, dtype=np.uint) for ma in MAPPING: niiGM[niiAPARCdata == ma[1]] = ma[0] mapDict[ma[0]] = ma[1] - iflogger.info('Grey matter mask created') + iflogger.info("Grey matter mask created") greyMaskLabels = np.unique(niiGM) numGMLabels = np.size(greyMaskLabels) - iflogger.info('Number of grey matter labels: %s', numGMLabels) + iflogger.info("Number of grey matter labels: %s", numGMLabels) labelDict = {} GMlabelDict = {} @@ -893,88 +1008,90 @@ def _run_interface(self, runtime): try: mapDict[label] if write_dict: - GMlabelDict['originalID'] = mapDict[label] + GMlabelDict["originalID"] = mapDict[label] except: - iflogger.info('Label %s not in provided mapping', label) + iflogger.info("Label %s not in provided mapping", label) if write_dict: del GMlabelDict GMlabelDict = {} - GMlabelDict['labels'] = LUTlabelDict[label][0] - GMlabelDict['colors'] = [ - LUTlabelDict[label][1], LUTlabelDict[label][2], - LUTlabelDict[label][3] + GMlabelDict["labels"] = LUTlabelDict[label][0] + GMlabelDict["colors"] = [ + LUTlabelDict[label][1], + LUTlabelDict[label][2], + LUTlabelDict[label][3], ] - GMlabelDict['a'] = LUTlabelDict[label][4] + GMlabelDict["a"] = LUTlabelDict[label][4] labelDict[label] = GMlabelDict - roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, - niiAPARCimg.header) - iflogger.info('Saving ROI File to %s', roi_file) + roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, niiAPARCimg.header) + iflogger.info("Saving ROI File to %s", roi_file) nb.save(roi_image, roi_file) if write_dict: - iflogger.info('Saving Dictionary File to %s in Pickle format', - dict_file) - with open(dict_file, 'w') as f: + iflogger.info("Saving Dictionary File to %s in Pickle format", dict_file) + with open(dict_file, "w") as f: pickle.dump(labelDict, f) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): - outputs['roi_file'] = op.abspath(self.inputs.out_roi_file) + outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: - outputs['roi_file'] = op.abspath(self._gen_outfilename('nii')) + outputs["roi_file"] = op.abspath(self._gen_outfilename("nii")) if isdefined(self.inputs.out_dict_file): - outputs['dict_file'] = op.abspath(self.inputs.out_dict_file) + outputs["dict_file"] = op.abspath(self.inputs.out_dict_file) else: - outputs['dict_file'] = op.abspath(self._gen_outfilename('pck')) + outputs["dict_file"] = op.abspath(self._gen_outfilename("pck")) return outputs def _gen_outfilename(self, ext): _, name, _ = split_filename(self.inputs.aparc_aseg_file) if self.inputs.use_freesurfer_LUT: - prefix = 'fsLUT' - elif not self.inputs.use_freesurfer_LUT and isdefined( - self.inputs.LUT_file): + prefix = "fsLUT" + elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): lutpath, lutname, lutext = split_filename(self.inputs.LUT_file) prefix = lutname else: - prefix = 'hardcoded' - return prefix + '_' + name + '.' + ext + prefix = "hardcoded" + return prefix + "_" + name + "." + ext def create_nodes(roi_file, resolution_network_file, out_filename): G = nx.Graph() gp = nx.read_graphml(resolution_network_file) - roi_image = nb.load(roi_file, mmap=NUMPY_MMAP) - roiData = roi_image.get_data() + roi_image = nb.load(roi_file) + # Preserve on-disk type unless scaled + roiData = np.asanyarray(roi_image.dataobj) for u, d in gp.nodes(data=True): G.add_node(int(u), **d) xyz = tuple( np.mean( - np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), - axis=1)) - G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) - nx.write_gpickle(G, out_filename) + np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1 + ) + ) + G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1]) + with open(out_filename, 'wb') as f: + pickle.dump(G, f, pickle.HIGHEST_PROTOCOL) return out_filename class CreateNodesInputSpec(BaseInterfaceInputSpec): - roi_file = File( - exists=True, mandatory=True, desc='Region of interest file') + roi_file = File(exists=True, mandatory=True, desc="Region of interest file") resolution_network_file = File( exists=True, mandatory=True, - desc='Parcellation file from Connectome Mapping Toolkit') + desc="Parcellation file from Connectome Mapping Toolkit", + ) out_filename = File( - 'nodenetwork.pck', + "nodenetwork.pck", usedefault=True, - desc='Output gpickled network with the nodes defined.') + desc="Output gpickled network with the nodes defined.", + ) class CreateNodesOutputSpec(TraitedSpec): - node_network = File(desc='Output gpickled network with the nodes defined.') + node_network = File(desc="Output gpickled network with the nodes defined.") class CreateNodes(BaseInterface): @@ -995,14 +1112,16 @@ class CreateNodes(BaseInterface): output_spec = CreateNodesOutputSpec def _run_interface(self, runtime): - iflogger.info('Creating nodes...') - create_nodes(self.inputs.roi_file, self.inputs.resolution_network_file, - self.inputs.out_filename) - iflogger.info('Saving node network to %s', - op.abspath(self.inputs.out_filename)) + iflogger.info("Creating nodes...") + create_nodes( + self.inputs.roi_file, + self.inputs.resolution_network_file, + self.inputs.out_filename, + ) + iflogger.info("Saving node network to %s", op.abspath(self.inputs.out_filename)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['node_network'] = op.abspath(self.inputs.out_filename) + outputs["node_network"] = op.abspath(self.inputs.out_filename) return outputs diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index 93802d5eb8..0c38fd3342 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -1,62 +1,68 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os import os.path as op import datetime import string -import networkx as nx from ...utils.filemanip import split_filename -from ..base import (BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, isdefined) -from .base import CFFBaseInterface, have_cfflib +from ..base import ( + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + isdefined, +) +from .base import CFFBaseInterface + + +def _read_pickle(fname): + import pickle + + with open(fname, 'rb') as f: + return pickle.load(f) class CFFConverterInputSpec(BaseInterfaceInputSpec): graphml_networks = InputMultiPath( - File(exists=True), desc='list of graphML networks') + File(exists=True), desc="list of graphML networks" + ) gpickled_networks = InputMultiPath( - File(exists=True), desc='list of gpickled Networkx graphs') + File(exists=True), desc="list of gpickled Networkx graphs" + ) - gifti_surfaces = InputMultiPath( - File(exists=True), desc='list of GIFTI surfaces') - gifti_labels = InputMultiPath( - File(exists=True), desc='list of GIFTI labels') - nifti_volumes = InputMultiPath( - File(exists=True), desc='list of NIFTI volumes') - tract_files = InputMultiPath( - File(exists=True), desc='list of Trackvis fiber files') + gifti_surfaces = InputMultiPath(File(exists=True), desc="list of GIFTI surfaces") + gifti_labels = InputMultiPath(File(exists=True), desc="list of GIFTI labels") + nifti_volumes = InputMultiPath(File(exists=True), desc="list of NIFTI volumes") + tract_files = InputMultiPath(File(exists=True), desc="list of Trackvis fiber files") timeseries_files = InputMultiPath( - File(exists=True), desc='list of HDF5 timeseries files') + File(exists=True), desc="list of HDF5 timeseries files" + ) script_files = InputMultiPath( - File(exists=True), desc='list of script files to include') + File(exists=True), desc="list of script files to include" + ) data_files = InputMultiPath( - File(exists=True), - desc='list of external data files (i.e. Numpy, HD5, XML) ') - - title = traits.Str(desc='Connectome Title') - creator = traits.Str(desc='Creator') - email = traits.Str(desc='Email address') - publisher = traits.Str(desc='Publisher') - license = traits.Str(desc='License') - rights = traits.Str(desc='Rights') - references = traits.Str(desc='References') - relation = traits.Str(desc='Relation') - species = traits.Str('Homo sapiens', desc='Species', usedefault=True) + File(exists=True), desc="list of external data files (i.e. Numpy, HD5, XML) " + ) + + title = traits.Str(desc="Connectome Title") + creator = traits.Str(desc="Creator") + email = traits.Str(desc="Email address") + publisher = traits.Str(desc="Publisher") + license = traits.Str(desc="License") + rights = traits.Str(desc="Rights") + references = traits.Str(desc="References") + relation = traits.Str(desc="Relation") + species = traits.Str("Homo sapiens", desc="Species", usedefault=True) description = traits.Str( - 'Created with the Nipype CFF converter', - desc='Description', - usedefault=True) + "Created with the Nipype CFF converter", desc="Description", usedefault=True + ) - out_file = File( - 'connectome.cff', usedefault=True, desc='Output connectome file') + out_file = File("connectome.cff", usedefault=True, desc="Output connectome file") class CFFConverterOutputSpec(TraitedSpec): - connectome_file = File(exists=True, desc='Output connectome file') + connectome_file = File(exists=True, desc="Output connectome file") class CFFConverter(CFFBaseInterface): @@ -80,6 +86,7 @@ class CFFConverter(CFFBaseInterface): def _run_interface(self, runtime): import cfflib as cf + a = cf.connectome() if isdefined(self.inputs.title): @@ -91,7 +98,7 @@ def _run_interface(self, runtime): a.connectome_meta.set_creator(self.inputs.creator) else: # Probably only works on some OSes... - a.connectome_meta.set_creator(os.getenv('USER')) + a.connectome_meta.set_creator(os.getenv("USER")) if isdefined(self.inputs.email): a.connectome_meta.set_email(self.inputs.email) @@ -124,7 +131,7 @@ def _run_interface(self, runtime): for ntwk in self.inputs.graphml_networks: # There must be a better way to deal with the unique name problem # (i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) - ntwk_name = 'Network {cnt}'.format(cnt=count) + ntwk_name = f"Network {count}" a.add_connectome_network_from_graphml(ntwk_name, ntwk) count += 1 @@ -132,7 +139,7 @@ def _run_interface(self, runtime): unpickled = [] for ntwk in self.inputs.gpickled_networks: _, ntwk_name, _ = split_filename(ntwk) - unpickled = nx.read_gpickle(ntwk) + unpickled = _read_pickle(ntwk) cnet = cf.CNetwork(name=ntwk_name) cnet.set_with_nxgraph(unpickled) a.add_connectome_network(cnet) @@ -150,10 +157,11 @@ def _run_interface(self, runtime): if isdefined(self.inputs.gifti_surfaces): for surf in self.inputs.gifti_surfaces: _, surf_name, _ = split_filename(surf) - csurf = cf.CSurface.create_from_gifti("Surface %d - %s" % - (count, surf_name), surf) - csurf.fileformat = 'Gifti' - csurf.dtype = 'Surfaceset' + csurf = cf.CSurface.create_from_gifti( + "Surface %d - %s" % (count, surf_name), surf + ) + csurf.fileformat = "Gifti" + csurf.dtype = "Surfaceset" a.add_connectome_surface(csurf) count += 1 @@ -162,9 +170,10 @@ def _run_interface(self, runtime): for label in self.inputs.gifti_labels: _, label_name, _ = split_filename(label) csurf = cf.CSurface.create_from_gifti( - "Surface Label %d - %s" % (count, label_name), label) - csurf.fileformat = 'Gifti' - csurf.dtype = 'Labels' + "Surface Label %d - %s" % (count, label_name), label + ) + csurf.fileformat = "Gifti" + csurf.dtype = "Labels" a.add_connectome_surface(csurf) count += 1 @@ -183,19 +192,19 @@ def _run_interface(self, runtime): if isdefined(self.inputs.data_files): for data in self.inputs.data_files: _, data_name, _ = split_filename(data) - cda = cf.CData(name=data_name, src=data, fileformat='NumPy') - if not string.find(data_name, 'lengths') == -1: - cda.dtype = 'FinalFiberLengthArray' - if not string.find(data_name, 'endpoints') == -1: - cda.dtype = 'FiberEndpoints' - if not string.find(data_name, 'labels') == -1: - cda.dtype = 'FinalFiberLabels' + cda = cf.CData(name=data_name, src=data, fileformat="NumPy") + if 'lengths' in data_name: + cda.dtype = "FinalFiberLengthArray" + if 'endpoints' in data_name: + cda.dtype = "FiberEndpoints" + if 'labels' in data_name: + cda.dtype = "FinalFiberLabels" a.add_connectome_data(cda) a.print_summary() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' + if ext != '.cff': + ext = ".cff" cf.save_to_cff(a, op.abspath(name + ext)) return runtime @@ -203,9 +212,9 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' - outputs['connectome_file'] = op.abspath(name + ext) + if ext != '.cff': + ext = ".cff" + outputs["connectome_file"] = op.abspath(name + ext) return outputs @@ -213,20 +222,23 @@ class MergeCNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='List of CFF files to extract networks from') + desc="List of CFF files to extract networks from", + ) out_file = File( - 'merged_network_connectome.cff', + "merged_network_connectome.cff", usedefault=True, - desc='Output CFF file with all the networks added') + desc="Output CFF file with all the networks added", + ) class MergeCNetworksOutputSpec(TraitedSpec): connectome_file = File( - exists=True, desc='Output CFF file with all the networks added') + exists=True, desc="Output CFF file with all the networks added" + ) class MergeCNetworks(CFFBaseInterface): - """ Merges networks from multiple CFF files into one new CFF file. + """Merges networks from multiple CFF files into one new CFF file. Example ------- @@ -237,11 +249,13 @@ class MergeCNetworks(CFFBaseInterface): >>> mrg.run() # doctest: +SKIP """ + input_spec = MergeCNetworksInputSpec output_spec = MergeCNetworksOutputSpec def _run_interface(self, runtime): import cfflib as cf + extracted_networks = [] for i, con in enumerate(self.inputs.in_files): @@ -252,21 +266,22 @@ def _run_interface(self, runtime): # metadata information ne.load() contitle = mycon.get_connectome_meta().get_title() - ne.set_name(str(i) + ': ' + contitle + ' - ' + ne.get_name()) + ne.set_name(str(i) + ": " + contitle + " - " + ne.get_name()) ne.set_src(ne.get_name()) extracted_networks.append(ne) # Add networks to new connectome newcon = cf.connectome( - title='All CNetworks', connectome_network=extracted_networks) + title="All CNetworks", connectome_network=extracted_networks + ) # Setting additional metadata metadata = newcon.get_connectome_meta() - metadata.set_creator('My Name') - metadata.set_email('My Email') + metadata.set_creator("My Name") + metadata.set_email("My Email") _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' + if ext != '.cff': + ext = ".cff" cf.save_to_cff(newcon, op.abspath(name + ext)) return runtime @@ -274,7 +289,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' - outputs['connectome_file'] = op.abspath(name + ext) + if ext != '.cff': + ext = ".cff" + outputs["connectome_file"] = op.abspath(name + ext) return outputs diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index f21f2b33ea..b63144cb50 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -1,37 +1,47 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op import numpy as np import networkx as nx +import pickle from ... import logging -from ..base import (LibraryBaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) -from .base import have_cv -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + LibraryBaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + OutputMultiPath, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") + + +def _read_pickle(fname): + with open(fname, 'rb') as f: + return pickle.load(f) def ntwks_to_matrices(in_files, edge_key): - first = nx.read_gpickle(in_files[0]) + first = _read_pickle(in_files[0]) files = len(in_files) nodes = len(first.nodes()) matrix = np.zeros((nodes, nodes, files)) for idx, name in enumerate(in_files): - graph = nx.read_gpickle(name) + graph = _read_pickle(name) for u, v, d in graph.edges(data=True): try: - graph[u][v]['weight'] = d[ - edge_key] # Setting the edge requested edge value as weight value + graph[u][v]["weight"] = d[ + edge_key + ] # Setting the edge requested edge value as weight value except: - raise KeyError( - "the graph edges do not have {} attribute".format( - edge_key)) - matrix[:, :, idx] = nx.to_numpy_matrix(graph) # Retrieve the matrix + raise KeyError(f"the graph edges do not have {edge_key} attribute") + matrix[:, :, idx] = nx.to_numpy_array(graph) # Retrieve the matrix return matrix @@ -39,71 +49,74 @@ class NetworkBasedStatisticInputSpec(BaseInterfaceInputSpec): in_group1 = InputMultiPath( File(exists=True), mandatory=True, - desc='Networks for the first group of subjects') + desc="Networks for the first group of subjects", + ) in_group2 = InputMultiPath( File(exists=True), mandatory=True, - desc='Networks for the second group of subjects') + desc="Networks for the second group of subjects", + ) node_position_network = File( - desc= - 'An optional network used to position the nodes for the output networks' + desc="An optional network used to position the nodes for the output networks" ) number_of_permutations = traits.Int( - 1000, usedefault=True, desc='Number of permutations to perform') - threshold = traits.Float(3, usedefault=True, desc='T-statistic threshold') + 1000, usedefault=True, desc="Number of permutations to perform" + ) + threshold = traits.Float(3, usedefault=True, desc="T-statistic threshold") t_tail = traits.Enum( - 'left', - 'right', - 'both', + "left", + "right", + "both", usedefault=True, - desc='Can be one of "left", "right", or "both"') + desc='Can be one of "left", "right", or "both"', + ) edge_key = traits.Str( - 'number_of_fibers', + "number_of_fibers", usedefault=True, - desc= - 'Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' - 'Sometimes "weight" or "value" for functional networks.') - out_nbs_network = File( - desc='Output network with edges identified by the NBS') + desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' + 'Sometimes "weight" or "value" for functional networks.', + ) + out_nbs_network = File(desc="Output network with edges identified by the NBS") out_nbs_pval_network = File( - desc= - 'Output network with p-values to weight the edges identified by the NBS' + desc="Output network with p-values to weight the edges identified by the NBS" ) class NetworkBasedStatisticOutputSpec(TraitedSpec): nbs_network = File( - exists=True, desc='Output network with edges identified by the NBS') + exists=True, desc="Output network with edges identified by the NBS" + ) nbs_pval_network = File( exists=True, - desc= - 'Output network with p-values to weight the edges identified by the NBS' + desc="Output network with p-values to weight the edges identified by the NBS", ) network_files = OutputMultiPath( - File(exists=True), - desc='Output network with edges identified by the NBS') + File(exists=True), desc="Output network with edges identified by the NBS" + ) class NetworkBasedStatistic(LibraryBaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files + See Also + -------- For documentation of Network-based statistic parameters: - - https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py + https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nbs = cmtk.NetworkBasedStatistic() >>> nbs.inputs.in_group1 = ['subj1.pck', 'subj2.pck'] # doctest: +SKIP >>> nbs.inputs.in_group2 = ['pat1.pck', 'pat2.pck'] # doctest: +SKIP >>> nbs.run() # doctest: +SKIP + """ + input_spec = NetworkBasedStatisticInputSpec output_spec = NetworkBasedStatisticOutputSpec - _pkg = 'cviewer' + _pkg = "cviewer" def _run_interface(self, runtime): from cviewer.libs.pyconto.groupstatistics import nbs @@ -112,8 +125,16 @@ def _run_interface(self, runtime): K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key - details = edge_key + '-thresh-' + str(THRESH) + '-k-' + str( - K) + '-tail-' + TAIL + '.pck' + details = ( + edge_key + + "-thresh-" + + str(THRESH) + + "-k-" + + str(K) + + "-tail-" + + TAIL + + ".pck" + ) # Fill in the data from the networks X = ntwks_to_matrices(self.inputs.in_group1, edge_key) @@ -121,7 +142,7 @@ def _run_interface(self, runtime): PVAL, ADJ, _ = nbs.compute_nbs(X, Y, THRESH, K, TAIL) - iflogger.info('p-values:') + iflogger.info("p-values:") iflogger.info(PVAL) pADJ = ADJ.copy() @@ -130,8 +151,8 @@ def _run_interface(self, runtime): pADJ[x, y] = PVAL[idx] # Create networkx graphs from the adjacency matrix - nbsgraph = nx.from_numpy_matrix(ADJ) - nbs_pval_graph = nx.from_numpy_matrix(pADJ) + nbsgraph = nx.from_numpy_array(ADJ) + nbs_pval_graph = nx.from_numpy_array(pADJ) # Relabel nodes because they should not start at zero for our convention nbsgraph = nx.relabel_nodes(nbsgraph, lambda x: x + 1) @@ -142,23 +163,26 @@ def _run_interface(self, runtime): else: node_ntwk_name = self.inputs.in_group1[0] - node_network = nx.read_gpickle(node_ntwk_name) - iflogger.info('Populating node dictionaries with attributes from %s', - node_ntwk_name) + node_network = _read_pickle(node_ntwk_name) + iflogger.info( + "Populating node dictionaries with attributes from %s", node_ntwk_name + ) for nid, ndata in node_network.nodes(data=True): nbsgraph.nodes[nid] = ndata nbs_pval_graph.nodes[nid] = ndata - path = op.abspath('NBS_Result_' + details) + path = op.abspath("NBS_Result_" + details) iflogger.info(path) - nx.write_gpickle(nbsgraph, path) - iflogger.info('Saving output NBS edge network as %s', path) + with open(path, 'wb') as f: + pickle.dump(nbsgraph, f, pickle.HIGHEST_PROTOCOL) + iflogger.info("Saving output NBS edge network as %s", path) - pval_path = op.abspath('NBS_P_vals_' + details) + pval_path = op.abspath("NBS_P_vals_" + details) iflogger.info(pval_path) - nx.write_gpickle(nbs_pval_graph, pval_path) - iflogger.info('Saving output p-value network as %s', pval_path) + with open(pval_path, 'wb') as f: + pickle.dump(nbs_pval_graph, f, pickle.HIGHEST_PROTOCOL) + iflogger.info("Saving output p-value network as %s", pval_path) return runtime def _list_outputs(self): @@ -168,15 +192,23 @@ def _list_outputs(self): K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key - details = edge_key + '-thresh-' + str(THRESH) + '-k-' + str( - K) + '-tail-' + TAIL + '.pck' - path = op.abspath('NBS_Result_' + details) - pval_path = op.abspath('NBS_P_vals_' + details) - - outputs['nbs_network'] = path - outputs['nbs_pval_network'] = pval_path - outputs['network_files'] = [path, pval_path] + details = ( + edge_key + + "-thresh-" + + str(THRESH) + + "-k-" + + str(K) + + "-tail-" + + TAIL + + ".pck" + ) + path = op.abspath("NBS_Result_" + details) + pval_path = op.abspath("NBS_P_vals_" + details) + + outputs["nbs_network"] = path + outputs["nbs_pval_network"] = pval_path + outputs["network_files"] = [path, pval_path] return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index cd6ad4877e..ad72582f3d 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open, range - import os.path as op import pickle @@ -13,19 +8,31 @@ from ... import logging from ...utils.filemanip import split_filename -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) -from .base import have_cmp +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + OutputMultiPath, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") -iflogger = logging.getLogger('nipype.interface') + +def _read_pickle(fname): + with open(fname, 'rb') as f: + return pickle.load(f) def read_unknown_ntwk(ntwk): if not isinstance(ntwk, nx.classes.graph.Graph): _, _, ext = split_filename(ntwk) - if ext == '.pck': - ntwk = nx.read_gpickle(ntwk) - elif ext == '.graphml': + if ext == ".pck": + ntwk = _read_pickle(ntwk) + elif ext == ".graphml": ntwk = nx.read_graphml(ntwk) return ntwk @@ -43,34 +50,36 @@ def fix_keys_for_gexf(orig): GEXF Networks can be read in Gephi, however, the keys for the node and edge IDs must be converted to strings """ import networkx as nx + ntwk = nx.Graph() nodes = list(orig.nodes()) edges = list(orig.edges()) for node in nodes: newnodedata = {} newnodedata.update(orig.nodes[node]) - if 'dn_fsname' in orig.nodes[node]: - newnodedata['label'] = orig.nodes[node]['dn_fsname'] + if "dn_fsname" in orig.nodes[node]: + newnodedata["label"] = orig.nodes[node]["dn_fsname"] ntwk.add_node(str(node), **newnodedata) - if 'dn_position' in ntwk.nodes[str( - node)] and 'dn_position' in newnodedata: - ntwk.nodes[str(node)]['dn_position'] = str( - newnodedata['dn_position']) + if "dn_position" in ntwk.nodes[str(node)] and "dn_position" in newnodedata: + ntwk.nodes[str(node)]["dn_position"] = str(newnodedata["dn_position"]) for edge in edges: data = {} data = orig.edge[edge[0]][edge[1]] ntwk.add_edge(str(edge[0]), str(edge[1]), **data) - if 'fiber_length_mean' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_mean'] = str( - data['fiber_length_mean']) - if 'fiber_length_std' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_std'] = str( - data['fiber_length_std']) - if 'number_of_fibers' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['number_of_fibers'] = str( - data['number_of_fibers']) - if 'value' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['value'] = str(data['value']) + if "fiber_length_mean" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_mean"] = str( + data["fiber_length_mean"] + ) + if "fiber_length_std" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_std"] = str( + data["fiber_length_std"] + ) + if "number_of_fibers" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["number_of_fibers"] = str( + data["number_of_fibers"] + ) + if "value" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["value"] = str(data["value"]) return ntwk @@ -94,30 +103,34 @@ def average_networks(in_files, ntwk_res_file, group_id): import networkx as nx import os.path as op import scipy.io as sio - iflogger.info('Creating average network for group: %s', group_id) + + iflogger.info("Creating average network for group: %s", group_id) matlab_network_list = [] if len(in_files) == 1: avg_ntwk = read_unknown_ntwk(in_files[0]) else: count_to_keep_edge = np.round(len(in_files) / 2.0) - iflogger.info('Number of networks: %i, an edge must occur in at ' - 'least %i to remain in the average network', - len(in_files), count_to_keep_edge) + iflogger.info( + "Number of networks: %i, an edge must occur in at " + "least %i to remain in the average network", + len(in_files), + count_to_keep_edge, + ) ntwk_res_file = read_unknown_ntwk(ntwk_res_file) - iflogger.info('%i nodes found in network resolution file', - ntwk_res_file.number_of_nodes()) + iflogger.info( + "%i nodes found in network resolution file", ntwk_res_file.number_of_nodes() + ) ntwk = remove_all_edges(ntwk_res_file) counting_ntwk = ntwk.copy() # Sums all the relevant variables - for index, subject in enumerate(in_files): - tmp = nx.read_gpickle(subject) - iflogger.info('File %s has %i edges', subject, - tmp.number_of_edges()) + for subject in in_files: + tmp = _read_pickle(subject) + iflogger.info("File %s has %i edges", subject, tmp.number_of_edges()) edges = list(tmp.edges()) for edge in edges: data = {} data = tmp.edge[edge[0]][edge[1]] - data['count'] = 1 + data["count"] = 1 if ntwk.has_edge(edge[0], edge[1]): current = {} current = ntwk.edge[edge[0]][edge[1]] @@ -127,67 +140,76 @@ def average_networks(in_files, ntwk_res_file, group_id): for node in nodes: data = {} data = ntwk.nodes[node] - if 'value' in tmp.nodes[node]: - data['value'] = data['value'] + tmp.nodes[node]['value'] + if "value" in tmp.nodes[node]: + data["value"] = data["value"] + tmp.nodes[node]["value"] ntwk.add_node(node, **data) # Divides each value by the number of files nodes = list(ntwk.nodes()) edges = list(ntwk.edges()) - iflogger.info('Total network has %i edges', ntwk.number_of_edges()) + iflogger.info("Total network has %i edges", ntwk.number_of_edges()) avg_ntwk = nx.Graph() newdata = {} for node in nodes: data = ntwk.nodes[node] newdata = data - if 'value' in data: - newdata['value'] = data['value'] / len(in_files) - ntwk.nodes[node]['value'] = newdata + if "value" in data: + newdata["value"] = data["value"] / len(in_files) + ntwk.nodes[node]["value"] = newdata avg_ntwk.add_node(node, **newdata) edge_dict = {} - edge_dict['count'] = np.zeros((avg_ntwk.number_of_nodes(), - avg_ntwk.number_of_nodes())) + edge_dict["count"] = np.zeros( + (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) + ) for edge in edges: data = ntwk.edge[edge[0]][edge[1]] - if ntwk.edge[edge[0]][edge[1]]['count'] >= count_to_keep_edge: - for key in list(data.keys()): - if not key == 'count': + if ntwk.edge[edge[0]][edge[1]]["count"] >= count_to_keep_edge: + for key in data: + if key != "count": data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data avg_ntwk.add_edge(edge[0], edge[1], **data) - edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][ - edge[1]]['count'] + edge_dict["count"][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]][ + "count" + ] - iflogger.info('After thresholding, the average network has %i edges', - avg_ntwk.number_of_edges()) + iflogger.info( + "After thresholding, the average network has %i edges", + avg_ntwk.number_of_edges(), + ) avg_edges = avg_ntwk.edges() for edge in avg_edges: data = avg_ntwk.edge[edge[0]][edge[1]] - for key in list(data.keys()): - if not key == 'count': - edge_dict[key] = np.zeros((avg_ntwk.number_of_nodes(), - avg_ntwk.number_of_nodes())) + for key in data: + if key != "count": + edge_dict[key] = np.zeros( + (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) + ) edge_dict[key][edge[0] - 1][edge[1] - 1] = data[key] for key in list(edge_dict.keys()): tmp = {} - network_name = group_id + '_' + key + '_average.mat' + network_name = group_id + "_" + key + "_average.mat" matlab_network_list.append(op.abspath(network_name)) tmp[key] = edge_dict[key] sio.savemat(op.abspath(network_name), tmp) - iflogger.info('Saving average network for key: %s as %s', key, - op.abspath(network_name)) + iflogger.info( + "Saving average network for key: %s as %s", + key, + op.abspath(network_name), + ) # Writes the networks and returns the name - network_name = group_id + '_average.pck' - nx.write_gpickle(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as %s', op.abspath(network_name)) + network_name = group_id + "_average.pck" + with open(op.abspath(network_name), 'wb') as f: + pickle.dump(avg_ntwk, f, pickle.HIGHEST_PROTOCOL) + iflogger.info("Saving average network as %s", op.abspath(network_name)) avg_ntwk = fix_keys_for_gexf(avg_ntwk) - network_name = group_id + '_average.gexf' + network_name = group_id + "_average.gexf" nx.write_gexf(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as %s', op.abspath(network_name)) + iflogger.info("Saving average network as %s", op.abspath(network_name)) return network_name, matlab_network_list @@ -195,44 +217,46 @@ def compute_node_measures(ntwk, calculate_cliques=False): """ These return node-based measures """ - iflogger.info('Computing node measures:') + iflogger.info("Computing node measures:") measures = {} - iflogger.info('...Computing degree...') - measures['degree'] = np.array(list(ntwk.degree().values())) - iflogger.info('...Computing load centrality...') - measures['load_centrality'] = np.array( - list(nx.load_centrality(ntwk).values())) - iflogger.info('...Computing betweenness centrality...') - measures['betweenness_centrality'] = np.array( - list(nx.betweenness_centrality(ntwk).values())) - iflogger.info('...Computing degree centrality...') - measures['degree_centrality'] = np.array( - list(nx.degree_centrality(ntwk).values())) - iflogger.info('...Computing closeness centrality...') - measures['closeness_centrality'] = np.array( - list(nx.closeness_centrality(ntwk).values())) + iflogger.info("...Computing degree...") + measures["degree"] = np.array(list(ntwk.degree().values())) + iflogger.info("...Computing load centrality...") + measures["load_centrality"] = np.array(list(nx.load_centrality(ntwk).values())) + iflogger.info("...Computing betweenness centrality...") + measures["betweenness_centrality"] = np.array( + list(nx.betweenness_centrality(ntwk).values()) + ) + iflogger.info("...Computing degree centrality...") + measures["degree_centrality"] = np.array(list(nx.degree_centrality(ntwk).values())) + iflogger.info("...Computing closeness centrality...") + measures["closeness_centrality"] = np.array( + list(nx.closeness_centrality(ntwk).values()) + ) # iflogger.info('...Computing eigenvector centrality...') # measures['eigenvector_centrality'] = np.array(nx.eigenvector_centrality(ntwk, max_iter=100000).values()) - iflogger.info('...Computing triangles...') - measures['triangles'] = np.array(list(nx.triangles(ntwk).values())) - iflogger.info('...Computing clustering...') - measures['clustering'] = np.array(list(nx.clustering(ntwk).values())) - iflogger.info('...Computing k-core number') - measures['core_number'] = np.array(list(nx.core_number(ntwk).values())) - iflogger.info('...Identifying network isolates...') + iflogger.info("...Computing triangles...") + measures["triangles"] = np.array(list(nx.triangles(ntwk).values())) + iflogger.info("...Computing clustering...") + measures["clustering"] = np.array(list(nx.clustering(ntwk).values())) + iflogger.info("...Computing k-core number") + measures["core_number"] = np.array(list(nx.core_number(ntwk).values())) + iflogger.info("...Identifying network isolates...") isolate_list = nx.isolates(ntwk) binarized = np.zeros((ntwk.number_of_nodes(), 1)) for value in isolate_list: value = value - 1 # Zero indexing binarized[value] = 1 - measures['isolates'] = binarized + measures["isolates"] = binarized if calculate_cliques: - iflogger.info('...Calculating node clique number') - measures['node_clique_number'] = np.array( - list(nx.node_clique_number(ntwk).values())) - iflogger.info('...Computing number of cliques for each node...') - measures['number_of_cliques'] = np.array( - list(nx.number_of_cliques(ntwk).values())) + iflogger.info("...Calculating node clique number") + measures["node_clique_number"] = np.array( + list(nx.node_clique_number(ntwk).values()) + ) + iflogger.info("...Computing number of cliques for each node...") + measures["number_of_cliques"] = np.array( + list(nx.number_of_cliques(ntwk).values()) + ) return measures @@ -240,7 +264,7 @@ def compute_edge_measures(ntwk): """ These return edge-based measures """ - iflogger.info('Computing edge measures:') + iflogger.info("Computing edge measures:") measures = {} # iflogger.info('...Computing google matrix...' #Makes really large networks (500k+ edges)) # measures['google_matrix'] = nx.google_matrix(ntwk) @@ -255,60 +279,50 @@ def compute_dict_measures(ntwk): """ Returns a dictionary """ - iflogger.info('Computing measures which return a dictionary:') + iflogger.info("Computing measures which return a dictionary:") measures = {} - iflogger.info('...Computing rich club coefficient...') - measures['rich_club_coef'] = nx.rich_club_coefficient(ntwk) + iflogger.info("...Computing rich club coefficient...") + measures["rich_club_coef"] = nx.rich_club_coefficient(ntwk) return measures -def compute_singlevalued_measures(ntwk, weighted=True, - calculate_cliques=False): +def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False): """ Returns a single value per network """ - iflogger.info('Computing single valued measures:') + iflogger.info("Computing single valued measures:") measures = {} - iflogger.info('...Computing degree assortativity (pearson number) ...') - try: - measures['degree_pearsonr'] = nx.degree_pearsonr(ntwk) - except AttributeError: # For NetworkX 1.6 - measures[ - 'degree_pearsonr'] = nx.degree_pearson_correlation_coefficient( - ntwk) - iflogger.info('...Computing degree assortativity...') - try: - measures['degree_assortativity'] = nx.degree_assortativity(ntwk) - except AttributeError: - measures['degree_assortativity'] = nx.degree_assortativity_coefficient( - ntwk) - iflogger.info('...Computing transitivity...') - measures['transitivity'] = nx.transitivity(ntwk) - iflogger.info('...Computing number of connected_components...') - measures['number_connected_components'] = nx.number_connected_components( - ntwk) - iflogger.info('...Computing graph density...') - measures['graph_density'] = nx.density(ntwk) - iflogger.info('...Recording number of edges...') - measures['number_of_edges'] = nx.number_of_edges(ntwk) - iflogger.info('...Recording number of nodes...') - measures['number_of_nodes'] = nx.number_of_nodes(ntwk) - iflogger.info('...Computing average clustering...') - measures['average_clustering'] = nx.average_clustering(ntwk) + iflogger.info("...Computing degree assortativity (pearson number) ...") + measures["degree_pearsonr"] = nx.degree_pearson_correlation_coefficient(ntwk) + iflogger.info("...Computing degree assortativity...") + measures["degree_assortativity"] = nx.degree_assortativity_coefficient(ntwk) + iflogger.info("...Computing transitivity...") + measures["transitivity"] = nx.transitivity(ntwk) + iflogger.info("...Computing number of connected_components...") + measures["number_connected_components"] = nx.number_connected_components(ntwk) + iflogger.info("...Computing graph density...") + measures["graph_density"] = nx.density(ntwk) + iflogger.info("...Recording number of edges...") + measures["number_of_edges"] = nx.number_of_edges(ntwk) + iflogger.info("...Recording number of nodes...") + measures["number_of_nodes"] = nx.number_of_nodes(ntwk) + iflogger.info("...Computing average clustering...") + measures["average_clustering"] = nx.average_clustering(ntwk) if nx.is_connected(ntwk): - iflogger.info('...Calculating average shortest path length...') - measures[ - 'average_shortest_path_length'] = nx.average_shortest_path_length( - ntwk, weighted) + iflogger.info("...Calculating average shortest path length...") + measures["average_shortest_path_length"] = nx.average_shortest_path_length( + ntwk, weighted + ) else: - iflogger.info('...Calculating average shortest path length...') - measures[ - 'average_shortest_path_length'] = nx.average_shortest_path_length( - nx.connected_component_subgraphs(ntwk)[0], weighted) + iflogger.info("...Calculating average shortest path length...") + measures["average_shortest_path_length"] = nx.average_shortest_path_length( + nx.connected_component_subgraphs(ntwk)[0], weighted + ) if calculate_cliques: - iflogger.info('...Computing graph clique number...') - measures['graph_clique_number'] = nx.graph_clique_number( - ntwk) # out of memory error + iflogger.info("...Computing graph clique number...") + measures["graph_clique_number"] = nx.graph_clique_number( + ntwk + ) # out of memory error return measures @@ -327,8 +341,8 @@ def add_node_data(node_array, ntwk): node_ntwk = nx.Graph() newdata = {} for idx, data in ntwk.nodes(data=True): - if not int(idx) == 0: - newdata['value'] = node_array[int(idx) - 1] + if int(idx) != 0: + newdata["value"] = node_array[int(idx) - 1] data.update(newdata) node_ntwk.add_node(int(idx), **data) return node_ntwk @@ -338,10 +352,10 @@ def add_edge_data(edge_array, ntwk, above=0, below=0): edge_ntwk = ntwk.copy() data = {} for x, row in enumerate(edge_array): - for y in range(0, np.max(np.shape(edge_array[x]))): - if not edge_array[x, y] == 0: - data['value'] = edge_array[x, y] - if data['value'] <= below or data['value'] >= above: + for y in range(np.max(np.shape(edge_array[x]))): + if edge_array[x, y] != 0: + data["value"] = edge_array[x, y] + if data["value"] <= below or data["value"] >= above: if edge_ntwk.has_edge(x + 1, y + 1): old_edge_dict = edge_ntwk.edge[x + 1][y + 1] edge_ntwk.remove_edge(x + 1, y + 1) @@ -351,79 +365,78 @@ def add_edge_data(edge_array, ntwk, above=0, below=0): class NetworkXMetricsInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='Input network') + in_file = File(exists=True, mandatory=True, desc="Input network") out_k_core = File( - 'k_core', + "k_core", usedefault=True, - desc='Computed k-core network stored as a NetworkX pickle.') + desc="Computed k-core network stored as a NetworkX pickle.", + ) out_k_shell = File( - 'k_shell', + "k_shell", usedefault=True, - desc='Computed k-shell network stored as a NetworkX pickle.') + desc="Computed k-shell network stored as a NetworkX pickle.", + ) out_k_crust = File( - 'k_crust', + "k_crust", usedefault=True, - desc='Computed k-crust network stored as a NetworkX pickle.') + desc="Computed k-crust network stored as a NetworkX pickle.", + ) treat_as_weighted_graph = traits.Bool( True, usedefault=True, - desc= - 'Some network metrics can be calculated while considering only a binarized version of the graph' + desc="Some network metrics can be calculated while considering only a binarized version of the graph", ) compute_clique_related_measures = traits.Bool( False, usedefault=True, - desc= - 'Computing clique-related measures (e.g. node clique number) can be very time consuming' + desc="Computing clique-related measures (e.g. node clique number) can be very time consuming", ) out_global_metrics_matlab = File( - genfile=True, desc='Output node metrics in MATLAB .mat format') + genfile=True, desc="Output node metrics in MATLAB .mat format" + ) out_node_metrics_matlab = File( - genfile=True, desc='Output node metrics in MATLAB .mat format') + genfile=True, desc="Output node metrics in MATLAB .mat format" + ) out_edge_metrics_matlab = File( - genfile=True, desc='Output edge metrics in MATLAB .mat format') + genfile=True, desc="Output edge metrics in MATLAB .mat format" + ) out_pickled_extra_measures = File( - 'extra_measures', + "extra_measures", usedefault=True, - desc= - 'Network measures for group 1 that return dictionaries stored as a Pickle.' + desc="Network measures for group 1 that return dictionaries stored as a Pickle.", ) class NetworkXMetricsOutputSpec(TraitedSpec): - gpickled_network_files = OutputMultiPath( - File(desc='Output gpickled network files')) + gpickled_network_files = OutputMultiPath(File(desc="Output gpickled network files")) matlab_matrix_files = OutputMultiPath( - File(desc='Output network metrics in MATLAB .mat format')) - global_measures_matlab = File( - desc='Output global metrics in MATLAB .mat format') - node_measures_matlab = File( - desc='Output node metrics in MATLAB .mat format') - edge_measures_matlab = File( - desc='Output edge metrics in MATLAB .mat format') + File(desc="Output network metrics in MATLAB .mat format") + ) + global_measures_matlab = File(desc="Output global metrics in MATLAB .mat format") + node_measures_matlab = File(desc="Output node metrics in MATLAB .mat format") + edge_measures_matlab = File(desc="Output edge metrics in MATLAB .mat format") node_measure_networks = OutputMultiPath( - File(desc='Output gpickled network files for all node-based measures')) + File(desc="Output gpickled network files for all node-based measures") + ) edge_measure_networks = OutputMultiPath( - File(desc='Output gpickled network files for all edge-based measures')) + File(desc="Output gpickled network files for all edge-based measures") + ) k_networks = OutputMultiPath( File( - desc= - 'Output gpickled network files for the k-core, k-shell, and k-crust networks' - )) - k_core = File(desc='Computed k-core network stored as a NetworkX pickle.') - k_shell = File( - desc='Computed k-shell network stored as a NetworkX pickle.') - k_crust = File( - desc='Computed k-crust network stored as a NetworkX pickle.') + desc="Output gpickled network files for the k-core, k-shell, and k-crust networks" + ) + ) + k_core = File(desc="Computed k-core network stored as a NetworkX pickle.") + k_shell = File(desc="Computed k-shell network stored as a NetworkX pickle.") + k_crust = File(desc="Computed k-crust network stored as a NetworkX pickle.") pickled_extra_measures = File( - desc= - 'Network measures for the group that return dictionaries, stored as a Pickle.' + desc="Network measures for the group that return dictionaries, stored as a Pickle." ) matlab_dict_measures = OutputMultiPath( File( - desc= - 'Network measures for the group that return dictionaries, stored as matlab matrices.' - )) + desc="Network measures for the group that return dictionaries, stored as matlab matrices." + ) + ) class NetworkXMetrics(BaseInterface): @@ -432,24 +445,26 @@ class NetworkXMetrics(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nxmetrics = cmtk.NetworkXMetrics() >>> nxmetrics.inputs.in_file = 'subj1.pck' >>> nxmetrics.run() # doctest: +SKIP + """ + input_spec = NetworkXMetricsInputSpec output_spec = NetworkXMetricsOutputSpec def _run_interface(self, runtime): import scipy.io as sio + global gpickled, nodentwks, edgentwks, kntwks, matlab gpickled = list() nodentwks = list() edgentwks = list() kntwks = list() matlab = list() - ntwk = nx.read_gpickle(self.inputs.in_file) + ntwk = _read_pickle(self.inputs.in_file) # Each block computes, writes, and saves a measure # The names are then added to the output .pck file list @@ -459,70 +474,76 @@ def _run_interface(self, runtime): weighted = self.inputs.treat_as_weighted_graph global_measures = compute_singlevalued_measures( - ntwk, weighted, calculate_cliques) + ntwk, weighted, calculate_cliques + ) if isdefined(self.inputs.out_global_metrics_matlab): global_out_file = op.abspath(self.inputs.out_global_metrics_matlab) else: - global_out_file = op.abspath( - self._gen_outfilename('globalmetrics', 'mat')) - sio.savemat(global_out_file, global_measures, oned_as='column') + global_out_file = op.abspath(self._gen_outfilename("globalmetrics", "mat")) + sio.savemat(global_out_file, global_measures, oned_as="column") matlab.append(global_out_file) node_measures = compute_node_measures(ntwk, calculate_cliques) for key in list(node_measures.keys()): newntwk = add_node_data(node_measures[key], ntwk) - out_file = op.abspath(self._gen_outfilename(key, 'pck')) - nx.write_gpickle(newntwk, out_file) + out_file = op.abspath(self._gen_outfilename(key, "pck")) + with open(out_file, 'wb') as f: + pickle.dump(newntwk, f, pickle.HIGHEST_PROTOCOL) nodentwks.append(out_file) if isdefined(self.inputs.out_node_metrics_matlab): node_out_file = op.abspath(self.inputs.out_node_metrics_matlab) else: - node_out_file = op.abspath( - self._gen_outfilename('nodemetrics', 'mat')) - sio.savemat(node_out_file, node_measures, oned_as='column') + node_out_file = op.abspath(self._gen_outfilename("nodemetrics", "mat")) + sio.savemat(node_out_file, node_measures, oned_as="column") matlab.append(node_out_file) gpickled.extend(nodentwks) edge_measures = compute_edge_measures(ntwk) for key in list(edge_measures.keys()): newntwk = add_edge_data(edge_measures[key], ntwk) - out_file = op.abspath(self._gen_outfilename(key, 'pck')) - nx.write_gpickle(newntwk, out_file) + out_file = op.abspath(self._gen_outfilename(key, "pck")) + with open(out_file, 'wb') as f: + pickle.dump(newntwk, f, pickle.HIGHEST_PROTOCOL) edgentwks.append(out_file) if isdefined(self.inputs.out_edge_metrics_matlab): edge_out_file = op.abspath(self.inputs.out_edge_metrics_matlab) else: - edge_out_file = op.abspath( - self._gen_outfilename('edgemetrics', 'mat')) - sio.savemat(edge_out_file, edge_measures, oned_as='column') + edge_out_file = op.abspath(self._gen_outfilename("edgemetrics", "mat")) + sio.savemat(edge_out_file, edge_measures, oned_as="column") matlab.append(edge_out_file) gpickled.extend(edgentwks) ntwk_measures = compute_network_measures(ntwk) for key in list(ntwk_measures.keys()): - if key == 'k_core': + if key == "k_core": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_core, 'pck')) - if key == 'k_shell': + self._gen_outfilename(self.inputs.out_k_core, "pck") + ) + if key == "k_shell": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_shell, 'pck')) - if key == 'k_crust': + self._gen_outfilename(self.inputs.out_k_shell, "pck") + ) + if key == "k_crust": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_crust, 'pck')) - nx.write_gpickle(ntwk_measures[key], out_file) + self._gen_outfilename(self.inputs.out_k_crust, "pck") + ) + with open(out_file, 'wb') as f: + pickle.dump(ntwk_measures[key], f, pickle.HIGHEST_PROTOCOL) kntwks.append(out_file) gpickled.extend(kntwks) out_pickled_extra_measures = op.abspath( - self._gen_outfilename(self.inputs.out_pickled_extra_measures, - 'pck')) + self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") + ) dict_measures = compute_dict_measures(ntwk) - iflogger.info('Saving extra measure file to %s in Pickle format', - op.abspath(out_pickled_extra_measures)) - with open(out_pickled_extra_measures, 'w') as fo: - pickle.dump(dict_measures, fo) + iflogger.info( + "Saving extra measure file to %s in Pickle format", + op.abspath(out_pickled_extra_measures), + ) + with open(out_pickled_extra_measures, "w") as f: + pickle.dump(dict_measures, f) - iflogger.info('Saving MATLAB measures as %s', matlab) + iflogger.info("Saving MATLAB measures as %s", matlab) # Loops through the measures which return a dictionary, # converts the keys and values to a Numpy array, @@ -539,67 +560,72 @@ def _run_interface(self, runtime): values = np.array(dict_measures[key][keyd]) nparrayvalues = np.append(nparrayvalues, values) nparray = np.vstack((nparraykeys, nparrayvalues)) - out_file = op.abspath(self._gen_outfilename(key, 'mat')) + out_file = op.abspath(self._gen_outfilename(key, "mat")) npdict = {} npdict[key] = nparray - sio.savemat(out_file, npdict, oned_as='column') + sio.savemat(out_file, npdict, oned_as="column") dicts.append(out_file) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["k_core"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_core, 'pck')) + self._gen_outfilename(self.inputs.out_k_core, "pck") + ) outputs["k_shell"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_shell, 'pck')) + self._gen_outfilename(self.inputs.out_k_shell, "pck") + ) outputs["k_crust"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_crust, 'pck')) + self._gen_outfilename(self.inputs.out_k_crust, "pck") + ) outputs["gpickled_network_files"] = gpickled outputs["k_networks"] = kntwks outputs["node_measure_networks"] = nodentwks outputs["edge_measure_networks"] = edgentwks outputs["matlab_dict_measures"] = dicts outputs["global_measures_matlab"] = op.abspath( - self._gen_outfilename('globalmetrics', 'mat')) + self._gen_outfilename("globalmetrics", "mat") + ) outputs["node_measures_matlab"] = op.abspath( - self._gen_outfilename('nodemetrics', 'mat')) + self._gen_outfilename("nodemetrics", "mat") + ) outputs["edge_measures_matlab"] = op.abspath( - self._gen_outfilename('edgemetrics', 'mat')) + self._gen_outfilename("edgemetrics", "mat") + ) outputs["matlab_matrix_files"] = [ - outputs["global_measures_matlab"], outputs["node_measures_matlab"], - outputs["edge_measures_matlab"] + outputs["global_measures_matlab"], + outputs["node_measures_matlab"], + outputs["edge_measures_matlab"], ] outputs["pickled_extra_measures"] = op.abspath( - self._gen_outfilename(self.inputs.out_pickled_extra_measures, - 'pck')) + self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") + ) return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext class AverageNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( - File(exists=True), - mandatory=True, - desc='Networks for a group of subjects') + File(exists=True), mandatory=True, desc="Networks for a group of subjects" + ) resolution_network_file = File( exists=True, - desc= - 'Parcellation files from Connectome Mapping Toolkit. This is not necessary' - ', but if included, the interface will output the statistical maps as networkx graphs.' + desc="Parcellation files from Connectome Mapping Toolkit. This is not necessary" + ", but if included, the interface will output the statistical maps as networkx graphs.", ) - group_id = traits.Str('group1', usedefault=True, desc='ID for group') - out_gpickled_groupavg = File( - desc='Average network saved as a NetworkX .pck') - out_gexf_groupavg = File(desc='Average network saved as a .gexf file') + group_id = traits.Str("group1", usedefault=True, desc="ID for group") + out_gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") + out_gexf_groupavg = File(desc="Average network saved as a .gexf file") class AverageNetworksOutputSpec(TraitedSpec): - gpickled_groupavg = File(desc='Average network saved as a NetworkX .pck') - gexf_groupavg = File(desc='Average network saved as a .gexf file') + gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") + gexf_groupavg = File(desc="Average network saved as a .gexf file") matlab_groupavgs = OutputMultiPath( - File(desc='Average network saved as a .gexf file')) + File(desc="Average network saved as a .gexf file") + ) class AverageNetworks(BaseInterface): @@ -611,13 +637,13 @@ class AverageNetworks(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> avg = cmtk.AverageNetworks() >>> avg.inputs.in_files = ['subj1.pck', 'subj2.pck'] >>> avg.run() # doctest: +SKIP """ + input_spec = AverageNetworksInputSpec output_spec = AverageNetworksOutputSpec @@ -629,29 +655,28 @@ def _run_interface(self, runtime): global matlab_network_list network_name, matlab_network_list = average_networks( - self.inputs.in_files, ntwk_res_file, self.inputs.group_id) + self.inputs.in_files, ntwk_res_file, self.inputs.group_id + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_gpickled_groupavg): outputs["gpickled_groupavg"] = op.abspath( - self._gen_outfilename(self.inputs.group_id + '_average', - 'pck')) + self._gen_outfilename(self.inputs.group_id + "_average", "pck") + ) else: - outputs["gpickled_groupavg"] = op.abspath( - self.inputs.out_gpickled_groupavg) + outputs["gpickled_groupavg"] = op.abspath(self.inputs.out_gpickled_groupavg) if not isdefined(self.inputs.out_gexf_groupavg): outputs["gexf_groupavg"] = op.abspath( - self._gen_outfilename(self.inputs.group_id + '_average', - 'gexf')) + self._gen_outfilename(self.inputs.group_id + "_average", "gexf") + ) else: - outputs["gexf_groupavg"] = op.abspath( - self.inputs.out_gexf_groupavg) + outputs["gexf_groupavg"] = op.abspath(self.inputs.out_gexf_groupavg) outputs["matlab_groupavgs"] = matlab_network_list return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 0e25e8eb10..65062247d8 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - import os import os.path as op import shutil @@ -14,172 +9,342 @@ import networkx as nx from ... import logging -from ..base import (BaseInterface, LibraryBaseInterface, - BaseInterfaceInputSpec, traits, File, - TraitedSpec, Directory, isdefined) -from .base import have_cmp -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + LibraryBaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + Directory, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): - import cmp + from cmp.configuration import PipelineConfiguration from cmp.util import runCmd + iflogger.info("Create the cortical labels necessary for our ROIs") iflogger.info("=================================================") - fs_label_dir = op.join(op.join(subjects_dir, subject_id), 'label') + fs_label_dir = op.join(op.join(subjects_dir, subject_id), "label") output_dir = op.abspath(op.curdir) paths = [] - cmp_config = cmp.configuration.PipelineConfiguration() + cmp_config = PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" - for hemi in ['lh', 'rh']: - spath = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]['fs_label_subdir_name'] % hemi + for hemi in ["lh", "rh"]: + spath = ( + cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ + "fs_label_subdir_name" + ] + % hemi + ) paths.append(spath) for p in paths: try: - os.makedirs(op.join('.', p)) + os.makedirs(op.join(".", p)) except: pass - if '33' in parcellation_name: + if "33" in parcellation_name: comp = [ - ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot', - 'regenerated_rh_36', 'myaparc_36'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot', - 'regenerated_lh_36', 'myaparc_36'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_36_rh.gcs", + "rh.myaparc_36.annot", + "regenerated_rh_36", + "myaparc_36", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_36_lh.gcs", + "lh.myaparc_36.annot", + "regenerated_lh_36", + "myaparc_36", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '60' in parcellation_name: + elif "60" in parcellation_name: comp = [ - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '125' in parcellation_name: + elif "125" in parcellation_name: comp = [ - ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot', - 'regenerated_rh_125', 'myaparc_125'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot', - 'regenerated_lh_125', 'myaparc_125'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_125_rh.gcs", + "rh.myaparc_125.annot", + "regenerated_rh_125", + "myaparc_125", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_125_lh.gcs", + "lh.myaparc_125.annot", + "regenerated_lh_125", + "myaparc_125", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '250' in parcellation_name: + elif "250" in parcellation_name: comp = [ - ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot', - 'regenerated_rh_250', 'myaparc_250'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot', - 'regenerated_lh_250', 'myaparc_250'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_250_rh.gcs", + "rh.myaparc_250.annot", + "regenerated_rh_250", + "myaparc_250", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_250_lh.gcs", + "lh.myaparc_250.annot", + "regenerated_lh_250", + "myaparc_250", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] else: comp = [ - ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot', - 'regenerated_rh_36', 'myaparc_36'), - ('rh', 'myatlasP1_16_rh.gcs', 'rh.myaparcP1_16.annot', - 'regenerated_rh_500', 'myaparcP1_16'), - ('rh', 'myatlasP17_28_rh.gcs', 'rh.myaparcP17_28.annot', - 'regenerated_rh_500', 'myaparcP17_28'), - ('rh', 'myatlasP29_36_rh.gcs', 'rh.myaparcP29_36.annot', - 'regenerated_rh_500', 'myaparcP29_36'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot', - 'regenerated_rh_125', 'myaparc_125'), - ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot', - 'regenerated_rh_250', 'myaparc_250'), - ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot', - 'regenerated_lh_36', 'myaparc_36'), - ('lh', 'myatlasP1_16_lh.gcs', 'lh.myaparcP1_16.annot', - 'regenerated_lh_500', 'myaparcP1_16'), - ('lh', 'myatlasP17_28_lh.gcs', 'lh.myaparcP17_28.annot', - 'regenerated_lh_500', 'myaparcP17_28'), - ('lh', 'myatlasP29_36_lh.gcs', 'lh.myaparcP29_36.annot', - 'regenerated_lh_500', 'myaparcP29_36'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), - ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot', - 'regenerated_lh_125', 'myaparc_125'), - ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot', - 'regenerated_lh_250', 'myaparc_250'), + ( + "rh", + "myatlas_36_rh.gcs", + "rh.myaparc_36.annot", + "regenerated_rh_36", + "myaparc_36", + ), + ( + "rh", + "myatlasP1_16_rh.gcs", + "rh.myaparcP1_16.annot", + "regenerated_rh_500", + "myaparcP1_16", + ), + ( + "rh", + "myatlasP17_28_rh.gcs", + "rh.myaparcP17_28.annot", + "regenerated_rh_500", + "myaparcP17_28", + ), + ( + "rh", + "myatlasP29_36_rh.gcs", + "rh.myaparcP29_36.annot", + "regenerated_rh_500", + "myaparcP29_36", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "rh", + "myatlas_125_rh.gcs", + "rh.myaparc_125.annot", + "regenerated_rh_125", + "myaparc_125", + ), + ( + "rh", + "myatlas_250_rh.gcs", + "rh.myaparc_250.annot", + "regenerated_rh_250", + "myaparc_250", + ), + ( + "lh", + "myatlas_36_lh.gcs", + "lh.myaparc_36.annot", + "regenerated_lh_36", + "myaparc_36", + ), + ( + "lh", + "myatlasP1_16_lh.gcs", + "lh.myaparcP1_16.annot", + "regenerated_lh_500", + "myaparcP1_16", + ), + ( + "lh", + "myatlasP17_28_lh.gcs", + "lh.myaparcP17_28.annot", + "regenerated_lh_500", + "myaparcP17_28", + ), + ( + "lh", + "myatlasP29_36_lh.gcs", + "lh.myaparcP29_36.annot", + "regenerated_lh_500", + "myaparcP29_36", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_125_lh.gcs", + "lh.myaparc_125.annot", + "regenerated_lh_125", + "myaparc_125", + ), + ( + "lh", + "myatlas_250_lh.gcs", + "lh.myaparc_250.annot", + "regenerated_lh_250", + "myaparc_250", + ), ] log = cmp_config.get_logger() for out in comp: - mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % ( - subject_id, out[0], op.join(subjects_dir, subject_id), out[0], + mris_cmd = 'mris_ca_label {} {} "{}/surf/{}.sphere.reg" "{}" "{}" '.format( + subject_id, + out[0], + op.join(subjects_dir, subject_id), + out[0], cmp_config.get_lausanne_atlas(out[1]), - op.join(fs_label_dir, out[2])) + op.join(fs_label_dir, out[2]), + ) runCmd(mris_cmd, log) - iflogger.info('-----------') + iflogger.info("-----------") annot = '--annotation "%s"' % out[4] - mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % ( - subject_id, out[0], op.join(output_dir, out[3]), annot) + mri_an_cmd = ( + 'mri_annotation2label --subject {} --hemi {} --outdir "{}" {}'.format( + subject_id, + out[0], + op.join(output_dir, out[3]), + annot, + ) + ) iflogger.info(mri_an_cmd) runCmd(mri_an_cmd, log) - iflogger.info('-----------') - iflogger.info(os.environ['SUBJECTS_DIR']) + iflogger.info("-----------") + iflogger.info(os.environ["SUBJECTS_DIR"]) # extract cc and unknown to add to tractography mask, we do not want this as a region of interest # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?), # but for the other scales only, take the ones from _60 - rhun = op.join(output_dir, 'rh.unknown.label') - lhun = op.join(output_dir, 'lh.unknown.label') - rhco = op.join(output_dir, 'rh.corpuscallosum.label') - lhco = op.join(output_dir, 'lh.corpuscallosum.label') - shutil.copy( - op.join(output_dir, 'regenerated_rh_60', 'rh.unknown.label'), rhun) + rhun = op.join(output_dir, "rh.unknown.label") + lhun = op.join(output_dir, "lh.unknown.label") + rhco = op.join(output_dir, "rh.corpuscallosum.label") + lhco = op.join(output_dir, "lh.corpuscallosum.label") + shutil.copy(op.join(output_dir, "regenerated_rh_60", "rh.unknown.label"), rhun) + shutil.copy(op.join(output_dir, "regenerated_lh_60", "lh.unknown.label"), lhun) shutil.copy( - op.join(output_dir, 'regenerated_lh_60', 'lh.unknown.label'), lhun) - shutil.copy( - op.join(output_dir, 'regenerated_rh_60', 'rh.corpuscallosum.label'), - rhco) + op.join(output_dir, "regenerated_rh_60", "rh.corpuscallosum.label"), rhco + ) shutil.copy( - op.join(output_dir, 'regenerated_lh_60', 'lh.corpuscallosum.label'), - lhco) + op.join(output_dir, "regenerated_lh_60", "lh.corpuscallosum.label"), lhco + ) - mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ % ( - rhun, lhun, rhco, lhco, - op.join(op.join(subjects_dir, subject_id), 'mri', 'orig.mgz'), - op.join(fs_label_dir, 'cc_unknown.nii.gz')) + mri_cmd = ( + """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ + % ( + rhun, + lhun, + rhco, + lhco, + op.join(op.join(subjects_dir, subject_id), "mri", "orig.mgz"), + op.join(fs_label_dir, "cc_unknown.nii.gz"), + ) + ) runCmd(mri_cmd, log) - runCmd('mris_volmask %s' % subject_id, log) - mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % ( - op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id)) + runCmd("mris_volmask %s" % subject_id, log) + mri_cmd = 'mri_convert -i "{}/mri/ribbon.mgz" -o "{}/mri/ribbon.nii.gz"'.format( + op.join(subjects_dir, subject_id), + op.join(subjects_dir, subject_id), + ) runCmd(mri_cmd, log) - mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % ( - op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id)) + mri_cmd = 'mri_convert -i "{}/mri/aseg.mgz" -o "{}/mri/aseg.nii.gz"'.format( + op.join(subjects_dir, subject_id), + op.join(subjects_dir, subject_id), + ) runCmd(mri_cmd, log) iflogger.info("[ DONE ]") def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): - """ Creates the ROI_%s.nii.gz files using the given parcellation information - from networks. Iteratively create volume. """ - import cmp + """Creates the ROI_%s.nii.gz files using the given parcellation information + from networks. Iteratively create volume.""" + from cmp.configuration import PipelineConfiguration from cmp.util import runCmd + iflogger.info("Create the ROIs:") output_dir = op.abspath(op.curdir) fs_dir = op.join(subjects_dir, subject_id) - cmp_config = cmp.configuration.PipelineConfiguration() + cmp_config = PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() - parval = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name] - pgpath = parval['node_information_graphml'] - aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz')) - asegd = aseg.get_data() + parval = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] + pgpath = parval["node_information_graphml"] + aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) + asegd = np.asanyarray(aseg.dataobj) # identify cortical voxels, right (3) and left (42) hemispheres idxr = np.where(asegd == 3) @@ -193,7 +358,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): shape = (25, 25, 25) center = np.array(shape) // 2 # dist: distances from the center of the neighbourhood - dist = np.zeros(shape, dtype='float32') + dist = np.zeros(shape, dtype="float32") for x in range(shape[0]): for y in range(shape[1]): for z in range(shape[2]): @@ -202,8 +367,8 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): iflogger.info("Working on parcellation: ") iflogger.info( - cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]) + cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] + ) iflogger.info("========================") pg = nx.read_graphml(pgpath) # each node represents a brain region @@ -215,52 +380,55 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): count = count + 1 iflogger.info(brv) iflogger.info(brk) - if brv['dn_hemisphere'] == 'left': - hemi = 'lh' - elif brv['dn_hemisphere'] == 'right': - hemi = 'rh' - if brv['dn_region'] == 'subcortical': + if brv["dn_hemisphere"] == "left": + hemi = "lh" + elif brv["dn_hemisphere"] == "right": + hemi = "rh" + if brv["dn_region"] == "subcortical": iflogger.info(brv) - iflogger.info('---------------------') - iflogger.info('Work on brain region: %s', brv['dn_region']) - iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) - iflogger.info('Region %s of %s', count, pg.number_of_nodes()) - iflogger.info('---------------------') + iflogger.info("---------------------") + iflogger.info("Work on brain region: %s", brv["dn_region"]) + iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) + iflogger.info("Region %s of %s", count, pg.number_of_nodes()) + iflogger.info("---------------------") # if it is subcortical, retrieve roi from aseg - idx = np.where(asegd == int(brv['dn_fs_aseg_val'])) - rois[idx] = int(brv['dn_correspondence_id']) + idx = np.where(asegd == int(brv["dn_fs_aseg_val"])) + rois[idx] = int(brv["dn_correspondence_id"]) - elif brv['dn_region'] == 'cortical': + elif brv["dn_region"] == "cortical": iflogger.info(brv) - iflogger.info('---------------------') - iflogger.info('Work on brain region: %s', brv['dn_region']) - iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) - iflogger.info('Region %s of %s', count, pg.number_of_nodes()) - iflogger.info('---------------------') - - labelpath = op.join(output_dir, - parval['fs_label_subdir_name'] % hemi) + iflogger.info("---------------------") + iflogger.info("Work on brain region: %s", brv["dn_region"]) + iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) + iflogger.info("Region %s of %s", count, pg.number_of_nodes()) + iflogger.info("---------------------") + + labelpath = op.join(output_dir, parval["fs_label_subdir_name"] % hemi) # construct .label file name - fname = '%s.%s.label' % (hemi, brv['dn_fsname']) + fname = "{}.{}.label".format(hemi, brv["dn_fsname"]) # execute fs mri_label2vol to generate volume roi from the label file # store it in temporary file to be overwritten for each region - mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % ( - op.join(labelpath, fname), op.join(fs_dir, 'mri', 'orig.mgz'), - op.join(output_dir, 'tmp.nii.gz')) + mri_cmd = ( + 'mri_label2vol --label "{}" --temp "{}" --o "{}" --identity'.format( + op.join(labelpath, fname), + op.join(fs_dir, "mri", "orig.mgz"), + op.join(output_dir, "tmp.nii.gz"), + ) + ) runCmd(mri_cmd, log) - tmp = nb.load(op.join(output_dir, 'tmp.nii.gz')) - tmpd = tmp.get_data() + tmp = nb.load(op.join(output_dir, "tmp.nii.gz")) + tmpd = np.asanyarray(tmp.dataobj) # find voxel and set them to intensityvalue in rois idx = np.where(tmpd == 1) - rois[idx] = int(brv['dn_correspondence_id']) + rois[idx] = int(brv["dn_correspondence_id"]) # store volume eg in ROI_scale33.nii.gz - out_roi = op.abspath('ROI_%s.nii.gz' % parcellation_name) + out_roi = op.abspath("ROI_%s.nii.gz" % parcellation_name) # update the header hdr = aseg.header @@ -278,22 +446,20 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): # loop throughout all the voxels belonging to the aseg GM volume for j in range(xx.size): if rois[xx[j], yy[j], zz[j]] == 0: - local = extract( - rois, shape, position=(xx[j], yy[j], zz[j]), fill=0) + local = extract(rois, shape, position=(xx[j], yy[j], zz[j]), fill=0) mask = local.copy() mask[np.nonzero(local > 0)] = 1 thisdist = np.multiply(dist, mask) thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist) - value = np.int_( - local[np.nonzero(thisdist == np.amin(thisdist))]) + value = np.int_(local[np.nonzero(thisdist == np.amin(thisdist))]) if value.size > 1: counts = np.bincount(value) value = np.argmax(counts) rois[xx[j], yy[j], zz[j]] = value # store volume eg in ROIv_scale33.nii.gz - out_roi = op.abspath('ROIv_%s.nii.gz' % parcellation_name) - iflogger.info('Save output image to %s', out_roi) + out_roi = op.abspath("ROIv_%s.nii.gz" % parcellation_name) + iflogger.info("Save output image to %s", out_roi) img = nb.Nifti1Image(rois, aseg.affine, hdr2) nb.save(img, out_roi) @@ -301,17 +467,19 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): - import cmp + from cmp.configuration import PipelineConfiguration import scipy.ndimage.morphology as nd + iflogger.info("Create white matter mask") fs_dir = op.join(subjects_dir, subject_id) - cmp_config = cmp.configuration.PipelineConfiguration() + cmp_config = PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" - pgpath = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]['node_information_graphml'] + pgpath = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ + "node_information_graphml" + ] # load ribbon as basis for white matter mask - fsmask = nb.load(op.join(fs_dir, 'mri', 'ribbon.nii.gz')) - fsmaskd = fsmask.get_data() + fsmask = nb.load(op.join(fs_dir, "mri", "ribbon.nii.gz")) + fsmaskd = np.asanyarray(fsmask.dataobj) wmmask = np.zeros(fsmaskd.shape) # extract right and left white matter @@ -322,8 +490,8 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): wmmask[idx_rh] = 1 # remove subcortical nuclei from white matter mask - aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz')) - asegd = aseg.get_data() + aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) + asegd = np.asanyarray(aseg.dataobj) # need binary erosion function imerode = nd.binary_erosion @@ -344,21 +512,36 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): # lateral ventricles, thalamus proper and caudate # the latter two removed for better erosion, but put back afterwards - idx = np.where((asegd == 4) | (asegd == 43) | (asegd == 11) | (asegd == 50) - | (asegd == 31) | (asegd == 63) | (asegd == 10) - | (asegd == 49)) + idx = np.where( + (asegd == 4) + | (asegd == 43) + | (asegd == 11) + | (asegd == 50) + | (asegd == 31) + | (asegd == 63) + | (asegd == 10) + | (asegd == 49) + ) csfA[idx] = 1 csfA = imerode(imerode(csfA, se1), se) # thalmus proper and cuadate are put back because they are not lateral ventricles - idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10) - | (asegd == 49)) + idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10) | (asegd == 49)) csfA[idx] = 0 # REST CSF, IE 3RD AND 4TH VENTRICULE AND EXTRACEREBRAL CSF - idx = np.where((asegd == 5) | (asegd == 14) | (asegd == 15) | (asegd == 24) - | (asegd == 44) | (asegd == 72) | (asegd == 75) - | (asegd == 76) | (asegd == 213) | (asegd == 221)) + idx = np.where( + (asegd == 5) + | (asegd == 14) + | (asegd == 15) + | (asegd == 24) + | (asegd == 44) + | (asegd == 72) + | (asegd == 75) + | (asegd == 76) + | (asegd == 213) + | (asegd == 221) + ) # 43 ??, 4?? 213?, 221? # more to discuss. for i in [5, 14, 15, 24, 44, 72, 75, 76, 213, 221]: @@ -393,152 +576,177 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): remaining[idx] = 1 # now remove all the structures from the white matter - idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0) - | (remaining != 0)) + idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0) | (remaining != 0)) wmmask[idx] = 0 iflogger.info( "Removing lateral ventricles and eroded grey nuclei and brainstem from white matter mask" ) # ADD voxels from 'cc_unknown.nii.gz' dataset - ccun = nb.load(op.join(fs_dir, 'label', 'cc_unknown.nii.gz')) - ccund = ccun.get_data() + ccun = nb.load(op.join(fs_dir, "label", "cc_unknown.nii.gz")) + ccund = np.asanyarray(ccun.dataobj) idx = np.where(ccund != 0) iflogger.info("Add corpus callosum and unknown to wm mask") wmmask[idx] = 1 # check if we should subtract the cortical rois from this parcellation - iflogger.info('Loading ROI_%s.nii.gz to subtract cortical ROIs from white ' - 'matter mask', parcellation_name) - roi = nb.load(op.join(op.curdir, 'ROI_%s.nii.gz' % parcellation_name)) - roid = roi.get_data() + iflogger.info( + "Loading ROI_%s.nii.gz to subtract cortical ROIs from white matter mask", + parcellation_name, + ) + roi = nb.load(op.join(op.curdir, "ROI_%s.nii.gz" % parcellation_name)) + roid = np.asanyarray(roi.dataobj) assert roid.shape[0] == wmmask.shape[0] pg = nx.read_graphml(pgpath) for brk, brv in pg.nodes(data=True): - if brv['dn_region'] == 'cortical': - iflogger.info('Subtracting region %s with intensity value %s', - brv['dn_region'], brv['dn_correspondence_id']) - idx = np.where(roid == int(brv['dn_correspondence_id'])) + if brv["dn_region"] == "cortical": + iflogger.info( + "Subtracting region %s with intensity value %s", + brv["dn_region"], + brv["dn_correspondence_id"], + ) + idx = np.where(roid == int(brv["dn_correspondence_id"])) wmmask[idx] = 0 # output white matter mask. crop and move it afterwards - wm_out = op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz') + wm_out = op.join(fs_dir, "mri", "fsmask_1mm.nii.gz") img = nb.Nifti1Image(wmmask, fsmask.affine, fsmask.header) - iflogger.info('Save white matter mask: %s', wm_out) + iflogger.info("Save white matter mask: %s", wm_out) nb.save(img, wm_out) -def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, - out_roi_file, dilation): +def crop_and_move_datasets( + subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file, dilation +): + from cmp.configuration import PipelineConfiguration from cmp.util import runCmd + fs_dir = op.join(subjects_dir, subject_id) - cmp_config = cmp.configuration.PipelineConfiguration() + cmp_config = PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() output_dir = op.abspath(op.curdir) - iflogger.info('Cropping and moving datasets to %s', output_dir) - ds = [(op.join(fs_dir, 'mri', 'aseg.nii.gz'), - op.abspath('aseg.nii.gz')), (op.join(fs_dir, 'mri', - 'ribbon.nii.gz'), - op.abspath('ribbon.nii.gz')), - (op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz'), - op.abspath('fsmask_1mm.nii.gz')), (op.join(fs_dir, 'label', - 'cc_unknown.nii.gz'), - op.abspath('cc_unknown.nii.gz'))] - - ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name), - op.abspath('ROI_HR_th.nii.gz'))) + iflogger.info("Cropping and moving datasets to %s", output_dir) + ds = [ + (op.join(fs_dir, "mri", "aseg.nii.gz"), op.abspath("aseg.nii.gz")), + (op.join(fs_dir, "mri", "ribbon.nii.gz"), op.abspath("ribbon.nii.gz")), + (op.join(fs_dir, "mri", "fsmask_1mm.nii.gz"), op.abspath("fsmask_1mm.nii.gz")), + ( + op.join(fs_dir, "label", "cc_unknown.nii.gz"), + op.abspath("cc_unknown.nii.gz"), + ), + ] + + ds.append( + ( + op.abspath("ROI_%s.nii.gz" % parcellation_name), + op.abspath("ROI_HR_th.nii.gz"), + ) + ) if dilation is True: - ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name), - op.abspath('ROIv_HR_th.nii.gz'))) - orig = op.join(fs_dir, 'mri', 'orig', '001.mgz') + ds.append( + ( + op.abspath("ROIv_%s.nii.gz" % parcellation_name), + op.abspath("ROIv_HR_th.nii.gz"), + ) + ) + orig = op.join(fs_dir, "mri", "orig", "001.mgz") for d in ds: - iflogger.info('Processing %s:', d[0]) + iflogger.info("Processing %s:", d[0]) if not op.exists(d[0]): - raise Exception('File %s does not exist.' % d[0]) + raise Exception("File %s does not exist." % d[0]) # reslice to original volume because the roi creation with freesurfer # changed to 256x256x256 resolution - mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, - d[0], - d[1]) + mri_cmd = f'mri_convert -rl "{orig}" -rt nearest "{d[0]}" -nc "{d[1]}"' runCmd(mri_cmd, log) def extract(Z, shape, position, fill): - """ Extract voxel neighbourhood -Parameters ----------- -Z: the original data -shape: tuple containing neighbourhood dimensions -position: tuple containing central point indexes -fill: value for the padding of Z -Returns -------- -R: the neighbourhood of the specified point in Z -""" - R = np.ones(shape, dtype=Z.dtype) * \ - fill # initialize output block to the fill value - P = np.array(list(position)).astype( - int) # position coordinates(numpy array) - Rs = np.array(list(R.shape)).astype( - int) # output block dimensions (numpy array) - Zs = np.array(list(Z.shape)).astype( - int) # original volume dimensions (numpy array) + """Extract voxel neighbourhood + + Parameters + ---------- + Z : array-like + the original data + shape : tuple + tuple containing neighbourhood dimensions + position : tuple + tuple containing central point indexes + fill : float + value for the padding of Z + + Returns + ------- + R : ndarray + the neighbourhood of the specified point in Z + + """ + R = ( + np.ones(shape, dtype=Z.dtype) * fill + ) # initialize output block to the fill value + P = np.array(list(position)).astype(int) # position coordinates(numpy array) + Rs = np.array(list(R.shape)).astype(int) # output block dimensions (numpy array) + Zs = np.array(list(Z.shape)).astype(int) # original volume dimensions (numpy array) R_start = np.zeros(len(shape)).astype(int) R_stop = np.array(list(shape)).astype(int) - Z_start = (P - Rs // 2) + Z_start = P - Rs // 2 Z_start_cor = (np.maximum(Z_start, 0)).tolist() # handle borders R_start = R_start + (Z_start_cor - Z_start) Z_stop = (P + Rs // 2) + Rs % 2 Z_stop_cor = (np.minimum(Z_stop, Zs)).tolist() # handle borders R_stop = R_stop - (Z_stop - Z_stop_cor) - R[R_start[0]:R_stop[0], R_start[1]:R_stop[1], R_start[2]:R_stop[ - 2]] = Z[Z_start_cor[0]:Z_stop_cor[0], Z_start_cor[1]:Z_stop_cor[1], - Z_start_cor[2]:Z_stop_cor[2]] + R[R_start[0] : R_stop[0], R_start[1] : R_stop[1], R_start[2] : R_stop[2]] = Z[ + Z_start_cor[0] : Z_stop_cor[0], + Z_start_cor[1] : Z_stop_cor[1], + Z_start_cor[2] : Z_stop_cor[2], + ] return R class ParcellateInputSpec(BaseInterfaceInputSpec): - subject_id = traits.String(mandatory=True, desc='Subject ID') + subject_id = traits.String(mandatory=True, desc="Subject ID") parcellation_name = traits.Enum( - 'scale500', ['scale33', 'scale60', 'scale125', 'scale250', 'scale500'], - usedefault=True) - freesurfer_dir = Directory(exists=True, desc='Freesurfer main directory') - subjects_dir = Directory(exists=True, desc='Freesurfer subjects directory') + "scale500", + ["scale33", "scale60", "scale125", "scale250", "scale500"], + usedefault=True, + ) + freesurfer_dir = Directory(exists=True, desc="Freesurfer main directory") + subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory") out_roi_file = File( - genfile=True, desc='Region of Interest file for connectivity mapping') + genfile=True, desc="Region of Interest file for connectivity mapping" + ) dilation = traits.Bool( False, usedefault=True, - desc='Dilate cortical parcels? Useful for fMRI connectivity') + desc="Dilate cortical parcels? Useful for fMRI connectivity", + ) class ParcellateOutputSpec(TraitedSpec): roi_file = File( - exists=True, desc='Region of Interest file for connectivity mapping') - roiv_file = File( - desc='Region of Interest file for fMRI connectivity mapping') - white_matter_mask_file = File(exists=True, desc='White matter mask file') + exists=True, desc="Region of Interest file for connectivity mapping" + ) + roiv_file = File(desc="Region of Interest file for fMRI connectivity mapping") + white_matter_mask_file = File(exists=True, desc="White matter mask file") cc_unknown_file = File( - desc='Image file with regions labelled as unknown cortical structures', - exists=True) - ribbon_file = File( - desc='Image file detailing the cortical ribbon', exists=True) + desc="Image file with regions labelled as unknown cortical structures", + exists=True, + ) + ribbon_file = File(desc="Image file detailing the cortical ribbon", exists=True) aseg_file = File( - desc= - 'Automated segmentation file converted from Freesurfer "subjects" directory', - exists=True) + desc='Automated segmentation file converted from Freesurfer "subjects" directory', + exists=True, + ) roi_file_in_structural_space = File( - desc= - 'ROI image resliced to the dimensions of the original structural image', - exists=True) + desc="ROI image resliced to the dimensions of the original structural image", + exists=True, + ) dilated_roi_file_in_structural_space = File( - desc= - 'dilated ROI image resliced to the dimensions of the original structural image' + desc="dilated ROI image resliced to the dimensions of the original structural image" ) @@ -564,53 +772,66 @@ class Parcellate(LibraryBaseInterface): input_spec = ParcellateInputSpec output_spec = ParcellateOutputSpec - _pkg = 'cmp' - imports = ('scipy', ) + _pkg = "cmp" + imports = ("scipy",) def _run_interface(self, runtime): if self.inputs.subjects_dir: - os.environ.update({'SUBJECTS_DIR': self.inputs.subjects_dir}) + os.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) if not os.path.exists( - op.join(self.inputs.subjects_dir, self.inputs.subject_id)): + op.join(self.inputs.subjects_dir, self.inputs.subject_id) + ): raise Exception iflogger.info("ROI_HR_th.nii.gz / fsmask_1mm.nii.gz CREATION") iflogger.info("=============================================") - create_annot_label(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, - self.inputs.parcellation_name) - create_roi(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, self.inputs.parcellation_name, - self.inputs.dilation) - create_wm_mask(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, - self.inputs.parcellation_name) + create_annot_label( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + ) + create_roi( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + self.inputs.dilation, + ) + create_wm_mask( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + ) crop_and_move_datasets( - self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, self.inputs.parcellation_name, - self.inputs.out_roi_file, self.inputs.dilation) + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + self.inputs.out_roi_file, + self.inputs.dilation, + ) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): - outputs['roi_file'] = op.abspath(self.inputs.out_roi_file) + outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: - outputs['roi_file'] = op.abspath( - self._gen_outfilename('nii.gz', 'ROI')) + outputs["roi_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROI")) if self.inputs.dilation is True: - outputs['roiv_file'] = op.abspath( - self._gen_outfilename('nii.gz', 'ROIv')) - outputs['white_matter_mask_file'] = op.abspath('fsmask_1mm.nii.gz') - outputs['cc_unknown_file'] = op.abspath('cc_unknown.nii.gz') - outputs['ribbon_file'] = op.abspath('ribbon.nii.gz') - outputs['aseg_file'] = op.abspath('aseg.nii.gz') - outputs['roi_file_in_structural_space'] = op.abspath( - 'ROI_HR_th.nii.gz') + outputs["roiv_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROIv")) + outputs["white_matter_mask_file"] = op.abspath("fsmask_1mm.nii.gz") + outputs["cc_unknown_file"] = op.abspath("cc_unknown.nii.gz") + outputs["ribbon_file"] = op.abspath("ribbon.nii.gz") + outputs["aseg_file"] = op.abspath("aseg.nii.gz") + outputs["roi_file_in_structural_space"] = op.abspath("ROI_HR_th.nii.gz") if self.inputs.dilation is True: - outputs['dilated_roi_file_in_structural_space'] = op.abspath( - 'ROIv_HR_th.nii.gz') + outputs["dilated_roi_file_in_structural_space"] = op.abspath( + "ROIv_HR_th.nii.gz" + ) return outputs - def _gen_outfilename(self, ext, prefix='ROI'): - return prefix + '_' + self.inputs.parcellation_name + '.' + ext + def _gen_outfilename(self, ext, prefix="ROI"): + return prefix + "_" + self.inputs.parcellation_name + "." + ext diff --git a/nipype/interfaces/cmtk/tests/__init__.py b/nipype/interfaces/cmtk/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/cmtk/tests/__init__.py +++ b/nipype/interfaces/cmtk/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py b/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py index a80bbe757a..e115acad83 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py +++ b/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py @@ -1,25 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..nx import AverageNetworks def test_AverageNetworks_inputs(): input_map = dict( - group_id=dict(usedefault=True, ), - in_files=dict(mandatory=True, ), - out_gexf_groupavg=dict(), - out_gpickled_groupavg=dict(), - resolution_network_file=dict(), + group_id=dict( + usedefault=True, + ), + in_files=dict( + mandatory=True, + ), + out_gexf_groupavg=dict( + extensions=None, + ), + out_gpickled_groupavg=dict( + extensions=None, + ), + resolution_network_file=dict( + extensions=None, + ), ) inputs = AverageNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageNetworks_outputs(): output_map = dict( - gexf_groupavg=dict(), - gpickled_groupavg=dict(), + gexf_groupavg=dict( + extensions=None, + ), + gpickled_groupavg=dict( + extensions=None, + ), matlab_groupavgs=dict(), ) outputs = AverageNetworks.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_CFFBaseInterface.py b/nipype/interfaces/cmtk/tests/test_auto_CFFBaseInterface.py index 942f477518..449ffe2c8c 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CFFBaseInterface.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CFFBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import CFFBaseInterface diff --git a/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py b/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py index 00a1acea98..ee7b0459ef 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import CFFConverter @@ -7,7 +6,9 @@ def test_CFFConverter_inputs(): input_map = dict( creator=dict(), data_files=dict(), - description=dict(usedefault=True, ), + description=dict( + usedefault=True, + ), email=dict(), gifti_labels=dict(), gifti_surfaces=dict(), @@ -15,13 +16,18 @@ def test_CFFConverter_inputs(): graphml_networks=dict(), license=dict(), nifti_volumes=dict(), - out_file=dict(usedefault=True, ), + out_file=dict( + extensions=None, + usedefault=True, + ), publisher=dict(), references=dict(), relation=dict(), rights=dict(), script_files=dict(), - species=dict(usedefault=True, ), + species=dict( + usedefault=True, + ), timeseries_files=dict(), title=dict(), tract_files=dict(), @@ -31,8 +37,14 @@ def test_CFFConverter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CFFConverter_outputs(): - output_map = dict(connectome_file=dict(), ) + output_map = dict( + connectome_file=dict( + extensions=None, + ), + ) outputs = CFFConverter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py b/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py index 60e8596f5e..a9466f91be 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py @@ -1,47 +1,110 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..cmtk import CreateMatrix def test_CreateMatrix_inputs(): input_map = dict( - count_region_intersections=dict(usedefault=True, ), - out_endpoint_array_name=dict(genfile=True, ), - out_fiber_length_std_matrix_mat_file=dict(genfile=True, ), - out_intersection_matrix_mat_file=dict(genfile=True, ), - out_matrix_file=dict(genfile=True, ), - out_matrix_mat_file=dict(usedefault=True, ), - out_mean_fiber_length_matrix_mat_file=dict(genfile=True, ), - out_median_fiber_length_matrix_mat_file=dict(genfile=True, ), - resolution_network_file=dict(mandatory=True, ), - roi_file=dict(mandatory=True, ), - tract_file=dict(mandatory=True, ), + count_region_intersections=dict( + usedefault=True, + ), + out_endpoint_array_name=dict( + extensions=None, + genfile=True, + ), + out_fiber_length_std_matrix_mat_file=dict( + extensions=None, + genfile=True, + ), + out_intersection_matrix_mat_file=dict( + extensions=None, + genfile=True, + ), + out_matrix_file=dict( + extensions=None, + genfile=True, + ), + out_matrix_mat_file=dict( + extensions=None, + usedefault=True, + ), + out_mean_fiber_length_matrix_mat_file=dict( + extensions=None, + genfile=True, + ), + out_median_fiber_length_matrix_mat_file=dict( + extensions=None, + genfile=True, + ), + resolution_network_file=dict( + extensions=None, + mandatory=True, + ), + roi_file=dict( + extensions=None, + mandatory=True, + ), + tract_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CreateMatrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateMatrix_outputs(): output_map = dict( - endpoint_file=dict(), - endpoint_file_mm=dict(), - fiber_label_file=dict(), - fiber_labels_noorphans=dict(), - fiber_length_file=dict(), - fiber_length_std_matrix_mat_file=dict(), + endpoint_file=dict( + extensions=None, + ), + endpoint_file_mm=dict( + extensions=None, + ), + fiber_label_file=dict( + extensions=None, + ), + fiber_labels_noorphans=dict( + extensions=None, + ), + fiber_length_file=dict( + extensions=None, + ), + fiber_length_std_matrix_mat_file=dict( + extensions=None, + ), filtered_tractographies=dict(), - filtered_tractography=dict(), - filtered_tractography_by_intersections=dict(), - intersection_matrix_file=dict(), - intersection_matrix_mat_file=dict(), + filtered_tractography=dict( + extensions=None, + ), + filtered_tractography_by_intersections=dict( + extensions=None, + ), + intersection_matrix_file=dict( + extensions=None, + ), + intersection_matrix_mat_file=dict( + extensions=None, + ), matlab_matrix_files=dict(), - matrix_file=dict(), + matrix_file=dict( + extensions=None, + ), matrix_files=dict(), - matrix_mat_file=dict(), - mean_fiber_length_matrix_mat_file=dict(), - median_fiber_length_matrix_mat_file=dict(), - stats_file=dict(), + matrix_mat_file=dict( + extensions=None, + ), + mean_fiber_length_matrix_mat_file=dict( + extensions=None, + ), + median_fiber_length_matrix_mat_file=dict( + extensions=None, + ), + stats_file=dict( + extensions=None, + ), ) outputs = CreateMatrix.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py b/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py index 3635f21e59..f88950d758 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py @@ -1,21 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..cmtk import CreateNodes def test_CreateNodes_inputs(): input_map = dict( - out_filename=dict(usedefault=True, ), - resolution_network_file=dict(mandatory=True, ), - roi_file=dict(mandatory=True, ), + out_filename=dict( + extensions=None, + usedefault=True, + ), + resolution_network_file=dict( + extensions=None, + mandatory=True, + ), + roi_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CreateNodes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateNodes_outputs(): - output_map = dict(node_network=dict(), ) + output_map = dict( + node_network=dict( + extensions=None, + ), + ) outputs = CreateNodes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py b/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py index ceaa6d8dea..17f8990a08 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py +++ b/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py @@ -1,20 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import MergeCNetworks def test_MergeCNetworks_inputs(): input_map = dict( - in_files=dict(mandatory=True, ), - out_file=dict(usedefault=True, ), + in_files=dict( + mandatory=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), ) inputs = MergeCNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeCNetworks_outputs(): - output_map = dict(connectome_file=dict(), ) + output_map = dict( + connectome_file=dict( + extensions=None, + ), + ) outputs = MergeCNetworks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py b/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py index e3220e4790..975e4741cd 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py +++ b/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py @@ -1,29 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..nbs import NetworkBasedStatistic def test_NetworkBasedStatistic_inputs(): input_map = dict( - edge_key=dict(usedefault=True, ), - in_group1=dict(mandatory=True, ), - in_group2=dict(mandatory=True, ), - node_position_network=dict(), - number_of_permutations=dict(usedefault=True, ), - out_nbs_network=dict(), - out_nbs_pval_network=dict(), - t_tail=dict(usedefault=True, ), - threshold=dict(usedefault=True, ), + edge_key=dict( + usedefault=True, + ), + in_group1=dict( + mandatory=True, + ), + in_group2=dict( + mandatory=True, + ), + node_position_network=dict( + extensions=None, + ), + number_of_permutations=dict( + usedefault=True, + ), + out_nbs_network=dict( + extensions=None, + ), + out_nbs_pval_network=dict( + extensions=None, + ), + t_tail=dict( + usedefault=True, + ), + threshold=dict( + usedefault=True, + ), ) inputs = NetworkBasedStatistic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NetworkBasedStatistic_outputs(): output_map = dict( - nbs_network=dict(), - nbs_pval_network=dict(), + nbs_network=dict( + extensions=None, + ), + nbs_pval_network=dict( + extensions=None, + ), network_files=dict(), ) outputs = NetworkBasedStatistic.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py b/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py index d9a3f0c740..d171e6ab7a 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py +++ b/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py @@ -1,41 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..nx import NetworkXMetrics def test_NetworkXMetrics_inputs(): input_map = dict( - compute_clique_related_measures=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - out_edge_metrics_matlab=dict(genfile=True, ), - out_global_metrics_matlab=dict(genfile=True, ), - out_k_core=dict(usedefault=True, ), - out_k_crust=dict(usedefault=True, ), - out_k_shell=dict(usedefault=True, ), - out_node_metrics_matlab=dict(genfile=True, ), - out_pickled_extra_measures=dict(usedefault=True, ), - treat_as_weighted_graph=dict(usedefault=True, ), + compute_clique_related_measures=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_edge_metrics_matlab=dict( + extensions=None, + genfile=True, + ), + out_global_metrics_matlab=dict( + extensions=None, + genfile=True, + ), + out_k_core=dict( + extensions=None, + usedefault=True, + ), + out_k_crust=dict( + extensions=None, + usedefault=True, + ), + out_k_shell=dict( + extensions=None, + usedefault=True, + ), + out_node_metrics_matlab=dict( + extensions=None, + genfile=True, + ), + out_pickled_extra_measures=dict( + extensions=None, + usedefault=True, + ), + treat_as_weighted_graph=dict( + usedefault=True, + ), ) inputs = NetworkXMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NetworkXMetrics_outputs(): output_map = dict( edge_measure_networks=dict(), - edge_measures_matlab=dict(), - global_measures_matlab=dict(), + edge_measures_matlab=dict( + extensions=None, + ), + global_measures_matlab=dict( + extensions=None, + ), gpickled_network_files=dict(), - k_core=dict(), - k_crust=dict(), + k_core=dict( + extensions=None, + ), + k_crust=dict( + extensions=None, + ), k_networks=dict(), - k_shell=dict(), + k_shell=dict( + extensions=None, + ), matlab_dict_measures=dict(), matlab_matrix_files=dict(), node_measure_networks=dict(), - node_measures_matlab=dict(), - pickled_extra_measures=dict(), + node_measures_matlab=dict( + extensions=None, + ), + pickled_extra_measures=dict( + extensions=None, + ), ) outputs = NetworkXMetrics.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py b/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py index edcdf2e7a1..800b5b516b 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py +++ b/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py @@ -1,15 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..parcellation import Parcellate def test_Parcellate_inputs(): input_map = dict( - dilation=dict(usedefault=True, ), + dilation=dict( + usedefault=True, + ), freesurfer_dir=dict(), - out_roi_file=dict(genfile=True, ), - parcellation_name=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), + out_roi_file=dict( + extensions=None, + genfile=True, + ), + parcellation_name=dict( + usedefault=True, + ), + subject_id=dict( + mandatory=True, + ), subjects_dir=dict(), ) inputs = Parcellate.input_spec() @@ -17,16 +25,34 @@ def test_Parcellate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Parcellate_outputs(): output_map = dict( - aseg_file=dict(), - cc_unknown_file=dict(), - dilated_roi_file_in_structural_space=dict(), - ribbon_file=dict(), - roi_file=dict(), - roi_file_in_structural_space=dict(), - roiv_file=dict(), - white_matter_mask_file=dict(), + aseg_file=dict( + extensions=None, + ), + cc_unknown_file=dict( + extensions=None, + ), + dilated_roi_file_in_structural_space=dict( + extensions=None, + ), + ribbon_file=dict( + extensions=None, + ), + roi_file=dict( + extensions=None, + ), + roi_file_in_structural_space=dict( + extensions=None, + ), + roiv_file=dict( + extensions=None, + ), + white_matter_mask_file=dict( + extensions=None, + ), ) outputs = Parcellate.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py index dd2ce50aec..54fd9e46e9 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py +++ b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py @@ -1,26 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..cmtk import ROIGen def test_ROIGen_inputs(): input_map = dict( - LUT_file=dict(xor=['use_freesurfer_LUT'], ), - aparc_aseg_file=dict(mandatory=True, ), - freesurfer_dir=dict(requires=['use_freesurfer_LUT'], ), - out_dict_file=dict(genfile=True, ), - out_roi_file=dict(genfile=True, ), - use_freesurfer_LUT=dict(xor=['LUT_file'], ), + LUT_file=dict( + extensions=None, + xor=["use_freesurfer_LUT"], + ), + aparc_aseg_file=dict( + extensions=None, + mandatory=True, + ), + freesurfer_dir=dict( + requires=["use_freesurfer_LUT"], + ), + out_dict_file=dict( + extensions=None, + genfile=True, + ), + out_roi_file=dict( + extensions=None, + genfile=True, + ), + use_freesurfer_LUT=dict( + xor=["LUT_file"], + ), ) inputs = ROIGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ROIGen_outputs(): output_map = dict( - dict_file=dict(), - roi_file=dict(), + dict_file=dict( + extensions=None, + ), + roi_file=dict( + extensions=None, + ), ) outputs = ROIGen.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_nbs.py b/nipype/interfaces/cmtk/tests/test_nbs.py index be3008fb09..efc8aed678 100644 --- a/nipype/interfaces/cmtk/tests/test_nbs.py +++ b/nipype/interfaces/cmtk/tests/test_nbs.py @@ -1,14 +1,14 @@ -from __future__ import unicode_literals from ..nbs import NetworkBasedStatistic from ....utils.misc import package_check import numpy as np import networkx as nx +import pickle import pytest have_cv = True try: - package_check('cviewer') -except Exception as e: + package_check("cviewer") +except Exception: have_cv = False @@ -16,18 +16,18 @@ def creating_graphs(tmpdir): graphlist = [] graphnames = ["name" + str(i) for i in range(6)] - for idx, name in enumerate(graphnames): + for idx in range(len(graphnames)): graph = np.random.rand(10, 10) - G = nx.from_numpy_matrix(graph) - out_file = tmpdir.strpath + graphnames[idx] + '.pck' + G = nx.from_numpy_array(graph) + out_file = tmpdir.strpath + graphnames[idx] + ".pck" # Save as pck file - nx.write_gpickle(G, out_file) + with open(out_file, 'wb') as f: + pickle.dump(G, f, pickle.HIGHEST_PROTOCOL) graphlist.append(out_file) return graphlist -@pytest.mark.skipif( - have_cv, reason="tests for import error, cviewer available") +@pytest.mark.skipif(have_cv, reason="tests for import error, cviewer available") def test_importerror(creating_graphs, tmpdir): tmpdir.chdir() graphlist = creating_graphs @@ -39,7 +39,7 @@ def test_importerror(creating_graphs, tmpdir): nbs.inputs.in_group2 = group2 nbs.inputs.edge_key = "weight" - with pytest.raises(ImportError) as e: + with pytest.raises(ImportError): nbs.run() diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 65771873a5..baeb21c1e8 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -1,27 +1,35 @@ -# -*- coding: utf-8 -*- -"""The dcm2nii module provides basic functions for dicom conversion -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open +"""dcm2nii converts images from the proprietary scanner DICOM format to NIfTI.""" + import os import re from copy import deepcopy +import itertools as it +import glob +from glob import iglob from ..utils.filemanip import split_filename -from .base import (CommandLine, CommandLineInputSpec, InputMultiPath, traits, - TraitedSpec, OutputMultiPath, isdefined, File, Directory, - PackageInfo) +from .base import ( + CommandLine, + CommandLineInputSpec, + InputMultiPath, + traits, + TraitedSpec, + OutputMultiPath, + isdefined, + File, + Directory, + PackageInfo, +) class Info(PackageInfo): """Handle dcm2niix version information""" - version_cmd = 'dcm2niix' + version_cmd = "dcm2niix" @staticmethod def parse_version(raw_info): - m = re.search(r'version (\S+)', raw_info) + m = re.search(r"version (\S+)", raw_info) return m.groups()[0] if m else None @@ -32,64 +40,63 @@ class Dcm2niiInputSpec(CommandLineInputSpec): position=-1, copyfile=False, mandatory=True, - xor=['source_dir']) + xor=["source_dir"], + ) source_dir = Directory( - exists=True, - argstr="%s", - position=-1, - mandatory=True, - xor=['source_names']) + exists=True, argstr="%s", position=-1, mandatory=True, xor=["source_names"] + ) anonymize = traits.Bool( - True, - argstr='-a', - usedefault=True, - desc="Remove identifying information") + True, argstr="-a", usedefault=True, desc="Remove identifying information" + ) config_file = File( exists=True, argstr="-b %s", genfile=True, - desc="Load settings from specified inifile") + desc="Load settings from specified inifile", + ) collapse_folders = traits.Bool( - True, argstr='-c', usedefault=True, desc="Collapse input folders") + True, argstr="-c", usedefault=True, desc="Collapse input folders" + ) date_in_filename = traits.Bool( - True, argstr='-d', usedefault=True, desc="Date in filename") + True, argstr="-d", usedefault=True, desc="Date in filename" + ) events_in_filename = traits.Bool( - True, - argstr='-e', - usedefault=True, - desc="Events (series/acq) in filename") + True, argstr="-e", usedefault=True, desc="Events (series/acq) in filename" + ) source_in_filename = traits.Bool( - False, argstr='-f', usedefault=True, desc="Source filename") + False, argstr="-f", usedefault=True, desc="Source filename" + ) gzip_output = traits.Bool( - False, argstr='-g', usedefault=True, desc="Gzip output (.gz)") + False, argstr="-g", usedefault=True, desc="Gzip output (.gz)" + ) id_in_filename = traits.Bool( - False, argstr='-i', usedefault=True, desc="ID in filename") + False, argstr="-i", usedefault=True, desc="ID in filename" + ) nii_output = traits.Bool( True, - argstr='-n', + argstr="-n", usedefault=True, - desc="Save as .nii - if no, create .hdr/.img pair") + desc="Save as .nii - if no, create .hdr/.img pair", + ) output_dir = Directory( exists=True, - argstr='-o %s', + argstr="-o %s", genfile=True, - desc="Output dir - if unspecified, source directory is used") + desc="Output dir - if unspecified, source directory is used", + ) protocol_in_filename = traits.Bool( - True, argstr='-p', usedefault=True, desc="Protocol in filename") - reorient = traits.Bool( - argstr='-r', desc="Reorient image to nearest orthogonal") + True, argstr="-p", usedefault=True, desc="Protocol in filename" + ) + reorient = traits.Bool(argstr="-r", desc="Reorient image to nearest orthogonal") spm_analyze = traits.Bool( - argstr='-s', xor=['nii_output'], desc="SPM2/Analyze not SPM5/NIfTI") + argstr="-s", xor=["nii_output"], desc="SPM2/Analyze not SPM5/NIfTI" + ) convert_all_pars = traits.Bool( - True, - argstr='-v', - usedefault=True, - desc="Convert every image in directory") + True, argstr="-v", usedefault=True, desc="Convert every image in directory" + ) reorient_and_crop = traits.Bool( - False, - argstr='-x', - usedefault=True, - desc="Reorient and crop 3D images") + False, argstr="-x", usedefault=True, desc="Reorient and crop 3D images" + ) class Dcm2niiOutputSpec(TraitedSpec): @@ -113,38 +120,50 @@ class Dcm2nii(CommandLine): >>> converter.inputs.output_dir = '.' >>> converter.cmdline # doctest: +ELLIPSIS 'dcm2nii -a y -c y -b config.ini -v y -d y -e y -g y -i n -n y -o . -p y -x n -f n functional_1.dcm' -""" + """ input_spec = Dcm2niiInputSpec output_spec = Dcm2niiOutputSpec - _cmd = 'dcm2nii' + _cmd = "dcm2nii" def _format_arg(self, opt, spec, val): if opt in [ - 'anonymize', 'collapse_folders', 'date_in_filename', - 'events_in_filename', 'source_in_filename', 'gzip_output', - 'id_in_filename', 'nii_output', 'protocol_in_filename', - 'reorient', 'spm_analyze', 'convert_all_pars', - 'reorient_and_crop' + "anonymize", + "collapse_folders", + "date_in_filename", + "events_in_filename", + "source_in_filename", + "gzip_output", + "id_in_filename", + "nii_output", + "protocol_in_filename", + "reorient", + "spm_analyze", + "convert_all_pars", + "reorient_and_crop", ]: spec = deepcopy(spec) if val: - spec.argstr += ' y' + spec.argstr += " y" else: - spec.argstr += ' n' + spec.argstr += " n" val = True - if opt == 'source_names': + if opt == "source_names": return spec.argstr % val[0] - return super(Dcm2nii, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): self._config_created = False - new_runtime = super(Dcm2nii, self)._run_interface(runtime) - (self.output_files, self.reoriented_files, - self.reoriented_and_cropped_files, self.bvecs, - self.bvals) = self._parse_stdout(new_runtime.stdout) + new_runtime = super()._run_interface(runtime) + ( + self.output_files, + self.reoriented_files, + self.reoriented_and_cropped_files, + self.bvecs, + self.bvals, + ) = self._parse_stdout(new_runtime.stdout) if self._config_created: - os.remove('config.ini') + os.remove("config.ini") return new_runtime def _parse_stdout(self, stdout): @@ -159,12 +178,11 @@ def _parse_stdout(self, stdout): if not skip: out_file = None if line.startswith("Saving "): - out_file = line[len("Saving "):] + out_file = line[len("Saving ") :] elif line.startswith("GZip..."): # for gzipped output files are not absolute - fname = line[len("GZip..."):] - if len(files) and os.path.basename( - files[-1]) == fname[:-3]: + fname = line[len("GZip...") :] + if len(files) and os.path.basename(files[-1]) == fname[:-3]: # we are seeing a previously reported conversion # as being saved in gzipped form -- remove the # obsolete, uncompressed file @@ -172,7 +190,7 @@ def _parse_stdout(self, stdout): if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: - output_dir = self._gen_filename('output_dir') + output_dir = self._gen_filename("output_dir") out_file = os.path.abspath(os.path.join(output_dir, fname)) elif line.startswith("Number of diffusion directions "): if last_added_file: @@ -184,15 +202,15 @@ def _parse_stdout(self, stdout): # just above for l in (bvecs, bvals): l[-1] = os.path.join( - os.path.dirname(l[-1]), - 'x%s' % (os.path.basename(l[-1]), )) - elif re.search('.*->(.*)', line): - val = re.search('.*->(.*)', line) + os.path.dirname(l[-1]), f"x{os.path.basename(l[-1])}" + ) + elif re.search(".*->(.*)", line): + val = re.search(".*->(.*)", line) val = val.groups()[0] if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: - output_dir = self._gen_filename('output_dir') + output_dir = self._gen_filename("output_dir") val = os.path.join(output_dir, val) if os.path.exists(val): out_file = val @@ -204,18 +222,22 @@ def _parse_stdout(self, stdout): continue if line.startswith("Reorienting as "): - reoriented_files.append(line[len("Reorienting as "):]) + reoriented_files.append(line[len("Reorienting as ") :]) skip = True continue elif line.startswith("Cropping NIfTI/Analyze image "): base, filename = os.path.split( - line[len("Cropping NIfTI/Analyze image "):]) + line[len("Cropping NIfTI/Analyze image ") :] + ) filename = "c" + filename - if os.path.exists(os.path.join( - base, filename)) or self.inputs.reorient_and_crop: + if ( + os.path.exists(os.path.join(base, filename)) + or self.inputs.reorient_and_crop + ): # if reorient&crop is true but the file doesn't exist, this errors when setting outputs reoriented_and_cropped_files.append( - os.path.join(base, filename)) + os.path.join(base, filename) + ) skip = True continue @@ -224,18 +246,17 @@ def _parse_stdout(self, stdout): def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted_files'] = self.output_files - outputs['reoriented_files'] = self.reoriented_files - outputs[ - 'reoriented_and_cropped_files'] = self.reoriented_and_cropped_files - outputs['bvecs'] = self.bvecs - outputs['bvals'] = self.bvals + outputs["converted_files"] = self.output_files + outputs["reoriented_files"] = self.reoriented_files + outputs["reoriented_and_cropped_files"] = self.reoriented_and_cropped_files + outputs["bvecs"] = self.bvecs + outputs["bvals"] = self.bvals return outputs def _gen_filename(self, name): - if name == 'output_dir': + if name == "output_dir": return os.getcwd() - elif name == 'config_file': + elif name == "config_file": self._config_created = True config_file = "config.ini" with open(config_file, "w") as f: @@ -252,105 +273,115 @@ class Dcm2niixInputSpec(CommandLineInputSpec): position=-1, copyfile=False, mandatory=True, - desc=('A set of filenames to be converted. Note that the current ' - 'version (1.0.20180328) of dcm2niix converts any files in the ' - 'directory. To only convert specific files they should be in an ' - 'isolated directory'), - xor=['source_dir']) + desc=( + "A set of filenames to be converted. Note that the current " + "version (1.0.20180328) of dcm2niix converts any files in the " + "directory. To only convert specific files they should be in an " + "isolated directory" + ), + xor=["source_dir"], + ) source_dir = Directory( exists=True, argstr="%s", position=-1, mandatory=True, - desc='A directory containing dicom files to be converted', - xor=['source_names']) + desc="A directory containing dicom files to be converted", + xor=["source_names"], + ) out_filename = traits.Str( argstr="-f %s", desc="Output filename template (" - "%a=antenna (coil) number, " - "%c=comments, " - "%d=description, " - "%e=echo number, " - "%f=folder name, " - "%i=ID of patient, " - "%j=seriesInstanceUID, " - "%k=studyInstanceUID, " - "%m=manufacturer, " - "%n=name of patient, " - "%p=protocol, " - "%s=series number, " - "%t=time, " - "%u=acquisition number, " - "%v=vendor, " - "%x=study ID; " - "%z=sequence name)") + "%a=antenna (coil) number, " + "%c=comments, " + "%d=description, " + "%e=echo number, " + "%f=folder name, " + "%i=ID of patient, " + "%j=seriesInstanceUID, " + "%k=studyInstanceUID, " + "%m=manufacturer, " + "%n=name of patient, " + "%p=protocol, " + "%s=series number, " + "%t=time, " + "%u=acquisition number, " + "%v=vendor, " + "%x=study ID; " + "%z=sequence name)", + ) output_dir = Directory( - ".", - usedefault=True, - exists=True, - argstr='-o %s', - desc="Output directory") + ".", usedefault=True, exists=True, argstr="-o %s", desc="Output directory" + ) bids_format = traits.Bool( - True, - argstr='-b', - usedefault=True, - desc="Create a BIDS sidecar file") + True, argstr="-b", usedefault=True, desc="Create a BIDS sidecar file" + ) anon_bids = traits.Bool( - argstr='-ba', - requires=["bids_format"], - desc="Anonymize BIDS") + argstr="-ba", requires=["bids_format"], desc="Anonymize BIDS" + ) compress = traits.Enum( - 'y', 'i', 'n', '3', - argstr='-z %s', + "y", + "i", + "n", + "3", + argstr="-z %s", usedefault=True, - desc="Gzip compress images - [y=pigz, i=internal, n=no, 3=no,3D]") - merge_imgs = traits.Bool( - False, - argstr='-m', + desc="Gzip compress images - [y=pigz, i=internal, n=no, 3=no,3D]", + ) + merge_imgs = traits.Enum( + 0, + 1, + 2, + default=0, usedefault=True, - desc="merge 2D slices from same series") + argstr="-m %d", + desc="merge 2D slices from same series regardless of echo, exposure, etc. - [0=no, 1=yes, 2=auto]", + ) single_file = traits.Bool( - False, - argstr='-s', - usedefault=True, - desc="Single file mode") - verbose = traits.Bool( - False, - argstr='-v', - usedefault=True, - desc="Verbose output") + False, argstr="-s", usedefault=True, desc="Single file mode" + ) + verbose = traits.Bool(False, argstr="-v", usedefault=True, desc="Verbose output") crop = traits.Bool( - False, - argstr='-x', - usedefault=True, - desc="Crop 3D T1 acquisitions") + False, argstr="-x", usedefault=True, desc="Crop 3D T1 acquisitions" + ) has_private = traits.Bool( False, - argstr='-t', + argstr="-t", usedefault=True, - desc="Flag if text notes include private patient details") + desc="Text notes including private patient details", + ) compression = traits.Enum( - 1, 2, 3, 4, 5, 6, 7, 8, 9, - argstr='-%d', - desc="Gz compression level (1=fastest, 9=smallest)") - comment = traits.Str( - argstr='-c %s', - desc="Comment stored as NIfTI aux_file") + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + argstr="-%d", + desc="Gz compression level (1=fastest, 9=smallest)", + ) + comment = traits.Str(argstr="-c %s", desc="Comment stored as NIfTI aux_file") ignore_deriv = traits.Bool( - argstr='-i', - desc="Ignore derived, localizer and 2D images") + argstr="-i", desc="Ignore derived, localizer and 2D images" + ) series_numbers = InputMultiPath( traits.Str(), - argstr='-n %s...', - desc="Selectively convert by series number - can be used up to 16 times") + argstr="-n %s...", + desc="Selectively convert by series number - can be used up to 16 times", + ) philips_float = traits.Bool( - argstr='-p', - desc="Philips precise float (not display) scaling") + argstr="-p", desc="Philips precise float (not display) scaling" + ) + to_nrrd = traits.Bool(argstr="-e", desc="Export as NRRD instead of NIfTI") class Dcm2niixOutputSpec(TraitedSpec): converted_files = OutputMultiPath(File(exists=True)) bvecs = OutputMultiPath(File(exists=True)) + mvecs = OutputMultiPath(File(exists=True)) bvals = OutputMultiPath(File(exists=True)) bids = OutputMultiPath(File(exists=True)) @@ -367,95 +398,120 @@ class Dcm2niix(CommandLine): >>> converter.inputs.compression = 5 >>> converter.inputs.output_dir = 'ds005' >>> converter.cmdline - 'dcm2niix -b y -z y -5 -x n -t n -m n -o ds005 -s n -v n dicomdir' + 'dcm2niix -b y -z y -5 -x n -t n -m 0 -o ds005 -s n -v n dicomdir' >>> converter.run() # doctest: +SKIP In the example below, we note that the current version of dcm2niix converts any files in the directory containing the files in the list. We also do not support nested filenames with this option. **Thus all files must have a common root directory.** - + >>> converter = Dcm2niix() >>> converter.inputs.source_names = ['functional_1.dcm', 'functional_2.dcm'] >>> converter.inputs.compression = 5 >>> converter.inputs.output_dir = 'ds005' >>> converter.cmdline - 'dcm2niix -b y -z y -5 -x n -t n -m n -o ds005 -s n -v n .' + 'dcm2niix -b y -z y -5 -x n -t n -m 0 -o ds005 -s n -v n .' >>> converter.run() # doctest: +SKIP """ input_spec = Dcm2niixInputSpec output_spec = Dcm2niixOutputSpec - _cmd = 'dcm2niix' + _cmd = "dcm2niix" @property def version(self): return Info.version() def _format_arg(self, opt, spec, val): - bools = ['bids_format', 'merge_imgs', 'single_file', 'verbose', 'crop', - 'has_private', 'anon_bids', 'ignore_deriv', 'philips_float'] + bools = [ + "bids_format", + "single_file", + "verbose", + "crop", + "has_private", + "anon_bids", + "ignore_deriv", + "philips_float", + "to_nrrd", + ] if opt in bools: spec = deepcopy(spec) if val: - spec.argstr += ' y' + spec.argstr += " y" else: - spec.argstr += ' n' + spec.argstr += " n" val = True - if opt == 'source_names': - return spec.argstr % (os.path.dirname(val[0]) or '.') - return super(Dcm2niix, self)._format_arg(opt, spec, val) + if opt == "source_names": + return spec.argstr % (os.path.dirname(val[0]) or ".") + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): # may use return code 1 despite conversion - runtime = super(Dcm2niix, self)._run_interface( - runtime, correct_return_codes=(0, 1, )) - if self.inputs.bids_format: - (self.output_files, self.bvecs, self.bvals, - self.bids) = self._parse_stdout(runtime.stdout) - else: - (self.output_files, self.bvecs, self.bvals) = self._parse_stdout( - runtime.stdout) + runtime = super()._run_interface(runtime, correct_return_codes=(0, 1)) + self._parse_files(self._parse_stdout(runtime.stdout)) return runtime def _parse_stdout(self, stdout): - files = [] - bvecs = [] - bvals = [] - bids = [] - skip = False - find_b = False + filenames = [] for line in stdout.split("\n"): - if not skip: - out_file = None - if line.startswith("Convert "): # output - fname = str(re.search('\S+/\S+', line).group(0)) - out_file = os.path.abspath(fname) - # extract bvals - if find_b: - bvecs.append(out_file + ".bvec") - bvals.append(out_file + ".bval") - find_b = False - # next scan will have bvals/bvecs - elif 'DTI gradients' in line or 'DTI gradient directions' in line or 'DTI vectors' in line: - find_b = True - if out_file: - ext = '.nii' if self.inputs.compress == 'n' else '.nii.gz' - files.append(out_file + ext) - if self.inputs.bids_format: - bids.append(out_file + ".json") - skip = False - # just return what was done - if not bids: - return files, bvecs, bvals + if line.startswith("Convert "): # output + fname = str(re.search(r"\S+/\S+", line).group(0)) + filenames.append(os.path.abspath(fname)) + return filenames + + def _parse_files(self, filenames): + outfiles, bvals, bvecs, mvecs, bids = [], [], [], [], [] + outtypes = [".bval", ".bvec", ".mvec", ".json", ".txt"] + if self.inputs.to_nrrd: + outtypes += [".nrrd", ".nhdr", ".raw.gz"] else: - return files, bvecs, bvals, bids + outtypes += [".nii", ".nii.gz"] + + for filename in filenames: + # search for relevant files, and sort accordingly + for fl in search_files(filename, outtypes, self.inputs.crop): + if fl.endswith((".nii", ".gz", ".nrrd", ".nhdr")): + outfiles.append(fl) + elif fl.endswith(".bval"): + bvals.append(fl) + elif fl.endswith(".bvec"): + bvecs.append(fl) + elif fl.endswith(".mvec"): + mvecs.append(fl) + elif fl.endswith((".json", ".txt")): + bids.append(fl) + + # in siemens mosaic conversion nipype misread dcm2niix output and generate a duplicate list of results + # next line remove duplicates from output files array + outfiles = list(dict.fromkeys(outfiles)) + + self.output_files = outfiles + self.bvecs = bvecs + self.mvecs = mvecs + self.bvals = bvals + self.bids = bids def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted_files'] = self.output_files - outputs['bvecs'] = self.bvecs - outputs['bvals'] = self.bvals - if self.inputs.bids_format: - outputs['bids'] = self.bids + outputs["converted_files"] = self.output_files + outputs["bvecs"] = self.bvecs + outputs["bvals"] = self.bvals + outputs["mvecs"] = self.mvecs + outputs["bids"] = self.bids return outputs + + +# https://stackoverflow.com/a/4829130 +def search_files(prefix, outtypes, search_crop): + found = it.chain.from_iterable( + iglob(glob.escape(prefix + outtype)) for outtype in outtypes + ) + if search_crop: + found = it.chain( + it.chain.from_iterable( + iglob(glob.escape(prefix) + "_Crop_*" + outtype) for outtype in outtypes + ), + found, + ) + return found diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index 626cabe6cf..7664097c58 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -1,8 +1,4 @@ -# -*- coding: utf-8 -*- -"""Provides interfaces to various commands provided by dcmstack -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""dcmstack allows series of DICOM images to be stacked into multi-dimensional arrays.""" import os from os import path as op @@ -11,15 +7,23 @@ from glob import glob import nibabel as nb -import imghdr - -from .base import (TraitedSpec, DynamicTraitedSpec, InputMultiPath, File, - Directory, traits, BaseInterface, isdefined, Undefined) -from ..utils import NUMPY_MMAP +import puremagic + +from .base import ( + TraitedSpec, + DynamicTraitedSpec, + InputMultiPath, + File, + Directory, + traits, + BaseInterface, + isdefined, + Undefined, +) have_dcmstack = True try: - import dicom + import pydicom import dcmstack from dcmstack.dcmmeta import NiftiWrapper except ImportError: @@ -29,28 +33,28 @@ def sanitize_path_comp(path_comp): result = [] for char in path_comp: - if char not in string.letters + string.digits + '-_.': - result.append('_') + if char not in string.ascii_letters + string.digits + "-_.": + result.append("_") else: result.append(char) - return ''.join(result) + return "".join(result) class NiftiGeneratorBaseInputSpec(TraitedSpec): - out_format = traits.Str(desc="String which can be formatted with " - "meta data to create the output filename(s)") - out_ext = traits.Str( - '.nii.gz', usedefault=True, desc="Determines output file type") - out_path = Directory( - desc='output path, current working directory if not set') + out_format = traits.Str( + desc="String which can be formatted with " + "meta data to create the output filename(s)" + ) + out_ext = traits.Str(".nii.gz", usedefault=True, desc="Determines output file type") + out_path = Directory(desc="output path, current working directory if not set") class NiftiGeneratorBase(BaseInterface): - '''Base class for interfaces that produce Nifti files, potentially with - embedded meta data.''' + """Base class for interfaces that produce Nifti files, potentially with + embedded meta data.""" def _get_out_path(self, meta, idx=None): - '''Return the output path for the gernerated Nifti.''' + """Return the output path for the generated Nifti.""" if self.inputs.out_format: out_fmt = self.inputs.out_format else: @@ -58,16 +62,16 @@ def _get_out_path(self, meta, idx=None): # with the provided meta data. out_fmt = [] if idx is not None: - out_fmt.append('%03d' % idx) - if 'SeriesNumber' in meta: - out_fmt.append('%(SeriesNumber)03d') - if 'ProtocolName' in meta: - out_fmt.append('%(ProtocolName)s') - elif 'SeriesDescription' in meta: - out_fmt.append('%(SeriesDescription)s') + out_fmt.append("%03d" % idx) + if "SeriesNumber" in meta: + out_fmt.append("%(SeriesNumber)03d") + if "ProtocolName" in meta: + out_fmt.append("%(ProtocolName)s") + elif "SeriesDescription" in meta: + out_fmt.append("%(SeriesDescription)s") else: - out_fmt.append('sequence') - out_fmt = '-'.join(out_fmt) + out_fmt.append("sequence") + out_fmt = "-".join(out_fmt) out_fn = (out_fmt % meta) + self.inputs.out_ext out_fn = sanitize_path_comp(out_fn) @@ -92,16 +96,18 @@ class DcmStackInputSpec(NiftiGeneratorBaseInputSpec): InputMultiPath(File(exists=True)), Directory(exists=True), traits.Str(), - mandatory=True) + mandatory=True, + ) embed_meta = traits.Bool(desc="Embed DICOM meta data into result") - exclude_regexes = traits.List(desc="Meta data to exclude, suplementing " - "any default exclude filters") - include_regexes = traits.List(desc="Meta data to include, overriding any " - "exclude filters") + exclude_regexes = traits.List( + desc="Meta data to exclude, supplementing any default exclude filters" + ) + include_regexes = traits.List( + desc="Meta data to include, overriding any exclude filters" + ) force_read = traits.Bool( - True, - usedefault=True, - desc=('Force reading files without DICM marker')) + True, usedefault=True, desc=("Force reading files without DICM marker") + ) class DcmStackOutputSpec(TraitedSpec): @@ -109,7 +115,7 @@ class DcmStackOutputSpec(TraitedSpec): class DcmStack(NiftiGeneratorBase): - '''Create one Nifti file from a set of DICOM files. Can optionally embed + """Create one Nifti file from a set of DICOM files. Can optionally embed meta data. Example @@ -121,14 +127,15 @@ class DcmStack(NiftiGeneratorBase): >>> stacker.run() # doctest: +SKIP >>> result.outputs.out_file # doctest: +SKIP '/path/to/cwd/sequence.nii.gz' - ''' + """ + input_spec = DcmStackInputSpec output_spec = DcmStackOutputSpec def _get_filelist(self, trait_input): if isinstance(trait_input, (str, bytes)): if op.isdir(trait_input): - return glob(op.join(trait_input, '*.dcm')) + return glob(op.join(trait_input, "*.dcm")) else: return glob(trait_input) @@ -142,18 +149,17 @@ def _run_interface(self, runtime): exclude_regexes = dcmstack.default_key_excl_res if isdefined(self.inputs.exclude_regexes): exclude_regexes += self.inputs.exclude_regexes - meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, - include_regexes) + meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes) stack = dcmstack.DicomStack(meta_filter=meta_filter) for src_path in src_paths: - if not imghdr.what(src_path) == "gif": - src_dcm = dicom.read_file( - src_path, force=self.inputs.force_read) + if puremagic.what(src_path) != "gif": + src_dcm = pydicom.dcmread(src_path, force=self.inputs.force_read) stack.add_dcm(src_dcm) nii = stack.to_nifti(embed_meta=True) nw = NiftiWrapper(nii) - self.out_path = \ - self._get_out_path(nw.meta_ext.get_class_dict(('global', 'const'))) + self.out_path = self._get_out_path( + nw.meta_ext.get_class_dict(("global", "const")) + ) if not self.inputs.embed_meta: nw.remove_extension() nb.save(nii, self.out_path) @@ -170,8 +176,8 @@ class GroupAndStackOutputSpec(TraitedSpec): class GroupAndStack(DcmStack): - '''Create (potentially) multiple Nifti files for a set of DICOM files. - ''' + """Create (potentially) multiple Nifti files for a set of DICOM files.""" + input_spec = DcmStackInputSpec output_spec = GroupAndStackOutputSpec @@ -182,7 +188,7 @@ def _run_interface(self, runtime): self.out_list = [] for key, stack in list(stacks.items()): nw = NiftiWrapper(stack.to_nifti(embed_meta=True)) - const_meta = nw.meta_ext.get_class_dict(('global', 'const')) + const_meta = nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta) if not self.inputs.embed_meta: nw.remove_extension() @@ -198,19 +204,22 @@ def _list_outputs(self): class LookupMetaInputSpec(TraitedSpec): - in_file = File(mandatory=True, exists=True, desc='The input Nifti file') + in_file = File(mandatory=True, exists=True, desc="The input Nifti file") meta_keys = traits.Either( traits.List(), traits.Dict(), mandatory=True, - desc=("List of meta data keys to lookup, or a " - "dict where keys specify the meta data " - "keys to lookup and the values specify " - "the output names")) + desc=( + "List of meta data keys to lookup, or a " + "dict where keys specify the meta data " + "keys to lookup and the values specify " + "the output names" + ), + ) class LookupMeta(BaseInterface): - '''Lookup meta data values from a Nifti with embedded meta data. + """Lookup meta data values from a Nifti with embedded meta data. Example ------- @@ -225,7 +234,8 @@ class LookupMeta(BaseInterface): 9500.0 >>> result.outputs.TE # doctest: +SKIP 95.0 - ''' + """ + input_spec = LookupMetaInputSpec output_spec = DynamicTraitedSpec @@ -239,7 +249,7 @@ def _make_name_map(self): def _outputs(self): self._make_name_map() - outputs = super(LookupMeta, self)._outputs() + outputs = super()._outputs() undefined_traits = {} for out_name in list(self._meta_keys.values()): outputs.add_trait(out_name, traits.Any) @@ -251,7 +261,7 @@ def _outputs(self): return outputs def _run_interface(self, runtime): - # If the 'meta_keys' input is a list, covert it to a dict + # If the 'meta_keys' input is a list, convert it to a dict self._make_name_map() nw = NiftiWrapper.from_filename(self.inputs.in_file) self.result = {} @@ -269,11 +279,12 @@ def _list_outputs(self): class CopyMetaInputSpec(TraitedSpec): src_file = File(mandatory=True, exists=True) dest_file = File(mandatory=True, exists=True) - include_classes = traits.List(desc="List of specific meta data " - "classifications to include. If not " - "specified include everything.") - exclude_classes = traits.List(desc="List of meta data " - "classifications to exclude") + include_classes = traits.List( + desc="List of specific meta data " + "classifications to include. If not " + "specified include everything." + ) + exclude_classes = traits.List(desc="List of meta data classifications to exclude") class CopyMetaOutputSpec(TraitedSpec): @@ -281,8 +292,9 @@ class CopyMetaOutputSpec(TraitedSpec): class CopyMeta(BaseInterface): - '''Copy meta data from one Nifti file to another. Useful for preserving - meta data after some processing steps.''' + """Copy meta data from one Nifti file to another. Useful for preserving + meta data after some processing steps.""" + input_spec = CopyMetaInputSpec output_spec = CopyMetaOutputSpec @@ -293,14 +305,9 @@ def _run_interface(self, runtime): dest = NiftiWrapper(dest_nii, make_empty=True) classes = src.meta_ext.get_valid_classes() if self.inputs.include_classes: - classes = [ - cls for cls in classes if cls in self.inputs.include_classes - ] + classes = [cls for cls in classes if cls in self.inputs.include_classes] if self.inputs.exclude_classes: - classes = [ - cls for cls in classes - if cls not in self.inputs.exclude_classes - ] + classes = [cls for cls in classes if cls not in self.inputs.exclude_classes] for cls in classes: src_dict = src.meta_ext.get_class_dict(cls) @@ -311,15 +318,14 @@ def _run_interface(self, runtime): dest.meta_ext.slice_dim = src.meta_ext.slice_dim dest.meta_ext.shape = src.meta_ext.shape - self.out_path = op.join(os.getcwd(), op.basename( - self.inputs.dest_file)) + self.out_path = op.join(os.getcwd(), op.basename(self.inputs.dest_file)) dest.to_filename(self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['dest_file'] = self.out_path + outputs["dest_file"] = self.out_path return outputs @@ -328,11 +334,13 @@ class MergeNiftiInputSpec(NiftiGeneratorBaseInputSpec): sort_order = traits.Either( traits.Str(), traits.List(), - desc="One or more meta data keys to " - "sort files by.") - merge_dim = traits.Int(desc="Dimension to merge along. If not " - "specified, the last singular or " - "non-existant dimension is used.") + desc="One or more meta data keys to sort files by.", + ) + merge_dim = traits.Int( + desc="Dimension to merge along. If not " + "specified, the last singular or " + "non-existent dimension is used." + ) class MergeNiftiOutputSpec(TraitedSpec): @@ -348,13 +356,14 @@ def key_func(src_nii): class MergeNifti(NiftiGeneratorBase): - '''Merge multiple Nifti files into one. Merges together meta data - extensions as well.''' + """Merge multiple Nifti files into one. Merges together meta data + extensions as well.""" + input_spec = MergeNiftiInputSpec output_spec = MergeNiftiOutputSpec def _run_interface(self, runtime): - niis = [nb.load(fn, mmap=NUMPY_MMAP) for fn in self.inputs.in_files] + niis = [nb.load(fn) for fn in self.inputs.in_files] nws = [NiftiWrapper(nii, make_empty=True) for nii in niis] if self.inputs.sort_order: sort_order = self.inputs.sort_order @@ -366,21 +375,23 @@ def _run_interface(self, runtime): else: merge_dim = self.inputs.merge_dim merged = NiftiWrapper.from_sequence(nws, merge_dim) - const_meta = merged.meta_ext.get_class_dict(('global', 'const')) + const_meta = merged.meta_ext.get_class_dict(("global", "const")) self.out_path = self._get_out_path(const_meta) nb.save(merged.nii_img, self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self.out_path + outputs["out_file"] = self.out_path return outputs class SplitNiftiInputSpec(NiftiGeneratorBaseInputSpec): in_file = File(exists=True, mandatory=True, desc="Nifti file to split") - split_dim = traits.Int(desc="Dimension to split along. If not " - "specified, the last dimension is used.") + split_dim = traits.Int( + desc="Dimension to split along. If not " + "specified, the last dimension is used." + ) class SplitNiftiOutputSpec(TraitedSpec): @@ -388,10 +399,11 @@ class SplitNiftiOutputSpec(TraitedSpec): class SplitNifti(NiftiGeneratorBase): - ''' + """ Split one Nifti file into many along the specified dimension. Each result has an updated meta data extension as well. - ''' + """ + input_spec = SplitNiftiInputSpec output_spec = SplitNiftiOutputSpec @@ -405,7 +417,7 @@ def _run_interface(self, runtime): else: split_dim = self.inputs.split_dim for split_idx, split_nw in enumerate(nw.split(split_dim)): - const_meta = split_nw.meta_ext.get_class_dict(('global', 'const')) + const_meta = split_nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta, idx=split_idx) nb.save(split_nw.nii_img, out_path) self.out_list.append(out_path) @@ -414,5 +426,5 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_list'] = self.out_list + outputs["out_list"] = self.out_list return outputs diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index cef13227c4..89b3d059ef 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +"""Diffusion Toolkit performs data reconstruction and fiber tracking on diffusion MR images.""" + from .base import Info from .postproc import SplineFilter, TrackMerge from .dti import DTIRecon, DTITracker diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index c8e3a17c61..051d80e0f1 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtk module provides basic functions for interfacing with @@ -13,17 +12,14 @@ See the docstrings for the individual classes for 'working' examples. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object import re from ..base import CommandLine -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" -class Info(object): - """ Handle dtk output type and version information. +class Info: + """Handle dtk output type and version information. Examples -------- @@ -48,13 +44,12 @@ def version(): Version number as string or None if FSL not found """ - clout = CommandLine( - command='dti_recon', terminal_output='allatonce').run() + clout = CommandLine(command="dti_recon", terminal_output="allatonce").run() - if clout.runtime.returncode is not 0: + if clout.runtime.returncode != 0: return None dtirecon = clout.runtime.stdout - result = re.search('dti_recon (.*)\n', dtirecon) + result = re.search("dti_recon (.*)\n", dtirecon) version = result.group(0).split()[1] return version diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 570ae55df5..fa031799e3 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -1,70 +1,74 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import os import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, File, traits, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + CommandLineInputSpec, + isdefined, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class DTIReconInputSpec(CommandLineInputSpec): DWI = File( - desc='Input diffusion volume', - argstr='%s', + desc="Input diffusion volume", + argstr="%s", exists=True, mandatory=True, - position=1) + position=1, + ) out_prefix = traits.Str( - "dti", - desc='Output file prefix', - argstr='%s', - usedefault=True, - position=2) + "dti", desc="Output file prefix", argstr="%s", usedefault=True, position=2 + ) output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-ot %s', - desc='output file type', - usedefault=True) - bvecs = File( - exists=True, desc='b vectors file', argstr='-gm %s', mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) - n_averages = traits.Int(desc='Number of averages', argstr='-nex %s') + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-ot %s", + desc="output file type", + usedefault=True, + ) + bvecs = File(exists=True, desc="b vectors file", argstr="-gm %s", mandatory=True) + bvals = File(exists=True, desc="b values file", mandatory=True) + n_averages = traits.Int(desc="Number of averages", argstr="-nex %s") image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + desc="""\ +Specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. If 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +This information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when.""", + argstr="-iop %f", + ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", - argstr="-oc") + desc="""\ +When oblique angle(s) applied, some SIEMENS DTI protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", + argstr="-oc", + ) b0_threshold = traits.Float( - desc= - """program will use b0 image with the given threshold to mask out high - background of fa/adc maps. by default it will calculate threshold - automatically. but if it failed, you need to set it manually.""", - argstr="-b0_th") + desc="""\ +Program will use b0 image with the given threshold to mask out high +background of fa/adc maps. by default it will calculate threshold +automatically. but if it failed, you need to set it manually.""", + argstr="-b0_th", + ) class DTIReconOutputSpec(TraitedSpec): @@ -83,78 +87,76 @@ class DTIReconOutputSpec(TraitedSpec): class DTIRecon(CommandLine): - """Use dti_recon to generate tensors and other maps - """ + """Use dti_recon to generate tensors and other maps""" input_spec = DTIReconInputSpec output_spec = DTIReconOutputSpec - _cmd = 'dti_recon' + _cmd = "dti_recon" def _create_gradient_matrix(self, bvecs_file, bvals_file): - _gradient_matrix_file = 'gradient_matrix.txt' + _gradient_matrix_file = "gradient_matrix.txt" with open(bvals_file) as fbvals: - bvals = [val for val in re.split('\s+', fbvals.readline().strip())] + bvals = fbvals.readline().strip().split() with open(bvecs_file) as fbvecs: - bvecs_x = fbvecs.readline().split() - bvecs_y = fbvecs.readline().split() - bvecs_z = fbvecs.readline().split() + bvecs_x = fbvecs.readline().split() + bvecs_y = fbvecs.readline().split() + bvecs_z = fbvecs.readline().split() - with open(_gradient_matrix_file, 'w') as gradient_matrix_f: + with open(_gradient_matrix_file, "w") as gradient_matrix_f: for i in range(len(bvals)): - gradient_matrix_f.write("%s, %s, %s, %s\n" % - (bvecs_x[i], bvecs_y[i], bvecs_z[i], - bvals[i])) + gradient_matrix_f.write( + f"{bvecs_x[i]}, {bvecs_y[i]}, {bvecs_z[i]}, {bvals[i]}\n" + ) return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": - new_val = self._create_gradient_matrix(self.inputs.bvecs, - self.inputs.bvals) - return super(DTIRecon, self)._format_arg("bvecs", spec, new_val) - return super(DTIRecon, self)._format_arg(name, spec, value) + new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) + return super()._format_arg("bvecs", spec, new_val) + return super()._format_arg(name, spec, value) def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['ADC'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_adc.' + output_type)) - outputs['B0'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['L1'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e1.' + output_type)) - outputs['L2'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e2.' + output_type)) - outputs['L3'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e3.' + output_type)) - outputs['exp'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_exp.' + output_type)) - outputs['FA'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_fa.' + output_type)) - outputs['FA_color'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_fa_color.' + output_type)) - outputs['tensor'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_tensor.' + output_type)) - outputs['V1'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v1.' + output_type)) - outputs['V2'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v2.' + output_type)) - outputs['V3'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v3.' + output_type)) + outputs["ADC"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_adc." + output_type) + ) + outputs["B0"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) + ) + outputs["L1"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e1." + output_type) + ) + outputs["L2"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e2." + output_type) + ) + outputs["L3"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e3." + output_type) + ) + outputs["exp"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_exp." + output_type) + ) + outputs["FA"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_fa." + output_type) + ) + outputs["FA_color"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_fa_color." + output_type) + ) + outputs["tensor"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_tensor." + output_type) + ) + outputs["V1"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v1." + output_type) + ) + outputs["V2"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v2." + output_type) + ) + outputs["V3"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v3." + output_type) + ) return outputs @@ -162,89 +164,97 @@ def _list_outputs(self): class DTITrackerInputSpec(CommandLineInputSpec): tensor_file = File(exists=True, desc="reconstructed tensor file") input_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - desc="""input and output file type. accepted values are: - analyze -> analyze format 7.5 - ni1 -> nifti format saved in seperate .hdr and .img file - nii -> nifti format with one .nii file - nii.gz -> nifti format with compression - default type is 'nii'""", - argstr="-it %s") + "nii", + "analyze", + "ni1", + "nii.gz", + desc="""\ +Input and output file type. Accepted values are: + +* analyze -> analyze format 7.5 +* ni1 -> nifti format saved in separate .hdr and .img file +* nii -> nifti format with one .nii file +* nii.gz -> nifti format with compression + +Default type is 'nii' +""", + argstr="-it %s", + ) tracking_method = traits.Enum( - 'fact', - 'rk2', - 'tl', - 'sl', - desc="""fact -> use FACT method for tracking. this is the default method. - rk2 -> use 2nd order runge-kutta method for tracking. - tl -> use tensorline method for tracking. - sl -> use interpolated streamline method with fixed step-length""", - argstr="-%s") + "fact", + "rk2", + "tl", + "sl", + desc="""\ +Tracking algorithm. + +* fact -> use FACT method for tracking. This is the default method. +* rk2 -> use 2nd order Runge-Kutta method for tracking. +* tl -> use tensorline method for tracking. +* sl -> use interpolated streamline method with fixed step-length + +""", + argstr="-%s", + ) step_length = traits.Float( - desc="""set step length, in the unit of minimum voxel size. - default value is 0.5 for interpolated streamline method - and 0.1 for other methods""", - argstr="-l %f") + desc="""\ +Step length, in the unit of minimum voxel size. +default value is 0.5 for interpolated streamline method +and 0.1 for other methods""", + argstr="-l %f", + ) angle_threshold = traits.Float( - desc="set angle threshold. default value is 35 degree", - argstr="-at %f") + desc="set angle threshold. default value is 35 degree", argstr="-at %f" + ) angle_threshold_weight = traits.Float( - desc= - "set angle threshold weighting factor. weighting will be be applied \ - on top of the angle_threshold", - argstr="-atw %f") + desc="set angle threshold weighting factor. weighting will be applied " + "on top of the angle_threshold", + argstr="-atw %f", + ) random_seed = traits.Int( - desc="use random location in a voxel instead of the center of the voxel \ - to seed. can also define number of seed per voxel. default is 1", - argstr="-rseed %d") - invert_x = traits.Bool( - desc="invert x component of the vector", argstr="-ix") - invert_y = traits.Bool( - desc="invert y component of the vector", argstr="-iy") - invert_z = traits.Bool( - desc="invert z component of the vector", argstr="-iz") - swap_xy = traits.Bool( - desc="swap x & y vectors while tracking", argstr="-sxy") - swap_yz = traits.Bool( - desc="swap y & z vectors while tracking", argstr="-syz") - swap_zx = traits.Bool( - desc="swap x & z vectors while tracking", argstr="-szx") + desc="use random location in a voxel instead of the center of the voxel " + "to seed. can also define number of seed per voxel. default is 1", + argstr="-rseed %d", + ) + invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") + invert_y = traits.Bool(desc="invert y component of the vector", argstr="-iy") + invert_z = traits.Bool(desc="invert z component of the vector", argstr="-iz") + swap_xy = traits.Bool(desc="swap x & y vectors while tracking", argstr="-sxy") + swap_yz = traits.Bool(desc="swap y & z vectors while tracking", argstr="-syz") + swap_zx = traits.Bool(desc="swap x & z vectors while tracking", argstr="-szx") mask1_file = File( - desc="first mask image", mandatory=True, argstr="-m %s", position=2) + desc="first mask image", mandatory=True, argstr="-m %s", position=2 + ) mask1_threshold = traits.Float( - desc= - "threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", - position=3) + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", + position=3, + ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc= - "threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", - position=5) + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", + position=5, + ) input_data_prefix = traits.Str( "dti", desc="for internal naming use only", position=0, argstr="%s", - usedefault=True) + usedefault=True, + ) output_file = File( - "tracks.trk", - "file containing tracks", - argstr="%s", - position=1, - usedefault=True) + "tracks.trk", "file containing tracks", argstr="%s", position=1, usedefault=True + ) output_mask = File( - desc="output a binary mask file in analyze format", argstr="-om %s") + desc="output a binary mask file in analyze format", argstr="-om %s" + ) primary_vector = traits.Enum( - 'v2', - 'v3', - desc= - "which vector to use for fibre tracking: v2 or v3. If not set use v1", - argstr="-%s") + "v2", + "v3", + desc="which vector to use for fibre tracking: v2 or v3. If not set use v1", + argstr="-%s", + ) class DTITrackerOutputSpec(TraitedSpec): @@ -256,21 +266,22 @@ class DTITracker(CommandLine): input_spec = DTITrackerInputSpec output_spec = DTITrackerOutputSpec - _cmd = 'dti_tracker' + _cmd = "dti_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.tensor_file) copyfile( self.inputs.tensor_file, os.path.abspath(self.inputs.input_data_prefix + "_tensor" + ext), - copy=False) + copy=False, + ) - return super(DTITracker, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.output_file) + outputs["track_file"] = os.path.abspath(self.inputs.output_file) if isdefined(self.inputs.output_mask) and self.inputs.output_mask: - outputs['mask_file'] = os.path.abspath(self.inputs.output_mask) + outputs["mask_file"] = os.path.abspath(self.inputs.output_mask) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index cf4eb683a2..00f86a322c 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -1,198 +1,202 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - import os import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, File, traits, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + CommandLineInputSpec, + isdefined, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File( - exists=True, - desc='b vectors file', - argstr='%s', - position=1, - mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) + exists=True, desc="b vectors file", argstr="%s", position=1, mandatory=True + ) + bvals = File(exists=True, desc="b values file", mandatory=True) out_file = File( "recon_mat.dat", - desc='output matrix file', - argstr='%s', + desc="output matrix file", + argstr="%s", usedefault=True, - position=2) + position=2, + ) order = traits.Int( - argstr='-order %s', - desc= - """maximum order of spherical harmonics. must be even number. default - is 4""") + argstr="-order %s", + desc="""maximum order of spherical harmonics. must be even number. default is 4""", + ) odf_file = File( exists=True, - argstr='-odf %s', - desc= - """filename that contains the reconstruction points on a HEMI-sphere. - use the pre-set 181 points by default""") + argstr="-odf %s", + desc="""\ +Filename that contains the reconstruction points on a HEMI-sphere. +Use the pre-set 181 points by default""", + ) reference_file = File( exists=True, - argstr='-ref %s', - desc= - """provide a dicom or nifti image as the reference for the program to - figure out the image orientation information. if no such info was - found in the given image header, the next 5 options -info, etc., - will be used if provided. if image orientation info can be found - in the given reference, all other 5 image orientation options will - be IGNORED""") + argstr="-ref %s", + desc="""\ +Provide a dicom or nifti image as the reference for the program to +figure out the image orientation information. if no such info was +found in the given image header, the next 5 options -info, etc., +will be used if provided. if image orientation info can be found +in the given reference, all other 5 image orientation options will +be IGNORED""", + ) image_info = File( exists=True, - argstr='-info %s', - desc="""specify image information file. the image info file is generated - from original dicom image by diff_unpack program and contains image - orientation and other information needed for reconstruction and - tracking. by default will look into the image folder for .info file""") + argstr="-info %s", + desc="""\ +specify image information file. the image info file is generated +from original dicom image by diff_unpack program and contains image +orientation and other information needed for reconstruction and +tracking. by default will look into the image folder for .info file""", + ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", + argstr="-iop %f", + ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", - argstr="-oc") + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", + argstr="-oc", + ) class HARDIMatOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output matrix file') + out_file = File(exists=True, desc="output matrix file") class HARDIMat(CommandLine): - """Use hardi_mat to calculate a reconstruction matrix from a gradient table - """ + """Use hardi_mat to calculate a reconstruction matrix from a gradient table""" + input_spec = HARDIMatInputSpec output_spec = HARDIMatOutputSpec - _cmd = 'hardi_mat' + _cmd = "hardi_mat" def _create_gradient_matrix(self, bvecs_file, bvals_file): - _gradient_matrix_file = 'gradient_matrix.txt' - bvals = [ - val for val in re.split('\s+', - open(bvals_file).readline().strip()) - ] - bvecs_f = open(bvecs_file) - bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_f.close() - gradient_matrix_f = open(_gradient_matrix_file, 'w') + _gradient_matrix_file = "gradient_matrix.txt" + with open(bvals_file) as bvals_f: + bvals = bvals_f.readline().strip().split() + with open(bvecs_file) as bvecs_f: + bvecs_x = bvecs_f.readline().strip().split() + bvecs_y = bvecs_f.readline().strip().split() + bvecs_z = bvecs_f.readline().strip().split() + gradient_matrix_f = open(_gradient_matrix_file, "w") for i in range(len(bvals)): if int(bvals[i]) == 0: continue - gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], - bvecs_z[i])) + gradient_matrix_f.write(f"{bvecs_x[i]} {bvecs_y[i]} {bvecs_z[i]}\n") gradient_matrix_f.close() return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": - new_val = self._create_gradient_matrix(self.inputs.bvecs, - self.inputs.bvals) - return super(HARDIMat, self)._format_arg("bvecs", spec, new_val) - return super(HARDIMat, self)._format_arg(name, spec, value) + new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) + return super()._format_arg("bvecs", spec, new_val) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class ODFReconInputSpec(CommandLineInputSpec): DWI = File( - desc='Input raw data', - argstr='%s', - exists=True, - mandatory=True, - position=1) + desc="Input raw data", argstr="%s", exists=True, mandatory=True, position=1 + ) n_directions = traits.Int( - desc='Number of directions', argstr='%s', mandatory=True, position=2) + desc="Number of directions", argstr="%s", mandatory=True, position=2 + ) n_output_directions = traits.Int( - desc='Number of output directions', - argstr='%s', - mandatory=True, - position=3) + desc="Number of output directions", argstr="%s", mandatory=True, position=3 + ) out_prefix = traits.Str( - "odf", - desc='Output file prefix', - argstr='%s', - usedefault=True, - position=4) + "odf", desc="Output file prefix", argstr="%s", usedefault=True, position=4 + ) matrix = File( - argstr='-mat %s', + argstr="-mat %s", exists=True, desc="""use given file as reconstruction matrix.""", - mandatory=True) + mandatory=True, + ) n_b0 = traits.Int( - argstr='-b0 %s', - desc="""number of b0 scans. by default the program gets this information - from the number of directions and number of volumes in - the raw data. useful when dealing with incomplete raw - data set or only using part of raw data set to reconstruct""", - mandatory=True) + argstr="-b0 %s", + desc="""\ +number of b0 scans. by default the program gets this information +from the number of directions and number of volumes in +the raw data. useful when dealing with incomplete raw +data set or only using part of raw data set to reconstruct""", + mandatory=True, + ) output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-ot %s', - desc='output file type', - usedefault=True) + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-ot %s", + desc="output file type", + usedefault=True, + ) sharpness = traits.Float( - desc="""smooth or sharpen the raw data. factor > 0 is smoothing. - factor < 0 is sharpening. default value is 0 - NOTE: this option applies to DSI study only""", - argstr='-s %f') + desc="""\ +smooth or sharpen the raw data. factor > 0 is smoothing. +factor < 0 is sharpening. default value is 0 +NOTE: this option applies to DSI study only""", + argstr="-s %f", + ) filter = traits.Bool( - desc="""apply a filter (e.g. high pass) to the raw image""", - argstr='-f') + desc="""apply a filter (e.g. high pass) to the raw image""", argstr="-f" + ) subtract_background = traits.Bool( - desc="""subtract the background value before reconstruction""", - argstr='-bg') - dsi = traits.Bool(desc="""indicates that the data is dsi""", argstr='-dsi') - output_entropy = traits.Bool(desc="""output entropy map""", argstr='-oe') + desc="""subtract the background value before reconstruction""", argstr="-bg" + ) + dsi = traits.Bool(desc="""indicates that the data is dsi""", argstr="-dsi") + output_entropy = traits.Bool(desc="""output entropy map""", argstr="-oe") image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", + argstr="-iop %f", + ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", - argstr="-oc") + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", + argstr="-oc", + ) class ODFReconOutputSpec(TraitedSpec): @@ -204,35 +208,34 @@ class ODFReconOutputSpec(TraitedSpec): class ODFRecon(CommandLine): - """Use odf_recon to generate tensors and other maps - """ + """Use odf_recon to generate tensors and other maps""" input_spec = ODFReconInputSpec output_spec = ODFReconOutputSpec - _cmd = 'odf_recon' + _cmd = "odf_recon" def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['B0'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['DWI'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_dwi.' + output_type)) - outputs['max'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_max.' + output_type)) - outputs['ODF'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_odf.' + output_type)) + outputs["B0"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) + ) + outputs["DWI"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_dwi." + output_type) + ) + outputs["max"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_max." + output_type) + ) + outputs["ODF"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_odf." + output_type) + ) if isdefined(self.inputs.output_entropy): - outputs['entropy'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_entropy.' + output_type)) + outputs["entropy"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_entropy." + output_type) + ) return outputs @@ -241,147 +244,152 @@ class ODFTrackerInputSpec(CommandLineInputSpec): max = File(exists=True, mandatory=True) ODF = File(exists=True, mandatory=True) input_data_prefix = traits.Str( - "odf", - desc='recon data prefix', - argstr='%s', - usedefault=True, - position=0) + "odf", desc="recon data prefix", argstr="%s", usedefault=True, position=0 + ) out_file = File( - "tracks.trk", - desc='output track file', - argstr='%s', - usedefault=True, - position=1) + "tracks.trk", desc="output track file", argstr="%s", usedefault=True, position=1 + ) input_output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-it %s', - desc='input and output file type', - usedefault=True) + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-it %s", + desc="input and output file type", + usedefault=True, + ) runge_kutta2 = traits.Bool( - argstr='-rk2', - desc="""use 2nd order runge-kutta method for tracking. - default tracking method is non-interpolate streamline""") + argstr="-rk2", + desc="""\ +use 2nd order Runge-Kutta method for tracking. +default tracking method is non-interpolate streamline""", + ) step_length = traits.Float( - argstr='-l %f', - desc="""set step length, in the unit of minimum voxel size. - default value is 0.1.""") + argstr="-l %f", + desc="""\ +set step length, in the unit of minimum voxel size. +default value is 0.1.""", + ) angle_threshold = traits.Float( - argstr='-at %f', - desc="""set angle threshold. default value is 35 degree for - default tracking method and 25 for rk2""") + argstr="-at %f", + desc="""\ +set angle threshold. default value is 35 degree for +default tracking method and 25 for rk2""", + ) random_seed = traits.Int( - argstr='-rseed %s', - desc="""use random location in a voxel instead of the center of the voxel - to seed. can also define number of seed per voxel. default is 1""") - invert_x = traits.Bool( - argstr='-ix', desc='invert x component of the vector') - invert_y = traits.Bool( - argstr='-iy', desc='invert y component of the vector') - invert_z = traits.Bool( - argstr='-iz', desc='invert z component of the vector') - swap_xy = traits.Bool( - argstr='-sxy', desc='swap x and y vectors while tracking') - swap_yz = traits.Bool( - argstr='-syz', desc='swap y and z vectors while tracking') - swap_zx = traits.Bool( - argstr='-szx', desc='swap x and z vectors while tracking') - disc = traits.Bool(argstr='-disc', desc='use disc tracking') + argstr="-rseed %s", + desc="""\ +use random location in a voxel instead of the center of the voxel +to seed. can also define number of seed per voxel. default is 1""", + ) + invert_x = traits.Bool(argstr="-ix", desc="invert x component of the vector") + invert_y = traits.Bool(argstr="-iy", desc="invert y component of the vector") + invert_z = traits.Bool(argstr="-iz", desc="invert z component of the vector") + swap_xy = traits.Bool(argstr="-sxy", desc="swap x and y vectors while tracking") + swap_yz = traits.Bool(argstr="-syz", desc="swap y and z vectors while tracking") + swap_zx = traits.Bool(argstr="-szx", desc="swap x and z vectors while tracking") + disc = traits.Bool(argstr="-disc", desc="use disc tracking") mask1_file = File( - desc="first mask image", mandatory=True, argstr="-m %s", position=2) + desc="first mask image", mandatory=True, argstr="-m %s", position=2 + ) mask1_threshold = traits.Float( - desc= - "threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", - position=3) + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", + position=3, + ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc= - "threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", - position=5) + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", + position=5, + ) limit = traits.Int( - argstr='-limit %d', - desc="""in some special case, such as heart data, some track may go into - infinite circle and take long time to stop. this option allows - setting a limit for the longest tracking steps (voxels)""") + argstr="-limit %d", + desc="""\ +in some special case, such as heart data, some track may go into +infinite circle and take long time to stop. this option allows +setting a limit for the longest tracking steps (voxels)""", + ) dsi = traits.Bool( - argstr='-dsi', - desc=""" specify the input odf data is dsi. because dsi recon uses fixed - pre-calculated matrix, some special orientation patch needs to - be applied to keep dti/dsi/q-ball consistent.""") + argstr="-dsi", + desc="""\ +specify the input odf data is dsi. because dsi recon uses fixed +pre-calculated matrix, some special orientation patch needs to +be applied to keep dti/dsi/q-ball consistent.""", + ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", + argstr="-iop %f", + ) slice_order = traits.Int( - argstr='-sorder %d', - desc= - 'set the slice order. 1 means normal, -1 means reversed. default value is 1' + argstr="-sorder %d", + desc="set the slice order. 1 means normal, -1 means reversed. default value is 1", ) voxel_order = traits.Enum( - 'RAS', - 'RPS', - 'RAI', - 'RPI', - 'LAI', - 'LAS', - 'LPS', - 'LPI', - argstr='-vorder %s', - desc= - """specify the voxel order in RL/AP/IS (human brain) reference. must be - 3 letters with no space in between. - for example, RAS means the voxel row is from L->R, the column - is from P->A and the slice order is from I->S. - by default voxel order is determined by the image orientation - (but NOT guaranteed to be correct because of various standards). - for example, siemens axial image is LPS, coronal image is LIP and - sagittal image is PIL. - this information also is NOT needed for tracking but will be saved - in the track file and is essential for track display to map onto - the right coordinates""") + "RAS", + "RPS", + "RAI", + "RPI", + "LAI", + "LAS", + "LPS", + "LPI", + argstr="-vorder %s", + desc="""\ +specify the voxel order in RL/AP/IS (human brain) reference. must be +3 letters with no space in between. +for example, RAS means the voxel row is from L->R, the column +is from P->A and the slice order is from I->S. +by default voxel order is determined by the image orientation +(but NOT guaranteed to be correct because of various standards). +for example, siemens axial image is LPS, coronal image is LIP and +sagittal image is PIL. +this information also is NOT needed for tracking but will be saved +in the track file and is essential for track display to map onto +the right coordinates""", + ) class ODFTrackerOutputSpec(TraitedSpec): - track_file = File(exists=True, desc='output track file') + track_file = File(exists=True, desc="output track file") class ODFTracker(CommandLine): - """Use odf_tracker to generate track file - """ + """Use odf_tracker to generate track file""" input_spec = ODFTrackerInputSpec output_spec = ODFTrackerOutputSpec - _cmd = 'odf_tracker' + _cmd = "odf_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.max) copyfile( self.inputs.max, os.path.abspath(self.inputs.input_data_prefix + "_max" + ext), - copy=False) + copy=False, + ) _, _, ext = split_filename(self.inputs.ODF) copyfile( self.inputs.ODF, os.path.abspath(self.inputs.input_data_prefix + "_odf" + ext), - copy=False) + copy=False, + ) - return super(ODFTracker, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.out_file) + outputs["track_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 20aaeea927..5190843875 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -1,16 +1,19 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from ..base import (TraitedSpec, File, traits, CommandLine, InputMultiPath, - CommandLineInputSpec) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + InputMultiPath, + CommandLineInputSpec, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class SplineFilterInputSpec(CommandLineInputSpec): @@ -19,18 +22,21 @@ class SplineFilterInputSpec(CommandLineInputSpec): desc="file containing tracks to be filtered", position=0, argstr="%s", - mandatory=True) + mandatory=True, + ) step_length = traits.Float( desc="in the unit of minimum voxel size", position=1, argstr="%f", - mandatory=True) + mandatory=True, + ) output_file = File( "spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", - usedefault=True) + usedefault=True, + ) class SplineFilterOutputSpec(TraitedSpec): @@ -55,6 +61,7 @@ class SplineFilter(CommandLine): >>> filt.inputs.step_length = 0.5 >>> filt.run() # doctest: +SKIP """ + input_spec = SplineFilterInputSpec output_spec = SplineFilterOutputSpec @@ -62,8 +69,7 @@ class SplineFilter(CommandLine): def _list_outputs(self): outputs = self.output_spec().get() - outputs['smoothed_track_file'] = os.path.abspath( - self.inputs.output_file) + outputs["smoothed_track_file"] = os.path.abspath(self.inputs.output_file) return outputs @@ -73,13 +79,15 @@ class TrackMergeInputSpec(CommandLineInputSpec): desc="file containing tracks to be filtered", position=0, argstr="%s...", - mandatory=True) + mandatory=True, + ) output_file = File( "merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", - usedefault=True) + usedefault=True, + ) class TrackMergeOutputSpec(TraitedSpec): @@ -106,6 +114,7 @@ class TrackMerge(CommandLine): >>> mrg.inputs.track_files = ['track1.trk','track2.trk'] >>> mrg.run() # doctest: +SKIP """ + input_spec = TrackMergeInputSpec output_spec = TrackMergeOutputSpec @@ -113,5 +122,5 @@ class TrackMerge(CommandLine): def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.output_file) + outputs["track_file"] = os.path.abspath(self.inputs.output_file) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/tests/__init__.py b/nipype/interfaces/diffusion_toolkit/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py index a39dbf6c3b..f7bf46f327 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py @@ -1,36 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTIRecon def test_DTIRecon_inputs(): input_map = dict( DWI=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), - args=dict(argstr='%s', ), - b0_threshold=dict(argstr='-b0_th', ), - bvals=dict(mandatory=True, ), + args=dict( + argstr="%s", + ), + b0_threshold=dict( + argstr="-b0_th", + ), + bvals=dict( + extensions=None, + mandatory=True, + ), bvecs=dict( - argstr='-gm %s', + argstr="-gm %s", + extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), - image_orientation_vectors=dict(argstr='-iop %f', ), - n_averages=dict(argstr='-nex %s', ), - oblique_correction=dict(argstr='-oc', ), + image_orientation_vectors=dict( + argstr="-iop %f", + ), + n_averages=dict( + argstr="-nex %s", + ), + oblique_correction=dict( + argstr="-oc", + ), out_prefix=dict( - argstr='%s', + argstr="%s", position=2, usedefault=True, ), output_type=dict( - argstr='-ot %s', + argstr="-ot %s", usedefault=True, ), ) @@ -39,20 +53,46 @@ def test_DTIRecon_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIRecon_outputs(): output_map = dict( - ADC=dict(), - B0=dict(), - FA=dict(), - FA_color=dict(), - L1=dict(), - L2=dict(), - L3=dict(), - V1=dict(), - V2=dict(), - V3=dict(), - exp=dict(), - tensor=dict(), + ADC=dict( + extensions=None, + ), + B0=dict( + extensions=None, + ), + FA=dict( + extensions=None, + ), + FA_color=dict( + extensions=None, + ), + L1=dict( + extensions=None, + ), + L2=dict( + extensions=None, + ), + L3=dict( + extensions=None, + ), + V1=dict( + extensions=None, + ), + V2=dict( + extensions=None, + ), + V3=dict( + extensions=None, + ), + exp=dict( + extensions=None, + ), + tensor=dict( + extensions=None, + ), ) outputs = DTIRecon.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py index cf483d00d0..e550bc4b27 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py @@ -1,61 +1,106 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTITracker def test_DTITracker_inputs(): input_map = dict( - angle_threshold=dict(argstr='-at %f', ), - angle_threshold_weight=dict(argstr='-atw %f', ), - args=dict(argstr='%s', ), + angle_threshold=dict( + argstr="-at %f", + ), + angle_threshold_weight=dict( + argstr="-atw %f", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), input_data_prefix=dict( - argstr='%s', + argstr="%s", position=0, usedefault=True, ), - input_type=dict(argstr='-it %s', ), - invert_x=dict(argstr='-ix', ), - invert_y=dict(argstr='-iy', ), - invert_z=dict(argstr='-iz', ), + input_type=dict( + argstr="-it %s", + ), + invert_x=dict( + argstr="-ix", + ), + invert_y=dict( + argstr="-iy", + ), + invert_z=dict( + argstr="-iz", + ), mask1_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, position=2, ), - mask1_threshold=dict(position=3, ), + mask1_threshold=dict( + position=3, + ), mask2_file=dict( - argstr='-m2 %s', + argstr="-m2 %s", + extensions=None, position=4, ), - mask2_threshold=dict(position=5, ), + mask2_threshold=dict( + position=5, + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=1, usedefault=True, ), - output_mask=dict(argstr='-om %s', ), - primary_vector=dict(argstr='-%s', ), - random_seed=dict(argstr='-rseed %d', ), - step_length=dict(argstr='-l %f', ), - swap_xy=dict(argstr='-sxy', ), - swap_yz=dict(argstr='-syz', ), - swap_zx=dict(argstr='-szx', ), - tensor_file=dict(), - tracking_method=dict(argstr='-%s', ), + output_mask=dict( + argstr="-om %s", + extensions=None, + ), + primary_vector=dict( + argstr="-%s", + ), + random_seed=dict( + argstr="-rseed %d", + ), + step_length=dict( + argstr="-l %f", + ), + swap_xy=dict( + argstr="-sxy", + ), + swap_yz=dict( + argstr="-syz", + ), + swap_zx=dict( + argstr="-szx", + ), + tensor_file=dict( + extensions=None, + ), + tracking_method=dict( + argstr="-%s", + ), ) inputs = DTITracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTITracker_outputs(): output_map = dict( - mask_file=dict(), - track_file=dict(), + mask_file=dict( + extensions=None, + ), + track_file=dict( + extensions=None, + ), ) outputs = DTITracker.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py index 59bc8c25a5..a933495672 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py @@ -1,14 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import HARDIMat def test_HARDIMat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bvals=dict(mandatory=True, ), + args=dict( + argstr="%s", + ), + bvals=dict( + extensions=None, + mandatory=True, + ), bvecs=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), @@ -16,25 +21,47 @@ def test_HARDIMat_inputs(): nohash=True, usedefault=True, ), - image_info=dict(argstr='-info %s', ), - image_orientation_vectors=dict(argstr='-iop %f', ), - oblique_correction=dict(argstr='-oc', ), - odf_file=dict(argstr='-odf %s', ), - order=dict(argstr='-order %s', ), + image_info=dict( + argstr="-info %s", + extensions=None, + ), + image_orientation_vectors=dict( + argstr="-iop %f", + ), + oblique_correction=dict( + argstr="-oc", + ), + odf_file=dict( + argstr="-odf %s", + extensions=None, + ), + order=dict( + argstr="-order %s", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, usedefault=True, ), - reference_file=dict(argstr='-ref %s', ), + reference_file=dict( + argstr="-ref %s", + extensions=None, + ), ) inputs = HARDIMat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HARDIMat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = HARDIMat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py index 1e66b93bec..b6a18aaf77 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py @@ -1,67 +1,96 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import ODFRecon def test_ODFRecon_inputs(): input_map = dict( DWI=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), - args=dict(argstr='%s', ), - dsi=dict(argstr='-dsi', ), + args=dict( + argstr="%s", + ), + dsi=dict( + argstr="-dsi", + ), environ=dict( nohash=True, usedefault=True, ), - filter=dict(argstr='-f', ), - image_orientation_vectors=dict(argstr='-iop %f', ), + filter=dict( + argstr="-f", + ), + image_orientation_vectors=dict( + argstr="-iop %f", + ), matrix=dict( - argstr='-mat %s', + argstr="-mat %s", + extensions=None, mandatory=True, ), n_b0=dict( - argstr='-b0 %s', + argstr="-b0 %s", mandatory=True, ), n_directions=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, ), n_output_directions=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), - oblique_correction=dict(argstr='-oc', ), + oblique_correction=dict( + argstr="-oc", + ), out_prefix=dict( - argstr='%s', + argstr="%s", position=4, usedefault=True, ), - output_entropy=dict(argstr='-oe', ), + output_entropy=dict( + argstr="-oe", + ), output_type=dict( - argstr='-ot %s', + argstr="-ot %s", usedefault=True, ), - sharpness=dict(argstr='-s %f', ), - subtract_background=dict(argstr='-bg', ), + sharpness=dict( + argstr="-s %f", + ), + subtract_background=dict( + argstr="-bg", + ), ) inputs = ODFRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ODFRecon_outputs(): output_map = dict( - B0=dict(), - DWI=dict(), - ODF=dict(), - entropy=dict(), - max=dict(), + B0=dict( + extensions=None, + ), + DWI=dict( + extensions=None, + ), + ODF=dict( + extensions=None, + ), + entropy=dict( + extensions=None, + ), + max=dict( + extensions=None, + ), ) outputs = ODFRecon.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py index 41b2d530f6..2118745f3f 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py @@ -1,66 +1,118 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..odf import ODFTracker def test_ODFTracker_inputs(): input_map = dict( - ODF=dict(mandatory=True, ), - angle_threshold=dict(argstr='-at %f', ), - args=dict(argstr='%s', ), - disc=dict(argstr='-disc', ), - dsi=dict(argstr='-dsi', ), + ODF=dict( + extensions=None, + mandatory=True, + ), + angle_threshold=dict( + argstr="-at %f", + ), + args=dict( + argstr="%s", + ), + disc=dict( + argstr="-disc", + ), + dsi=dict( + argstr="-dsi", + ), environ=dict( nohash=True, usedefault=True, ), - image_orientation_vectors=dict(argstr='-iop %f', ), + image_orientation_vectors=dict( + argstr="-iop %f", + ), input_data_prefix=dict( - argstr='%s', + argstr="%s", position=0, usedefault=True, ), input_output_type=dict( - argstr='-it %s', + argstr="-it %s", usedefault=True, ), - invert_x=dict(argstr='-ix', ), - invert_y=dict(argstr='-iy', ), - invert_z=dict(argstr='-iz', ), - limit=dict(argstr='-limit %d', ), + invert_x=dict( + argstr="-ix", + ), + invert_y=dict( + argstr="-iy", + ), + invert_z=dict( + argstr="-iz", + ), + limit=dict( + argstr="-limit %d", + ), mask1_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, position=2, ), - mask1_threshold=dict(position=3, ), + mask1_threshold=dict( + position=3, + ), mask2_file=dict( - argstr='-m2 %s', + argstr="-m2 %s", + extensions=None, position=4, ), - mask2_threshold=dict(position=5, ), - max=dict(mandatory=True, ), + mask2_threshold=dict( + position=5, + ), + max=dict( + extensions=None, + mandatory=True, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=1, usedefault=True, ), - random_seed=dict(argstr='-rseed %s', ), - runge_kutta2=dict(argstr='-rk2', ), - slice_order=dict(argstr='-sorder %d', ), - step_length=dict(argstr='-l %f', ), - swap_xy=dict(argstr='-sxy', ), - swap_yz=dict(argstr='-syz', ), - swap_zx=dict(argstr='-szx', ), - voxel_order=dict(argstr='-vorder %s', ), + random_seed=dict( + argstr="-rseed %s", + ), + runge_kutta2=dict( + argstr="-rk2", + ), + slice_order=dict( + argstr="-sorder %d", + ), + step_length=dict( + argstr="-l %f", + ), + swap_xy=dict( + argstr="-sxy", + ), + swap_yz=dict( + argstr="-syz", + ), + swap_zx=dict( + argstr="-szx", + ), + voxel_order=dict( + argstr="-vorder %s", + ), ) inputs = ODFTracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ODFTracker_outputs(): - output_map = dict(track_file=dict(), ) + output_map = dict( + track_file=dict( + extensions=None, + ), + ) outputs = ODFTracker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py index 8648a1f1f9..65450952a4 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..postproc import SplineFilter def test_SplineFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, usedefault=True, ), step_length=dict( - argstr='%f', + argstr="%f", mandatory=True, position=1, ), track_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), @@ -31,8 +34,14 @@ def test_SplineFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplineFilter_outputs(): - output_map = dict(smoothed_track_file=dict(), ) + output_map = dict( + smoothed_track_file=dict( + extensions=None, + ), + ) outputs = SplineFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py index b004678175..7f668df568 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py @@ -1,22 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..postproc import TrackMerge def test_TrackMerge_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, usedefault=True, ), track_files=dict( - argstr='%s...', + argstr="%s...", mandatory=True, position=0, ), @@ -26,8 +28,14 @@ def test_TrackMerge_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackMerge_outputs(): - output_map = dict(track_file=dict(), ) + output_map = dict( + track_file=dict( + extensions=None, + ), + ) outputs = TrackMerge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index 1bd5dcb217..aa74ee46f8 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +"""DIPY is a computational neuroimaging tool for diffusion MRI.""" + from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI from .preprocess import Resample, Denoise diff --git a/nipype/interfaces/dipy/anisotropic_power.py b/nipype/interfaces/dipy/anisotropic_power.py index e28ae2bd19..c222ea8f6a 100644 --- a/nipype/interfaces/dipy/anisotropic_power.py +++ b/nipype/interfaces/dipy/anisotropic_power.py @@ -1,18 +1,15 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - +import numpy as np import nibabel as nb from ... import logging from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class APMQballInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional brain mask') + mask_file = File(exists=True, desc="An optional brain mask") class APMQballOutputSpec(TraitedSpec): @@ -33,6 +30,7 @@ class APMQball(DipyDiffusionInterface): >>> apm.inputs.in_bval = 'bvals' >>> apm.run() # doctest: +SKIP """ + input_spec = APMQballInputSpec output_spec = APMQballOutputSpec @@ -44,31 +42,32 @@ def _run_interface(self, runtime): gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) - data = img.get_data() + data = np.asanyarray(img.dataobj) affine = img.affine mask = None if isdefined(self.inputs.mask_file): - mask = nb.load(self.inputs.mask_file).get_data() + mask = np.asanyarray(nb.load(self.inputs.mask_file).dataobj) # Fit it model = shm.QballModel(gtab, 8) - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") peaks = peaks_from_model( model=model, data=data, - relative_peak_threshold=.5, + relative_peak_threshold=0.5, min_separation_angle=25, sphere=sphere, - mask=mask) + mask=mask, + ) apm = shm.anisotropic_power(peaks.shm_coeff) - out_file = self._gen_filename('apm') + out_file = self._gen_filename("apm") nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) - IFLOGGER.info('APM qball image saved as %s', out_file) + IFLOGGER.info("APM qball image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('apm') + outputs["out_file"] = self._gen_filename("apm") return outputs diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 7a9221e3d1..ec19d1fe7b 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,29 +1,37 @@ -# -*- coding: utf-8 -*- """ Base interfaces for dipy """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op +import inspect import numpy as np -from ... import logging -from ..base import (traits, File, isdefined, LibraryBaseInterface, - BaseInterfaceInputSpec) +from ..base import ( + traits, + File, + isdefined, + LibraryBaseInterface, + BaseInterfaceInputSpec, + TraitedSpec, +) + +# List of workflows to ignore +SKIP_WORKFLOWS_LIST = ["Workflow", "CombinedWorkflow"] HAVE_DIPY = True + try: import dipy + from dipy.workflows.base import IntrospectiveArgumentParser except ImportError: HAVE_DIPY = False def no_dipy(): - """ Check if dipy is available """ + """Check if dipy is available.""" global HAVE_DIPY return not HAVE_DIPY def dipy_version(): - """ Check dipy version """ + """Check dipy version.""" if no_dipy(): return None @@ -31,30 +39,29 @@ def dipy_version(): class DipyBaseInterface(LibraryBaseInterface): - """ - A base interface for py:mod:`dipy` computations - """ - _pkg = 'dipy' + """A base interface for py:mod:`dipy` computations.""" + + _pkg = "dipy" class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) - in_bval = File(exists=True, mandatory=True, desc=('input b-values table')) - in_bvec = File(exists=True, mandatory=True, desc=('input b-vectors table')) - b0_thres = traits.Int(700, usedefault=True, desc=('b0 threshold')) - out_prefix = traits.Str(desc=('output prefix for file names')) + in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) + in_bval = File(exists=True, mandatory=True, desc=("input b-values table")) + in_bvec = File(exists=True, mandatory=True, desc=("input b-vectors table")) + b0_thres = traits.Int(700, usedefault=True, desc=("b0 threshold")) + out_prefix = traits.Str(desc=("output prefix for file names")) class DipyDiffusionInterface(DipyBaseInterface): - """ - A base interface for py:mod:`dipy` computations - """ + """A base interface for py:mod:`dipy` computations.""" + input_spec = DipyBaseInterfaceInputSpec def _get_gradient_table(self): bval = np.loadtxt(self.inputs.in_bval) bvec = np.loadtxt(self.inputs.in_bvec).T from dipy.core.gradients import gradient_table + gtab = gradient_table(bval, bvec) gtab.b0_threshold = self.inputs.b0_thres @@ -62,7 +69,7 @@ def _get_gradient_table(self): def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext @@ -74,4 +81,205 @@ def _gen_filename(self, name, ext=None): if ext is None: ext = fext - return out_prefix + '_' + name + ext + return out_prefix + "_" + name + ext + + +def get_default_args(func): + """Return optional arguments of a function. + + Parameters + ---------- + func: callable + + Returns + ------- + dict + + """ + signature = inspect.signature(func) + return { + k: v.default + for k, v in signature.parameters.items() + if v.default is not inspect.Parameter.empty + } + + +def convert_to_traits_type(dipy_type, is_file=False): + """Convert DIPY type to Traits type.""" + dipy_type = dipy_type.lower() + is_mandatory = bool("optional" not in dipy_type) + if "variable" in dipy_type and "str" in dipy_type: + return traits.ListStr, is_mandatory + elif "variable" in dipy_type and "int" in dipy_type: + return traits.ListInt, is_mandatory + elif "variable" in dipy_type and "float" in dipy_type: + return traits.ListFloat, is_mandatory + elif "variable" in dipy_type and "bool" in dipy_type: + return traits.ListBool, is_mandatory + elif "variable" in dipy_type and "complex" in dipy_type: + return traits.ListComplex, is_mandatory + elif "str" in dipy_type and not is_file: + return traits.Str, is_mandatory + elif "str" in dipy_type and is_file: + return File, is_mandatory + elif "int" in dipy_type: + return traits.Int, is_mandatory + elif "float" in dipy_type: + return traits.Float, is_mandatory + elif "bool" in dipy_type: + return traits.Bool, is_mandatory + elif "complex" in dipy_type: + return traits.Complex, is_mandatory + else: + msg = f"Error during convert_to_traits_type({dipy_type}). Unknown DIPY type." + raise OSError(msg) + + +def create_interface_specs(class_name, params=None, BaseClass=TraitedSpec): + """Create IN/Out interface specifications dynamically. + + Parameters + ---------- + class_name: str + The future class name(e.g, (MyClassInSpec)) + params: list of tuple + dipy argument list + BaseClass: TraitedSpec object + parent class + + Returns + ------- + newclass: object + new nipype interface specification class + + """ + attr = {} + if params is not None: + for p in params: + name, dipy_type, desc = p[0], p[1], p[2] + is_file = bool("files" in name or "out_" in name) + traits_type, is_mandatory = convert_to_traits_type(dipy_type, is_file) + # print(name, dipy_type, desc, is_file, traits_type, is_mandatory) + if BaseClass.__name__ == BaseInterfaceInputSpec.__name__: + if len(p) > 3 and p[3] is not None: + default_value = p[3] + if isinstance(traits_type, traits.List) and not isinstance( + default_value, list + ): + default_value = [default_value] + attr[name] = traits_type( + default_value, + desc=desc[-1], + usedefault=True, + mandatory=is_mandatory, + ) + else: + attr[name] = traits_type(desc=desc[-1], mandatory=is_mandatory) + else: + attr[name] = traits_type( + p[3], desc=desc[-1], exists=True, usedefault=True + ) + + newclass = type(str(class_name), (BaseClass,), attr) + return newclass + + +def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface): + """Construct a class in order to respect nipype interface specifications. + + This convenient class factory convert a DIPY Workflow to a nipype + interface. + + Parameters + ---------- + cls_name: string + new class name + dipy_flow: Workflow class type. + It should be any children class of `dipy.workflows.workflow.Workflow` + BaseClass: object + nipype instance object + + Returns + ------- + newclass: object + new nipype interface specification class + + """ + parser = IntrospectiveArgumentParser() + flow = dipy_flow() + parser.add_workflow(flow) + default_values = list(get_default_args(flow.run).values()) + optional_params = [ + args + (val,) for args, val in zip(parser.optional_parameters, default_values) + ] + start = len(parser.optional_parameters) - len(parser.output_parameters) + + output_parameters = [ + args + (val,) + for args, val in zip(parser.output_parameters, default_values[start:]) + ] + input_parameters = parser.positional_parameters + optional_params + + input_spec = create_interface_specs( + f"{cls_name}InputSpec", + input_parameters, + BaseClass=BaseInterfaceInputSpec, + ) + + output_spec = create_interface_specs( + f"{cls_name}OutputSpec", output_parameters, BaseClass=TraitedSpec + ) + + def _run_interface(self, runtime): + flow = dipy_flow() + args = self.inputs.get() + flow.run(**args) + + def _list_outputs(self): + outputs = self._outputs().get() + out_dir = outputs.get("out_dir", ".") + for key, values in outputs.items(): + outputs[key] = op.join(out_dir, values) + + return outputs + + newclass = type( + str(cls_name), + (BaseClass,), + { + "input_spec": input_spec, + "output_spec": output_spec, + "_run_interface": _run_interface, + "_list_outputs:": _list_outputs, + }, + ) + return newclass + + +def get_dipy_workflows(module): + """Search for DIPY workflow class. + + Parameters + ---------- + module : object + module object + + Returns + ------- + l_wkflw : list of tuple + This a list of tuple containing 2 elements: + Workflow name, Workflow class obj + + Examples + -------- + >>> from dipy.workflows import align # doctest: +SKIP + >>> get_dipy_workflows(align) # doctest: +SKIP + + """ + return [ + (m, obj) + for m, obj in inspect.getmembers(module) + if inspect.isclass(obj) + and issubclass(obj, module.Workflow) + and m not in SKIP_WORKFLOWS_LIST + ] diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 285464847e..eb44a9bcef 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -1,38 +1,57 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os.path as op import nibabel as nb import numpy as np -from ...utils import NUMPY_MMAP - +from looseversion import LooseVersion from ... import logging -from ..base import (traits, TraitedSpec, File, isdefined) -from .base import DipyBaseInterface +from ..base import traits, Tuple, TraitedSpec, File, isdefined +from .base import ( + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, + DipyBaseInterface, +) + +IFLOGGER = logging.getLogger("nipype.interface") + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): + from dipy.workflows import denoise, mask -IFLOGGER = logging.getLogger('nipype.interface') + l_wkflw = get_dipy_workflows(denoise) + get_dipy_workflows(mask) + for name, obj in l_wkflw: + new_name = name.replace("Flow", "") + globals()[new_name] = dipy_to_nipype_interface(new_name, obj) + del l_wkflw + +else: + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) class ResampleInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='The input 4D diffusion-weighted image file') - vox_size = traits.Tuple( + exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" + ) + vox_size = Tuple( traits.Float, traits.Float, traits.Float, - desc=('specify the new voxel zooms. If no vox_size' - ' is set, then isotropic regridding will ' - 'be performed, with spacing equal to the ' - 'smallest current zoom.')) + desc=( + "specify the new voxel zooms. If no vox_size" + " is set, then isotropic regridding will " + "be performed, with spacing equal to the " + "smallest current zoom." + ), + ) interp = traits.Int( 1, mandatory=True, usedefault=True, - desc=('order of the interpolator (0 = nearest, 1 = linear, etc.')) + desc=("order of the interpolator (0 = nearest, 1 = linear, etc."), + ) class ResampleOutputSpec(TraitedSpec): @@ -53,6 +72,7 @@ class Resample(DipyBaseInterface): >>> reslice.inputs.in_file = 'diffusion.nii' >>> reslice.run() # doctest: +SKIP """ + input_spec = ResampleInputSpec output_spec = ResampleOutputSpec @@ -65,50 +85,47 @@ def _run_interface(self, runtime): out_file = op.abspath(self._gen_outfilename()) resample_proxy( - self.inputs.in_file, - order=order, - new_zooms=vox_size, - out_file=out_file) + self.inputs.in_file, order=order, new_zooms=vox_size, out_file=out_file + ) - IFLOGGER.info('Resliced image saved as %s', out_file) + IFLOGGER.info("Resliced image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath('%s_reslice%s' % (fname, fext)) + return op.abspath(f"{fname}_reslice{fext}") class DenoiseInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='The input 4D diffusion-weighted image file') - in_mask = File(exists=True, desc='brain mask') + exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" + ) + in_mask = File(exists=True, desc="brain mask") noise_model = traits.Enum( - 'rician', - 'gaussian', + "rician", + "gaussian", mandatory=True, usedefault=True, - desc=('noise distribution model')) + desc=("noise distribution model"), + ) signal_mask = File( - desc=('mask in which the mean signal ' - 'will be computed'), - exists=True) + desc=("mask in which the mean signal will be computed"), exists=True + ) noise_mask = File( - desc=('mask in which the standard deviation of noise ' - 'will be computed'), - exists=True) - patch_radius = traits.Int(1, usedefault=True, desc='patch radius') - block_radius = traits.Int(5, usedefault=True, desc='block_radius') - snr = traits.Float(desc='manually set an SNR') + desc=("mask in which the standard deviation of noise will be computed"), + exists=True, + ) + patch_radius = traits.Int(1, usedefault=True, desc="patch radius") + block_radius = traits.Int(5, usedefault=True, desc="block_radius") + snr = traits.Float(desc="manually set an SNR") class DenoiseOutputSpec(TraitedSpec): @@ -135,23 +152,23 @@ class Denoise(DipyBaseInterface): >>> denoise.inputs.in_file = 'diffusion.nii' >>> denoise.run() # doctest: +SKIP """ + input_spec = DenoiseInputSpec output_spec = DenoiseOutputSpec def _run_interface(self, runtime): out_file = op.abspath(self._gen_outfilename()) - settings = dict( - mask=None, rician=(self.inputs.noise_model == 'rician')) + settings = dict(mask=None, rician=(self.inputs.noise_model == "rician")) if isdefined(self.inputs.in_mask): - settings['mask'] = nb.load(self.inputs.in_mask).get_data() + settings["mask"] = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) if isdefined(self.inputs.patch_radius): - settings['patch_radius'] = self.inputs.patch_radius + settings["patch_radius"] = self.inputs.patch_radius if isdefined(self.inputs.block_radius): - settings['block_radius'] = self.inputs.block_radius + settings["block_radius"] = self.inputs.block_radius snr = None if isdefined(self.inputs.snr): @@ -159,10 +176,10 @@ def _run_interface(self, runtime): signal_mask = None if isdefined(self.inputs.signal_mask): - signal_mask = nb.load(self.inputs.signal_mask).get_data() + signal_mask = np.asanyarray(nb.load(self.inputs.signal_mask).dataobj) noise_mask = None if isdefined(self.inputs.noise_mask): - noise_mask = nb.load(self.inputs.noise_mask).get_data() + noise_mask = np.asanyarray(nb.load(self.inputs.noise_mask).dataobj) _, s = nlmeans_proxy( self.inputs.in_file, @@ -170,22 +187,22 @@ def _run_interface(self, runtime): snr=snr, smask=signal_mask, nmask=noise_mask, - out_file=out_file) - IFLOGGER.info('Denoised image saved as %s, estimated SNR=%s', out_file, - str(s)) + out_file=out_file, + ) + IFLOGGER.info("Denoised image saved as %s, estimated SNR=%s", out_file, str(s)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath('%s_denoise%s' % (fname, fext)) + return op.abspath(f"{fname}_denoise{fext}") def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): @@ -196,20 +213,20 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): if out_file is None: fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('./%s_reslice%s' % (fname, fext)) + out_file = op.abspath(f"./{fname}_reslice{fext}") - img = nb.load(in_file, mmap=NUMPY_MMAP) + img = nb.load(in_file) hdr = img.header.copy() - data = img.get_data().astype(np.float32) + data = img.get_fdata(dtype=np.float32) affine = img.affine im_zooms = hdr.get_zooms()[:3] if new_zooms is None: minzoom = np.array(im_zooms).min() - new_zooms = tuple(np.ones((3, )) * minzoom) + new_zooms = tuple(np.ones((3,)) * minzoom) if np.all(im_zooms == new_zooms): return in_file @@ -219,18 +236,14 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): tmp_zooms[:3] = new_zooms[0] hdr.set_zooms(tuple(tmp_zooms)) hdr.set_data_shape(data2.shape) - hdr.set_xyzt_units('mm') - nb.Nifti1Image(data2.astype(hdr.get_data_dtype()), affine2, - hdr).to_filename(out_file) + hdr.set_xyzt_units("mm") + nb.Nifti1Image(data2.astype(hdr.get_data_dtype()), affine2, hdr).to_filename( + out_file + ) return out_file, new_zooms -def nlmeans_proxy(in_file, - settings, - snr=None, - smask=None, - nmask=None, - out_file=None): +def nlmeans_proxy(in_file, settings, snr=None, smask=None, nmask=None, out_file=None): """ Uses non-local means to denoise 4D datasets """ @@ -240,14 +253,14 @@ def nlmeans_proxy(in_file, if out_file is None: fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('./%s_denoise%s' % (fname, fext)) + out_file = op.abspath(f"./{fname}_denoise{fext}") - img = nb.load(in_file, mmap=NUMPY_MMAP) + img = nb.load(in_file) hdr = img.header - data = img.get_data() + data = img.get_fdata() aff = img.affine if data.ndim < 4: @@ -256,25 +269,24 @@ def nlmeans_proxy(in_file, data = np.nan_to_num(data) if data.max() < 1.0e-4: - raise RuntimeError('There is no signal in the image') + raise RuntimeError("There is no signal in the image") df = 1.0 if data.max() < 1000.0: - df = 1000. / data.max() + df = 1000.0 / data.max() data *= df b0 = data[..., 0] if smask is None: smask = np.zeros_like(b0) - smask[b0 > np.percentile(b0, 85.)] = 1 + smask[b0 > np.percentile(b0, 85.0)] = 1 - smask = binary_erosion( - smask.astype(np.uint8), iterations=2).astype(np.uint8) + smask = binary_erosion(smask.astype(np.uint8), iterations=2).astype(np.uint8) if nmask is None: nmask = np.ones_like(b0, dtype=np.uint8) - bmask = settings['mask'] + bmask = settings["mask"] if bmask is None: bmask = np.zeros_like(b0) bmask[b0 > np.percentile(b0[b0 > 0], 10)] = 1 @@ -313,6 +325,5 @@ def nlmeans_proxy(in_file, den = np.squeeze(den) den /= df - nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, - hdr).to_filename(out_file) + nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, hdr).to_filename(out_file) return out_file, snr diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 7879553609..084fd7c501 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -1,45 +1,57 @@ -# -*- coding: utf-8 -*- """ Interfaces to the reconstruction algorithms in dipy """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from future import standard_library -standard_library.install_aliases() -from builtins import str, open import os.path as op import numpy as np import nibabel as nb +from looseversion import LooseVersion from ... import logging from ..base import TraitedSpec, File, traits, isdefined -from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec +from .base import ( + DipyDiffusionInterface, + DipyBaseInterfaceInputSpec, + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, +) -IFLOGGER = logging.getLogger('nipype.interface') + +IFLOGGER = logging.getLogger("nipype.interface") + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): + from dipy.workflows import reconst + + l_wkflw = get_dipy_workflows(reconst) + for name, obj in l_wkflw: + new_name = name.replace("Flow", "") + globals()[new_name] = dipy_to_nipype_interface(new_name, obj) + del l_wkflw + +else: + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more models" + ) class RESTOREInputSpec(DipyBaseInterfaceInputSpec): - in_mask = File(exists=True, desc=('input mask in which compute tensors')) - noise_mask = File( - exists=True, desc=('input mask in which compute noise variance')) + in_mask = File(exists=True, desc=("input mask in which compute tensors")) + noise_mask = File(exists=True, desc=("input mask in which compute noise variance")) class RESTOREOutputSpec(TraitedSpec): - fa = File(desc='output fractional anisotropy (FA) map computed from ' - 'the fitted DTI') - md = File(desc='output mean diffusivity (MD) map computed from the ' - 'fitted DTI') - rd = File(desc='output radial diffusivity (RD) map computed from ' - 'the fitted DTI') - mode = File(desc=('output mode (MO) map computed from the fitted DTI')) - trace = File( - desc=('output the tensor trace map computed from the ' - 'fitted DTI')) - evals = File(desc=('output the eigenvalues of the fitted DTI')) - evecs = File(desc=('output the eigenvectors of the fitted DTI')) + fa = File(desc="output fractional anisotropy (FA) map computed from the fitted DTI") + md = File(desc="output mean diffusivity (MD) map computed from the fitted DTI") + rd = File(desc="output radial diffusivity (RD) map computed from the fitted DTI") + mode = File(desc=("output mode (MO) map computed from the fitted DTI")) + trace = File(desc=("output the tensor trace map computed from the fitted DTI")) + evals = File(desc=("output the eigenvalues of the fitted DTI")) + evecs = File(desc=("output the eigenvectors of the fitted DTI")) class RESTORE(DipyDiffusionInterface): @@ -66,34 +78,36 @@ class RESTORE(DipyDiffusionInterface): """ + input_spec = RESTOREInputSpec output_spec = RESTOREOutputSpec def _run_interface(self, runtime): from scipy.special import gamma from dipy.reconst.dti import TensorModel - import gc img = nb.load(self.inputs.in_file) hdr = img.header.copy() affine = img.affine - data = img.get_data() + data = img.get_fdata() gtab = self._get_gradient_table() if isdefined(self.inputs.in_mask): - msk = nb.load(self.inputs.in_mask).get_data().astype(np.uint8) + msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj).astype(np.uint8) else: msk = np.ones(data.shape[:3], dtype=np.uint8) try_b0 = True if isdefined(self.inputs.noise_mask): - noise_msk = nb.load(self.inputs.noise_mask).get_data().reshape(-1) + noise_msk = ( + nb.load(self.inputs.noise_mask).get_fdata(dtype=np.float32).reshape(-1) + ) noise_msk[noise_msk > 0.5] = 1 noise_msk[noise_msk < 1.0] = 0 noise_msk = noise_msk.astype(np.uint8) try_b0 = False elif np.all(data[msk == 0, 0] == 0): - IFLOGGER.info('Input data are masked.') + IFLOGGER.info("Input data are masked.") noise_msk = msk.reshape(-1).astype(np.uint8) else: noise_msk = (1 - msk).reshape(-1).astype(np.uint8) @@ -102,35 +116,38 @@ def _run_interface(self, runtime): dsample = data.reshape(-1, data.shape[-1]) if try_b0 and (nb0 > 1): - noise_data = dsample.take( - np.where(gtab.b0s_mask), axis=-1)[noise_msk == 0, ...] + noise_data = dsample.take(np.where(gtab.b0s_mask), axis=-1)[ + noise_msk == 0, ... + ] n = nb0 else: nodiff = np.where(~gtab.b0s_mask) nodiffidx = nodiff[0].tolist() - n = 20 if len(nodiffidx) >= 20 else len(nodiffidx) + n = min(20, len(nodiffidx)) idxs = np.random.choice(nodiffidx, size=n, replace=False) noise_data = dsample.take(idxs, axis=-1)[noise_msk == 1, ...] # Estimate sigma required by RESTORE mean_std = np.median(noise_data.std(-1)) try: - bias = (1. - np.sqrt(2. / (n - 1)) * (gamma(n / 2.) / gamma( - (n - 1) / 2.))) + bias = 1.0 - np.sqrt(2.0 / (n - 1)) * ( + gamma(n / 2.0) / gamma((n - 1) / 2.0) + ) except: - bias = .0 - pass + bias = 0.0 sigma = mean_std * (1 + bias) if sigma == 0: - IFLOGGER.warning('Noise std is 0.0, looks like data was masked and ' - 'noise cannot be estimated correctly. Using default ' - 'tensor model instead of RESTORE.') + IFLOGGER.warning( + "Noise std is 0.0, looks like data was masked and " + "noise cannot be estimated correctly. Using default " + "tensor model instead of RESTORE." + ) dti = TensorModel(gtab) else: - IFLOGGER.info('Performing RESTORE with noise std=%.4f.', sigma) - dti = TensorModel(gtab, fit_method='RESTORE', sigma=sigma) + IFLOGGER.info("Performing RESTORE with noise std=%.4f.", sigma) + dti = TensorModel(gtab, fit_method="RESTORE", sigma=sigma) try: fit_restore = dti.fit(data, msk) @@ -139,13 +156,14 @@ def _run_interface(self, runtime): fit_restore = dti.fit(data, msk) hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 for k in self._outputs().get(): scalar = getattr(fit_restore, k) hdr.set_data_shape(np.shape(scalar)) nb.Nifti1Image(scalar.astype(np.float32), affine, hdr).to_filename( - self._gen_filename(k)) + self._gen_filename(k) + ) return runtime @@ -157,25 +175,25 @@ def _list_outputs(self): class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): - in_evals = File( - exists=True, mandatory=True, desc=('input eigenvalues file')) - in_mask = File( - exists=True, desc=('input mask in which we find single fibers')) - fa_thresh = traits.Float(0.7, usedefault=True, desc=('FA threshold')) + in_evals = File(exists=True, mandatory=True, desc=("input eigenvalues file")) + in_mask = File(exists=True, desc=("input mask in which we find single fibers")) + fa_thresh = traits.Float(0.7, usedefault=True, desc=("FA threshold")) roi_radius = traits.Int( - 10, usedefault=True, desc=('ROI radius to be used in auto_response')) + 10, usedefault=True, desc=("ROI radius to be used in auto_response") + ) auto = traits.Bool( - xor=['recursive'], desc='use the auto_response estimator from dipy') + xor=["recursive"], desc="use the auto_response estimator from dipy" + ) recursive = traits.Bool( - xor=['auto'], desc='use the recursive response estimator from dipy') - response = File( - 'response.txt', usedefault=True, desc=('the output response file')) - out_mask = File('wm_mask.nii.gz', usedefault=True, desc='computed wm mask') + xor=["auto"], desc="use the recursive response estimator from dipy" + ) + response = File("response.txt", usedefault=True, desc=("the output response file")) + out_mask = File("wm_mask.nii.gz", usedefault=True, desc="computed wm mask") class EstimateResponseSHOutputSpec(TraitedSpec): - response = File(exists=True, desc=('the response file')) - out_mask = File(exists=True, desc=('output wm mask')) + response = File(exists=True, desc=("the response file")) + out_mask = File(exists=True, desc=("output wm mask")) class EstimateResponseSH(DipyDiffusionInterface): @@ -198,11 +216,11 @@ class EstimateResponseSH(DipyDiffusionInterface): """ + input_spec = EstimateResponseSHInputSpec output_spec = EstimateResponseSHOutputSpec def _run_interface(self, runtime): - from dipy.core.gradients import GradientTable from dipy.reconst.dti import fractional_anisotropy, mean_diffusivity from dipy.reconst.csdeconv import recursive_response, auto_response @@ -211,16 +229,16 @@ def _run_interface(self, runtime): affine = img.affine if isdefined(self.inputs.in_mask): - msk = nb.load(self.inputs.in_mask).get_data() + msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) msk[msk > 0] = 1 msk[msk < 0] = 0 else: msk = np.ones(imref.shape) - data = img.get_data().astype(np.float32) + data = img.get_fdata(dtype=np.float32) gtab = self._get_gradient_table() - evals = np.nan_to_num(nb.load(self.inputs.in_evals).get_data()) + evals = np.nan_to_num(nb.load(self.inputs.in_evals).dataobj) FA = np.nan_to_num(fractional_anisotropy(evals)) * msk indices = np.where(FA > self.inputs.fa_thresh) S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] @@ -231,13 +249,15 @@ def _run_interface(self, runtime): gtab, data, roi_radius=self.inputs.roi_radius, - fa_thr=self.inputs.fa_thresh) + fa_thr=self.inputs.fa_thresh, + ) response = response[0].tolist() + [S0] elif self.inputs.recursive: MD = np.nan_to_num(mean_diffusivity(evals)) * msk - indices = np.logical_or(FA >= 0.4, - (np.logical_and(FA >= 0.15, MD >= 0.0011))) - data = nb.load(self.inputs.in_file).get_data() + indices = np.logical_or( + FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011)) + ) + data = np.asanyarray(nb.load(self.inputs.in_file).dataobj) response = recursive_response( gtab, data, @@ -248,7 +268,8 @@ def _run_interface(self, runtime): init_trace=0.0021, iter=8, convergence=0.001, - parallel=True) + parallel=True, + ) ratio = abs(response[1] / response[0]) else: lambdas = evals[indices] @@ -258,42 +279,44 @@ def _run_interface(self, runtime): ratio = abs(response[1] / response[0]) if ratio > 0.25: - IFLOGGER.warning('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.', ratio) - elif ratio < 1.e-5 or np.any(np.isnan(response)): - response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) IFLOGGER.warning( - 'Estimated response is not valid, using a default one') + "Estimated response is not prolate enough. Ratio=%0.3f.", ratio + ) + elif ratio < 1.0e-5 or np.any(np.isnan(response)): + response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) + IFLOGGER.warning("Estimated response is not valid, using a default one") else: - IFLOGGER.info('Estimated response: %s', str(response[:3])) + IFLOGGER.info("Estimated response: %s", str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) wm_mask = np.zeros_like(FA) wm_mask[indices] = 1 nb.Nifti1Image(wm_mask.astype(np.uint8), affine, None).to_filename( - op.abspath(self.inputs.out_mask)) + op.abspath(self.inputs.out_mask) + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['response'] = op.abspath(self.inputs.response) - outputs['out_mask'] = op.abspath(self.inputs.out_mask) + outputs["response"] = op.abspath(self.inputs.response) + outputs["out_mask"] = op.abspath(self.inputs.out_mask) return outputs class CSDInputSpec(DipyBaseInterfaceInputSpec): - in_mask = File(exists=True, desc=('input mask in which compute tensors')) - response = File(exists=True, desc=('single fiber estimated response')) + in_mask = File(exists=True, desc=("input mask in which compute tensors")) + response = File(exists=True, desc=("single fiber estimated response")) sh_order = traits.Int( - 8, usedefault=True, desc=('maximal shperical harmonics order')) - save_fods = traits.Bool(True, usedefault=True, desc=('save fODFs in file')) - out_fods = File(desc=('fODFs output file name')) + 8, usedefault=True, desc=("maximal shperical harmonics order") + ) + save_fods = traits.Bool(True, usedefault=True, desc=("save fODFs in file")) + out_fods = File(desc=("fODFs output file name")) class CSDOutputSpec(TraitedSpec): - model = File(desc='Python pickled object of the CSD model fitted.') - out_fods = File(desc=('fODFs output file name')) + model = File(desc="Python pickled object of the CSD model fitted.") + out_fods = File(desc=("fODFs output file name")) class CSD(DipyDiffusionInterface): @@ -317,12 +340,14 @@ class CSD(DipyDiffusionInterface): >>> csd.inputs.in_bvec = 'bvecs' >>> res = csd.run() # doctest: +SKIP """ + input_spec = CSDInputSpec output_spec = CSDOutputSpec def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere + # import marshal as pickle import pickle as pickle import gzip @@ -331,11 +356,11 @@ def _run_interface(self, runtime): imref = nb.four_to_three(img)[0] if isdefined(self.inputs.in_mask): - msk = nb.load(self.inputs.in_mask).get_data() + msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) else: msk = np.ones(imref.shape) - data = img.get_data().astype(np.float32) + data = img.get_fdata(dtype=np.float32) gtab = self._get_gradient_table() resp_file = np.loadtxt(self.inputs.response) @@ -344,30 +369,33 @@ def _run_interface(self, runtime): ratio = response[0][1] / response[0][0] if abs(ratio - 0.2) > 0.1: - IFLOGGER.warning('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.', ratio) + IFLOGGER.warning( + "Estimated response is not prolate enough. Ratio=%0.3f.", ratio + ) csd_model = ConstrainedSphericalDeconvModel( - gtab, response, sh_order=self.inputs.sh_order) + gtab, response, sh_order=self.inputs.sh_order + ) - IFLOGGER.info('Fitting CSD model') + IFLOGGER.info("Fitting CSD model") csd_fit = csd_model.fit(data, msk) - f = gzip.open(self._gen_filename('csdmodel', ext='.pklz'), 'wb') + f = gzip.open(self._gen_filename("csdmodel", ext=".pklz"), "wb") pickle.dump(csd_model, f, -1) f.close() if self.inputs.save_fods: - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") fods = csd_fit.odf(sphere) - nb.Nifti1Image(fods.astype(np.float32), img.affine, - None).to_filename(self._gen_filename('fods')) + nb.Nifti1Image(fods.astype(np.float32), img.affine, None).to_filename( + self._gen_filename("fods") + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['model'] = self._gen_filename('csdmodel', ext='.pklz') + outputs["model"] = self._gen_filename("csdmodel", ext=".pklz") if self.inputs.save_fods: - outputs['out_fods'] = self._gen_filename('fods') + outputs["out_fods"] = self._gen_filename("fods") return outputs diff --git a/nipype/interfaces/dipy/registration.py b/nipype/interfaces/dipy/registration.py new file mode 100644 index 0000000000..b9b818a66a --- /dev/null +++ b/nipype/interfaces/dipy/registration.py @@ -0,0 +1,21 @@ +from looseversion import LooseVersion +from ... import logging +from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows + +IFLOGGER = logging.getLogger("nipype.interface") + + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): + from dipy.workflows import align + + l_wkflw = get_dipy_workflows(align) + for name, obj in l_wkflw: + new_name = name.replace("Flow", "") + globals()[new_name] = dipy_to_nipype_interface(new_name, obj) + del l_wkflw + +else: + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index e6c8f000b2..38a7622894 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -1,19 +1,17 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -def configuration(parent_package='', top_path=None): +def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration - config = Configuration('dipy', parent_package, top_path) + config = Configuration("dipy", parent_package, top_path) # config.add_data_dir('tests') return config -if __name__ == '__main__': +if __name__ == "__main__": from numpy.distutils.core import setup - setup(**configuration(top_path='').todict()) + + setup(**configuration(top_path="").todict()) diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index b50b150f24..6959e0a31d 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -1,82 +1,87 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from multiprocessing import (Pool, cpu_count) +from multiprocessing import Pool, cpu_count import os.path as op -from builtins import range import numpy as np import nibabel as nb from ... import logging -from ...utils import NUMPY_MMAP -from ..base import (traits, TraitedSpec, BaseInterfaceInputSpec, File, - InputMultiPath, isdefined) +from ..base import ( + traits, + Tuple, + TraitedSpec, + BaseInterfaceInputSpec, + File, + InputMultiPath, + isdefined, +) from .base import DipyBaseInterface -IFLOGGER = logging.getLogger('nipype.interface') + +IFLOGGER = logging.getLogger("nipype.interface") class SimulateMultiTensorInputSpec(BaseInterfaceInputSpec): in_dirs = InputMultiPath( - File(exists=True), - mandatory=True, - desc='list of fibers (principal directions)') + File(exists=True), mandatory=True, desc="list of fibers (principal directions)" + ) in_frac = InputMultiPath( - File(exists=True), - mandatory=True, - desc=('volume fraction of each fiber')) + File(exists=True), mandatory=True, desc=("volume fraction of each fiber") + ) in_vfms = InputMultiPath( File(exists=True), mandatory=True, - desc=('volume fractions of isotropic ' - 'compartiments')) - in_mask = File(exists=True, desc='mask to simulate data') + desc=("volume fractions of isotropic compartiments"), + ) + in_mask = File(exists=True, desc="mask to simulate data") diff_iso = traits.List( [3000e-6, 960e-6, 680e-6], traits.Float, usedefault=True, - desc='Diffusivity of isotropic compartments') - diff_sf = traits.Tuple( + desc="Diffusivity of isotropic compartments", + ) + diff_sf = Tuple( (1700e-6, 200e-6, 200e-6), traits.Float, traits.Float, traits.Float, usedefault=True, - desc='Single fiber tensor') - - n_proc = traits.Int(0, usedefault=True, desc='number of processes') - baseline = File(exists=True, mandatory=True, desc='baseline T2 signal') - gradients = File(exists=True, desc='gradients file') - in_bvec = File(exists=True, desc='input bvecs file') - in_bval = File(exists=True, desc='input bvals file') + desc="Single fiber tensor", + ) + + n_proc = traits.Int(0, usedefault=True, desc="number of processes") + baseline = File(exists=True, mandatory=True, desc="baseline T2 signal") + gradients = File(exists=True, desc="gradients file") + in_bvec = File(exists=True, desc="input bvecs file") + in_bval = File(exists=True, desc="input bvals file") num_dirs = traits.Int( 32, usedefault=True, - desc=('number of gradient directions (when table ' - 'is automatically generated)')) + desc=("number of gradient directions (when table is automatically generated)"), + ) bvalues = traits.List( traits.Int, value=[1000, 3000], usedefault=True, - desc=('list of b-values (when table ' - 'is automatically generated)')) + desc=("list of b-values (when table is automatically generated)"), + ) out_file = File( - 'sim_dwi.nii.gz', + "sim_dwi.nii.gz", usedefault=True, - desc='output file with fractions to be simluated') + desc="output file with fractions to be simluated", + ) out_mask = File( - 'sim_msk.nii.gz', usedefault=True, desc='file with the mask simulated') - out_bvec = File('bvec.sim', usedefault=True, desc='simulated b vectors') - out_bval = File('bval.sim', usedefault=True, desc='simulated b values') - snr = traits.Int(0, usedefault=True, desc='signal-to-noise ratio (dB)') + "sim_msk.nii.gz", usedefault=True, desc="file with the mask simulated" + ) + out_bvec = File("bvec.sim", usedefault=True, desc="simulated b vectors") + out_bval = File("bval.sim", usedefault=True, desc="simulated b values") + snr = traits.Int(0, usedefault=True, desc="signal-to-noise ratio (dB)") class SimulateMultiTensorOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='simulated DWIs') - out_mask = File(exists=True, desc='mask file') - out_bvec = File(exists=True, desc='simulated b vectors') - out_bval = File(exists=True, desc='simulated b values') + out_file = File(exists=True, desc="simulated DWIs") + out_mask = File(exists=True, desc="mask file") + out_bvec = File(exists=True, desc="simulated b vectors") + out_bval = File(exists=True, desc="simulated b values") class SimulateMultiTensor(DipyBaseInterface): @@ -98,6 +103,7 @@ class SimulateMultiTensor(DipyBaseInterface): >>> sim.inputs.in_bval = 'bvals' >>> sim.run() # doctest: +SKIP """ + input_spec = SimulateMultiTensorInputSpec output_spec = SimulateMultiTensorOutputSpec @@ -111,8 +117,7 @@ def _run_interface(self, runtime): bvecs = np.loadtxt(self.inputs.in_bvec).T gtab = gradient_table(bvals, bvecs) else: - gtab = _generate_gradients(self.inputs.num_dirs, - self.inputs.bvalues) + gtab = _generate_gradients(self.inputs.num_dirs, self.inputs.bvalues) ndirs = len(gtab.bvals) np.savetxt(op.abspath(self.inputs.out_bvec), gtab.bvecs.T) np.savetxt(op.abspath(self.inputs.out_bval), gtab.bvals) @@ -126,22 +131,20 @@ def _run_interface(self, runtime): # Check and load sticks and their volume fractions nsticks = len(self.inputs.in_dirs) if len(self.inputs.in_frac) != nsticks: - raise RuntimeError(('Number of sticks and their volume fractions' - ' must match.')) + raise RuntimeError( + "Number of sticks and their volume fractions must match." + ) # Volume fractions of isotropic compartments nballs = len(self.inputs.in_vfms) - vfs = np.squeeze( - nb.concat_images([ - nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_vfms - ]).get_data()) + vfs = np.squeeze(nb.concat_images(self.inputs.in_vfms).dataobj) if nballs == 1: vfs = vfs[..., np.newaxis] total_vf = np.sum(vfs, axis=3) # Generate a mask if isdefined(self.inputs.in_mask): - msk = nb.load(self.inputs.in_mask).get_data() + msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) msk[msk > 0.0] = 1.0 msk[msk < 1.0] = 0.0 else: @@ -152,10 +155,9 @@ def _run_interface(self, runtime): nvox = len(msk[msk > 0]) # Fiber fractions - ffsim = nb.concat_images( - [nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_frac]) - ffs = np.nan_to_num(np.squeeze(ffsim.get_data())) # fiber fractions - ffs = np.clip(ffs, 0., 1.) + ffsim = nb.concat_images(self.inputs.in_frac) + ffs = np.nan_to_num(np.squeeze(ffsim.dataobj)) # fiber fractions + ffs = np.clip(ffs, 0.0, 1.0) if nsticks == 1: ffs = ffs[..., np.newaxis] @@ -175,19 +177,19 @@ def _run_interface(self, runtime): for i in range(vfs.shape[-1]): vfs[..., i] -= total_ff - vfs = np.clip(vfs, 0., 1.) + vfs = np.clip(vfs, 0.0, 1.0) fractions = np.concatenate((ffs, vfs), axis=3) - nb.Nifti1Image(fractions, aff, None).to_filename('fractions.nii.gz') - nb.Nifti1Image(np.sum(fractions, axis=3), aff, - None).to_filename('total_vf.nii.gz') + nb.Nifti1Image(fractions, aff, None).to_filename("fractions.nii.gz") + nb.Nifti1Image(np.sum(fractions, axis=3), aff, None).to_filename( + "total_vf.nii.gz" + ) mhdr = hdr.copy() mhdr.set_data_dtype(np.uint8) - mhdr.set_xyzt_units('mm', 'sec') - nb.Nifti1Image(msk, aff, mhdr).to_filename( - op.abspath(self.inputs.out_mask)) + mhdr.set_xyzt_units("mm", "sec") + nb.Nifti1Image(msk, aff, mhdr).to_filename(op.abspath(self.inputs.out_mask)) # Initialize stack of args fracs = fractions[msk > 0] @@ -196,7 +198,7 @@ def _run_interface(self, runtime): dirs = None for i in range(nsticks): f = self.inputs.in_dirs[i] - fd = np.nan_to_num(nb.load(f, mmap=NUMPY_MMAP).get_data()) + fd = np.nan_to_num(nb.load(f).dataobj) w = np.linalg.norm(fd, axis=3)[..., np.newaxis] w[w < np.finfo(float).eps] = 1.0 fd /= w @@ -209,7 +211,7 @@ def _run_interface(self, runtime): for d in range(nballs): fd = np.random.randn(nvox, 3) w = np.linalg.norm(fd, axis=1) - fd[w < np.finfo(float).eps, ...] = np.array([1., 0., 0.]) + fd[w < np.finfo(float).eps, ...] = np.array([1.0, 0.0, 0.0]) w[w < np.finfo(float).eps] = 1.0 fd /= w[..., np.newaxis] dirs = np.hstack((dirs, fd)) @@ -217,26 +219,20 @@ def _run_interface(self, runtime): sf_evals = list(self.inputs.diff_sf) ba_evals = list(self.inputs.diff_iso) - mevals = [sf_evals] * nsticks + \ - [[ba_evals[d]] * 3 for d in range(nballs)] - - b0 = b0_im.get_data()[msk > 0] - args = [] - for i in range(nvox): - args.append({ - 'fractions': - fracs[i, ...].tolist(), - 'sticks': - [tuple(dirs[i, j:j + 3]) for j in range(nsticks + nballs)], - 'gradients': - gtab, - 'mevals': - mevals, - 'S0': - b0[i], - 'snr': - self.inputs.snr - }) + mevals = [sf_evals] * nsticks + [[ba_evals[d]] * 3 for d in range(nballs)] + + b0 = b0_im.get_fdata()[msk > 0] + args = [ + { + "fractions": fracs[i, ...].tolist(), + "sticks": [tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs)], + "gradients": gtab, + "mevals": mevals, + "S0": b0[i], + "snr": self.inputs.snr, + } + for i in range(nvox) + ] n_proc = self.inputs.n_proc if n_proc == 0: @@ -249,30 +245,32 @@ def _run_interface(self, runtime): # Simulate sticks using dipy IFLOGGER.info( - 'Starting simulation of %d voxels, %d diffusion directions.', - len(args), ndirs) + "Starting simulation of %d voxels, %d diffusion directions.", + len(args), + ndirs, + ) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: - raise RuntimeError(('Computed directions do not match number' - 'of b-values.')) + raise RuntimeError("Computed directions do not match number of b-values.") signal = np.zeros((shape[0], shape[1], shape[2], ndirs)) signal[msk > 0] = result simhdr = hdr.copy() simhdr.set_data_dtype(np.float32) - simhdr.set_xyzt_units('mm', 'sec') + simhdr.set_xyzt_units("mm", "sec") nb.Nifti1Image(signal.astype(np.float32), aff, simhdr).to_filename( - op.abspath(self.inputs.out_file)) + op.abspath(self.inputs.out_file) + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_mask'] = op.abspath(self.inputs.out_mask) - outputs['out_bvec'] = op.abspath(self.inputs.out_bvec) - outputs['out_bval'] = op.abspath(self.inputs.out_bval) + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_mask"] = op.abspath(self.inputs.out_mask) + outputs["out_bvec"] = op.abspath(self.inputs.out_bvec) + outputs["out_bval"] = op.abspath(self.inputs.out_bval) return outputs @@ -294,24 +292,25 @@ def _compute_voxel(args): """ from dipy.sims.voxel import multi_tensor - ffs = args['fractions'] - gtab = args['gradients'] + ffs = args["fractions"] + gtab = args["gradients"] signal = np.zeros_like(gtab.bvals, dtype=np.float32) # Simulate dwi signal sf_vf = np.sum(ffs) if sf_vf > 0.0: - ffs = ((np.array(ffs) / sf_vf) * 100) - snr = args['snr'] if args['snr'] > 0 else None + ffs = (np.array(ffs) / sf_vf) * 100 + snr = args["snr"] if args["snr"] > 0 else None try: signal, _ = multi_tensor( gtab, - args['mevals'], - S0=args['S0'], - angles=args['sticks'], + args["mevals"], + S0=args["S0"], + angles=args["sticks"], fractions=ffs, - snr=snr) + snr=snr, + ) except Exception: pass @@ -325,7 +324,7 @@ def _generate_gradients(ndirs=64, values=[1000, 3000], nb0s=1): """ import numpy as np - from dipy.core.sphere import (disperse_charges, Sphere, HemiSphere) + from dipy.core.sphere import disperse_charges, HemiSphere from dipy.core.gradients import gradient_table theta = np.pi * np.random.rand(ndirs) @@ -342,7 +341,7 @@ def _generate_gradients(ndirs=64, values=[1000, 3000], nb0s=1): bvecs = np.vstack((bvecs, vertices)) bvals = np.hstack((bvals, v * np.ones(vertices.shape[0]))) - for i in range(0, nb0s): + for i in range(nb0s): bvals = bvals.tolist() bvals.insert(0, 0) diff --git a/nipype/interfaces/dipy/stats.py b/nipype/interfaces/dipy/stats.py new file mode 100644 index 0000000000..f2de24ca33 --- /dev/null +++ b/nipype/interfaces/dipy/stats.py @@ -0,0 +1,20 @@ +from looseversion import LooseVersion +from ... import logging +from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows + +IFLOGGER = logging.getLogger("nipype.interface") + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.16"): + from dipy.workflows import stats + + l_wkflw = get_dipy_workflows(stats) + for name, obj in l_wkflw: + new_name = name.replace("Flow", "") + globals()[new_name] = dipy_to_nipype_interface(new_name, obj) + del l_wkflw + +else: + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 33f5a73b08..f8be2b5c70 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -1,18 +1,15 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - +import numpy as np import nibabel as nb from ... import logging from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class DTIInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional white matter mask') + mask_file = File(exists=True, desc="An optional white matter mask") class DTIOutputSpec(TraitedSpec): @@ -38,51 +35,53 @@ class DTI(DipyDiffusionInterface): >>> dti.inputs.in_bval = 'bvals' >>> dti.run() # doctest: +SKIP """ + input_spec = DTIInputSpec output_spec = DTIOutputSpec def _run_interface(self, runtime): from dipy.reconst import dti from dipy.io.utils import nifti1_symmat + gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) - data = img.get_data() + data = img.get_fdata() affine = img.affine mask = None if isdefined(self.inputs.mask_file): - mask = nb.load(self.inputs.mask_file).get_data() + mask = np.asanyarray(nb.load(self.inputs.mask_file).dataobj) # Fit it tenmodel = dti.TensorModel(gtab) ten_fit = tenmodel.fit(data, mask) lower_triangular = ten_fit.lower_triangular() img = nifti1_symmat(lower_triangular, affine) - out_file = self._gen_filename('dti') + out_file = self._gen_filename("dti") nb.save(img, out_file) - IFLOGGER.info('DTI parameters image saved as %s', out_file) + IFLOGGER.info("DTI parameters image saved as %s", out_file) # FA MD RD and AD for metric in ["fa", "md", "rd", "ad", "color_fa"]: data = getattr(ten_fit, metric).astype("float32") out_name = self._gen_filename(metric) nb.Nifti1Image(data, affine).to_filename(out_name) - IFLOGGER.info('DTI %s image saved as %s', metric, out_name) + IFLOGGER.info("DTI %s image saved as %s", metric, out_name) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('dti') + outputs["out_file"] = self._gen_filename("dti") for metric in ["fa", "md", "rd", "ad", "color_fa"]: - outputs["{}_file".format(metric)] = self._gen_filename(metric) + outputs[f"{metric}_file"] = self._gen_filename(metric) return outputs class TensorModeInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional white matter mask') + mask_file = File(exists=True, desc="An optional white matter mask") class TensorModeOutputSpec(TraitedSpec): @@ -93,24 +92,27 @@ class TensorMode(DipyDiffusionInterface): """ Creates a map of the mode of the diffusion tensors given a set of diffusion-weighted images, as well as their associated b-values and - b-vectors. Fits the diffusion tensors and calculates tensor mode + b-vectors [1]_. Fits the diffusion tensors and calculates tensor mode with Dipy. - .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor - Invariants and the Analysis of Diffusion Tensor Magnetic Resonance - Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, - 2006. - Example ------- - >>> import nipype.interfaces.dipy as dipy >>> mode = dipy.TensorMode() >>> mode.inputs.in_file = 'diffusion.nii' >>> mode.inputs.in_bvec = 'bvecs' >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP + + References + ---------- + .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor + Invariants and the Analysis of Diffusion Tensor Magnetic Resonance + Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, + 2006. + """ + input_spec = TensorModeInputSpec output_spec = TensorModeOutputSpec @@ -119,7 +121,7 @@ def _run_interface(self, runtime): # Load the 4D image files img = nb.load(self.inputs.in_file) - data = img.get_data() + data = img.get_fdata() affine = img.affine # Load the gradient strengths and directions @@ -138,12 +140,12 @@ def _run_interface(self, runtime): # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(mode_data, affine) - out_file = self._gen_filename('mode') + out_file = self._gen_filename("mode") nb.save(img, out_file) - IFLOGGER.info('Tensor mode image saved as %s', out_file) + IFLOGGER.info("Tensor mode image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('mode') + outputs["out_file"] = self._gen_filename("mode") return outputs diff --git a/nipype/interfaces/dipy/tests/__init__.py b/nipype/interfaces/dipy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/dipy/tests/__init__.py +++ b/nipype/interfaces/dipy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dipy/tests/test_auto_APMQball.py b/nipype/interfaces/dipy/tests/test_auto_APMQball.py index f6f3f2e4c6..81ff397cb8 100644 --- a/nipype/interfaces/dipy/tests/test_auto_APMQball.py +++ b/nipype/interfaces/dipy/tests/test_auto_APMQball.py @@ -1,15 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..anisotropic_power import APMQball def test_APMQball_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - mask_file=dict(), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + mask_file=dict( + extensions=None, + ), out_prefix=dict(), ) inputs = APMQball.input_spec() @@ -17,8 +29,14 @@ def test_APMQball_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_APMQball_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = APMQball.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_CSD.py b/nipype/interfaces/dipy/tests/test_auto_CSD.py index a30efaa3cc..7cdab47f9b 100644 --- a/nipype/interfaces/dipy/tests/test_auto_CSD.py +++ b/nipype/interfaces/dipy/tests/test_auto_CSD.py @@ -1,30 +1,56 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reconstruction import CSD def test_CSD_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(), - out_fods=dict(), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + out_fods=dict( + extensions=None, + ), out_prefix=dict(), - response=dict(), - save_fods=dict(usedefault=True, ), - sh_order=dict(usedefault=True, ), + response=dict( + extensions=None, + ), + save_fods=dict( + usedefault=True, + ), + sh_order=dict( + usedefault=True, + ), ) inputs = CSD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CSD_outputs(): output_map = dict( - model=dict(), - out_fods=dict(), + model=dict( + extensions=None, + ), + out_fods=dict( + extensions=None, + ), ) outputs = CSD.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_DTI.py b/nipype/interfaces/dipy/tests/test_auto_DTI.py index f543856065..1cea142a36 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DTI.py +++ b/nipype/interfaces/dipy/tests/test_auto_DTI.py @@ -1,15 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import DTI def test_DTI_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - mask_file=dict(), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + mask_file=dict( + extensions=None, + ), out_prefix=dict(), ) inputs = DTI.input_spec() @@ -17,14 +29,28 @@ def test_DTI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTI_outputs(): output_map = dict( - ad_file=dict(), - color_fa_file=dict(), - fa_file=dict(), - md_file=dict(), - out_file=dict(), - rd_file=dict(), + ad_file=dict( + extensions=None, + ), + color_fa_file=dict( + extensions=None, + ), + fa_file=dict( + extensions=None, + ), + md_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + rd_file=dict( + extensions=None, + ), ) outputs = DTI.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_Denoise.py b/nipype/interfaces/dipy/tests/test_auto_Denoise.py index 88a1bc5314..e85d2644c2 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Denoise.py +++ b/nipype/interfaces/dipy/tests/test_auto_Denoise.py @@ -1,20 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Denoise def test_Denoise_inputs(): input_map = dict( - block_radius=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(), - noise_mask=dict(), + block_radius=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + noise_mask=dict( + extensions=None, + ), noise_model=dict( mandatory=True, usedefault=True, ), - patch_radius=dict(usedefault=True, ), - signal_mask=dict(), + patch_radius=dict( + usedefault=True, + ), + signal_mask=dict( + extensions=None, + ), snr=dict(), ) inputs = Denoise.input_spec() @@ -22,8 +34,14 @@ def test_Denoise_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Denoise_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Denoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py index e133e266c4..d422dc5290 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py +++ b/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import DipyBaseInterface diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py index ee9022ca58..e292135ba0 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py +++ b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py @@ -1,14 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import DipyDiffusionInterface def test_DipyDiffusionInterface_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), out_prefix=dict(), ) inputs = DipyDiffusionInterface.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py index 1270d94b13..00c8c1ba0d 100644 --- a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -1,33 +1,68 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reconstruction import EstimateResponseSH def test_EstimateResponseSH_inputs(): input_map = dict( - auto=dict(xor=['recursive'], ), - b0_thres=dict(usedefault=True, ), - fa_thresh=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_evals=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(), - out_mask=dict(usedefault=True, ), + auto=dict( + xor=["recursive"], + ), + b0_thres=dict( + usedefault=True, + ), + fa_thresh=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_evals=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + out_mask=dict( + extensions=None, + usedefault=True, + ), out_prefix=dict(), - recursive=dict(xor=['auto'], ), - response=dict(usedefault=True, ), - roi_radius=dict(usedefault=True, ), + recursive=dict( + xor=["auto"], + ), + response=dict( + extensions=None, + usedefault=True, + ), + roi_radius=dict( + usedefault=True, + ), ) inputs = EstimateResponseSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateResponseSH_outputs(): output_map = dict( - out_mask=dict(), - response=dict(), + out_mask=dict( + extensions=None, + ), + response=dict( + extensions=None, + ), ) outputs = EstimateResponseSH.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py index 9f62d05e0c..f25127f9c9 100644 --- a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py +++ b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py @@ -1,16 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reconstruction import RESTORE def test_RESTORE_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - in_mask=dict(), - noise_mask=dict(), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + noise_mask=dict( + extensions=None, + ), out_prefix=dict(), ) inputs = RESTORE.input_spec() @@ -18,15 +32,31 @@ def test_RESTORE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RESTORE_outputs(): output_map = dict( - evals=dict(), - evecs=dict(), - fa=dict(), - md=dict(), - mode=dict(), - rd=dict(), - trace=dict(), + evals=dict( + extensions=None, + ), + evecs=dict( + extensions=None, + ), + fa=dict( + extensions=None, + ), + md=dict( + extensions=None, + ), + mode=dict( + extensions=None, + ), + rd=dict( + extensions=None, + ), + trace=dict( + extensions=None, + ), ) outputs = RESTORE.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_Resample.py b/nipype/interfaces/dipy/tests/test_auto_Resample.py index ff51e0efe4..6c765b2fa9 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Resample.py +++ b/nipype/interfaces/dipy/tests/test_auto_Resample.py @@ -1,11 +1,13 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Resample def test_Resample_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), interp=dict( mandatory=True, usedefault=True, @@ -17,8 +19,14 @@ def test_Resample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py b/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py index db46a3b982..16fff2aeff 100644 --- a/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py +++ b/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py @@ -1,40 +1,90 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..simulate import SimulateMultiTensor def test_SimulateMultiTensor_inputs(): input_map = dict( - baseline=dict(mandatory=True, ), - bvalues=dict(usedefault=True, ), - diff_iso=dict(usedefault=True, ), - diff_sf=dict(usedefault=True, ), - gradients=dict(), - in_bval=dict(), - in_bvec=dict(), - in_dirs=dict(mandatory=True, ), - in_frac=dict(mandatory=True, ), - in_mask=dict(), - in_vfms=dict(mandatory=True, ), - n_proc=dict(usedefault=True, ), - num_dirs=dict(usedefault=True, ), - out_bval=dict(usedefault=True, ), - out_bvec=dict(usedefault=True, ), - out_file=dict(usedefault=True, ), - out_mask=dict(usedefault=True, ), - snr=dict(usedefault=True, ), + baseline=dict( + extensions=None, + mandatory=True, + ), + bvalues=dict( + usedefault=True, + ), + diff_iso=dict( + usedefault=True, + ), + diff_sf=dict( + usedefault=True, + ), + gradients=dict( + extensions=None, + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + extensions=None, + ), + in_dirs=dict( + mandatory=True, + ), + in_frac=dict( + mandatory=True, + ), + in_mask=dict( + extensions=None, + ), + in_vfms=dict( + mandatory=True, + ), + n_proc=dict( + usedefault=True, + ), + num_dirs=dict( + usedefault=True, + ), + out_bval=dict( + extensions=None, + usedefault=True, + ), + out_bvec=dict( + extensions=None, + usedefault=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), + out_mask=dict( + extensions=None, + usedefault=True, + ), + snr=dict( + usedefault=True, + ), ) inputs = SimulateMultiTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimulateMultiTensor_outputs(): output_map = dict( - out_bval=dict(), - out_bvec=dict(), - out_file=dict(), - out_mask=dict(), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + out_mask=dict( + extensions=None, + ), ) outputs = SimulateMultiTensor.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py index 1bc1a2ea97..ad97523ce2 100644 --- a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py +++ b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracks import StreamlineTractography @@ -9,9 +8,16 @@ def test_StreamlineTractography_inputs(): mandatory=True, usedefault=True, ), - in_file=dict(mandatory=True, ), - in_model=dict(), - in_peaks=dict(), + in_file=dict( + extensions=None, + mandatory=True, + ), + in_model=dict( + extensions=None, + ), + in_peaks=dict( + extensions=None, + ), min_angle=dict( mandatory=True, usedefault=True, @@ -33,21 +39,37 @@ def test_StreamlineTractography_inputs(): mandatory=True, usedefault=True, ), - seed_coord=dict(), - seed_mask=dict(), - tracking_mask=dict(), + seed_coord=dict( + extensions=None, + ), + seed_mask=dict( + extensions=None, + ), + tracking_mask=dict( + extensions=None, + ), ) inputs = StreamlineTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StreamlineTractography_outputs(): output_map = dict( - gfa=dict(), - odf_peaks=dict(), - out_seeds=dict(), - tracks=dict(), + gfa=dict( + extensions=None, + ), + odf_peaks=dict( + extensions=None, + ), + out_seeds=dict( + extensions=None, + ), + tracks=dict( + extensions=None, + ), ) outputs = StreamlineTractography.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py index d072af78fc..59b1b30e8b 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py +++ b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py @@ -1,15 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import TensorMode def test_TensorMode_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict(mandatory=True, ), - in_bvec=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), - mask_file=dict(), + b0_thres=dict( + usedefault=True, + ), + in_bval=dict( + extensions=None, + mandatory=True, + ), + in_bvec=dict( + extensions=None, + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + mask_file=dict( + extensions=None, + ), out_prefix=dict(), ) inputs = TensorMode.input_spec() @@ -17,8 +29,14 @@ def test_TensorMode_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TensorMode_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TensorMode.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py index 79af3b5940..0c7855c507 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py +++ b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py @@ -1,15 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracks import TrackDensityMap def test_TrackDensityMap_inputs(): input_map = dict( data_dims=dict(), - in_file=dict(mandatory=True, ), - out_filename=dict(usedefault=True, ), - points_space=dict(usedefault=True, ), - reference=dict(), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_filename=dict( + extensions=None, + usedefault=True, + ), + points_space=dict( + usedefault=True, + ), + reference=dict( + extensions=None, + ), voxel_dims=dict(), ) inputs = TrackDensityMap.input_spec() @@ -17,8 +26,14 @@ def test_TrackDensityMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackDensityMap_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TrackDensityMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_base.py b/nipype/interfaces/dipy/tests/test_base.py new file mode 100644 index 0000000000..d2d81ec005 --- /dev/null +++ b/nipype/interfaces/dipy/tests/test_base.py @@ -0,0 +1,210 @@ +import pytest +from packaging.version import Version +from collections import namedtuple +from ...base import traits, File, TraitedSpec, BaseInterfaceInputSpec +from ..base import ( + convert_to_traits_type, + create_interface_specs, + dipy_to_nipype_interface, + DipyBaseInterface, + no_dipy, + get_dipy_workflows, + get_default_args, + dipy_version, +) + + +def test_convert_to_traits_type(): + Params = namedtuple("Params", "traits_type is_file") + Res = namedtuple("Res", "traits_type is_mandatory") + l_entries = [ + Params("variable string", False), + Params("variable int", False), + Params("variable float", False), + Params("variable bool", False), + Params("variable complex", False), + Params("variable int, optional", False), + Params("variable string, optional", False), + Params("variable float, optional", False), + Params("variable bool, optional", False), + Params("variable complex, optional", False), + Params("string", False), + Params("int", False), + Params("string", True), + Params("float", False), + Params("bool", False), + Params("complex", False), + Params("string, optional", False), + Params("int, optional", False), + Params("string, optional", True), + Params("float, optional", False), + Params("bool, optional", False), + Params("complex, optional", False), + ] + l_expected = [ + Res(traits.ListStr, True), + Res(traits.ListInt, True), + Res(traits.ListFloat, True), + Res(traits.ListBool, True), + Res(traits.ListComplex, True), + Res(traits.ListInt, False), + Res(traits.ListStr, False), + Res(traits.ListFloat, False), + Res(traits.ListBool, False), + Res(traits.ListComplex, False), + Res(traits.Str, True), + Res(traits.Int, True), + Res(File, True), + Res(traits.Float, True), + Res(traits.Bool, True), + Res(traits.Complex, True), + Res(traits.Str, False), + Res(traits.Int, False), + Res(File, False), + Res(traits.Float, False), + Res(traits.Bool, False), + Res(traits.Complex, False), + ] + + for entry, res in zip(l_entries, l_expected): + traits_type, is_mandatory = convert_to_traits_type( + entry.traits_type, entry.is_file + ) + assert traits_type == res.traits_type + assert is_mandatory == res.is_mandatory + + with pytest.raises(IOError): + convert_to_traits_type("file, optional") + + +def test_create_interface_specs(): + new_interface = create_interface_specs("MyInterface") + + assert new_interface.__base__ == TraitedSpec + assert isinstance(new_interface(), TraitedSpec) + assert new_interface.__name__ == "MyInterface" + assert not new_interface().get() + + new_interface = create_interface_specs( + "MyInterface", BaseClass=BaseInterfaceInputSpec + ) + assert new_interface.__base__ == BaseInterfaceInputSpec + assert isinstance(new_interface(), BaseInterfaceInputSpec) + assert new_interface.__name__ == "MyInterface" + assert not new_interface().get() + + params = [ + ("params1", "string", ["my description"]), + ("params2_files", "string", ["my description @"]), + ("params3", "int, optional", ["useful option"]), + ("out_params", "string", ["my out description"]), + ] + + new_interface = create_interface_specs( + "MyInterface", params=params, BaseClass=BaseInterfaceInputSpec + ) + + assert new_interface.__base__ == BaseInterfaceInputSpec + assert isinstance(new_interface(), BaseInterfaceInputSpec) + assert new_interface.__name__ == "MyInterface" + current_params = new_interface().get() + assert len(current_params) == 4 + assert "params1" in current_params + assert "params2_files" in current_params + assert "params3" in current_params + assert "out_params" in current_params + + +@pytest.mark.skipif( + no_dipy() or Version(dipy_version()) < Version("1.4"), reason="DIPY >=1.4 required" +) +def test_get_default_args(): + from dipy.utils.deprecator import deprecated_params + + def test(dummy=11, x=3): + return dummy, x + + @deprecated_params('x', None, '0.3', '0.5', alternative='test2.y') + def test2(dummy=11, x=3): + return dummy, x + + @deprecated_params(['dummy', 'x'], None, '0.3', alternative='test2.y') + def test3(dummy=11, x=3): + return dummy, x + + @deprecated_params(['dummy', 'x'], None, '0.3', '0.5', alternative='test2.y') + def test4(dummy=11, x=3): + return dummy, x + + expected_res = {'dummy': 11, 'x': 3} + for func in [test, test2, test3, test4]: + assert get_default_args(func) == expected_res + + +@pytest.mark.skipif(no_dipy(), reason="DIPY is not installed") +def test_dipy_to_nipype_interface(): + from dipy.workflows.workflow import Workflow + + class DummyWorkflow(Workflow): + @classmethod + def get_short_name(cls): + return "dwf1" + + def run(self, in_files, param1=1, out_dir="", out_ref="out1.txt"): + """Workflow used to test basic workflows. + + Parameters + ---------- + in_files : string + fake input string param + param1 : int, optional + fake positional param (default 1) + out_dir : string, optional + fake output directory (default '') + out_ref : string, optional + fake out file (default out1.txt) + + References + ----------- + dummy references + + """ + return param1 + + new_specs = dipy_to_nipype_interface("MyModelSpec", DummyWorkflow) + assert new_specs.__base__ == DipyBaseInterface + assert isinstance(new_specs(), DipyBaseInterface) + assert new_specs.__name__ == "MyModelSpec" + assert hasattr(new_specs, "input_spec") + assert new_specs().input_spec.__base__ == BaseInterfaceInputSpec + assert hasattr(new_specs, "output_spec") + assert new_specs().output_spec.__base__ == TraitedSpec + assert hasattr(new_specs, "_run_interface") + assert hasattr(new_specs, "_list_outputs") + params_in = new_specs().inputs.get() + params_out = new_specs()._outputs().get() + assert len(params_in) == 4 + assert "in_files" in params_in + assert "param1" in params_in + assert "out_dir" in params_out + assert "out_ref" in params_out + + with pytest.raises(ValueError): + new_specs().run() + + +@pytest.mark.skipif(no_dipy(), reason="DIPY is not installed") +def test_get_dipy_workflows(): + from dipy.workflows import align + + l_wkflw = get_dipy_workflows(align) + for name, obj in l_wkflw: + assert name.endswith("Flow") + assert issubclass(obj, align.Workflow) + + +if __name__ == "__main__": + test_convert_to_traits_type() + test_create_interface_specs() + test_dipy_to_nipype_interface() + test_get_default_args() diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index cd47590f94..0a782b652e 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -1,42 +1,68 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os.path as op import numpy as np import nibabel as nb -import nibabel.trackvis as nbt +from looseversion import LooseVersion from ... import logging -from ..base import (TraitedSpec, BaseInterfaceInputSpec, File, isdefined, - traits) -from .base import DipyBaseInterface -IFLOGGER = logging.getLogger('nipype.interface') +from ..base import TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits +from .base import ( + DipyBaseInterface, + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, +) + +IFLOGGER = logging.getLogger("nipype.interface") + + +if HAVE_DIPY and ( + LooseVersion("0.15") >= LooseVersion(dipy_version()) >= LooseVersion("0.16") +): + try: + from dipy.workflows.tracking import LocalFiberTrackingPAMFlow as DetTrackFlow + except ImportError: # different name in 0.15 + from dipy.workflows.tracking import DetTrackPAMFlow as DetTrackFlow + + DeterministicTracking = dipy_to_nipype_interface( + "DeterministicTracking", DetTrackFlow + ) + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): + from dipy.workflows import segment, tracking + + l_wkflw = get_dipy_workflows(segment) + get_dipy_workflows(tracking) + for name, obj in l_wkflw: + new_name = name.replace("Flow", "") + globals()[new_name] = dipy_to_nipype_interface(new_name, obj) + del l_wkflw + +else: + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) class TrackDensityMapInputSpec(BaseInterfaceInputSpec): - in_file = File( - exists=True, mandatory=True, desc='The input TrackVis track file') + in_file = File(exists=True, mandatory=True, desc="The input TrackVis track file") reference = File( - exists=True, desc='A reference file to define RAS coordinates space') + exists=True, desc="A reference file to define RAS coordinates space" + ) points_space = traits.Enum( - 'rasmm', - 'voxel', - None, - usedefault=True, - desc='coordinates of trk file') + "rasmm", "voxel", None, usedefault=True, desc="coordinates of trk file" + ) voxel_dims = traits.List( - traits.Float, minlen=3, maxlen=3, desc='The size of each voxel in mm.') + traits.Float, minlen=3, maxlen=3, desc="The size of each voxel in mm." + ) data_dims = traits.List( - traits.Int, - minlen=3, - maxlen=3, - desc='The size of the image in voxels.') + traits.Int, minlen=3, maxlen=3, desc="The size of the image in voxels." + ) out_filename = File( - 'tdi.nii', + "tdi.nii", usedefault=True, - desc='The output filename for the tracks in TrackVis ' - '(.trk) format') + desc="The output filename for the tracks in TrackVis (.trk) format", + ) class TrackDensityMapOutputSpec(TraitedSpec): @@ -57,12 +83,14 @@ class TrackDensityMap(DipyBaseInterface): >>> trk2tdi.run() # doctest: +SKIP """ + input_spec = TrackDensityMapInputSpec output_spec = TrackDensityMapOutputSpec def _run_interface(self, runtime): from numpy import min_scalar_type from dipy.tracking.utils import density_map + import nibabel.trackvis as nbt tracks, header = nbt.read(self.inputs.in_file) streams = ((ii[0]) for ii in tracks) @@ -73,19 +101,21 @@ def _run_interface(self, runtime): data_dims = refnii.shape[:3] kwargs = dict(affine=affine) else: - IFLOGGER.warning('voxel_dims and data_dims are deprecated as of dipy ' - '0.7.1. Please use reference input instead') + IFLOGGER.warning( + "voxel_dims and data_dims are deprecated as of dipy " + "0.7.1. Please use reference input instead" + ) if not isdefined(self.inputs.data_dims): - data_dims = header['dim'] + data_dims = header["dim"] else: data_dims = self.inputs.data_dims if not isdefined(self.inputs.voxel_dims): - voxel_size = header['voxel_size'] + voxel_size = header["voxel_size"] else: voxel_size = self.inputs.voxel_dims - affine = header['vox_to_ras'] + affine = header["vox_to_ras"] kwargs = dict(voxel_size=voxel_size) data = density_map(streams, data_dims, **kwargs) @@ -94,68 +124,70 @@ def _run_interface(self, runtime): out_file = op.abspath(self.inputs.out_filename) nb.save(img, out_file) - IFLOGGER.info('Track density map saved as %s, size=%s, dimensions=%s', - out_file, img.shape, img.header.get_zooms()) + IFLOGGER.info( + "Track density map saved as %s, size=%s, dimensions=%s", + out_file, + img.shape, + img.header.get_zooms(), + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_filename) + outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs class StreamlineTractographyInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) - in_model = File(exists=True, desc=('input f/d-ODF model extracted from.')) - tracking_mask = File( - exists=True, desc=('input mask within which perform tracking')) - seed_mask = File( - exists=True, desc=('input mask within which perform seeding')) - in_peaks = File(exists=True, desc=('peaks computed from the odf')) + in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) + in_model = File(exists=True, desc=("input f/d-ODF model extracted from.")) + tracking_mask = File(exists=True, desc=("input mask within which perform tracking")) + seed_mask = File(exists=True, desc=("input mask within which perform seeding")) + in_peaks = File(exists=True, desc=("peaks computed from the odf")) seed_coord = File( exists=True, - desc=('file containing the list of seed voxel ' - 'coordinates (N,3)')) + desc=("file containing the list of seed voxel coordinates (N,3)"), + ) gfa_thresh = traits.Float( 0.2, mandatory=True, usedefault=True, - desc=('GFA threshold to compute tracking mask')) + desc=("GFA threshold to compute tracking mask"), + ) peak_threshold = traits.Float( 0.5, mandatory=True, usedefault=True, - desc=('threshold to consider peaks from model')) + desc=("threshold to consider peaks from model"), + ) min_angle = traits.Float( - 25.0, - mandatory=True, - usedefault=True, - desc=('minimum separation angle')) + 25.0, mandatory=True, usedefault=True, desc=("minimum separation angle") + ) multiprocess = traits.Bool( - True, mandatory=True, usedefault=True, desc=('use multiprocessing')) + True, mandatory=True, usedefault=True, desc=("use multiprocessing") + ) save_seeds = traits.Bool( - False, - mandatory=True, - usedefault=True, - desc=('save seeding voxels coordinates')) + False, mandatory=True, usedefault=True, desc=("save seeding voxels coordinates") + ) num_seeds = traits.Int( 10000, mandatory=True, usedefault=True, - desc=('desired number of tracks in tractography')) - out_prefix = traits.Str(desc=('output prefix for file names')) + desc=("desired number of tracks in tractography"), + ) + out_prefix = traits.Str(desc=("output prefix for file names")) class StreamlineTractographyOutputSpec(TraitedSpec): - tracks = File(desc='TrackVis file containing extracted streamlines') + tracks = File(desc="TrackVis file containing extracted streamlines") gfa = File( - desc=('The resulting GFA (generalized FA) computed using the ' - 'peaks of the ODF')) - odf_peaks = File(desc=('peaks computed from the odf')) + desc=("The resulting GFA (generalized FA) computed using the peaks of the ODF") + ) + odf_peaks = File(desc=("peaks computed from the odf")) out_seeds = File( - desc=('file containing the (N,3) *voxel* coordinates used' - ' in seeding.')) + desc=("file containing the (N,3) *voxel* coordinates used in seeding.") + ) class StreamlineTractography(DipyBaseInterface): @@ -175,6 +207,7 @@ class StreamlineTractography(DipyBaseInterface): >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' >>> res = track.run() # doctest: +SKIP """ + input_spec = StreamlineTractographyInputSpec output_spec = StreamlineTractographyOutputSpec @@ -182,36 +215,37 @@ def _run_interface(self, runtime): from dipy.reconst.peaks import peaks_from_model from dipy.tracking.eudx import EuDX from dipy.data import get_sphere + # import marshal as pickle import pickle as pickle import gzip - if (not (isdefined(self.inputs.in_model) - or isdefined(self.inputs.in_peaks))): - raise RuntimeError(('At least one of in_model or in_peaks should ' - 'be supplied')) + if not (isdefined(self.inputs.in_model) or isdefined(self.inputs.in_peaks)): + raise RuntimeError( + "At least one of in_model or in_peaks should be supplied" + ) img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] affine = img.affine - data = img.get_data().astype(np.float32) + data = img.get_fdata(dtype=np.float32) hdr = imref.header.copy() hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") self._save_peaks = False if isdefined(self.inputs.in_peaks): - IFLOGGER.info('Peaks file found, skipping ODF peaks search...') - f = gzip.open(self.inputs.in_peaks, 'rb') + IFLOGGER.info("Peaks file found, skipping ODF peaks search...") + f = gzip.open(self.inputs.in_peaks, "rb") peaks = pickle.load(f) f.close() else: self._save_peaks = True - IFLOGGER.info('Loading model and computing ODF peaks') - f = gzip.open(self.inputs.in_model, 'rb') + IFLOGGER.info("Loading model and computing ODF peaks") + f = gzip.open(self.inputs.in_model, "rb") odf_model = pickle.load(f) f.close() @@ -221,20 +255,22 @@ def _run_interface(self, runtime): sphere=sphere, relative_peak_threshold=self.inputs.peak_threshold, min_separation_angle=self.inputs.min_angle, - parallel=self.inputs.multiprocess) + parallel=self.inputs.multiprocess, + ) - f = gzip.open(self._gen_filename('peaks', ext='.pklz'), 'wb') + f = gzip.open(self._gen_filename("peaks", ext=".pklz"), "wb") pickle.dump(peaks, f, -1) f.close() hdr.set_data_shape(peaks.gfa.shape) nb.Nifti1Image(peaks.gfa.astype(np.float32), affine, hdr).to_filename( - self._gen_filename('gfa')) + self._gen_filename("gfa") + ) - IFLOGGER.info('Performing tractography') + IFLOGGER.info("Performing tractography") if isdefined(self.inputs.tracking_mask): - msk = nb.load(self.inputs.tracking_mask).get_data() + msk = np.asanyarray(nb.load(self.inputs.tracking_mask).dataobj) msk[msk > 0] = 1 msk[msk < 0] = 0 else: @@ -247,27 +283,32 @@ def _run_interface(self, runtime): seeds = np.loadtxt(self.inputs.seed_coord) elif isdefined(self.inputs.seed_mask): - seedmsk = nb.load(self.inputs.seed_mask).get_data() - assert (seedmsk.shape == data.shape[:3]) + seedmsk = np.asanyarray(nb.load(self.inputs.seed_mask).dataobj) + assert seedmsk.shape == data.shape[:3] seedmsk[seedmsk > 0] = 1 seedmsk[seedmsk < 1] = 0 seedps = np.array(np.where(seedmsk == 1), dtype=np.float32).T vseeds = seedps.shape[0] nsperv = (seeds // vseeds) + 1 - IFLOGGER.info('Seed mask is provided (%d voxels inside ' - 'mask), computing seeds (%d seeds/voxel).', vseeds, - nsperv) + IFLOGGER.info( + "Seed mask is provided (%d voxels inside " + "mask), computing seeds (%d seeds/voxel).", + vseeds, + nsperv, + ) if nsperv > 1: - IFLOGGER.info('Needed %d seeds per selected voxel (total %d).', - nsperv, vseeds) + IFLOGGER.info( + "Needed %d seeds per selected voxel (total %d).", nsperv, vseeds + ) seedps = np.vstack(np.array([seedps] * nsperv)) voxcoord = seedps + np.random.uniform(-1, 1, size=seedps.shape) nseeds = voxcoord.shape[0] - seeds = affine.dot( - np.vstack((voxcoord.T, np.ones((1, nseeds)))))[:3, :].T + seeds = affine.dot(np.vstack((voxcoord.T, np.ones((1, nseeds)))))[ + :3, : + ].T if self.inputs.save_seeds: - np.savetxt(self._gen_filename('seeds', ext='.txt'), seeds) + np.savetxt(self._gen_filename("seeds", ext=".txt"), seeds) if isdefined(self.inputs.tracking_mask): tmask = msk @@ -282,34 +323,34 @@ def _run_interface(self, runtime): seeds=seeds, affine=affine, odf_vertices=sphere.vertices, - a_low=a_low) + a_low=a_low, + ) ss_mm = [np.array(s) for s in eu] trkfilev = nb.trackvis.TrackvisFile( - [(s, None, None) for s in ss_mm], - points_space='rasmm', - affine=np.eye(4)) - trkfilev.to_file(self._gen_filename('tracked', ext='.trk')) + [(s, None, None) for s in ss_mm], points_space="rasmm", affine=np.eye(4) + ) + trkfilev.to_file(self._gen_filename("tracked", ext=".trk")) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['tracks'] = self._gen_filename('tracked', ext='.trk') - outputs['gfa'] = self._gen_filename('gfa') + outputs["tracks"] = self._gen_filename("tracked", ext=".trk") + outputs["gfa"] = self._gen_filename("gfa") if self._save_peaks: - outputs['odf_peaks'] = self._gen_filename('peaks', ext='.pklz') + outputs["odf_peaks"] = self._gen_filename("peaks", ext=".pklz") if self.inputs.save_seeds: if isdefined(self.inputs.seed_coord): - outputs['out_seeds'] = self.inputs.seed_coord + outputs["out_seeds"] = self.inputs.seed_coord else: - outputs['out_seeds'] = self._gen_filename('seeds', ext='.txt') + outputs["out_seeds"] = self._gen_filename("seeds", ext=".txt") return outputs def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext @@ -321,4 +362,4 @@ def _gen_filename(self, name, ext=None): if ext is None: ext = fext - return out_prefix + '_' + name + ext + return out_prefix + "_" + name + ext diff --git a/nipype/interfaces/dtitk/__init__.py b/nipype/interfaces/dtitk/__init__.py index a41c09e588..4210c1dd5d 100644 --- a/nipype/interfaces/dtitk/__init__.py +++ b/nipype/interfaces/dtitk/__init__.py @@ -1,13 +1,26 @@ -"""The dtitk module provides classes for interfacing with the `Diffusion -Tensor Imaging Toolkit (DTI-TK) +""" +DTI-TK is a spatial normalization and atlas construction toolkit for DTI. + +Interfaces for the `Diffusion Tensor Imaging Toolkit (DTI-TK) `_ command line tools. -Top-level namespace for dti-tk. """ -# from .base import () -from .registration import (Rigid, Affine, Diffeo, - ComposeXfm, DiffeoSymTensor3DVol, AffSymTensor3DVol, - AffScalarVol, DiffeoScalarVol) -from .utils import (TVAdjustVoxSp, SVAdjustVoxSp, TVResample, SVResample, - TVtool, BinThresh) +from .registration import ( + Rigid, + Affine, + Diffeo, + ComposeXfm, + DiffeoSymTensor3DVol, + AffSymTensor3DVol, + AffScalarVol, + DiffeoScalarVol, +) +from .utils import ( + TVAdjustVoxSp, + SVAdjustVoxSp, + TVResample, + SVResample, + TVtool, + BinThresh, +) diff --git a/nipype/interfaces/dtitk/base.py b/nipype/interfaces/dtitk/base.py index 5cfb81d9dd..6f46f8d404 100644 --- a/nipype/interfaces/dtitk/base.py +++ b/nipype/interfaces/dtitk/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtitk module provides classes for interfacing with the `DTITK @@ -25,9 +24,6 @@ See the docstrings of the individual classes for examples. """ -from __future__ import print_function, division, unicode_literals, \ - absolute_import - import os from ... import logging @@ -36,33 +32,33 @@ from nipype.interfaces.fsl.base import Info import warnings -LOGGER = logging.getLogger('nipype.interface') +LOGGER = logging.getLogger("nipype.interface") -class DTITKRenameMixin(object): +class DTITKRenameMixin: def __init__(self, *args, **kwargs): classes = [cls.__name__ for cls in self.__class__.mro()] dep_name = classes[0] - rename_idx = classes.index('DTITKRenameMixin') + rename_idx = classes.index("DTITKRenameMixin") new_name = classes[rename_idx + 1] - warnings.warn('The {} interface has been renamed to {}\n' - 'Please see the documentation for DTI-TK ' - 'interfaces, as some inputs have been ' - 'added or renamed for clarity.' - ''.format(dep_name, new_name), - DeprecationWarning) - super(DTITKRenameMixin, self).__init__(*args, **kwargs) + warnings.warn( + "The {} interface has been renamed to {}\n" + "Please see the documentation for DTI-TK " + "interfaces, as some inputs have been " + "added or renamed for clarity." + "".format(dep_name, new_name), + DeprecationWarning, + ) + super().__init__(*args, **kwargs) class CommandLineDtitk(CommandLine): - - def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, - ext=None): + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. - If change_ext is True, it will use the extentions specified in - intputs.output_type. + If change_ext is True, it will use the extensions specified in + inputs.output_type. Parameters ---------- @@ -83,9 +79,9 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() @@ -93,11 +89,10 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = f"{suffix}{ext}" else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix(basename, suffix=suffix, - use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname diff --git a/nipype/interfaces/dtitk/registration.py b/nipype/interfaces/dtitk/registration.py index 6aa40d4201..f077c37b75 100644 --- a/nipype/interfaces/dtitk/registration.py +++ b/nipype/interfaces/dtitk/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK registration interfaces @@ -24,30 +23,65 @@ """ -from ..base import TraitedSpec, CommandLineInputSpec, traits, File, isdefined +from ..base import TraitedSpec, CommandLineInputSpec, traits, Tuple, File, isdefined from ...utils.filemanip import fname_presuffix, split_filename from .base import CommandLineDtitk, DTITKRenameMixin import os -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class RigidInputSpec(CommandLineInputSpec): - fixed_file = File(desc="fixed tensor volume", exists=True, - mandatory=True, position=0, argstr="%s", copyfile=False) - moving_file = File(desc="moving tensor volume", exists=True, - mandatory=True, position=1, argstr="%s", copyfile=False) - similarity_metric = traits.Enum('EDS', 'GDS', 'DDS', 'NMI', - mandatory=True, position=2, argstr="%s", - desc="similarity metric", usedefault=True) - sampling_xyz = traits.Tuple((4, 4, 4), mandatory=True, position=3, - argstr="%g %g %g", usedefault=True, - desc="dist between samp points (mm) (x,y,z)") - ftol = traits.Float(mandatory=True, position=4, argstr="%g", - desc="cost function tolerance", default_value=0.01, - usedefault=True) - initialize_xfm = File(copyfile=True, desc="Initialize w/DTITK-FORMAT" - "affine", position=5, argstr="%s", exists=True) + fixed_file = File( + desc="fixed tensor volume", + exists=True, + mandatory=True, + position=0, + argstr="%s", + copyfile=False, + ) + moving_file = File( + desc="moving tensor volume", + exists=True, + mandatory=True, + position=1, + argstr="%s", + copyfile=False, + ) + similarity_metric = traits.Enum( + "EDS", + "GDS", + "DDS", + "NMI", + mandatory=True, + position=2, + argstr="%s", + desc="similarity metric", + usedefault=True, + ) + sampling_xyz = Tuple( + (4, 4, 4), + mandatory=True, + position=3, + argstr="%g %g %g", + usedefault=True, + desc="dist between samp points (mm) (x,y,z)", + ) + ftol = traits.Float( + mandatory=True, + position=4, + argstr="%g", + desc="cost function tolerance", + default_value=0.01, + usedefault=True, + ) + initialize_xfm = File( + copyfile=True, + desc="Initialize w/DTITK-FORMAT affine", + position=5, + argstr="%s", + exists=True, + ) class RigidOutputSpec(TraitedSpec): @@ -72,27 +106,27 @@ class Rigid(CommandLineDtitk): 'dti_rigid_reg im1.nii im2.nii EDS 4 4 4 0.01' >>> node.run() # doctest: +SKIP """ + input_spec = RigidInputSpec output_spec = RigidOutputSpec - _cmd = 'dti_rigid_reg' + _cmd = "dti_rigid_reg" - '''def _format_arg(self, name, spec, value): + """def _format_arg(self, name, spec, value): if name == 'initialize_xfm': value = 1 - return super(Rigid, self)._format_arg(name, spec, value)''' + return super(Rigid, self)._format_arg(name, spec, value)""" def _run_interface(self, runtime): - runtime = super(Rigid, self)._run_interface(runtime) - if '''.aff doesn't exist or can't be opened''' in runtime.stderr: + runtime = super()._run_interface(runtime) + if """.aff doesn't exist or can't be opened""" in runtime.stderr: self.raise_exception(runtime) return runtime def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file - outputs['out_file_xfm'] = fname_presuffix(moving, suffix='.aff', - use_ext=False) - outputs['out_file'] = fname_presuffix(moving, suffix='_aff') + outputs["out_file_xfm"] = fname_presuffix(moving, suffix=".aff", use_ext=False) + outputs["out_file"] = fname_presuffix(moving, suffix="_aff") return outputs @@ -114,24 +148,44 @@ class Affine(Rigid): 'dti_affine_reg im1.nii im2.nii EDS 4 4 4 0.01 im_affine.aff' >>> node.run() # doctest: +SKIP """ - _cmd = 'dti_affine_reg' + + _cmd = "dti_affine_reg" class DiffeoInputSpec(CommandLineInputSpec): - fixed_file = File(desc="fixed tensor volume", - exists=True, position=0, argstr="%s") - moving_file = File(desc="moving tensor volume", - exists=True, position=1, argstr="%s", copyfile=False) - mask_file = File(desc="mask", exists=True, position=2, argstr="%s") - legacy = traits.Enum(1, desc="legacy parameter; always set to 1", - usedefault=True, mandatory=True, - position=3, argstr="%d") - n_iters = traits.Int(6, desc="number of iterations", - mandatory=True, - position=4, argstr="%d", usedefault=True) - ftol = traits.Float(0.002, desc="iteration for the optimization to stop", - mandatory=True, position=5, argstr="%g", - usedefault=True) + fixed_file = File(desc="fixed tensor volume", exists=True, position=0, argstr="%s") + moving_file = File( + desc="moving tensor volume", + exists=True, + position=1, + argstr="%s", + copyfile=False, + ) + mask_file = File(desc="mask", exists=True, position=2, argstr="%s") + legacy = traits.Enum( + 1, + desc="legacy parameter; always set to 1", + usedefault=True, + mandatory=True, + position=3, + argstr="%d", + ) + n_iters = traits.Int( + 6, + desc="number of iterations", + mandatory=True, + position=4, + argstr="%d", + usedefault=True, + ) + ftol = traits.Float( + 0.002, + desc="iteration for the optimization to stop", + mandatory=True, + position=5, + argstr="%g", + usedefault=True, + ) class DiffeoOutputSpec(TraitedSpec): @@ -157,25 +211,27 @@ class Diffeo(CommandLineDtitk): 'dti_diffeomorphic_reg im1.nii im2.nii mask.nii 1 6 0.002' >>> node.run() # doctest: +SKIP """ + input_spec = DiffeoInputSpec output_spec = DiffeoOutputSpec - _cmd = 'dti_diffeomorphic_reg' + _cmd = "dti_diffeomorphic_reg" def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file - outputs['out_file_xfm'] = fname_presuffix(moving, suffix='_diffeo.df') - outputs['out_file'] = fname_presuffix(moving, suffix='_diffeo') + outputs["out_file_xfm"] = fname_presuffix(moving, suffix="_diffeo.df") + outputs["out_file"] = fname_presuffix(moving, suffix="_diffeo") return outputs class ComposeXfmInputSpec(CommandLineInputSpec): - in_df = File(desc='diffeomorphic warp file', exists=True, - argstr="-df %s", mandatory=True) - in_aff = File(desc='affine transform file', exists=True, - argstr="-aff %s", mandatory=True) - out_file = File(desc='output path', - argstr="-out %s", genfile=True) + in_df = File( + desc="diffeomorphic warp file", exists=True, argstr="-df %s", mandatory=True + ) + in_aff = File( + desc="affine transform file", exists=True, argstr="-aff %s", mandatory=True + ) + out_file = File(desc="output path", argstr="-out %s", genfile=True) class ComposeXfmOutputSpec(TraitedSpec): @@ -198,60 +254,89 @@ class ComposeXfm(CommandLineDtitk): im_warp_affdf.df.nii' >>> node.run() # doctest: +SKIP """ + input_spec = ComposeXfmInputSpec output_spec = ComposeXfmOutputSpec - _cmd = 'dfRightComposeAffine' + _cmd = "dfRightComposeAffine" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_filename('out_file') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_filename("out_file") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name != 'out_file': + if name != "out_file": return path, base, ext = split_filename(self.inputs.in_df) - suffix = '_affdf' - if base.endswith('.df'): - suffix += '.df' + suffix = "_affdf" + if base.endswith(".df"): + suffix += ".df" base = base[:-3] return fname_presuffix(base, suffix=suffix + ext, use_ext=False) class AffSymTensor3DVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving tensor volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_affxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - xor=['target', 'translation', 'euler', 'deformation'], - desc='transform to apply: specify an input transformation' - ' file; parameters input will be ignored',) - interpolation = traits.Enum('LEI', 'EI', usedefault=True, - argstr="-interp %s", - desc='Log Euclidean/Euclidean Interpolation') - reorient = traits.Enum('PPD', 'NO', 'FS', argstr='-reorient %s', - usedefault=True, desc='Reorientation strategy: ' - 'preservation of principal direction, no ' - 'reorientation, or finite strain') - target = File(exists=True, argstr="-target %s", xor=['transform'], - desc='output volume specification read from the target ' - 'volume if specified') - translation = traits.Tuple((traits.Float(), traits.Float(), - traits.Float()), - desc='translation (x,y,z) in mm', - argstr='-translation %g %g %g', - xor=['transform']) - euler = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='(theta, phi, psi) in degrees', - xor=['transform'], argstr='-euler %g %g %g') - deformation = traits.Tuple((traits.Float(),) * 6, - desc='(xx,yy,zz,xy,yz,xz)', xor=['transform'], - argstr='-deformation %g %g %g %g %g %g') + in_file = File( + desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_affxfmd", + keep_extension=True, + ) + transform = File( + exists=True, + argstr="-trans %s", + xor=["target", "translation", "euler", "deformation"], + desc="transform to apply: specify an input transformation" + " file; parameters input will be ignored", + ) + interpolation = traits.Enum( + "LEI", + "EI", + usedefault=True, + argstr="-interp %s", + desc="Log Euclidean/Euclidean Interpolation", + ) + reorient = traits.Enum( + "PPD", + "NO", + "FS", + argstr="-reorient %s", + usedefault=True, + desc="Reorientation strategy: " + "preservation of principal direction, no " + "reorientation, or finite strain", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["transform"], + desc="output volume specification read from the target volume if specified", + ) + translation = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="translation (x,y,z) in mm", + argstr="-translation %g %g %g", + xor=["transform"], + ) + euler = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="(theta, phi, psi) in degrees", + xor=["transform"], + argstr="-euler %g %g %g", + ) + deformation = Tuple( + (traits.Float(),) * 6, + desc="(xx,yy,zz,xy,yz,xz)", + xor=["transform"], + argstr="-deformation %g %g %g %g %g %g", + ) class AffSymTensor3DVolOutputSpec(TraitedSpec): @@ -274,43 +359,65 @@ class AffSymTensor3DVol(CommandLineDtitk): -reorient PPD -trans im_affine.aff' >>> node.run() # doctest: +SKIP """ + input_spec = AffSymTensor3DVolInputSpec output_spec = AffSymTensor3DVolOutputSpec - _cmd = 'affineSymTensor3DVolume' + _cmd = "affineSymTensor3DVolume" class AffScalarVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving scalar volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_affxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - xor=['target', 'translation', 'euler', 'deformation'], - desc='transform to apply: specify an input transformation' - ' file; parameters input will be ignored',) - interpolation = traits.Enum('trilinear', 'NN', - usedefault=True, argstr="-interp %s", - desc='trilinear or nearest neighbor' - ' interpolation') - target = File(exists=True, argstr="-target %s", xor=['transform'], - desc='output volume specification read from the target ' - 'volume if specified') - translation = traits.Tuple((traits.Float(), traits.Float(), - traits.Float()), - desc='translation (x,y,z) in mm', - argstr='-translation %g %g %g', - xor=['transform']) - euler = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='(theta, phi, psi) in degrees', - xor=['transform'], argstr='-euler %g %g %g') - deformation = traits.Tuple((traits.Float(),) * 6, - desc='(xx,yy,zz,xy,yz,xz)', xor=['transform'], - argstr='-deformation %g %g %g %g %g %g') + in_file = File( + desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_affxfmd", + keep_extension=True, + ) + transform = File( + exists=True, + argstr="-trans %s", + xor=["target", "translation", "euler", "deformation"], + desc="transform to apply: specify an input transformation" + " file; parameters input will be ignored", + ) + interpolation = traits.Enum( + "trilinear", + "NN", + usedefault=True, + argstr="-interp %s", + desc="trilinear or nearest neighbor interpolation", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["transform"], + desc="output volume specification read from the target volume if specified", + ) + translation = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="translation (x,y,z) in mm", + argstr="-translation %g %g %g", + xor=["transform"], + ) + euler = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="(theta, phi, psi) in degrees", + xor=["transform"], + argstr="-euler %g %g %g", + ) + deformation = Tuple( + (traits.Float(),) * 6, + desc="(xx,yy,zz,xy,yz,xz)", + xor=["transform"], + argstr="-deformation %g %g %g %g %g %g", + ) class AffScalarVolOutputSpec(TraitedSpec): - out_file = File(desc='moved volume', exists=True) + out_file = File(desc="moved volume", exists=True) class AffScalarVol(CommandLineDtitk): @@ -329,43 +436,67 @@ class AffScalarVol(CommandLineDtitk): im_affine.aff' >>> node.run() # doctest: +SKIP """ + input_spec = AffScalarVolInputSpec output_spec = AffScalarVolOutputSpec - _cmd = 'affineScalarVolume' + _cmd = "affineScalarVolume" def _format_arg(self, name, spec, value): - if name == 'interpolation': - value = {'trilinear': 0, 'NN': 1}[value] - return super(AffScalarVol, self)._format_arg(name, spec, value) + if name == "interpolation": + value = {"trilinear": 0, "NN": 1}[value] + return super()._format_arg(name, spec, value) class DiffeoSymTensor3DVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving tensor volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_diffeoxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - mandatory=True, desc='transform to apply') - df = traits.Str('FD', argstr="-df %s", usedefault=True) - interpolation = traits.Enum('LEI', 'EI', usedefault=True, - argstr="-interp %s", - desc='Log Euclidean/Euclidean Interpolation') - reorient = traits.Enum('PPD', 'FS', argstr='-reorient %s', - usedefault=True, desc='Reorientation strategy: ' - 'preservation of principal direction or finite ' - 'strain') - target = File(exists=True, argstr="-target %s", xor=['voxel_size'], - desc='output volume specification read from the target ' - 'volume if specified') - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target']) - flip = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - argstr="-flip %d %d %d") - resampling_type = traits.Enum('backward', 'forward', - desc='use backward or forward resampling', - argstr="-type %s") + in_file = File( + desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_diffeoxfmd", + keep_extension=True, + ) + transform = File( + exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" + ) + df = traits.Str("FD", argstr="-df %s", usedefault=True) + interpolation = traits.Enum( + "LEI", + "EI", + usedefault=True, + argstr="-interp %s", + desc="Log Euclidean/Euclidean Interpolation", + ) + reorient = traits.Enum( + "PPD", + "FS", + argstr="-reorient %s", + usedefault=True, + desc="Reorientation strategy: " + "preservation of principal direction or finite " + "strain", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["voxel_size"], + desc="output volume specification read from the target volume if specified", + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target"], + ) + flip = Tuple((traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d") + resampling_type = traits.Enum( + "backward", + "forward", + desc="use backward or forward resampling", + argstr="-type %s", + ) class DiffeoSymTensor3DVolOutputSpec(TraitedSpec): @@ -391,41 +522,58 @@ class DiffeoSymTensor3DVol(CommandLineDtitk): input_spec = DiffeoSymTensor3DVolInputSpec output_spec = DiffeoSymTensor3DVolOutputSpec - _cmd = 'deformationSymTensor3DVolume' + _cmd = "deformationSymTensor3DVolume" def _format_arg(self, name, spec, value): - if name == 'resampling_type': - value = {'forward': 0, 'backward': 1}[value] - return super(DiffeoSymTensor3DVol, self)._format_arg(name, spec, value) + if name == "resampling_type": + value = {"forward": 0, "backward": 1}[value] + return super()._format_arg(name, spec, value) class DiffeoScalarVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving scalar volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_diffeoxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - mandatory=True, desc='transform to apply') - target = File(exists=True, argstr="-target %s", xor=['voxel_size'], - desc='output volume specification read from the target ' - 'volume if specified') - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target']) - flip = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - argstr="-flip %d %d %d") - resampling_type = traits.Enum('backward', 'forward', - desc='use backward or forward resampling', - argstr="-type %s") - interpolation = traits.Enum('trilinear', 'NN', - desc='trilinear, or nearest neighbor', - argstr="-interp %s", - usedefault=True) + in_file = File( + desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_diffeoxfmd", + keep_extension=True, + ) + transform = File( + exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" + ) + target = File( + exists=True, + argstr="-target %s", + xor=["voxel_size"], + desc="output volume specification read from the target volume if specified", + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target"], + ) + flip = Tuple((traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d") + resampling_type = traits.Enum( + "backward", + "forward", + desc="use backward or forward resampling", + argstr="-type %s", + ) + interpolation = traits.Enum( + "trilinear", + "NN", + desc="trilinear, or nearest neighbor", + argstr="-interp %s", + usedefault=True, + ) class DiffeoScalarVolOutputSpec(TraitedSpec): - out_file = File(desc='moved volume', exists=True) + out_file = File(desc="moved volume", exists=True) class DiffeoScalarVol(CommandLineDtitk): @@ -447,14 +595,14 @@ class DiffeoScalarVol(CommandLineDtitk): input_spec = DiffeoScalarVolInputSpec output_spec = DiffeoScalarVolOutputSpec - _cmd = 'deformationScalarVolume' + _cmd = "deformationScalarVolume" def _format_arg(self, name, spec, value): - if name == 'resampling_type': - value = {'forward': 0, 'backward': 1}[value] - elif name == 'interpolation': - value = {'trilinear': 0, 'NN': 1}[value] - return super(DiffeoScalarVol, self)._format_arg(name, spec, value) + if name == "resampling_type": + value = {"forward": 0, "backward": 1}[value] + elif name == "interpolation": + value = {"trilinear": 0, "NN": 1}[value] + return super()._format_arg(name, spec, value) class RigidTask(DTITKRenameMixin, Rigid): diff --git a/nipype/interfaces/dtitk/tests/__init__.py b/nipype/interfaces/dtitk/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/dtitk/tests/__init__.py +++ b/nipype/interfaces/dtitk/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py b/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py index 5f9262d788..2988e44e8d 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py @@ -1,48 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import AffScalarVol def test_AffScalarVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], + argstr="-deformation %g %g %g %g %g %g", + xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], + argstr="-euler %g %g %g", + xor=["transform"], ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', + name_source="in_file", + name_template="%s_affxfmd", ), target=dict( - argstr='-target %s', - xor=['transform'], + argstr="-target %s", + extensions=None, + xor=["transform"], ), transform=dict( - argstr='-trans %s', - xor=['target', 'translation', 'euler', 'deformation'], + argstr="-trans %s", + extensions=None, + xor=["target", "translation", "euler", "deformation"], ), translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + argstr="-translation %g %g %g", + xor=["transform"], ), ) inputs = AffScalarVol.input_spec() @@ -50,8 +55,14 @@ def test_AffScalarVol_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffScalarVol_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AffScalarVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py b/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py index 6d7abc852a..d1ba18a8ac 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py @@ -1,52 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import AffSymTensor3DVol def test_AffSymTensor3DVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], + argstr="-deformation %g %g %g %g %g %g", + xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], + argstr="-euler %g %g %g", + xor=["transform"], ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', + name_source="in_file", + name_template="%s_affxfmd", ), reorient=dict( - argstr='-reorient %s', + argstr="-reorient %s", usedefault=True, ), target=dict( - argstr='-target %s', - xor=['transform'], + argstr="-target %s", + extensions=None, + xor=["transform"], ), transform=dict( - argstr='-trans %s', - xor=['target', 'translation', 'euler', 'deformation'], + argstr="-trans %s", + extensions=None, + xor=["target", "translation", "euler", "deformation"], ), translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + argstr="-translation %g %g %g", + xor=["transform"], ), ) inputs = AffSymTensor3DVol.input_spec() @@ -54,8 +59,14 @@ def test_AffSymTensor3DVol_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffSymTensor3DVol_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AffSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_Affine.py b/nipype/interfaces/dtitk/tests/test_auto_Affine.py index 78d2e6f011..0ff24b788c 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Affine.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Affine.py @@ -1,46 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Affine def test_Affine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, position=5, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, usedefault=True, @@ -51,10 +55,16 @@ def test_Affine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Affine_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = Affine.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py b/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py index 5f3b43153a..c680c08815 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py @@ -1,46 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import AffineTask def test_AffineTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, position=5, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, usedefault=True, @@ -51,10 +55,16 @@ def test_AffineTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineTask_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = AffineTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py b/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py index cbd4efccb2..994c8a2b8d 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py +++ b/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py @@ -1,47 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import BinThresh def test_BinThresh_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), inside_value=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), lower_bound=dict( - argstr='%g', + argstr="%g", mandatory=True, position=2, usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_thrbin', + name_source="in_file", + name_template="%s_thrbin", position=1, ), outside_value=dict( - argstr='%g', + argstr="%g", mandatory=True, position=5, usedefault=True, ), upper_bound=dict( - argstr='%g', + argstr="%g", mandatory=True, position=3, usedefault=True, @@ -52,8 +55,14 @@ def test_BinThresh_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinThresh_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BinThresh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py b/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py index dcd396abe2..cccf7eed27 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py @@ -1,47 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import BinThreshTask def test_BinThreshTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), inside_value=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), lower_bound=dict( - argstr='%g', + argstr="%g", mandatory=True, position=2, usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_thrbin', + name_source="in_file", + name_template="%s_thrbin", position=1, ), outside_value=dict( - argstr='%g', + argstr="%g", mandatory=True, position=5, usedefault=True, ), upper_bound=dict( - argstr='%g', + argstr="%g", mandatory=True, position=3, usedefault=True, @@ -52,8 +55,14 @@ def test_BinThreshTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinThreshTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BinThreshTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py b/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py index b43fcc5e97..03044b6bc4 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py +++ b/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import CommandLineDtitk def test_CommandLineDtitk_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py index 09fcdc186e..fa34fdcdb3 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py +++ b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py @@ -1,25 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import ComposeXfm def test_ComposeXfm_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_aff=dict( - argstr='-aff %s', + argstr="-aff %s", + extensions=None, mandatory=True, ), in_df=dict( - argstr='-df %s', + argstr="-df %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, genfile=True, ), ) @@ -28,8 +32,14 @@ def test_ComposeXfm_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeXfm_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ComposeXfm.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py index 99c6f6d340..b4b2f7509e 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py @@ -1,25 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import ComposeXfmTask def test_ComposeXfmTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_aff=dict( - argstr='-aff %s', + argstr="-aff %s", + extensions=None, mandatory=True, ), in_df=dict( - argstr='-df %s', + argstr="-df %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, genfile=True, ), ) @@ -28,8 +32,14 @@ def test_ComposeXfmTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeXfmTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ComposeXfmTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py b/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py index ad532bd631..89a1bdcfc8 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py @@ -1,42 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Diffeo def test_Diffeo_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=5, usedefault=True, ), legacy=dict( - argstr='%d', + argstr="%d", mandatory=True, position=3, usedefault=True, ), mask_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, position=1, ), n_iters=dict( - argstr='%d', + argstr="%d", mandatory=True, position=4, usedefault=True, @@ -47,10 +51,16 @@ def test_Diffeo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Diffeo_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = Diffeo.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py index 7d1305d384..39255fb5c1 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py @@ -1,42 +1,51 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import DiffeoScalarVol def test_DiffeoScalarVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - flip=dict(argstr='-flip %d %d %d', ), + flip=dict( + argstr="-flip %d %d %d", + ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', + name_source="in_file", + name_template="%s_diffeoxfmd", + ), + resampling_type=dict( + argstr="-type %s", ), - resampling_type=dict(argstr='-type %s', ), target=dict( - argstr='-target %s', - xor=['voxel_size'], + argstr="-target %s", + extensions=None, + xor=["voxel_size"], ), transform=dict( - argstr='-trans %s', + argstr="-trans %s", + extensions=None, mandatory=True, ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + argstr="-vsize %g %g %g", + xor=["target"], ), ) inputs = DiffeoScalarVol.input_spec() @@ -44,8 +53,14 @@ def test_DiffeoScalarVol_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoScalarVol_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = DiffeoScalarVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py index b934c56d2b..123b741645 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py @@ -1,50 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import DiffeoSymTensor3DVol def test_DiffeoSymTensor3DVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), df=dict( - argstr='-df %s', + argstr="-df %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - flip=dict(argstr='-flip %d %d %d', ), + flip=dict( + argstr="-flip %d %d %d", + ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', + name_source="in_file", + name_template="%s_diffeoxfmd", ), reorient=dict( - argstr='-reorient %s', + argstr="-reorient %s", usedefault=True, ), - resampling_type=dict(argstr='-type %s', ), + resampling_type=dict( + argstr="-type %s", + ), target=dict( - argstr='-target %s', - xor=['voxel_size'], + argstr="-target %s", + extensions=None, + xor=["voxel_size"], ), transform=dict( - argstr='-trans %s', + argstr="-trans %s", + extensions=None, mandatory=True, ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + argstr="-vsize %g %g %g", + xor=["target"], ), ) inputs = DiffeoSymTensor3DVol.input_spec() @@ -52,8 +61,14 @@ def test_DiffeoSymTensor3DVol_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoSymTensor3DVol_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = DiffeoSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py index 5aea665d4c..f7914ab9cc 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py @@ -1,42 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import DiffeoTask def test_DiffeoTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=5, usedefault=True, ), legacy=dict( - argstr='%d', + argstr="%d", mandatory=True, position=3, usedefault=True, ), mask_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, position=1, ), n_iters=dict( - argstr='%d', + argstr="%d", mandatory=True, position=4, usedefault=True, @@ -47,10 +51,16 @@ def test_DiffeoTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoTask_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = DiffeoTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_Rigid.py b/nipype/interfaces/dtitk/tests/test_auto_Rigid.py index ecb7c2d33b..dbcc6f0fcf 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Rigid.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Rigid.py @@ -1,46 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Rigid def test_Rigid_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, position=5, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, usedefault=True, @@ -51,10 +55,16 @@ def test_Rigid_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rigid_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = Rigid.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py b/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py index c627fda741..6c5236607c 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py @@ -1,46 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import RigidTask def test_RigidTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), ftol=dict( - argstr='%g', + argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, position=5, ), moving_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, usedefault=True, @@ -51,10 +55,16 @@ def test_RigidTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RigidTask_outputs(): output_map = dict( - out_file=dict(), - out_file_xfm=dict(), + out_file=dict( + extensions=None, + ), + out_file_xfm=dict( + extensions=None, + ), ) outputs = RigidTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py index 3574906455..3d32a314bd 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py @@ -1,36 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SVAdjustVoxSp def test_SVAdjustVoxSp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - xor=['voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = SVAdjustVoxSp.input_spec() @@ -38,8 +42,14 @@ def test_SVAdjustVoxSp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVAdjustVoxSp_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py index 40a7592c19..cedc693a24 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py @@ -1,36 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SVAdjustVoxSpTask def test_SVAdjustVoxSpTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - xor=['voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = SVAdjustVoxSpTask.input_spec() @@ -38,8 +42,14 @@ def test_SVAdjustVoxSpTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVAdjustVoxSpTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVResample.py b/nipype/interfaces/dtitk/tests/test_auto_SVResample.py index 91ca638f22..fc880cd3e7 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVResample.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVResample.py @@ -1,41 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SVResample def test_SVResample_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), + align=dict( + argstr="-align %s", + ), + args=dict( + argstr="%s", + ), array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], + argstr="-size %d %d %d", + xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', - xor=['array_size', 'voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = SVResample.input_spec() @@ -43,8 +49,14 @@ def test_SVResample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVResample_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SVResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py b/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py index 8a7574bfd8..88412e68ae 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py @@ -1,41 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SVResampleTask def test_SVResampleTask_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), + align=dict( + argstr="-align %s", + ), + args=dict( + argstr="%s", + ), array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], + argstr="-size %d %d %d", + xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', - xor=['array_size', 'voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = SVResampleTask.input_spec() @@ -43,8 +49,14 @@ def test_SVResampleTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVResampleTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SVResampleTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py index bda9128369..333e05628d 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py @@ -1,36 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVAdjustOriginTask def test_TVAdjustOriginTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - xor=['voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = TVAdjustOriginTask.input_spec() @@ -38,8 +42,14 @@ def test_TVAdjustOriginTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustOriginTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVAdjustOriginTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py index b8ce9039d5..9c6596042b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py @@ -1,36 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVAdjustVoxSp def test_TVAdjustVoxSp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - xor=['voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = TVAdjustVoxSp.input_spec() @@ -38,8 +42,14 @@ def test_TVAdjustVoxSp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustVoxSp_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py index e9ef8137dc..f34a76ae7b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py @@ -1,36 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVAdjustVoxSpTask def test_TVAdjustVoxSpTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - xor=['voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = TVAdjustVoxSpTask.input_spec() @@ -38,8 +42,14 @@ def test_TVAdjustVoxSpTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustVoxSpTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVResample.py b/nipype/interfaces/dtitk/tests/test_auto_TVResample.py index aefafc6430..2ca99176f8 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVResample.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVResample.py @@ -1,42 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVResample def test_TVResample_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), + align=dict( + argstr="-align %s", + ), + args=dict( + argstr="%s", + ), array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], + argstr="-size %d %d %d", + xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), - interpolation=dict(argstr='-interp %s', ), + interpolation=dict( + argstr="-interp %s", + ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', - xor=['array_size', 'voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = TVResample.input_spec() @@ -44,8 +52,14 @@ def test_TVResample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVResample_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py index b3c70bb729..d1f908fca8 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py @@ -1,42 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVResampleTask def test_TVResampleTask_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), + align=dict( + argstr="-align %s", + ), + args=dict( + argstr="%s", + ), array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], + argstr="-size %d %d %d", + xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), - interpolation=dict(argstr='-interp %s', ), + interpolation=dict( + argstr="-interp %s", + ), origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], + argstr="-origin %g %g %g", + xor=["target_file"], ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', - xor=['array_size', 'voxel_size', 'origin'], + argstr="-target %s", + extensions=None, + xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-vsize %g %g %g", + xor=["target_file"], ), ) inputs = TVResampleTask.input_spec() @@ -44,8 +52,14 @@ def test_TVResampleTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVResampleTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVResampleTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVtool.py b/nipype/interfaces/dtitk/tests/test_auto_TVtool.py index 2267228631..11e2d05acc 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVtool.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVtool.py @@ -1,22 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVtool def test_TVtool_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), - in_flag=dict(argstr='-%s', ), + in_flag=dict( + argstr="-%s", + ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, genfile=True, ), ) @@ -25,8 +30,14 @@ def test_TVtool_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVtool_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVtool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py index 252d7c9d0a..7af7bcb75b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py @@ -1,22 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TVtoolTask def test_TVtoolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), - in_flag=dict(argstr='-%s', ), + in_flag=dict( + argstr="-%s", + ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, genfile=True, ), ) @@ -25,8 +30,14 @@ def test_TVtoolTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVtoolTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TVtoolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py index 72330737b6..8d3ebfad98 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py @@ -1,48 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import affScalarVolTask def test_affScalarVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], + argstr="-deformation %g %g %g %g %g %g", + xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], + argstr="-euler %g %g %g", + xor=["transform"], ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', + name_source="in_file", + name_template="%s_affxfmd", ), target=dict( - argstr='-target %s', - xor=['transform'], + argstr="-target %s", + extensions=None, + xor=["transform"], ), transform=dict( - argstr='-trans %s', - xor=['target', 'translation', 'euler', 'deformation'], + argstr="-trans %s", + extensions=None, + xor=["target", "translation", "euler", "deformation"], ), translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + argstr="-translation %g %g %g", + xor=["transform"], ), ) inputs = affScalarVolTask.input_spec() @@ -50,8 +55,14 @@ def test_affScalarVolTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_affScalarVolTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = affScalarVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py index da8fae25cc..fbfde68e86 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py @@ -1,52 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import affSymTensor3DVolTask def test_affSymTensor3DVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], + argstr="-deformation %g %g %g %g %g %g", + xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], + argstr="-euler %g %g %g", + xor=["transform"], ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', + name_source="in_file", + name_template="%s_affxfmd", ), reorient=dict( - argstr='-reorient %s', + argstr="-reorient %s", usedefault=True, ), target=dict( - argstr='-target %s', - xor=['transform'], + argstr="-target %s", + extensions=None, + xor=["transform"], ), transform=dict( - argstr='-trans %s', - xor=['target', 'translation', 'euler', 'deformation'], + argstr="-trans %s", + extensions=None, + xor=["target", "translation", "euler", "deformation"], ), translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + argstr="-translation %g %g %g", + xor=["transform"], ), ) inputs = affSymTensor3DVolTask.input_spec() @@ -54,8 +59,14 @@ def test_affSymTensor3DVolTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_affSymTensor3DVolTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = affSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py index 10965b7077..4a7ffee6f4 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py @@ -1,42 +1,51 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import diffeoScalarVolTask def test_diffeoScalarVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - flip=dict(argstr='-flip %d %d %d', ), + flip=dict( + argstr="-flip %d %d %d", + ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', + name_source="in_file", + name_template="%s_diffeoxfmd", + ), + resampling_type=dict( + argstr="-type %s", ), - resampling_type=dict(argstr='-type %s', ), target=dict( - argstr='-target %s', - xor=['voxel_size'], + argstr="-target %s", + extensions=None, + xor=["voxel_size"], ), transform=dict( - argstr='-trans %s', + argstr="-trans %s", + extensions=None, mandatory=True, ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + argstr="-vsize %g %g %g", + xor=["target"], ), ) inputs = diffeoScalarVolTask.input_spec() @@ -44,8 +53,14 @@ def test_diffeoScalarVolTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_diffeoScalarVolTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = diffeoScalarVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py index 52112735b1..6724343e69 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py @@ -1,50 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import diffeoSymTensor3DVolTask def test_diffeoSymTensor3DVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), df=dict( - argstr='-df %s', + argstr="-df %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - flip=dict(argstr='-flip %d %d %d', ), + flip=dict( + argstr="-flip %d %d %d", + ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), interpolation=dict( - argstr='-interp %s', + argstr="-interp %s", usedefault=True, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', + name_source="in_file", + name_template="%s_diffeoxfmd", ), reorient=dict( - argstr='-reorient %s', + argstr="-reorient %s", usedefault=True, ), - resampling_type=dict(argstr='-type %s', ), + resampling_type=dict( + argstr="-type %s", + ), target=dict( - argstr='-target %s', - xor=['voxel_size'], + argstr="-target %s", + extensions=None, + xor=["voxel_size"], ), transform=dict( - argstr='-trans %s', + argstr="-trans %s", + extensions=None, mandatory=True, ), voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + argstr="-vsize %g %g %g", + xor=["target"], ), ) inputs = diffeoSymTensor3DVolTask.input_spec() @@ -52,8 +61,14 @@ def test_diffeoSymTensor3DVolTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_diffeoSymTensor3DVolTask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = diffeoSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py index 3ed6e61395..6f185fe393 100644 --- a/nipype/interfaces/dtitk/utils.py +++ b/nipype/interfaces/dtitk/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK utility interfaces @@ -24,30 +23,40 @@ """ -from ..base import TraitedSpec, CommandLineInputSpec, File, traits, isdefined +from ..base import TraitedSpec, CommandLineInputSpec, File, traits, Tuple, isdefined from ...utils.filemanip import fname_presuffix from .base import CommandLineDtitk, DTITKRenameMixin import os -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class TVAdjustVoxSpInputSpec(CommandLineInputSpec): - in_file = File(desc="tensor volume to modify", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - argstr="-out %s", name_source='in_file', - name_template='%s_avs', keep_extension=True) - target_file = File(desc='target volume to match', - argstr="-target %s", - xor=['voxel_size', 'origin']) - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target_file']) - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin (superseded by target)', - argstr='-origin %g %g %g', - xor=['target_file']) + in_file = File( + desc="tensor volume to modify", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + argstr="-out %s", + name_source="in_file", + name_template="%s_avs", + keep_extension=True, + ) + target_file = File( + desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target_file"], + ) + origin = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin (superseded by target)", + argstr="-origin %g %g %g", + xor=["target_file"], + ) class TVAdjustVoxSpOutputSpec(TraitedSpec): @@ -56,11 +65,10 @@ class TVAdjustVoxSpOutputSpec(TraitedSpec): class TVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a tensor volume + Adjusts the voxel space of a tensor volume. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.TVAdjustVoxSp() >>> node.inputs.in_file = 'im1.nii' @@ -68,27 +76,40 @@ class TVAdjustVoxSp(CommandLineDtitk): >>> node.cmdline 'TVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP + """ + input_spec = TVAdjustVoxSpInputSpec output_spec = TVAdjustVoxSpOutputSpec - _cmd = 'TVAdjustVoxelspace' + _cmd = "TVAdjustVoxelspace" class SVAdjustVoxSpInputSpec(CommandLineInputSpec): - in_file = File(desc="scalar volume to modify", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', argstr="-out %s", - name_source="in_file", name_template='%s_avs', - keep_extension=True) - target_file = File(desc='target volume to match', - argstr="-target %s", xor=['voxel_size', 'origin']) - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target_file']) - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin (superseded by target)', - argstr='-origin %g %g %g', - xor=['target_file']) + in_file = File( + desc="scalar volume to modify", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + argstr="-out %s", + name_source="in_file", + name_template="%s_avs", + keep_extension=True, + ) + target_file = File( + desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target_file"], + ) + origin = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin (superseded by target)", + argstr="-origin %g %g %g", + xor=["target_file"], + ) class SVAdjustVoxSpOutputSpec(TraitedSpec): @@ -97,46 +118,68 @@ class SVAdjustVoxSpOutputSpec(TraitedSpec): class SVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a scalar volume - - Example - ------- - - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVAdjustVoxSp() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + Adjusts the voxel space of a scalar volume. + + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVAdjustVoxSp() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' + >>> node.run() # doctest: +SKIP + + """ + input_spec = SVAdjustVoxSpInputSpec output_spec = SVAdjustVoxSpOutputSpec - _cmd = 'SVAdjustVoxelspace' + _cmd = "SVAdjustVoxelspace" class TVResampleInputSpec(CommandLineInputSpec): - in_file = File(desc="tensor volume to resample", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - name_source="in_file", name_template="%s_resampled", - keep_extension=True, argstr="-out %s") - target_file = File(desc='specs read from the target volume', - argstr="-target %s", - xor=['array_size', 'voxel_size', 'origin']) - align = traits.Enum('center', 'origin', argstr="-align %s", - desc='how to align output volume to input volume') - interpolation = traits.Enum('LEI', 'EI', argstr="-interp %s", - desc='Log Euclidean Euclidean Interpolation') - array_size = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - desc='resampled array size', xor=['target_file'], - argstr="-size %d %d %d") - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='resampled voxel size', xor=['target_file'], - argstr="-vsize %g %g %g") - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin', xor=['target_file'], - argstr='-origin %g %g %g') + in_file = File( + desc="tensor volume to resample", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + name_source="in_file", + name_template="%s_resampled", + keep_extension=True, + argstr="-out %s", + ) + target_file = File( + desc="specs read from the target volume", + argstr="-target %s", + xor=["array_size", "voxel_size", "origin"], + ) + align = traits.Enum( + "center", + "origin", + argstr="-align %s", + desc="how to align output volume to input volume", + ) + interpolation = traits.Enum( + "LEI", "EI", argstr="-interp %s", desc="Log Euclidean Interpolation" + ) + array_size = Tuple( + (traits.Int(), traits.Int(), traits.Int()), + desc="resampled array size", + xor=["target_file"], + argstr="-size %d %d %d", + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="resampled voxel size", + xor=["target_file"], + argstr="-vsize %g %g %g", + ) + origin = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin", + xor=["target_file"], + argstr="-origin %g %g %g", + ) class TVResampleOutputSpec(TraitedSpec): @@ -145,44 +188,65 @@ class TVResampleOutputSpec(TraitedSpec): class TVResample(CommandLineDtitk): """ - Resamples a tensor volume - - Example - ------- - - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + Resamples a tensor volume. + + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP + + """ + input_spec = TVResampleInputSpec output_spec = TVResampleOutputSpec - _cmd = 'TVResample' + _cmd = "TVResample" class SVResampleInputSpec(CommandLineInputSpec): - in_file = File(desc="image to resample", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - name_source="in_file", name_template="%s_resampled", - keep_extension=True, argstr="-out %s") - target_file = File(desc='specs read from the target volume', - argstr="-target %s", - xor=['array_size', 'voxel_size', 'origin']) - align = traits.Enum('center', 'origin', argstr="-align %s", - desc='how to align output volume to input volume') - array_size = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - desc='resampled array size', xor=['target_file'], - argstr="-size %d %d %d") - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='resampled voxel size', xor=['target_file'], - argstr="-vsize %g %g %g") - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin', xor=['target_file'], - argstr='-origin %g %g %g') + in_file = File( + desc="image to resample", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + name_source="in_file", + name_template="%s_resampled", + keep_extension=True, + argstr="-out %s", + ) + target_file = File( + desc="specs read from the target volume", + argstr="-target %s", + xor=["array_size", "voxel_size", "origin"], + ) + align = traits.Enum( + "center", + "origin", + argstr="-align %s", + desc="how to align output volume to input volume", + ) + array_size = Tuple( + (traits.Int(), traits.Int(), traits.Int()), + desc="resampled array size", + xor=["target_file"], + argstr="-size %d %d %d", + ) + voxel_size = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="resampled voxel size", + xor=["target_file"], + argstr="-vsize %g %g %g", + ) + origin = Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin", + xor=["target_file"], + argstr="-origin %g %g %g", + ) class SVResampleOutputSpec(TraitedSpec): @@ -191,30 +255,31 @@ class SVResampleOutputSpec(TraitedSpec): class SVResample(CommandLineDtitk): """ - Resamples a scalar volume - - Example - ------- - - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + Resamples a scalar volume. + + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP + + """ + input_spec = SVResampleInputSpec output_spec = SVResampleOutputSpec - _cmd = 'SVResample' + _cmd = "SVResample" class TVtoolInputSpec(CommandLineInputSpec): - in_file = File(desc="scalar volume to resample", exists=True, - argstr="-in %s", mandatory=True) - '''NOTE: there are a lot more options here; not implementing all of them''' - in_flag = traits.Enum('fa', 'tr', 'ad', 'rd', 'pd', 'rgb', - argstr="-%s", desc='') + in_file = File( + desc="scalar volume to resample", exists=True, argstr="-in %s", mandatory=True + ) + """NOTE: there are a lot more options here; not implementing all of them""" + in_flag = traits.Enum("fa", "tr", "ad", "rd", "pd", "rgb", argstr="-%s", desc="") out_file = File(argstr="-out %s", genfile=True) @@ -224,59 +289,91 @@ class TVtoolOutputSpec(TraitedSpec): class TVtool(CommandLineDtitk): """ - Calculates a tensor metric volume from a tensor volume - - Example - ------- - - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVtool() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.in_flag = 'fa' - >>> node.cmdline - 'TVtool -in im1.nii -fa -out im1_fa.nii' - >>> node.run() # doctest: +SKIP - """ + Calculates a tensor metric volume from a tensor volume. + + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVtool() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.in_flag = 'fa' + >>> node.cmdline + 'TVtool -in im1.nii -fa -out im1_fa.nii' + >>> node.run() # doctest: +SKIP + + """ + input_spec = TVtoolInputSpec output_spec = TVtoolOutputSpec - _cmd = 'TVtool' + _cmd = "TVtool" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_filename('out_file') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_filename("out_file") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name != 'out_file': + if name != "out_file": return - return fname_presuffix(os.path.basename(self.inputs.in_file), - suffix='_' + self.inputs.in_flag) + return fname_presuffix( + os.path.basename(self.inputs.in_file), suffix="_" + self.inputs.in_flag + ) -'''Note: SVTool not implemented at this time''' +"""Note: SVTool not implemented at this time""" class BinThreshInputSpec(CommandLineInputSpec): - in_file = File(desc='Image to threshold/binarize', exists=True, - position=0, argstr="%s", mandatory=True) - out_file = File(desc='output path', position=1, argstr="%s", - keep_extension=True, name_source='in_file', - name_template='%s_thrbin') - lower_bound = traits.Float(0.01, usedefault=True, - position=2, argstr="%g", mandatory=True, - desc='lower bound of binarization range') - upper_bound = traits.Float(100, usedefault=True, - position=3, argstr="%g", mandatory=True, - desc='upper bound of binarization range') - inside_value = traits.Float(1, position=4, argstr="%g", usedefault=True, - mandatory=True, desc='value for voxels in ' - 'binarization range') - outside_value = traits.Float(0, position=5, argstr="%g", usedefault=True, - mandatory=True, desc='value for voxels' - 'outside of binarization range') + in_file = File( + desc="Image to threshold/binarize", + exists=True, + position=0, + argstr="%s", + mandatory=True, + ) + out_file = File( + desc="output path", + position=1, + argstr="%s", + keep_extension=True, + name_source="in_file", + name_template="%s_thrbin", + ) + lower_bound = traits.Float( + 0.01, + usedefault=True, + position=2, + argstr="%g", + mandatory=True, + desc="lower bound of binarization range", + ) + upper_bound = traits.Float( + 100, + usedefault=True, + position=3, + argstr="%g", + mandatory=True, + desc="upper bound of binarization range", + ) + inside_value = traits.Float( + 1, + position=4, + argstr="%g", + usedefault=True, + mandatory=True, + desc="value for voxels in binarization range", + ) + outside_value = traits.Float( + 0, + position=5, + argstr="%g", + usedefault=True, + mandatory=True, + desc="value for voxels outside of binarization range", + ) class BinThreshOutputSpec(TraitedSpec): @@ -285,11 +382,10 @@ class BinThreshOutputSpec(TraitedSpec): class BinThresh(CommandLineDtitk): """ - Binarizes an image + Binarizes an image. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.BinThresh() >>> node.inputs.in_file = 'im1.nii' @@ -300,11 +396,12 @@ class BinThresh(CommandLineDtitk): >>> node.cmdline 'BinaryThresholdImageFilter im1.nii im1_thrbin.nii 0 100 1 0' >>> node.run() # doctest: +SKIP + """ input_spec = BinThreshInputSpec output_spec = BinThreshOutputSpec - _cmd = 'BinaryThresholdImageFilter' + _cmd = "BinaryThresholdImageFilter" class BinThreshTask(DTITKRenameMixin, BinThresh): diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 5d3a3c1899..1fede10507 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,35 +1,36 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - +"""Experimental Slicer wrapper - Work in progress.""" import os import warnings import xml.dom.minidom -from .base import (CommandLine, CommandLineInputSpec, DynamicTraitedSpec, - traits, Undefined, File, isdefined) +from .base import ( + CommandLine, + CommandLineInputSpec, + DynamicTraitedSpec, + traits, + Undefined, + File, + isdefined, +) class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): - module = traits.Str( - desc="name of the Slicer command line module you want to use") + module = traits.Str(desc="name of the Slicer command line module you want to use") class SlicerCommandLine(CommandLine): - """Experimental Slicer wrapper. Work in progress. + """Experimental Slicer wrapper. Work in progress.""" - """ _cmd = "Slicer3" input_spec = SlicerCommandLineInputSpec output_spec = DynamicTraitedSpec def _grab_xml(self, module): cmd = CommandLine( - command="Slicer3", - resource_monitor=False, - args="--launch %s --xml" % module) + command="Slicer3", resource_monitor=False, args="--launch %s --xml" % module + ) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) @@ -37,11 +38,11 @@ def _grab_xml(self, module): raise Exception(cmd.cmdline + " failed:\n%s" % ret.runtime.stderr) def _outputs(self): - base = super(SlicerCommandLine, self)._outputs() + base = super()._outputs() undefined_output_traits = {} for key in [ - node.getElementsByTagName('name')[0].firstChild.nodeValue - for node in self._outputs_nodes + node.getElementsByTagName("name")[0].firstChild.nodeValue + for node in self._outputs_nodes ]: base.add_trait(key, File(exists=True)) undefined_output_traits[key] = Undefined @@ -50,9 +51,8 @@ def _outputs(self): return base def __init__(self, module, **inputs): - warnings.warn('slicer is Not fully implemented', RuntimeWarning) - super(SlicerCommandLine, self).__init__( - command="Slicer3 --launch %s " % module, name=module, **inputs) + warnings.warn("slicer is Not fully implemented", RuntimeWarning) + super().__init__(command="Slicer3 --launch %s " % module, name=module, **inputs) dom = self._grab_xml(module) self._outputs_filenames = {} @@ -62,95 +62,89 @@ def __init__(self, module, **inputs): for paramGroup in dom.getElementsByTagName("parameters"): for param in paramGroup.childNodes: - if param.nodeName in [ - 'label', 'description', '#text', '#comment' - ]: + if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue - longFlagNode = param.getElementsByTagName('longflag') + longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: - traitsParams[ - "argstr"] = "--" + longFlagNode[0].firstChild.nodeValue + " " + traitsParams["argstr"] = ( + "--" + longFlagNode[0].firstChild.nodeValue + " " + ) else: traitsParams["argstr"] = "--" + name + " " argsDict = { - 'file': '%s', - 'integer': "%d", - 'double': "%f", - 'float': "%f", - 'image': "%s", - 'transform': "%s", - 'boolean': '', - 'string-enumeration': '%s', - 'string': "%s" + "file": "%s", + "integer": "%d", + "double": "%f", + "float": "%f", + "image": "%s", + "transform": "%s", + "boolean": "", + "string-enumeration": "%s", + "string": "%s", } - if param.nodeName.endswith('-vector'): + if param.nodeName.endswith("-vector"): traitsParams["argstr"] += argsDict[param.nodeName[:-7]] else: traitsParams["argstr"] += argsDict[param.nodeName] - index = param.getElementsByTagName('index') + index = param.getElementsByTagName("index") if index: traitsParams["position"] = index[0].firstChild.nodeValue - desc = param.getElementsByTagName('description') + desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue typesDict = { - 'integer': traits.Int, - 'double': traits.Float, - 'float': traits.Float, - 'image': File, - 'transform': File, - 'boolean': traits.Bool, - 'string': traits.Str, - 'file': File + "integer": traits.Int, + "double": traits.Float, + "float": traits.Float, + "image": File, + "transform": File, + "boolean": traits.Bool, + "string": traits.Str, + "file": File, } - if param.nodeName == 'string-enumeration': + if param.nodeName == "string-enumeration": type = traits.Enum values = [ el.firstChild.nodeValue - for el in param.getElementsByTagName('element') + for el in param.getElementsByTagName("element") ] - elif param.nodeName.endswith('-vector'): + elif param.nodeName.endswith("-vector"): type = traits.List values = [typesDict[param.nodeName[:-7]]] - traitsParams["sep"] = ',' + traitsParams["sep"] = "," else: values = [] type = typesDict[param.nodeName] - if param.nodeName in [ - 'file', 'directory', 'image', 'transform' - ] and param.getElementsByTagName( - 'channel')[0].firstChild.nodeValue == 'output': - self.inputs.add_trait(name, - traits.Either( - traits.Bool, File, - **traitsParams)) + if ( + param.nodeName in ["file", "directory", "image", "transform"] + and param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "output" + ): + self.inputs.add_trait( + name, traits.Either(traits.Bool, File, **traitsParams) + ) undefined_traits[name] = Undefined # traitsParams["exists"] = True - self._outputs_filenames[ - name] = self._gen_filename_from_param(param) + self._outputs_filenames[name] = self._gen_filename_from_param(param) # undefined_output_traits[name] = Undefined # self._outputs().add_trait(name, File(*values, **traitsParams)) self._outputs_nodes.append(param) else: - if param.nodeName in [ - 'file', 'directory', 'image', 'transform' - ]: + if param.nodeName in ["file", "directory", "image", "transform"]: traitsParams["exists"] = True self.inputs.add_trait(name, type(*values, **traitsParams)) undefined_traits[name] = Undefined @@ -166,23 +160,18 @@ def _gen_filename(self, name): return None def _gen_filename_from_param(self, param): - base = param.getElementsByTagName('name')[0].firstChild.nodeValue + base = param.getElementsByTagName("name")[0].firstChild.nodeValue fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: ext = fileExtensions else: - ext = { - 'image': '.nii', - 'transform': '.txt', - 'file': '' - }[param.nodeName] + ext = {"image": ".nii", "transform": ".txt", "file": ""}[param.nodeName] return base + ext def _list_outputs(self): outputs = self.output_spec().get() for output_node in self._outputs_nodes: - name = output_node.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = output_node.getElementsByTagName("name")[0].firstChild.nodeValue outputs[name] = getattr(self.inputs, name) if isdefined(outputs[name]) and isinstance(outputs[name], bool): if outputs[name]: @@ -193,15 +182,15 @@ def _list_outputs(self): def _format_arg(self, name, spec, value): if name in [ - output_node.getElementsByTagName('name')[0] - .firstChild.nodeValue for output_node in self._outputs_nodes + output_node.getElementsByTagName("name")[0].firstChild.nodeValue + for output_node in self._outputs_nodes ]: if isinstance(value, bool): fname = self._gen_filename(name) else: fname = value return spec.argstr % fname - return super(SlicerCommandLine, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) # test = SlicerCommandLine(module="BRAINSFit") diff --git a/nipype/interfaces/elastix/__init__.py b/nipype/interfaces/elastix/__init__.py index 9dcdb88c18..1f1116af69 100644 --- a/nipype/interfaces/elastix/__init__.py +++ b/nipype/interfaces/elastix/__init__.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for elastix.""" - -from __future__ import absolute_import - +"""elastix is a toolbox for rigid and nonrigid registration of images.""" from .registration import Registration, ApplyWarp, AnalyzeWarp, PointsWarp from .utils import EditTransform diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index b47e1fec17..61fe288ff6 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The :py:mod:`nipype.interfaces.elastix` provides the interface to @@ -9,24 +7,26 @@ """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) from ... import logging from ..base import CommandLineInputSpec, Directory, traits -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class ElastixBaseInputSpec(CommandLineInputSpec): output_path = Directory( - './', + "./", exists=True, mandatory=True, usedefault=True, - argstr='-out %s', - desc='output directory') + argstr="-out %s", + desc="output directory", + ) num_threads = traits.Int( - 1, usedefault=True, - argstr='-threads %01d', + 1, + usedefault=True, + argstr="-threads %01d", nohash=True, - desc='set the maximum number of threads of elastix') + desc="set the maximum number of threads of elastix", + ) diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 3a815963a2..ead163de0b 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,10 +5,6 @@ displacement maps to images and points. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - import os.path as op import re @@ -18,38 +12,37 @@ from .base import ElastixBaseInputSpec from ..base import CommandLine, TraitedSpec, File, traits, InputMultiPath -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class RegistrationInputSpec(ElastixBaseInputSpec): - fixed_image = File( - exists=True, mandatory=True, argstr='-f %s', desc='fixed image') + fixed_image = File(exists=True, mandatory=True, argstr="-f %s", desc="fixed image") moving_image = File( - exists=True, mandatory=True, argstr='-m %s', desc='moving image') + exists=True, mandatory=True, argstr="-m %s", desc="moving image" + ) parameters = InputMultiPath( File(exists=True), mandatory=True, - argstr='-p %s...', - desc='parameter file, elastix handles 1 or more -p') - fixed_mask = File( - exists=True, argstr='-fMask %s', desc='mask for fixed image') - moving_mask = File( - exists=True, argstr='-mMask %s', desc='mask for moving image') + argstr="-p %s...", + desc="parameter file, elastix handles 1 or more -p", + ) + fixed_mask = File(exists=True, argstr="-fMask %s", desc="mask for fixed image") + moving_mask = File(exists=True, argstr="-mMask %s", desc="mask for moving image") initial_transform = File( - exists=True, - argstr='-t0 %s', - desc='parameter file for initial transform') + exists=True, argstr="-t0 %s", desc="parameter file for initial transform" + ) class RegistrationOutputSpec(TraitedSpec): - transform = InputMultiPath(File(exists=True), desc='output transform') - warped_file = File(desc='input moving image warped to fixed image') + transform = InputMultiPath(File(exists=True), desc="output transform") + warped_file = File(desc="input moving image warped to fixed image") warped_files = InputMultiPath( File(exists=False), - desc=('input moving image warped to fixed image at each level')) + desc=("input moving image warped to fixed image at each level"), + ) warped_files_flags = traits.List( - traits.Bool(False), - desc='flag indicating if warped image was generated') + traits.Bool(False), desc="flag indicating if warped image was generated" + ) class Registration(CommandLine): @@ -70,7 +63,7 @@ class Registration(CommandLine): """ - _cmd = 'elastix' + _cmd = "elastix" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec @@ -79,37 +72,39 @@ def _list_outputs(self): out_dir = op.abspath(self.inputs.output_path) - regex = re.compile(r'^\((\w+)\s(.+)\)$') + regex = re.compile(r"^\((\w+)\s(.+)\)$") - outputs['transform'] = [] - outputs['warped_files'] = [] - outputs['warped_files_flags'] = [] + outputs["transform"] = [] + outputs["warped_files"] = [] + outputs["warped_files_flags"] = [] for i, params in enumerate(self.inputs.parameters): config = {} - with open(params, 'r') as f: - for line in f.readlines(): + with open(params) as f: + for line in f: line = line.strip() - if not line.startswith('//') and line: + if not line.startswith("//") and line: m = regex.search(line) if m: value = self._cast(m.group(2).strip()) config[m.group(1).strip()] = value - outputs['transform'].append( - op.join(out_dir, 'TransformParameters.%01d.txt' % i)) + outputs["transform"].append( + op.join(out_dir, "TransformParameters.%01d.txt" % i) + ) warped_file = None - if config['WriteResultImage']: - warped_file = op.join(out_dir, 'result.%01d.%s' % - (i, config['ResultImageFormat'])) + if config["WriteResultImage"]: + warped_file = op.join( + out_dir, "result.%01d.%s" % (i, config["ResultImageFormat"]) + ) - outputs['warped_files'].append(warped_file) - outputs['warped_files_flags'].append(config['WriteResultImage']) + outputs["warped_files"].append(warped_file) + outputs["warped_files_flags"].append(config["WriteResultImage"]) - if outputs['warped_files_flags'][-1]: - outputs['warped_file'] = outputs['warped_files'][-1] + if outputs["warped_files_flags"][-1]: + outputs["warped_file"] = outputs["warped_files"][-1] return outputs @@ -135,18 +130,17 @@ class ApplyWarpInputSpec(ElastixBaseInputSpec): transform_file = File( exists=True, mandatory=True, - argstr='-tp %s', - desc='transform-parameter file, only 1') + argstr="-tp %s", + desc="transform-parameter file, only 1", + ) moving_image = File( - exists=True, - argstr='-in %s', - mandatory=True, - desc='input image to deform') + exists=True, argstr="-in %s", mandatory=True, desc="input image to deform" + ) class ApplyWarpOutputSpec(TraitedSpec): - warped_file = File(desc='input moving image warped to fixed image') + warped_file = File(desc="input moving image warped to fixed image") class ApplyWarp(CommandLine): @@ -167,44 +161,47 @@ class ApplyWarp(CommandLine): """ - _cmd = 'transformix' + _cmd = "transformix" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['warped_file'] = op.join(out_dir, 'result.nii.gz') + outputs["warped_file"] = op.join(out_dir, "result.nii.gz") return outputs class AnalyzeWarpInputSpec(ApplyWarpInputSpec): points = traits.Enum( - 'all', + "all", usedefault=True, position=0, - argstr='-def %s', - desc='transform all points from the input-image, which effectively' - ' generates a deformation field.') + argstr="-def %s", + desc="transform all points from the input-image, which effectively" + " generates a deformation field.", + ) jac = traits.Enum( - 'all', + "all", usedefault=True, - argstr='-jac %s', - desc='generate an image with the determinant of the spatial Jacobian') + argstr="-jac %s", + desc="generate an image with the determinant of the spatial Jacobian", + ) jacmat = traits.Enum( - 'all', + "all", usedefault=True, - argstr='-jacmat %s', - desc='generate an image with the spatial Jacobian matrix at each voxel') + argstr="-jacmat %s", + desc="generate an image with the spatial Jacobian matrix at each voxel", + ) moving_image = File( - exists=True, - argstr='-in %s', - desc='input image to deform (not used)') + exists=True, argstr="-in %s", desc="input image to deform (not used)" + ) + class AnalyzeWarpOutputSpec(TraitedSpec): - disp_field = File(desc='displacements field') - jacdet_map = File(desc='det(Jacobian) map') - jacmat_map = File(desc='Jacobian matrix map') + disp_field = File(desc="displacements field") + jacdet_map = File(desc="det(Jacobian) map") + jacmat_map = File(desc="Jacobian matrix map") class AnalyzeWarp(ApplyWarp): @@ -231,27 +228,29 @@ class AnalyzeWarp(ApplyWarp): def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['disp_field'] = op.join(out_dir, 'deformationField.nii.gz') - outputs['jacdet_map'] = op.join(out_dir, 'spatialJacobian.nii.gz') - outputs['jacmat_map'] = op.join(out_dir, 'fullSpatialJacobian.nii.gz') + outputs["disp_field"] = op.join(out_dir, "deformationField.nii.gz") + outputs["jacdet_map"] = op.join(out_dir, "spatialJacobian.nii.gz") + outputs["jacmat_map"] = op.join(out_dir, "fullSpatialJacobian.nii.gz") return outputs class PointsWarpInputSpec(ElastixBaseInputSpec): points_file = File( exists=True, - argstr='-def %s', + argstr="-def %s", mandatory=True, - desc='input points (accepts .vtk triangular meshes).') + desc="input points (accepts .vtk triangular meshes).", + ) transform_file = File( exists=True, mandatory=True, - argstr='-tp %s', - desc='transform-parameter file, only 1') + argstr="-tp %s", + desc="transform-parameter file, only 1", + ) class PointsWarpOutputSpec(TraitedSpec): - warped_file = File(desc='input points displaced in fixed image domain') + warped_file = File(desc="input points displaced in fixed image domain") class PointsWarp(CommandLine): @@ -271,7 +270,7 @@ class PointsWarp(CommandLine): """ - _cmd = 'transformix' + _cmd = "transformix" input_spec = PointsWarpInputSpec output_spec = PointsWarpOutputSpec @@ -281,5 +280,5 @@ def _list_outputs(self): fname, ext = op.splitext(op.basename(self.inputs.points_file)) - outputs['warped_file'] = op.join(out_dir, 'outputpoints%s' % ext) + outputs["warped_file"] = op.join(out_dir, "outputpoints%s" % ext) return outputs diff --git a/nipype/interfaces/elastix/tests/__init__.py b/nipype/interfaces/elastix/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/elastix/tests/__init__.py +++ b/nipype/interfaces/elastix/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py b/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py index f05821d9c1..b7895e1cf1 100644 --- a/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py @@ -1,41 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import AnalyzeWarp def test_AnalyzeWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), jac=dict( - argstr='-jac %s', + argstr="-jac %s", usedefault=True, ), jacmat=dict( - argstr='-jacmat %s', + argstr="-jacmat %s", usedefault=True, ), - moving_image=dict(argstr='-in %s', ), + moving_image=dict( + argstr="-in %s", + extensions=None, + ), num_threads=dict( - argstr='-threads %01d', + argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( - argstr='-out %s', + argstr="-out %s", mandatory=True, usedefault=True, ), points=dict( - argstr='-def %s', + argstr="-def %s", position=0, usedefault=True, ), transform_file=dict( - argstr='-tp %s', + argstr="-tp %s", + extensions=None, mandatory=True, ), ) @@ -44,11 +49,19 @@ def test_AnalyzeWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AnalyzeWarp_outputs(): output_map = dict( - disp_field=dict(), - jacdet_map=dict(), - jacmat_map=dict(), + disp_field=dict( + extensions=None, + ), + jacdet_map=dict( + extensions=None, + ), + jacmat_map=dict( + extensions=None, + ), ) outputs = AnalyzeWarp.output_spec() diff --git a/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py b/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py index 16f2b54079..0eebfb0665 100644 --- a/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py @@ -1,31 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import ApplyWarp def test_ApplyWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), moving_image=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), num_threads=dict( - argstr='-threads %01d', + argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( - argstr='-out %s', + argstr="-out %s", mandatory=True, usedefault=True, ), transform_file=dict( - argstr='-tp %s', + argstr="-tp %s", + extensions=None, mandatory=True, ), ) @@ -34,8 +37,14 @@ def test_ApplyWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyWarp_outputs(): - output_map = dict(warped_file=dict(), ) + output_map = dict( + warped_file=dict( + extensions=None, + ), + ) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_EditTransform.py b/nipype/interfaces/elastix/tests/test_auto_EditTransform.py index ef227da7f8..a8617c4988 100644 --- a/nipype/interfaces/elastix/tests/test_auto_EditTransform.py +++ b/nipype/interfaces/elastix/tests/test_auto_EditTransform.py @@ -1,27 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import EditTransform def test_EditTransform_inputs(): input_map = dict( interpolation=dict( - argstr='FinalBSplineInterpolationOrder', + argstr="FinalBSplineInterpolationOrder", usedefault=True, ), - output_file=dict(), - output_format=dict(argstr='ResultImageFormat', ), - output_type=dict(argstr='ResultImagePixelType', ), - reference_image=dict(), - transform_file=dict(mandatory=True, ), + output_file=dict( + extensions=None, + ), + output_format=dict( + argstr="ResultImageFormat", + ), + output_type=dict( + argstr="ResultImagePixelType", + ), + reference_image=dict( + extensions=None, + ), + transform_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = EditTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EditTransform_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = EditTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py b/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py index c93a0526c1..bce2a3c662 100644 --- a/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py @@ -1,31 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import PointsWarp def test_PointsWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( - argstr='-threads %01d', + argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( - argstr='-out %s', + argstr="-out %s", mandatory=True, usedefault=True, ), points_file=dict( - argstr='-def %s', + argstr="-def %s", + extensions=None, mandatory=True, ), transform_file=dict( - argstr='-tp %s', + argstr="-tp %s", + extensions=None, mandatory=True, ), ) @@ -34,8 +37,14 @@ def test_PointsWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PointsWarp_outputs(): - output_map = dict(warped_file=dict(), ) + output_map = dict( + warped_file=dict( + extensions=None, + ), + ) outputs = PointsWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_Registration.py b/nipype/interfaces/elastix/tests/test_auto_Registration.py index 4e774ab036..ae6d929950 100644 --- a/nipype/interfaces/elastix/tests/test_auto_Registration.py +++ b/nipype/interfaces/elastix/tests/test_auto_Registration.py @@ -1,38 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Registration def test_Registration_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( - argstr='-f %s', + argstr="-f %s", + extensions=None, mandatory=True, ), - fixed_mask=dict(argstr='-fMask %s', ), - initial_transform=dict(argstr='-t0 %s', ), + fixed_mask=dict( + argstr="-fMask %s", + extensions=None, + ), + initial_transform=dict( + argstr="-t0 %s", + extensions=None, + ), moving_image=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), - moving_mask=dict(argstr='-mMask %s', ), + moving_mask=dict( + argstr="-mMask %s", + extensions=None, + ), num_threads=dict( - argstr='-threads %01d', + argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( - argstr='-out %s', + argstr="-out %s", mandatory=True, usedefault=True, ), parameters=dict( - argstr='-p %s...', + argstr="-p %s...", mandatory=True, ), ) @@ -41,10 +53,14 @@ def test_Registration_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Registration_outputs(): output_map = dict( transform=dict(), - warped_file=dict(), + warped_file=dict( + extensions=None, + ), warped_files=dict(), warped_files_flags=dict(), ) diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 505115b05a..912216af9a 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,56 +5,63 @@ transform files (to configure warpings) """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - import os.path as op from ... import logging -from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, - TraitedSpec, File, traits) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + isdefined, + TraitedSpec, + File, + traits, +) + +iflogger = logging.getLogger("nipype.interface") class EditTransformInputSpec(BaseInterfaceInputSpec): transform_file = File( - exists=True, mandatory=True, desc='transform-parameter file, only 1') + exists=True, mandatory=True, desc="transform-parameter file, only 1" + ) reference_image = File( exists=True, - desc=('set a new reference image to change the ' - 'target coordinate system.')) + desc=("set a new reference image to change the target coordinate system."), + ) interpolation = traits.Enum( - 'cubic', - 'linear', - 'nearest', + "cubic", + "linear", + "nearest", usedefault=True, - argstr='FinalBSplineInterpolationOrder', - desc='set a new interpolator for transformation') + argstr="FinalBSplineInterpolationOrder", + desc="set a new interpolator for transformation", + ) output_type = traits.Enum( - 'float', - 'unsigned char', - 'unsigned short', - 'short', - 'unsigned long', - 'long', - 'double', - argstr='ResultImagePixelType', - desc='set a new output pixel type for resampled images') + "float", + "unsigned char", + "unsigned short", + "short", + "unsigned long", + "long", + "double", + argstr="ResultImagePixelType", + desc="set a new output pixel type for resampled images", + ) output_format = traits.Enum( - 'nii.gz', - 'nii', - 'mhd', - 'hdr', - 'vtk', - argstr='ResultImageFormat', - desc='set a new image format for resampled images') - output_file = File(desc='the filename for the resulting transform file') + "nii.gz", + "nii", + "mhd", + "hdr", + "vtk", + argstr="ResultImageFormat", + desc="set a new image format for resampled images", + ) + output_file = File(desc="the filename for the resulting transform file") class EditTransformOutputSpec(TraitedSpec): - output_file = File(exists=True, desc='output transform file') + output_file = File(exists=True, desc="output transform file") class EditTransform(BaseInterface): @@ -77,38 +82,42 @@ class EditTransform(BaseInterface): input_spec = EditTransformInputSpec output_spec = EditTransformOutputSpec - _out_file = '' - _pattern = '\((?P%s\s\"?)([-\.\s\w]+)(\"?\))' + _out_file = "" + _pattern = r'\((?P%s\s"?)([-\.\s\w]+)("?\))' - _interp = {'nearest': 0, 'linear': 1, 'cubic': 3} + _interp = {"nearest": 0, "linear": 1, "cubic": 3} def _run_interface(self, runtime): import re import nibabel as nb import numpy as np - contents = '' + contents = "" - with open(self.inputs.transform_file, 'r') as f: + with open(self.inputs.transform_file) as f: contents = f.read() if isdefined(self.inputs.output_type): - p = re.compile((self._pattern % - 'ResultImagePixelType').decode('string-escape')) - rep = '(\g%s\g<3>' % self.inputs.output_type + p = re.compile( + (self._pattern % "ResultImagePixelType").decode("string-escape") + ) + rep = r"(\g%s\g<3>" % self.inputs.output_type contents = p.sub(rep, contents) if isdefined(self.inputs.output_format): p = re.compile( - (self._pattern % 'ResultImageFormat').decode('string-escape')) - rep = '(\g%s\g<3>' % self.inputs.output_format + (self._pattern % "ResultImageFormat").decode("string-escape") + ) + rep = r"(\g%s\g<3>" % self.inputs.output_format contents = p.sub(rep, contents) if isdefined(self.inputs.interpolation): p = re.compile( - (self._pattern % - 'FinalBSplineInterpolationOrder').decode('string-escape')) - rep = '(\g%s\g<3>' % self._interp[self.inputs.interpolation] + (self._pattern % "FinalBSplineInterpolationOrder").decode( + "string-escape" + ) + ) + rep = r"(\g%s\g<3>" % self._interp[self.inputs.interpolation] contents = p.sub(rep, contents) if isdefined(self.inputs.reference_image): @@ -117,19 +126,19 @@ def _run_interface(self, runtime): if len(im.header.get_zooms()) == 4: im = nb.func.four_to_three(im)[0] - size = ' '.join(["%01d" % s for s in im.shape]) - p = re.compile((self._pattern % 'Size').decode('string-escape')) - rep = '(\g%s\g<3>' % size + size = " ".join(["%01d" % s for s in im.shape]) + p = re.compile((self._pattern % "Size").decode("string-escape")) + rep = r"(\g%s\g<3>" % size contents = p.sub(rep, contents) - index = ' '.join(["0" for s in im.shape]) - p = re.compile((self._pattern % 'Index').decode('string-escape')) - rep = '(\g%s\g<3>' % index + index = " ".join(["0" for s in im.shape]) + p = re.compile((self._pattern % "Index").decode("string-escape")) + rep = r"(\g%s\g<3>" % index contents = p.sub(rep, contents) - spacing = ' '.join(["%0.4f" % f for f in im.header.get_zooms()]) - p = re.compile((self._pattern % 'Spacing').decode('string-escape')) - rep = '(\g%s\g<3>' % spacing + spacing = " ".join(["%0.4f" % f for f in im.header.get_zooms()]) + p = re.compile((self._pattern % "Spacing").decode("string-escape")) + rep = r"(\g%s\g<3>" % spacing contents = p.sub(rep, contents) itkmat = np.eye(4) @@ -137,37 +146,36 @@ def _run_interface(self, runtime): itkmat[1, 1] = -1 affine = np.dot(itkmat, im.affine) - dirs = ' '.join( - ['%0.4f' % f for f in affine[0:3, 0:3].reshape(-1)]) - orig = ' '.join(['%0.4f' % f for f in affine[0:3, 3].reshape(-1)]) + dirs = " ".join(["%0.4f" % f for f in affine[0:3, 0:3].reshape(-1)]) + orig = " ".join(["%0.4f" % f for f in affine[0:3, 3].reshape(-1)]) # p = re.compile((self._pattern % 'Direction').decode('string-escape')) # rep = '(\g%s\g<3>' % dirs # contents = p.sub(rep, contents) - p = re.compile((self._pattern % 'Origin').decode('string-escape')) - rep = '(\g%s\g<3>' % orig + p = re.compile((self._pattern % "Origin").decode("string-escape")) + rep = r"(\g%s\g<3>" % orig contents = p.sub(rep, contents) - with open(self._get_outfile(), 'w') as of: + with open(self._get_outfile(), "w") as of: of.write(contents) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = getattr(self, '_out_file') + outputs["output_file"] = self._out_file return outputs def _get_outfile(self): - val = getattr(self, '_out_file') - if val is not None and val != '': + val = self._out_file + if val is not None and val != "": return val if isdefined(self.inputs.output_file): - setattr(self, '_out_file', self.inputs.output_file) + self._out_file = self.inputs.output_file return self.inputs.output_file out_file = op.abspath(op.basename(self.inputs.transform_file)) - setattr(self, '_out_file', out_file) + self._out_file = out_file return out_file diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 44c939706e..b6863c9ded 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -1,29 +1,98 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for freesurfer.""" +"""FreeSurfer is an open source software suite for processing and analyzing brain MRI images.""" from .base import Info, FSCommand, no_freesurfer from .preprocess import ( - ParseDICOMDir, UnpackSDICOMDir, MRIConvert, Resample, ReconAll, BBRegister, - ApplyVolTransform, Smooth, DICOMConvert, RobustRegister, FitMSParams, - SynthesizeFLASH, MNIBiasCorrection, WatershedSkullStrip, Normalize, - CANormalize, CARegister, CALabel, MRIsCALabel, SegmentCC, SegmentWM, - EditWMwithAseg, ConcatenateLTA) -from .model import (MRISPreproc, MRISPreprocReconAll, GLMFit, OneSampleTTest, - Binarize, Concatenate, SegStats, SegStatsReconAll, - Label2Vol, MS_LDA, Label2Label, Label2Annot, - SphericalAverage) + ParseDICOMDir, + UnpackSDICOMDir, + MRIConvert, + Resample, + ReconAll, + BBRegister, + ApplyVolTransform, + Smooth, + DICOMConvert, + RobustRegister, + FitMSParams, + SynthesizeFLASH, + MNIBiasCorrection, + WatershedSkullStrip, + Normalize, + CANormalize, + CARegister, + CALabel, + MRIsCALabel, + SegmentCC, + SegmentWM, + EditWMwithAseg, + ConcatenateLTA, +) +from .model import ( + MRISPreproc, + MRISPreprocReconAll, + GLMFit, + OneSampleTTest, + Binarize, + Concatenate, + SegStats, + SegStatsReconAll, + Label2Vol, + MS_LDA, + Label2Label, + Label2Annot, + SphericalAverage, +) from .utils import ( - SampleToSurface, SurfaceSmooth, SurfaceTransform, Surface2VolTransform, - SurfaceSnapshots, ApplyMask, MRIsConvert, MRITessellate, MRIPretess, - MRIMarchingCubes, SmoothTessellation, MakeAverageSubject, - ExtractMainComponent, Tkregister2, AddXFormToHeader, - CheckTalairachAlignment, TalairachAVI, TalairachQC, RemoveNeck, MRIFill, - MRIsInflate, Sphere, FixTopology, EulerNumber, RemoveIntersection, - MakeSurfaces, Curvature, CurvatureStats, Jacobian, MRIsCalc, VolumeMask, - ParcellationStats, Contrast, RelabelHypointensities, Aparc2Aseg, Apas2Aseg, - MRIsExpand, MRIsCombine) -from .longitudinal import (RobustTemplate, FuseSegmentations) -from .registration import (MPRtoMNI305, RegisterAVItoTalairach, EMRegister, - Register, Paint, MRICoreg) + SampleToSurface, + SurfaceSmooth, + SurfaceTransform, + Surface2VolTransform, + SurfaceSnapshots, + ApplyMask, + MRIsConvert, + MRITessellate, + MRIPretess, + MRIMarchingCubes, + SmoothTessellation, + MakeAverageSubject, + ExtractMainComponent, + Tkregister2, + AddXFormToHeader, + CheckTalairachAlignment, + TalairachAVI, + TalairachQC, + RemoveNeck, + MRIFill, + MRIsInflate, + Sphere, + FixTopology, + EulerNumber, + RemoveIntersection, + MakeSurfaces, + Curvature, + CurvatureStats, + Jacobian, + MRIsCalc, + VolumeMask, + ParcellationStats, + Contrast, + RelabelHypointensities, + Aparc2Aseg, + Apas2Aseg, + MRIsExpand, + MRIsCombine, +) +from .longitudinal import RobustTemplate, FuseSegmentations +from .registration import ( + MPRtoMNI305, + RegisterAVItoTalairach, + EMRegister, + Register, + Paint, + MRICoreg, +) +from .petsurfer import ( + GTMSeg, + GTMPVC, +) diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index ef54cda040..c84fc78cd5 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with @@ -14,22 +13,27 @@ See the docstrings for the individual classes for 'working' examples. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open, object, str - import os -from ... import LooseVersion +from looseversion import LooseVersion + from ...utils.filemanip import fname_presuffix -from ..base import (CommandLine, Directory, CommandLineInputSpec, isdefined, - traits, TraitedSpec, File, PackageInfo) +from ..base import ( + CommandLine, + Directory, + CommandLineInputSpec, + isdefined, + traits, + TraitedSpec, + File, + PackageInfo, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class Info(PackageInfo): - """ Freesurfer subject directory and version information. + """Freesurfer subject directory and version information. Examples -------- @@ -39,9 +43,9 @@ class Info(PackageInfo): >>> Info.subjectsdir() # doctest: +SKIP """ - if os.getenv('FREESURFER_HOME'): - version_file = os.path.join( - os.getenv('FREESURFER_HOME'), 'build-stamp.txt') + + if os.getenv("FREESURFER_HOME"): + version_file = os.path.join(os.getenv("FREESURFER_HOME"), "build-stamp.txt") @staticmethod def parse_version(raw_info): @@ -49,36 +53,39 @@ def parse_version(raw_info): @classmethod def looseversion(cls): - """ Return a comparable version object + """Return a comparable version object If no version found, use LooseVersion('0.0.0') """ ver = cls.version() if ver is None: - return LooseVersion('0.0.0') + return LooseVersion("0.0.0") - vinfo = ver.rstrip().split('-') + vinfo = ver.rstrip().split("-") try: int(vinfo[-1], 16) except ValueError: - githash = '' + githash = "" else: - githash = '.' + vinfo[-1] + githash = "." + vinfo[-1] # As of FreeSurfer v6.0.0, the final component is a githash if githash: - if vinfo[3] == 'dev': + if vinfo[3] == "dev": # This will need updating when v6.0.1 comes out - vstr = '6.0.0-dev' + githash - elif vinfo[5][0] == 'v': + vstr = "6.0.0-dev" + githash + elif vinfo[5][0] == "v": vstr = vinfo[5][1:] + elif len([1 for val in vinfo[3] if val == "."]) == 2: + "version string: freesurfer-linux-centos7_x86_64-7.1.0-20200511-813297b" + vstr = vinfo[3] else: - raise RuntimeError('Unknown version string: ' + ver) + raise RuntimeError("Unknown version string: " + ver) # Retain pre-6.0.0 heuristics - elif 'dev' in ver: - vstr = vinfo[-1] + '-dev' + elif "dev" in ver: + vstr = vinfo[-1] + "-dev" else: - vstr = ver.rstrip().split('-v')[-1] + vstr = ver.rstrip().split("-v")[-1] return LooseVersion(vstr) @@ -100,18 +107,18 @@ def subjectsdir(cls): """ if cls.version(): - return os.environ['SUBJECTS_DIR'] + return os.environ["SUBJECTS_DIR"] return None class FSTraitedSpec(CommandLineInputSpec): - subjects_dir = Directory(exists=True, desc='subjects directory') + subjects_dir = Directory(exists=True, desc="subjects directory") class FSCommand(CommandLine): """General support for FreeSurfer commands. - Every FS command accepts 'subjects_dir' input. + Every FS command accepts 'subjects_dir' input. """ input_spec = FSTraitedSpec @@ -119,8 +126,8 @@ class FSCommand(CommandLine): _subjects_dir = None def __init__(self, **inputs): - super(FSCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._subjects_dir_update, 'subjects_dir') + super().__init__(**inputs) + self.inputs.on_trait_change(self._subjects_dir_update, "subjects_dir") if not self._subjects_dir: self._subjects_dir = Info.subjectsdir() if not isdefined(self.inputs.subjects_dir) and self._subjects_dir: @@ -129,27 +136,20 @@ def __init__(self, **inputs): def _subjects_dir_update(self): if self.inputs.subjects_dir: - self.inputs.environ.update({ - 'SUBJECTS_DIR': self.inputs.subjects_dir - }) + self.inputs.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) @classmethod def set_default_subjects_dir(cls, subjects_dir): cls._subjects_dir = subjects_dir def run(self, **inputs): - if 'subjects_dir' in inputs: - self.inputs.subjects_dir = inputs['subjects_dir'] + if "subjects_dir" in inputs: + self.inputs.subjects_dir = inputs["subjects_dir"] self._subjects_dir_update() - return super(FSCommand, self).run(**inputs) + return super().run(**inputs) - def _gen_fname(self, - basename, - fname=None, - cwd=None, - suffix='_fs', - use_ext=True): - '''Define a generic mapping for a single outfile + def _gen_fname(self, basename, fname=None, cwd=None, suffix="_fs", use_ext=True): + """Define a generic mapping for a single outfile The filename is potentially autogenerated by suffixing inputs.infile @@ -163,15 +163,14 @@ def _gen_fname(self, prefix paths with cwd, otherwise os.getcwd() suffix : string default suffix - ''' - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + """ + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() - fname = fname_presuffix( - basename, suffix=suffix, use_ext=use_ext, newpath=cwd) + fname = fname_presuffix(basename, suffix=suffix, use_ext=use_ext, newpath=cwd) return fname @property @@ -200,33 +199,34 @@ def _associated_file(in_file, out_name): inspecting the surface data structure. """ path, base = os.path.split(out_name) - if path == '': + if path == "": path, in_file = os.path.split(in_file) - hemis = ('lh.', 'rh.') + hemis = ("lh.", "rh.") if in_file[:3] in hemis and base[:3] not in hemis: base = in_file[:3] + base return os.path.join(path, base) class FSScriptCommand(FSCommand): - """ Support for Freesurfer script commands with log terminal_output - """ - _terminal_output = 'file' + """Support for Freesurfer script commands with log terminal_output""" + + _terminal_output = "file" _always_run = False def _list_outputs(self): outputs = self._outputs().get() - outputs['log_file'] = os.path.abspath('output.nipype') + outputs["log_file"] = os.path.abspath("output.nipype") return outputs class FSScriptOutputSpec(TraitedSpec): log_file = File( - 'output.nipype', usedefault=True, exists=True, desc="The output log") + "output.nipype", usedefault=True, exists=True, desc="The output log" + ) class FSTraitedSpecOpenMP(FSTraitedSpec): - num_threads = traits.Int(desc='allows for specifying more threads') + num_threads = traits.Int(desc="allows for specifying more threads") class FSCommandOpenMP(FSCommand): @@ -241,28 +241,27 @@ class FSCommandOpenMP(FSCommand): _num_threads = None def __init__(self, **inputs): - super(FSCommandOpenMP, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + super().__init__(**inputs) + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not self._num_threads: - self._num_threads = os.environ.get('OMP_NUM_THREADS', None) + self._num_threads = os.environ.get("OMP_NUM_THREADS", None) if not self._num_threads: - self._num_threads = os.environ.get('NSLOTS', None) + self._num_threads = os.environ.get("NSLOTS", None) if not isdefined(self.inputs.num_threads) and self._num_threads: self.inputs.num_threads = int(self._num_threads) self._num_threads_update() def _num_threads_update(self): if self.inputs.num_threads: - self.inputs.environ.update({ - 'OMP_NUM_THREADS': - str(self.inputs.num_threads) - }) + self.inputs.environ.update( + {"OMP_NUM_THREADS": str(self.inputs.num_threads)} + ) def run(self, **inputs): - if 'num_threads' in inputs: - self.inputs.num_threads = inputs['num_threads'] + if "num_threads" in inputs: + self.inputs.num_threads = inputs["num_threads"] self._num_threads_update() - return super(FSCommandOpenMP, self).run(**inputs) + return super().run(**inputs) def no_freesurfer(): @@ -270,7 +269,4 @@ def no_freesurfer(): used with skipif to skip tests that will fail if FreeSurfer is not installed""" - if Info.version() is None: - return True - else: - return False + return Info.version() is None diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 1d982a7a44..227ea76775 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -1,21 +1,28 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from ... import logging -from ..base import (TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, - isdefined) -from .base import (FSCommand, FSTraitedSpec, FSCommandOpenMP, - FSTraitedSpecOpenMP) +from ..base import ( + TraitedSpec, + File, + traits, + InputMultiPath, + OutputMultiPath, + isdefined, +) +from .base import ( + FSCommand, + FSTraitedSpec, + FSCommandOpenMP, + FSTraitedSpecOpenMP, +) -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class RobustTemplateInputSpec(FSTraitedSpecOpenMP): @@ -23,82 +30,96 @@ class RobustTemplateInputSpec(FSTraitedSpecOpenMP): in_files = InputMultiPath( File(exists=True), mandatory=True, - argstr='--mov %s', - desc='input movable volumes to be aligned to common mean/median ' - 'template') + argstr="--mov %s", + desc="input movable volumes to be aligned to common mean/median template", + ) out_file = File( - 'mri_robust_template_out.mgz', + "mri_robust_template_out.mgz", mandatory=True, usedefault=True, - argstr='--template %s', - desc='output template volume (final mean/median image)') + argstr="--template %s", + desc="output template volume (final mean/median image)", + ) auto_detect_sensitivity = traits.Bool( - argstr='--satit', - xor=['outlier_sensitivity'], + argstr="--satit", + xor=["outlier_sensitivity"], mandatory=True, - desc='auto-detect good sensitivity (recommended for head or full ' - 'brain scans)') + desc="auto-detect good sensitivity (recommended for head or full " + "brain scans)", + ) outlier_sensitivity = traits.Float( - argstr='--sat %.4f', - xor=['auto_detect_sensitivity'], + argstr="--sat %.4f", + xor=["auto_detect_sensitivity"], mandatory=True, desc='set outlier sensitivity manually (e.g. "--sat 4.685" ). Higher ' - 'values mean less sensitivity.') + "values mean less sensitivity.", + ) # optional transform_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, - argstr='--lta %s', - desc='output xforms to template (for each input)') + argstr="--lta %s", + desc="output xforms to template (for each input)", + ) intensity_scaling = traits.Bool( default_value=False, - argstr='--iscale', - desc='allow also intensity scaling (default off)') + argstr="--iscale", + desc="allow also intensity scaling (default off)", + ) scaled_intensity_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, - argstr='--iscaleout %s', - desc='final intensity scales (will activate --iscale)') + argstr="--iscaleout %s", + desc="final intensity scales (will activate --iscale)", + ) subsample_threshold = traits.Int( - argstr='--subsample %d', - desc='subsample if dim > # on all axes (default no subs.)') + argstr="--subsample %d", + desc="subsample if dim > # on all axes (default no subs.)", + ) average_metric = traits.Enum( - 'median', - 'mean', - argstr='--average %d', - desc='construct template from: 0 Mean, 1 Median (default)') + "median", + "mean", + argstr="--average %d", + desc="construct template from: 0 Mean, 1 Median (default)", + ) initial_timepoint = traits.Int( - argstr='--inittp %d', - desc='use TP# for spacial init (default random), 0: no init') + argstr="--inittp %d", + desc="use TP# for special init (default random), 0: no init", + ) fixed_timepoint = traits.Bool( default_value=False, - argstr='--fixtp', - desc='map everthing to init TP# (init TP is not resampled)') + argstr="--fixtp", + desc="map everything to init TP# (init TP is not resampled)", + ) no_iteration = traits.Bool( default_value=False, - argstr='--noit', - desc='do not iterate, just create first template') + argstr="--noit", + desc="do not iterate, just create first template", + ) initial_transforms = InputMultiPath( File(exists=True), - argstr='--ixforms %s', - desc='use initial transforms (lta) on source') + argstr="--ixforms %s", + desc="use initial transforms (lta) on source", + ) in_intensity_scales = InputMultiPath( - File(exists=True), - argstr='--iscalein %s', - desc='use initial intensity scales') + File(exists=True), argstr="--iscalein %s", desc="use initial intensity scales" + ) class RobustTemplateOutputSpec(TraitedSpec): out_file = File( - exists=True, desc='output template volume (final mean/median image)') + exists=True, desc="output template volume (final mean/median image)" + ) transform_outputs = OutputMultiPath( - File(exists=True), desc="output xform files from moving to template") + File(exists=True), desc="output xform files from moving to template" + ) scaled_intensity_outputs = OutputMultiPath( - File(exists=True), desc="output final intensity scales") + File(exists=True), desc="output final intensity scales" + ) class RobustTemplate(FSCommandOpenMP): - """ construct an unbiased robust template for longitudinal volumes + """construct an unbiased robust template for longitudinal volumes Examples -------- @@ -137,76 +158,73 @@ class RobustTemplate(FSCommandOpenMP): """ - _cmd = 'mri_robust_template' + _cmd = "mri_robust_template" input_spec = RobustTemplateInputSpec output_spec = RobustTemplateOutputSpec def _format_arg(self, name, spec, value): - if name == 'average_metric': + if name == "average_metric": # return enumeration value return spec.argstr % {"mean": 0, "median": 1}[value] - if name in ('transform_outputs', 'scaled_intensity_outputs'): + if name in ("transform_outputs", "scaled_intensity_outputs"): value = self._list_outputs()[name] - return super(RobustTemplate, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) n_files = len(self.inputs.in_files) - fmt = '{}{:02d}.{}' if n_files > 9 else '{}{:d}.{}' + fmt = "{}{:02d}.{}" if n_files > 9 else "{}{:d}.{}" if isdefined(self.inputs.transform_outputs): fnames = self.inputs.transform_outputs if fnames is True: - fnames = [ - fmt.format('tp', i + 1, 'lta') for i in range(n_files) - ] - outputs['transform_outputs'] = [os.path.abspath(x) for x in fnames] + fnames = [fmt.format("tp", i + 1, "lta") for i in range(n_files)] + outputs["transform_outputs"] = [os.path.abspath(x) for x in fnames] if isdefined(self.inputs.scaled_intensity_outputs): fnames = self.inputs.scaled_intensity_outputs if fnames is True: - fnames = [ - fmt.format('is', i + 1, 'txt') for i in range(n_files) - ] - outputs['scaled_intensity_outputs'] = [ - os.path.abspath(x) for x in fnames - ] + fnames = [fmt.format("is", i + 1, "txt") for i in range(n_files)] + outputs["scaled_intensity_outputs"] = [os.path.abspath(x) for x in fnames] return outputs class FuseSegmentationsInputSpec(FSTraitedSpec): # required subject_id = traits.String( - argstr='%s', position=-3, desc="subject_id being processed") + argstr="%s", position=-3, desc="subject_id being processed" + ) timepoints = InputMultiPath( traits.String(), mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='subject_ids or timepoints to be processed') + desc="subject_ids or timepoints to be processed", + ) out_file = File( - exists=False, - mandatory=True, - position=-1, - desc="output fused segmentation file") + exists=False, mandatory=True, position=-1, desc="output fused segmentation file" + ) in_segmentations = InputMultiPath( File(exists=True), argstr="-a %s", mandatory=True, desc="name of aseg file to use (default: aseg.mgz) \ - must include the aseg files for all the given timepoints") + must include the aseg files for all the given timepoints", + ) in_segmentations_noCC = InputMultiPath( File(exists=True), argstr="-c %s", mandatory=True, desc="name of aseg file w/o CC labels (default: aseg.auto_noCCseg.mgz) \ - must include the corresponding file for all the given timepoints") + must include the corresponding file for all the given timepoints", + ) in_norms = InputMultiPath( File(exists=True), argstr="-n %s", mandatory=True, desc="-n - name of norm file to use (default: norm.mgs) \ must include the corresponding norm file for all given timepoints \ - as well as for the current subject") + as well as for the current subject", + ) class FuseSegmentationsOutputSpec(TraitedSpec): @@ -214,7 +232,7 @@ class FuseSegmentationsOutputSpec(TraitedSpec): class FuseSegmentations(FSCommand): - """ fuse segmentations together from multiple timepoints + """fuse segmentations together from multiple timepoints Examples -------- @@ -230,17 +248,17 @@ class FuseSegmentations(FSCommand): 'mri_fuse_segmentations -n norm.mgz -a aseg.mgz -c aseg.mgz tp.long.A.template tp1 tp2' """ - _cmd = 'mri_fuse_segmentations' + _cmd = "mri_fuse_segmentations" input_spec = FuseSegmentationsInputSpec output_spec = FuseSegmentationsOutputSpec def _format_arg(self, name, spec, value): - if name in ('in_segmentations', 'in_segmentations_noCC', 'in_norms'): + if name in ("in_segmentations", "in_segmentations_noCC", "in_norms"): # return enumeration value return spec.argstr % os.path.basename(value[0]) - return super(FuseSegmentations, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 58d168e2d7..6376c1b971 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -1,93 +1,113 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with freesurfer tools. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from ...utils.filemanip import fname_presuffix, split_filename -from ..base import (TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, - Directory, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + InputMultiPath, + OutputMultiPath, + Directory, + isdefined, +) from .base import FSCommand, FSTraitedSpec from .utils import copy2subjdir -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class MRISPreprocInputSpec(FSTraitedSpec): - out_file = File(argstr='--out %s', genfile=True, desc='output filename') + out_file = File(argstr="--out %s", genfile=True, desc="output filename") target = traits.Str( - argstr='--target %s', mandatory=True, desc='target subject name') + argstr="--target %s", mandatory=True, desc="target subject name" + ) hemi = traits.Enum( - 'lh', - 'rh', - argstr='--hemi %s', + "lh", + "rh", + argstr="--hemi %s", mandatory=True, - desc='hemisphere for source and target') + desc="hemisphere for source and target", + ) surf_measure = traits.Str( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='Use subject/surf/hemi.surf_measure as input') + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="Use subject/surf/hemi.surf_measure as input", + ) surf_area = traits.Str( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc= - 'Extract vertex area from subject/surf/hemi.surfname to use as input.') + argstr="--area %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="Extract vertex area from subject/surf/hemi.surfname to use as input.", + ) subjects = traits.List( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='subjects from who measures are calculated') + argstr="--s %s...", + xor=("subjects", "fsgd_file", "subject_file"), + desc="subjects from who measures are calculated", + ) fsgd_file = File( exists=True, - argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='specify subjects using fsgd file') + argstr="--fsgd %s", + xor=("subjects", "fsgd_file", "subject_file"), + desc="specify subjects using fsgd file", + ) subject_file = File( exists=True, - argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='file specifying subjects separated by white space') + argstr="--f %s", + xor=("subjects", "fsgd_file", "subject_file"), + desc="file specifying subjects separated by white space", + ) surf_measure_file = InputMultiPath( File(exists=True), - argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='file alternative to surfmeas, still requires list of subjects') - source_format = traits.Str(argstr='--srcfmt %s', desc='source format') + argstr="--is %s...", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="file alternative to surfmeas, still requires list of subjects", + ) + source_format = traits.Str(argstr="--srcfmt %s", desc="source format") surf_dir = traits.Str( - argstr='--surfdir %s', desc='alternative directory (instead of surf)') + argstr="--surfdir %s", desc="alternative directory (instead of surf)" + ) vol_measure_file = InputMultiPath( - traits.Tuple(File(exists=True), File(exists=True)), - argstr='--iv %s %s...', - desc='list of volume measure and reg file tuples') + Tuple(File(exists=True), File(exists=True)), + argstr="--iv %s %s...", + desc="list of volume measure and reg file tuples", + ) proj_frac = traits.Float( - argstr='--projfrac %s', desc='projection fraction for vol2surf') + argstr="--projfrac %s", desc="projection fraction for vol2surf" + ) fwhm = traits.Float( - argstr='--fwhm %f', - xor=['num_iters'], - desc='smooth by fwhm mm on the target surface') + argstr="--fwhm %f", + xor=["num_iters"], + desc="smooth by fwhm mm on the target surface", + ) num_iters = traits.Int( - argstr='--niters %d', - xor=['fwhm'], - desc='niters : smooth by niters on the target surface') + argstr="--niters %d", + xor=["fwhm"], + desc="niters : smooth by niters on the target surface", + ) fwhm_source = traits.Float( - argstr='--fwhm-src %f', - xor=['num_iters_source'], - desc='smooth by fwhm mm on the source surface') + argstr="--fwhm-src %f", + xor=["num_iters_source"], + desc="smooth by fwhm mm on the source surface", + ) num_iters_source = traits.Int( - argstr='--niterssrc %d', - xor=['fwhm_source'], - desc='niters : smooth by niters on the source surface') + argstr="--niterssrc %d", + xor=["fwhm_source"], + desc="niters : smooth by niters on the source surface", + ) smooth_cortex_only = traits.Bool( - argstr='--smooth-cortex-only', - desc='only smooth cortex (ie, exclude medial wall)') + argstr="--smooth-cortex-only", + desc="only smooth cortex (ie, exclude medial wall)", + ) class MRISPreprocOutputSpec(TraitedSpec): - out_file = File(desc='preprocessed output file') + out_file = File(desc="preprocessed output file") class MRISPreproc(FSCommand): @@ -96,7 +116,6 @@ class MRISPreproc(FSCommand): Examples -------- - >>> preproc = MRISPreproc() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -108,22 +127,22 @@ class MRISPreproc(FSCommand): """ - _cmd = 'mris_preproc' + _cmd = "mris_preproc" input_spec = MRISPreprocInputSpec output_spec = MRISPreprocOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.out_file - outputs['out_file'] = outfile + outputs["out_file"] = outfile if not isdefined(outfile): - outputs['out_file'] = os.path.join( - os.getcwd(), 'concat_%s_%s.mgz' % (self.inputs.hemi, - self.inputs.target)) + outputs["out_file"] = os.path.join( + os.getcwd(), f"concat_{self.inputs.hemi}_{self.inputs.target}.mgz" + ) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -131,36 +150,41 @@ def _gen_filename(self, name): class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): surf_measure_file = File( exists=True, - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='file necessary for surfmeas') + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="file necessary for surfmeas", + ) surfreg_files = InputMultiPath( File(exists=True), argstr="--surfreg %s", - requires=['lh_surfreg_target', 'rh_surfreg_target'], - desc="lh and rh input surface registration files") + requires=["lh_surfreg_target", "rh_surfreg_target"], + desc="lh and rh input surface registration files", + ) lh_surfreg_target = File( - desc="Implicit target surface registration file", - requires=['surfreg_files']) + desc="Implicit target surface registration file", requires=["surfreg_files"] + ) rh_surfreg_target = File( - desc="Implicit target surface registration file", - requires=['surfreg_files']) + desc="Implicit target surface registration file", requires=["surfreg_files"] + ) subject_id = traits.String( - 'subject_id', - argstr='--s %s', + "subject_id", + argstr="--s %s", usedefault=True, - xor=('subjects', 'fsgd_file', 'subject_file', 'subject_id'), - desc='subject from whom measures are calculated') + xor=("subjects", "fsgd_file", "subject_file", "subject_id"), + desc="subject from whom measures are calculated", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True " + - "this will copy some implicit inputs to the " + "node directory.") + desc="If running as a node, set this to True " + "this will copy some implicit inputs to the " + "node directory." + ) class MRISPreprocReconAll(MRISPreproc): """Extends MRISPreproc to allow it to be used in a recon-all workflow Examples - ======== + -------- >>> preproc = MRISPreprocReconAll() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -169,6 +193,7 @@ class MRISPreprocReconAll(MRISPreproc): >>> preproc.inputs.out_file = 'concatenated_file.mgz' >>> preproc.cmdline 'mris_preproc --hemi lh --out concatenated_file.mgz --s subject_id --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' + """ input_spec = MRISPreprocReconAllInputSpec @@ -176,209 +201,262 @@ class MRISPreprocReconAll(MRISPreproc): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir if isdefined(self.inputs.surf_dir): folder = self.inputs.surf_dir else: - folder = 'surf' + folder = "surf" if isdefined(self.inputs.surfreg_files): for surfreg in self.inputs.surfreg_files: basename = os.path.basename(surfreg) copy2subjdir(self, surfreg, folder, basename) - if basename.startswith('lh.'): + if basename.startswith("lh."): copy2subjdir( self, self.inputs.lh_surfreg_target, folder, basename, - subject_id=self.inputs.target) + subject_id=self.inputs.target, + ) else: copy2subjdir( self, self.inputs.rh_surfreg_target, folder, basename, - subject_id=self.inputs.target) + subject_id=self.inputs.target, + ) if isdefined(self.inputs.surf_measure_file): copy2subjdir(self, self.inputs.surf_measure_file, folder) - return super(MRISPreprocReconAll, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): # mris_preproc looks for these files in the surf dir - if name == 'surfreg_files': + if name == "surfreg_files": basename = os.path.basename(value[0]) - return spec.argstr % basename.lstrip('rh.').lstrip('lh.') + return spec.argstr % basename.lstrip("rh.").lstrip("lh.") if name == "surf_measure_file": basename = os.path.basename(value) - return spec.argstr % basename.lstrip('rh.').lstrip('lh.') - return super(MRISPreprocReconAll, self)._format_arg(name, spec, value) + return spec.argstr % basename.lstrip("rh.").lstrip("lh.") + return super()._format_arg(name, spec, value) class GLMFitInputSpec(FSTraitedSpec): - glm_dir = traits.Str( - argstr='--glmdir %s', desc='save outputs to dir', genfile=True) + glm_dir = traits.Str(argstr="--glmdir %s", desc="save outputs to dir", genfile=True) in_file = File( - desc='input 4D file', argstr='--y %s', mandatory=True, copyfile=False) - _design_xor = ('fsgd', 'design', 'one_sample') - fsgd = traits.Tuple( + desc="input 4D file", argstr="--y %s", mandatory=True, copyfile=False + ) + _design_xor = ("fsgd", "design", "one_sample") + fsgd = Tuple( File(exists=True), - traits.Enum('doss', 'dods'), - argstr='--fsgd %s %s', + traits.Enum("doss", "dods"), + argstr="--fsgd %s %s", xor=_design_xor, - desc='freesurfer descriptor file') + desc="freesurfer descriptor file", + ) design = File( - exists=True, - argstr='--X %s', - xor=_design_xor, - desc='design matrix file') + exists=True, argstr="--X %s", xor=_design_xor, desc="design matrix file" + ) contrast = InputMultiPath( - File(exists=True), argstr='--C %s...', desc='contrast file') + File(exists=True), argstr="--C %s...", desc="contrast file" + ) one_sample = traits.Bool( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), - desc='construct X and C as a one-sample group mean') + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + desc="construct X and C as a one-sample group mean", + ) no_contrast_ok = traits.Bool( - argstr='--no-contrasts-ok', - desc='do not fail if no contrasts specified') + argstr="--no-contrasts-ok", desc="do not fail if no contrasts specified" + ) per_voxel_reg = InputMultiPath( - File(exists=True), argstr='--pvr %s...', desc='per-voxel regressors') - self_reg = traits.Tuple( + File(exists=True), argstr="--pvr %s...", desc="per-voxel regressors" + ) + self_reg = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--selfreg %d %d %d', - desc='self-regressor from index col row slice') + argstr="--selfreg %d %d %d", + desc="self-regressor from index col row slice", + ) weighted_ls = File( exists=True, - argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), - desc='weighted least squares') + argstr="--wls %s", + xor=("weight_file", "weight_inv", "weight_sqrt"), + desc="weighted least squares", + ) fixed_fx_var = File( - exists=True, argstr='--yffxvar %s', desc='for fixed effects analysis') + exists=True, argstr="--yffxvar %s", desc="for fixed effects analysis" + ) fixed_fx_dof = traits.Int( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], - desc='dof for fixed effects analysis') + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + desc="dof for fixed effects analysis", + ) fixed_fx_dof_file = File( - argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], - desc='text file with dof for fixed effects analysis') + argstr="--ffxdofdat %d", + xor=["fixed_fx_dof"], + desc="text file with dof for fixed effects analysis", + ) weight_file = File( - exists=True, - xor=['weighted_ls'], - desc='weight for each input at each voxel') + exists=True, xor=["weighted_ls"], desc="weight for each input at each voxel" + ) weight_inv = traits.Bool( - argstr='--w-inv', desc='invert weights', xor=['weighted_ls']) + argstr="--w-inv", desc="invert weights", xor=["weighted_ls"] + ) weight_sqrt = traits.Bool( - argstr='--w-sqrt', desc='sqrt of weights', xor=['weighted_ls']) - fwhm = traits.Range( - low=0.0, argstr='--fwhm %f', desc='smooth input by fwhm') + argstr="--w-sqrt", desc="sqrt of weights", xor=["weighted_ls"] + ) + fwhm = traits.Range(low=0.0, argstr="--fwhm %f", desc="smooth input by fwhm") var_fwhm = traits.Range( - low=0.0, argstr='--var-fwhm %f', desc='smooth variance by fwhm') + low=0.0, argstr="--var-fwhm %f", desc="smooth variance by fwhm" + ) no_mask_smooth = traits.Bool( - argstr='--no-mask-smooth', desc='do not mask when smoothing') + argstr="--no-mask-smooth", desc="do not mask when smoothing" + ) no_est_fwhm = traits.Bool( - argstr='--no-est-fwhm', desc='turn off FWHM output estimation') - mask_file = File(exists=True, argstr='--mask %s', desc='binary mask') + argstr="--no-est-fwhm", desc="turn off FWHM output estimation" + ) + mask_file = File(exists=True, argstr="--mask %s", desc="binary mask") label_file = File( exists=True, - argstr='--label %s', - xor=['cortex'], - desc='use label as mask, surfaces only') + argstr="--label %s", + xor=["cortex"], + desc="use label as mask, surfaces only", + ) cortex = traits.Bool( - argstr='--cortex', - xor=['label_file'], - desc='use subjects ?h.cortex.label as label') - invert_mask = traits.Bool(argstr='--mask-inv', desc='invert mask') + argstr="--cortex", + xor=["label_file"], + desc="use subjects ?h.cortex.label as label", + ) + invert_mask = traits.Bool(argstr="--mask-inv", desc="invert mask") prune = traits.Bool( - argstr='--prune', - desc= - 'remove voxels that do not have a non-zero value at each frame (def)') + argstr="--prune", + desc="remove voxels that do not have a non-zero value at each frame (def)", + ) no_prune = traits.Bool( - argstr='--no-prune', xor=['prunethresh'], desc='do not prune') + argstr="--no-prune", xor=["prunethresh"], desc="do not prune" + ) prune_thresh = traits.Float( - argstr='--prune_thr %f', - xor=['noprune'], - desc='prune threshold. Default is FLT_MIN') + argstr="--prune_thr %f", + xor=["noprune"], + desc="prune threshold. Default is FLT_MIN", + ) compute_log_y = traits.Bool( - argstr='--logy', desc='compute natural log of y prior to analysis') + argstr="--logy", desc="compute natural log of y prior to analysis" + ) save_estimate = traits.Bool( - argstr='--yhat-save', desc='save signal estimate (yhat)') - save_residual = traits.Bool( - argstr='--eres-save', desc='save residual error (eres)') + argstr="--yhat-save", desc="save signal estimate (yhat)" + ) + save_residual = traits.Bool(argstr="--eres-save", desc="save residual error (eres)") save_res_corr_mtx = traits.Bool( - argstr='--eres-scm', - desc='save residual error spatial correlation matrix (eres.scm). Big!') + argstr="--eres-scm", + desc="save residual error spatial correlation matrix (eres.scm). Big!", + ) surf = traits.Bool( argstr="--surf %s %s %s", requires=["subject_id", "hemi"], - desc="analysis is on a surface mesh") + desc="analysis is on a surface mesh", + ) subject_id = traits.Str(desc="subject id for surface geometry") hemi = traits.Enum("lh", "rh", desc="surface hemisphere") surf_geo = traits.Str( - "white", - usedefault=True, - desc="surface geometry name (e.g. white, pial)") - simulation = traits.Tuple( - traits.Enum('perm', 'mc-full', 'mc-z'), + "white", usedefault=True, desc="surface geometry name (e.g. white, pial)" + ) + simulation = Tuple( + traits.Enum("perm", "mc-full", "mc-z"), traits.Int(min=1), traits.Float, traits.Str, - argstr='--sim %s %d %f %s', - desc='nulltype nsim thresh csdbasename') + argstr="--sim %s %d %f %s", + desc="nulltype nsim thresh csdbasename", + ) sim_sign = traits.Enum( - 'abs', 'pos', 'neg', argstr='--sim-sign %s', desc='abs, pos, or neg') - uniform = traits.Tuple( + "abs", "pos", "neg", argstr="--sim-sign %s", desc="abs, pos, or neg" + ) + uniform = Tuple( traits.Float, traits.Float, - argstr='--uniform %f %f', - desc='use uniform distribution instead of gaussian') - pca = traits.Bool( - argstr='--pca', desc='perform pca/svd analysis on residual') + argstr="--uniform %f %f", + desc="use uniform distribution instead of gaussian", + ) + pca = traits.Bool(argstr="--pca", desc="perform pca/svd analysis on residual") calc_AR1 = traits.Bool( - argstr='--tar1', desc='compute and save temporal AR1 of residual') + argstr="--tar1", desc="compute and save temporal AR1 of residual" + ) save_cond = traits.Bool( - argstr='--save-cond', - desc='flag to save design matrix condition at each voxel') - vox_dump = traits.Tuple( + argstr="--save-cond", desc="flag to save design matrix condition at each voxel" + ) + vox_dump = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--voxdump %d %d %d', - desc='dump voxel GLM and exit') - seed = traits.Int(argstr='--seed %d', desc='used for synthesizing noise') - synth = traits.Bool(argstr='--synth', desc='replace input with gaussian') - resynth_test = traits.Int( - argstr='--resynthtest %d', desc='test GLM by resynthsis') - profile = traits.Int(argstr='--profile %d', desc='niters : test speed') + argstr="--voxdump %d %d %d", + desc="dump voxel GLM and exit", + ) + seed = traits.Int(argstr="--seed %d", desc="used for synthesizing noise") + synth = traits.Bool(argstr="--synth", desc="replace input with gaussian") + resynth_test = traits.Int(argstr="--resynthtest %d", desc="test GLM by resynthsis") + profile = traits.Int(argstr="--profile %d", desc="niters : test speed") + mrtm1 = Tuple( + File(exists=True), + File(exists=True), + argstr="--mrtm1 %s %s", + desc="RefTac TimeSec : perform MRTM1 kinetic modeling", + ) + mrtm2 = Tuple( + File(exists=True), + File(exists=True), + traits.Float, + argstr="--mrtm2 %s %s %f", + desc="RefTac TimeSec k2prime : perform MRTM2 kinetic modeling", + ) + logan = Tuple( + File(exists=True), + File(exists=True), + traits.Float, + argstr="--logan %s %s %f", + desc="RefTac TimeSec tstar : perform Logan kinetic modeling", + ) + bp_clip_neg = traits.Bool( + argstr="--bp-clip-neg", + desc="set negative BP voxels to zero", + ) + bp_clip_max = traits.Float( + argstr="--bp-clip-max %f", + desc="set BP voxels above max to max", + ) force_perm = traits.Bool( - argstr='--perm-force', - desc='force perumtation test, even when design matrix is not orthog') - diag = traits.Int(argstr='--diag %d', desc='Gdiag_no : set diagnositc level') + argstr="--perm-force", + desc="force perumtation test, even when design matrix is not orthog", + ) + diag = traits.Int(argstr="--diag %d", desc="Gdiag_no : set diagnostic level") diag_cluster = traits.Bool( - argstr='--diag-cluster', - desc='save sig volume and exit from first sim loop') - debug = traits.Bool(argstr='--debug', desc='turn on debugging') + argstr="--diag-cluster", desc="save sig volume and exit from first sim loop" + ) + debug = traits.Bool(argstr="--debug", desc="turn on debugging") check_opts = traits.Bool( - argstr='--checkopts', - desc="don't run anything, just check options and exit") + argstr="--checkopts", desc="don't run anything, just check options and exit" + ) allow_repeated_subjects = traits.Bool( - argstr='--allowsubjrep', - desc= - 'allow subject names to repeat in the fsgd file (must appear before --fsgd' + argstr="--allowsubjrep", + desc="allow subject names to repeat in the fsgd file (must appear before --fsgd", ) allow_ill_cond = traits.Bool( - argstr='--illcond', desc='allow ill-conditioned design matrices') + argstr="--illcond", desc="allow ill-conditioned design matrices" + ) sim_done_file = File( - argstr='--sim-done %s', desc='create file when simulation finished') + argstr="--sim-done %s", desc="create file when simulation finished" + ) + _ext_xor = ['nii', 'nii_gz'] + nii = traits.Bool(argstr='--nii', desc='save outputs as nii', xor=_ext_xor) + nii_gz = traits.Bool(argstr='--nii.gz', desc='save outputs as nii.gz', xor=_ext_xor) class GLMFitOutputSpec(TraitedSpec): - glm_dir = Directory(exists=True, desc="output directory") beta_file = File(exists=True, desc="map of regression coefficients") error_file = File(desc="map of residual error") @@ -387,20 +465,17 @@ class GLMFitOutputSpec(TraitedSpec): estimate_file = File(desc="map of the estimated Y values") mask_file = File(desc="map of the mask used in the analysis") fwhm_file = File(desc="text file with estimated smoothness") - dof_file = File( - desc="text file with effective degrees-of-freedom for the analysis") - gamma_file = OutputMultiPath( - desc="map of contrast of regression coefficients") - gamma_var_file = OutputMultiPath( - desc="map of regression contrast variance") + dof_file = File(desc="text file with effective degrees-of-freedom for the analysis") + gamma_file = OutputMultiPath(desc="map of contrast of regression coefficients") + gamma_var_file = OutputMultiPath(desc="map of regression contrast variance") sig_file = OutputMultiPath(desc="map of F-test significance (in -log10p)") ftest_file = OutputMultiPath(desc="map of test statistic values") - spatial_eigenvectors = File( - desc="map of spatial eigenvectors from residual PCA") - frame_eigenvectors = File( - desc="matrix of frame eigenvectors from residual PCA") + spatial_eigenvectors = File(desc="map of spatial eigenvectors from residual PCA") + frame_eigenvectors = File(desc="matrix of frame eigenvectors from residual PCA") singular_values = File(desc="matrix singular values from residual PCA") svd_stats_file = File(desc="text file summarizing the residual PCA") + k2p_file = File(desc="estimate of k2p parameter") + bp_file = File(desc="Binding potential estimates") class GLMFit(FSCommand): @@ -408,7 +483,6 @@ class GLMFit(FSCommand): Examples -------- - >>> glmfit = GLMFit() >>> glmfit.inputs.in_file = 'functional.nii' >>> glmfit.inputs.one_sample = True @@ -417,7 +491,7 @@ class GLMFit(FSCommand): """ - _cmd = 'mri_glmfit' + _cmd = "mri_glmfit" input_spec = GLMFitInputSpec output_spec = GLMFitOutputSpec @@ -425,7 +499,7 @@ def _format_arg(self, name, spec, value): if name == "surf": _si = self.inputs return spec.argstr % (_si.subject_id, _si.hemi, _si.surf_geo) - return super(GLMFit, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -436,22 +510,33 @@ def _list_outputs(self): glmdir = os.path.abspath(self.inputs.glm_dir) outputs["glm_dir"] = glmdir + if isdefined(self.inputs.nii_gz): + ext = 'nii.gz' + elif isdefined(self.inputs.nii): + ext = 'nii' + else: + ext = 'mgh' + # Assign the output files that always get created - outputs["beta_file"] = os.path.join(glmdir, "beta.mgh") - outputs["error_var_file"] = os.path.join(glmdir, "rvar.mgh") - outputs["error_stddev_file"] = os.path.join(glmdir, "rstd.mgh") - outputs["mask_file"] = os.path.join(glmdir, "mask.mgh") + outputs["beta_file"] = os.path.join(glmdir, f"beta.{ext}") + outputs["error_var_file"] = os.path.join(glmdir, f"rvar.{ext}") + outputs["error_stddev_file"] = os.path.join(glmdir, f"rstd.{ext}") + outputs["mask_file"] = os.path.join(glmdir, f"mask.{ext}") outputs["fwhm_file"] = os.path.join(glmdir, "fwhm.dat") outputs["dof_file"] = os.path.join(glmdir, "dof.dat") # Assign the conditional outputs - if isdefined(self.inputs.save_residual) and self.inputs.save_residual: - outputs["error_file"] = os.path.join(glmdir, "eres.mgh") - if isdefined(self.inputs.save_estimate) and self.inputs.save_estimate: - outputs["estimate_file"] = os.path.join(glmdir, "yhat.mgh") + if self.inputs.save_residual: + outputs["error_file"] = os.path.join(glmdir, f"eres.{ext}") + if self.inputs.save_estimate: + outputs["estimate_file"] = os.path.join(glmdir, f"yhat.{ext}") + if any((self.inputs.mrtm1, self.inputs.mrtm2, self.inputs.logan)): + outputs["bp_file"] = os.path.join(glmdir, f"bp.{ext}") + if self.inputs.mrtm1: + outputs["k2p_file"] = os.path.join(glmdir, "k2prime.dat") # Get the contrast directory name(s) + contrasts = [] if isdefined(self.inputs.contrast): - contrasts = [] for c in self.inputs.contrast: if split_filename(c)[2] in [".mat", ".dat", ".mtx", ".con"]: contrasts.append(split_filename(c)[1]) @@ -461,23 +546,19 @@ def _list_outputs(self): contrasts = ["osgm"] # Add in the contrast images - outputs["sig_file"] = [ - os.path.join(glmdir, c, "sig.mgh") for c in contrasts - ] - outputs["ftest_file"] = [ - os.path.join(glmdir, c, "F.mgh") for c in contrasts - ] + outputs["sig_file"] = [os.path.join(glmdir, c, f"sig.{ext}") for c in contrasts] + outputs["ftest_file"] = [os.path.join(glmdir, c, f"F.{ext}") for c in contrasts] outputs["gamma_file"] = [ - os.path.join(glmdir, c, "gamma.mgh") for c in contrasts + os.path.join(glmdir, c, f"gamma.{ext}") for c in contrasts ] outputs["gamma_var_file"] = [ - os.path.join(glmdir, c, "gammavar.mgh") for c in contrasts + os.path.join(glmdir, c, f"gammavar.{ext}") for c in contrasts ] # Add in the PCA results, if relevant if isdefined(self.inputs.pca) and self.inputs.pca: pcadir = os.path.join(glmdir, "pca-eres") - outputs["spatial_eigenvectors"] = os.path.join(pcadir, "v.mgh") + outputs["spatial_eigenvectors"] = os.path.join(pcadir, f"v.{ext}") outputs["frame_eigenvectors"] = os.path.join(pcadir, "u.mtx") outputs["singluar_values"] = os.path.join(pcadir, "sdiag.mat") outputs["svd_stats_file"] = os.path.join(pcadir, "stats.dat") @@ -485,90 +566,89 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'glm_dir': + if name == "glm_dir": return os.getcwd() return None class OneSampleTTest(GLMFit): def __init__(self, **kwargs): - super(OneSampleTTest, self).__init__(**kwargs) + super().__init__(**kwargs) self.inputs.one_sample = True class BinarizeInputSpec(FSTraitedSpec): in_file = File( exists=True, - argstr='--i %s', + argstr="--i %s", mandatory=True, copyfile=False, - desc='input volume') - min = traits.Float( - argstr='--min %f', xor=['wm_ven_csf'], desc='min thresh') - max = traits.Float( - argstr='--max %f', xor=['wm_ven_csf'], desc='max thresh') - rmin = traits.Float( - argstr='--rmin %f', desc='compute min based on rmin*globalmean') - rmax = traits.Float( - argstr='--rmax %f', desc='compute max based on rmax*globalmean') + desc="input volume", + ) + min = traits.Float(argstr="--min %f", xor=["wm_ven_csf"], desc="min thresh") + max = traits.Float(argstr="--max %f", xor=["wm_ven_csf"], desc="max thresh") + rmin = traits.Float(argstr="--rmin %f", desc="compute min based on rmin*globalmean") + rmax = traits.Float(argstr="--rmax %f", desc="compute max based on rmax*globalmean") match = traits.List( - traits.Int, argstr='--match %d...', desc='match instead of threshold') + traits.Int, argstr="--match %d...", desc="match instead of threshold" + ) wm = traits.Bool( - argstr='--wm', - desc='set match vals to 2 and 41 (aseg for cerebral WM)') + argstr="--wm", desc="set match vals to 2 and 41 (aseg for cerebral WM)" + ) ventricles = traits.Bool( - argstr='--ventricles', - desc='set match vals those for aseg ventricles+choroid (not 4th)') + argstr="--ventricles", + desc="set match vals those for aseg ventricles+choroid (not 4th)", + ) wm_ven_csf = traits.Bool( - argstr='--wm+vcsf', - xor=['min', 'max'], - desc='WM and ventricular CSF, including choroid (not 4th)') - binary_file = File( - argstr='--o %s', genfile=True, desc='binary output volume') - out_type = traits.Enum( - 'nii', 'nii.gz', 'mgz', argstr='', desc='output file type') + argstr="--wm+vcsf", + xor=["min", "max"], + desc="WM and ventricular CSF, including choroid (not 4th)", + ) + binary_file = File(argstr="--o %s", genfile=True, desc="binary output volume") + out_type = traits.Enum("nii", "nii.gz", "mgz", argstr="", desc="output file type") count_file = traits.Either( traits.Bool, File, - argstr='--count %s', - desc='save number of hits in ascii file (hits, ntotvox, pct)') + argstr="--count %s", + desc="save number of hits in ascii file (hits, ntotvox, pct)", + ) bin_val = traits.Int( - argstr='--binval %d', - desc='set vox within thresh to val (default is 1)') + argstr="--binval %d", desc="set vox within thresh to val (default is 1)" + ) bin_val_not = traits.Int( - argstr='--binvalnot %d', - desc='set vox outside range to val (default is 0)') - invert = traits.Bool(argstr='--inv', desc='set binval=0, binvalnot=1') + argstr="--binvalnot %d", desc="set vox outside range to val (default is 0)" + ) + invert = traits.Bool(argstr="--inv", desc="set binval=0, binvalnot=1") frame_no = traits.Int( - argstr='--frame %s', desc='use 0-based frame of input (default is 0)') - merge_file = File( - exists=True, argstr='--merge %s', desc='merge with mergevol') - mask_file = File( - exists=True, argstr='--mask maskvol', desc='must be within mask') - mask_thresh = traits.Float( - argstr='--mask-thresh %f', desc='set thresh for mask') + argstr="--frame %s", desc="use 0-based frame of input (default is 0)" + ) + merge_file = File(exists=True, argstr="--merge %s", desc="merge with mergevol") + mask_file = File(exists=True, argstr="--mask maskvol", desc="must be within mask") + mask_thresh = traits.Float(argstr="--mask-thresh %f", desc="set thresh for mask") abs = traits.Bool( - argstr='--abs', desc='take abs of invol first (ie, make unsigned)') + argstr="--abs", desc="take abs of invol first (ie, make unsigned)" + ) bin_col_num = traits.Bool( - argstr='--bincol', - desc='set binarized voxel value to its column number') - zero_edges = traits.Bool( - argstr='--zero-edges', desc='zero the edge voxels') + argstr="--bincol", desc="set binarized voxel value to its column number" + ) + zero_edges = traits.Bool(argstr="--zero-edges", desc="zero the edge voxels") zero_slice_edge = traits.Bool( - argstr='--zero-slice-edges', desc='zero the edge slice voxels') - dilate = traits.Int( - argstr='--dilate %d', desc='niters: dilate binarization in 3D') + argstr="--zero-slice-edges", desc="zero the edge slice voxels" + ) + dilate = traits.Int(argstr="--dilate %d", desc="niters: dilate binarization in 3D") erode = traits.Int( - argstr='--erode %d', - desc='nerode: erode binarization in 3D (after any dilation)') + argstr="--erode %d", + desc="nerode: erode binarization in 3D (after any dilation)", + ) erode2d = traits.Int( - argstr='--erode2d %d', - desc='nerode2d: erode binarization in 2D (after any 3D erosion)') + argstr="--erode2d %d", + desc="nerode2d: erode binarization in 2D (after any 3D erosion)", + ) class BinarizeOutputSpec(TraitedSpec): - binary_file = File(exists=True, desc='binarized output volume') - count_file = File(desc='ascii file containing number of hits') + binary_file = File(exists=True, desc="binarized output volume") + count_file = File(desc="ascii file containing number of hits") class Binarize(FSCommand): @@ -576,14 +656,13 @@ class Binarize(FSCommand): Examples -------- - >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') >>> binvol.cmdline 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' - """ + """ - _cmd = 'mri_binarize' + _cmd = "mri_binarize" input_spec = BinarizeInputSpec output_spec = BinarizeOutputSpec @@ -595,38 +674,41 @@ def _list_outputs(self): outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), - suffix='.'.join(('_thresh', self.inputs.out_type)), - use_ext=False) + suffix=f"_thresh.{self.inputs.out_type}", + use_ext=False, + ) else: outfile = fname_presuffix( - self.inputs.in_file, newpath=os.getcwd(), suffix='_thresh') - outputs['binary_file'] = os.path.abspath(outfile) + self.inputs.in_file, newpath=os.getcwd(), suffix="_thresh" + ) + outputs["binary_file"] = os.path.abspath(outfile) value = self.inputs.count_file if isdefined(value): if isinstance(value, bool): if value: - outputs['count_file'] = fname_presuffix( + outputs["count_file"] = fname_presuffix( self.inputs.in_file, - suffix='_count.txt', + suffix="_count.txt", newpath=os.getcwd(), - use_ext=False) + use_ext=False, + ) else: - outputs['count_file'] = value + outputs["count_file"] = value return outputs def _format_arg(self, name, spec, value): - if name == 'count_file': + if name == "count_file": if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname - if name == 'out_type': - return '' - return super(Binarize, self)._format_arg(name, spec, value) + if name == "out_type": + return "" + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'binary_file': + if name == "binary_file": return self._list_outputs()[name] return None @@ -634,73 +716,77 @@ def _gen_filename(self, name): class ConcatenateInputSpec(FSTraitedSpec): in_files = InputMultiPath( File(exists=True), - desc='Individual volumes to be concatenated', - argstr='--i %s...', - mandatory=True) - concatenated_file = File( - desc='Output volume', argstr='--o %s', genfile=True) + desc="Individual volumes to be concatenated", + argstr="--i %s...", + mandatory=True, + ) + concatenated_file = File(desc="Output volume", argstr="--o %s", genfile=True) sign = traits.Enum( - 'abs', - 'pos', - 'neg', - argstr='--%s', - desc='Take only pos or neg voxles from input, or take abs') + "abs", + "pos", + "neg", + argstr="--%s", + desc="Take only pos or neg voxles from input, or take abs", + ) stats = traits.Enum( - 'sum', - 'var', - 'std', - 'max', - 'min', - 'mean', - argstr='--%s', - desc='Compute the sum, var, std, max, min or mean of the input volumes' + "sum", + "var", + "std", + "max", + "min", + "mean", + argstr="--%s", + desc="Compute the sum, var, std, max, min or mean of the input volumes", ) paired_stats = traits.Enum( - 'sum', - 'avg', - 'diff', - 'diff-norm', - 'diff-norm1', - 'diff-norm2', - argstr='--paired-%s', - desc='Compute paired sum, avg, or diff') + "sum", + "avg", + "diff", + "diff-norm", + "diff-norm1", + "diff-norm2", + argstr="--paired-%s", + desc="Compute paired sum, avg, or diff", + ) gmean = traits.Int( - argstr='--gmean %d', - desc='create matrix to average Ng groups, Nper=Ntot/Ng') + argstr="--gmean %d", desc="create matrix to average Ng groups, Nper=Ntot/Ng" + ) mean_div_n = traits.Bool( - argstr='--mean-div-n', desc='compute mean/nframes (good for var)') + argstr="--mean-div-n", desc="compute mean/nframes (good for var)" + ) multiply_by = traits.Float( - argstr='--mul %f', desc='Multiply input volume by some amount') + argstr="--mul %f", desc="Multiply input volume by some amount" + ) add_val = traits.Float( - argstr='--add %f', desc='Add some amount to the input volume') + argstr="--add %f", desc="Add some amount to the input volume" + ) multiply_matrix_file = File( - exists=True, - argstr='--mtx %s', - desc='Multiply input by an ascii matrix in file') + exists=True, argstr="--mtx %s", desc="Multiply input by an ascii matrix in file" + ) combine = traits.Bool( - argstr='--combine', - desc='Combine non-zero values into single frame volume') + argstr="--combine", desc="Combine non-zero values into single frame volume" + ) keep_dtype = traits.Bool( - argstr='--keep-datatype', - desc='Keep voxelwise precision type (default is float') + argstr="--keep-datatype", desc="Keep voxelwise precision type (default is float" + ) max_bonfcor = traits.Bool( - argstr='--max-bonfcor', - desc='Compute max and bonferroni correct (assumes -log10(ps))') + argstr="--max-bonfcor", + desc="Compute max and bonferroni correct (assumes -log10(ps))", + ) max_index = traits.Bool( - argstr='--max-index', - desc='Compute the index of max voxel in concatenated volumes') - mask_file = File( - exists=True, argstr='--mask %s', desc='Mask input with a volume') + argstr="--max-index", + desc="Compute the index of max voxel in concatenated volumes", + ) + mask_file = File(exists=True, argstr="--mask %s", desc="Mask input with a volume") vote = traits.Bool( - argstr='--vote', - desc='Most frequent value at each voxel and fraction of occurances') - sort = traits.Bool( - argstr='--sort', desc='Sort each voxel by ascending frame value') + argstr="--vote", + desc="Most frequent value at each voxel and fraction of occurrences", + ) + sort = traits.Bool(argstr="--sort", desc="Sort each voxel by ascending frame value") class ConcatenateOutputSpec(TraitedSpec): - concatenated_file = File( - exists=True, desc='Path/name of the output volume') + concatenated_file = File(exists=True, desc="Path/name of the output volume") class Concatenate(FSCommand): @@ -710,7 +796,6 @@ class Concatenate(FSCommand): Examples -------- - Combine two input volumes into one volume with two frames >>> concat = Concatenate() @@ -721,7 +806,7 @@ class Concatenate(FSCommand): """ - _cmd = 'mri_concat' + _cmd = "mri_concat" input_spec = ConcatenateInputSpec output_spec = ConcatenateOutputSpec @@ -730,181 +815,198 @@ def _list_outputs(self): fname = self.inputs.concatenated_file if not isdefined(fname): - fname = 'concat_output.nii.gz' - outputs['concatenated_file'] = os.path.join(os.getcwd(), fname) + fname = "concat_output.nii.gz" + outputs["concatenated_file"] = os.path.join(os.getcwd(), fname) return outputs def _gen_filename(self, name): - if name == 'concatenated_file': + if name == "concatenated_file": return self._list_outputs()[name] return None class SegStatsInputSpec(FSTraitedSpec): - _xor_inputs = ('segmentation_file', 'annot', 'surf_label') + _xor_inputs = ("segmentation_file", "annot", "surf_label") segmentation_file = File( exists=True, - argstr='--seg %s', + argstr="--seg %s", xor=_xor_inputs, mandatory=True, - desc='segmentation volume path') - annot = traits.Tuple( + desc="segmentation volume path", + ) + annot = Tuple( traits.Str, - traits.Enum('lh', 'rh'), + traits.Enum("lh", "rh"), traits.Str, - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", xor=_xor_inputs, mandatory=True, - desc='subject hemi parc : use surface parcellation') - surf_label = traits.Tuple( + desc="subject hemi parc : use surface parcellation", + ) + surf_label = Tuple( traits.Str, - traits.Enum('lh', 'rh'), + traits.Enum("lh", "rh"), traits.Str, - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", xor=_xor_inputs, mandatory=True, - desc='subject hemi label : use surface label') + desc="subject hemi label : use surface label", + ) summary_file = File( - argstr='--sum %s', + argstr="--sum %s", genfile=True, position=-1, - desc='Segmentation stats summary table file') + desc="Segmentation stats summary table file", + ) partial_volume_file = File( - exists=True, argstr='--pv %s', desc='Compensate for partial voluming') + exists=True, argstr="--pv %s", desc="Compensate for partial voluming" + ) in_file = File( exists=True, - argstr='--i %s', - desc='Use the segmentation to report stats on this volume') + argstr="--i %s", + desc="Use the segmentation to report stats on this volume", + ) frame = traits.Int( - argstr='--frame %d', desc='Report stats on nth frame of input volume') - multiply = traits.Float(argstr='--mul %f', desc='multiply input by val') + argstr="--frame %d", desc="Report stats on nth frame of input volume" + ) + multiply = traits.Float(argstr="--mul %f", desc="multiply input by val") calc_snr = traits.Bool( - argstr='--snr', desc='save mean/std as extra column in output table') + argstr="--snr", desc="save mean/std as extra column in output table" + ) calc_power = traits.Enum( - 'sqr', - 'sqrt', - argstr='--%s', - desc='Compute either the sqr or the sqrt of the input') - _ctab_inputs = ('color_table_file', 'default_color_table', - 'gca_color_table') + "sqr", + "sqrt", + argstr="--%s", + desc="Compute either the sqr or the sqrt of the input", + ) + _ctab_inputs = ("color_table_file", "default_color_table", "gca_color_table") color_table_file = File( exists=True, - argstr='--ctab %s', + argstr="--ctab %s", xor=_ctab_inputs, - desc='color table file with seg id names') + desc="color table file with seg id names", + ) default_color_table = traits.Bool( - argstr='--ctab-default', + argstr="--ctab-default", xor=_ctab_inputs, - desc='use $FREESURFER_HOME/FreeSurferColorLUT.txt') + desc="use $FREESURFER_HOME/FreeSurferColorLUT.txt", + ) gca_color_table = File( exists=True, - argstr='--ctab-gca %s', + argstr="--ctab-gca %s", xor=_ctab_inputs, - desc='get color table from GCA (CMA)') + desc="get color table from GCA (CMA)", + ) segment_id = traits.List( - argstr='--id %s...', desc='Manually specify segmentation ids') - exclude_id = traits.Int( - argstr='--excludeid %d', desc='Exclude seg id from report') + argstr="--id %s...", desc="Manually specify segmentation ids" + ) + exclude_id = traits.Int(argstr="--excludeid %d", desc="Exclude seg id from report") exclude_ctx_gm_wm = traits.Bool( - argstr='--excl-ctxgmwm', desc='exclude cortical gray and white matter') + argstr="--excl-ctxgmwm", desc="exclude cortical gray and white matter" + ) wm_vol_from_surf = traits.Bool( - argstr='--surf-wm-vol', desc='Compute wm volume from surf') + argstr="--surf-wm-vol", desc="Compute wm volume from surf" + ) cortex_vol_from_surf = traits.Bool( - argstr='--surf-ctx-vol', desc='Compute cortex volume from surf') + argstr="--surf-ctx-vol", desc="Compute cortex volume from surf" + ) non_empty_only = traits.Bool( - argstr='--nonempty', desc='Only report nonempty segmentations') + argstr="--nonempty", desc="Only report nonempty segmentations" + ) empty = traits.Bool( - argstr="--empty", - desc="Report on segmentations listed in the color table") + argstr="--empty", desc="Report on segmentations listed in the color table" + ) mask_file = File( - exists=True, argstr='--mask %s', desc='Mask volume (same size as seg') + exists=True, argstr="--mask %s", desc="Mask volume (same size as seg" + ) mask_thresh = traits.Float( - argstr='--maskthresh %f', - desc='binarize mask with this threshold <0.5>') + argstr="--maskthresh %f", desc="binarize mask with this threshold <0.5>" + ) mask_sign = traits.Enum( - 'abs', - 'pos', - 'neg', - '--masksign %s', - desc='Sign for mask threshold: pos, neg, or abs') + "abs", + "pos", + "neg", + "--masksign %s", + desc="Sign for mask threshold: pos, neg, or abs", + ) mask_frame = traits.Int( - '--maskframe %d', - requires=['mask_file'], - desc='Mask with this (0 based) frame of the mask volume') + "--maskframe %d", + requires=["mask_file"], + desc="Mask with this (0 based) frame of the mask volume", + ) mask_invert = traits.Bool( - argstr='--maskinvert', desc='Invert binarized mask volume') - mask_erode = traits.Int( - argstr='--maskerode %d', desc='Erode mask by some amount') + argstr="--maskinvert", desc="Invert binarized mask volume" + ) + mask_erode = traits.Int(argstr="--maskerode %d", desc="Erode mask by some amount") brain_vol = traits.Enum( - 'brain-vol-from-seg', - 'brainmask', - argstr='--%s', - desc= - 'Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``' + "brain-vol-from-seg", + "brainmask", + argstr="--%s", + desc="Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``", ) brainmask_file = File( argstr="--brainmask %s", exists=True, - desc= - "Load brain mask and compute the volume of the brain as the non-zero voxels in this volume" + desc="Load brain mask and compute the volume of the brain as the non-zero voxels in this volume", ) - etiv = traits.Bool( - argstr='--etiv', desc='Compute ICV from talairach transform') + etiv = traits.Bool(argstr="--etiv", desc="Compute ICV from talairach transform") etiv_only = traits.Enum( - 'etiv', - 'old-etiv', - '--%s-only', - desc='Compute etiv and exit. Use ``etiv`` or ``old-etiv``') + "etiv", + "old-etiv", + "--%s-only", + desc="Compute etiv and exit. Use ``etiv`` or ``old-etiv``", + ) avgwf_txt_file = traits.Either( traits.Bool, File, - argstr='--avgwf %s', - desc='Save average waveform into file (bool or filename)') + argstr="--avgwf %s", + desc="Save average waveform into file (bool or filename)", + ) avgwf_file = traits.Either( traits.Bool, File, - argstr='--avgwfvol %s', - desc='Save as binary volume (bool or filename)') + argstr="--avgwfvol %s", + desc="Save as binary volume (bool or filename)", + ) sf_avg_file = traits.Either( - traits.Bool, - File, - argstr='--sfavg %s', - desc='Save mean across space and time') + traits.Bool, File, argstr="--sfavg %s", desc="Save mean across space and time" + ) vox = traits.List( traits.Int, - argstr='--vox %s', - desc='Replace seg with all 0s except at C R S (three int inputs)') - supratent = traits.Bool( - argstr="--supratent", desc="Undocumented input flag") + argstr="--vox %s", + desc="Replace seg with all 0s except at C R S (three int inputs)", + ) + supratent = traits.Bool(argstr="--supratent", desc="Undocumented input flag") subcort_gm = traits.Bool( - argstr="--subcortgray", - desc="Compute volume of subcortical gray matter") + argstr="--subcortgray", desc="Compute volume of subcortical gray matter" + ) total_gray = traits.Bool( - argstr="--totalgray", desc="Compute volume of total gray matter") + argstr="--totalgray", desc="Compute volume of total gray matter" + ) euler = traits.Bool( argstr="--euler", - desc= - "Write out number of defect holes in orig.nofix based on the euler number" + desc="Write out number of defect holes in orig.nofix based on the euler number", ) in_intensity = File( - argstr="--in %s --in-intensity-name %s", - desc="Undocumented input norm.mgz file") + argstr="--in %s --in-intensity-name %s", desc="Undocumented input norm.mgz file" + ) intensity_units = traits.Enum( - 'MR', + "MR", argstr="--in-intensity-units %s", requires=["in_intensity"], - desc="Intensity units") + desc="Intensity units", + ) class SegStatsOutputSpec(TraitedSpec): - summary_file = File( - exists=True, desc='Segmentation summary statistics table') + summary_file = File(exists=True, desc="Segmentation summary statistics table") avgwf_txt_file = File( - desc='Text file with functional statistics averaged over segs') - avgwf_file = File( - desc='Volume with functional statistics averaged over segs') + desc="Text file with functional statistics averaged over segs" + ) + avgwf_file = File(desc="Volume with functional statistics averaged over segs") sf_avg_file = File( - desc='Text file with func statistics averaged over segs and framss') + desc="Text file with func statistics averaged over segs and framss" + ) class SegStats(FSCommand): @@ -912,7 +1014,6 @@ class SegStats(FSCommand): Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> ss = fs.SegStats() >>> ss.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -925,56 +1026,58 @@ class SegStats(FSCommand): """ - _cmd = 'mri_segstats' + _cmd = "mri_segstats" input_spec = SegStatsInputSpec output_spec = SegStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.summary_file): - outputs['summary_file'] = os.path.abspath(self.inputs.summary_file) + outputs["summary_file"] = os.path.abspath(self.inputs.summary_file) else: - outputs['summary_file'] = os.path.join(os.getcwd(), - 'summary.stats') + outputs["summary_file"] = os.path.join(os.getcwd(), "summary.stats") suffices = dict( - avgwf_txt_file='_avgwf.txt', - avgwf_file='_avgwf.nii.gz', - sf_avg_file='sfavg.txt') + avgwf_txt_file="_avgwf.txt", + avgwf_file="_avgwf.nii.gz", + sf_avg_file="sfavg.txt", + ) if isdefined(self.inputs.segmentation_file): _, src = os.path.split(self.inputs.segmentation_file) if isdefined(self.inputs.annot): - src = '_'.join(self.inputs.annot) + src = "_".join(self.inputs.annot) if isdefined(self.inputs.surf_label): - src = '_'.join(self.inputs.surf_label) + src = "_".join(self.inputs.surf_label) for name, suffix in list(suffices.items()): value = getattr(self.inputs, name) if isdefined(value): if isinstance(value, bool): outputs[name] = fname_presuffix( - src, suffix=suffix, newpath=os.getcwd(), use_ext=False) + src, suffix=suffix, newpath=os.getcwd(), use_ext=False + ) else: outputs[name] = os.path.abspath(value) return outputs def _format_arg(self, name, spec, value): - if name in ('summary_file', 'avgwf_txt_file'): + if name in ("summary_file", "avgwf_txt_file"): if not isinstance(value, bool): if not os.path.isabs(value): - value = os.path.join('.', value) - if name in ['avgwf_txt_file', 'avgwf_file', 'sf_avg_file']: + value = os.path.join(".", value) + if name in ["avgwf_txt_file", "avgwf_file", "sf_avg_file"]: if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname - elif name == 'in_intensity': - intensity_name = os.path.basename( - self.inputs.in_intensity).replace('.mgz', '') + elif name == "in_intensity": + intensity_name = os.path.basename(self.inputs.in_intensity).replace( + ".mgz", "" + ) return spec.argstr % (value, intensity_name) - return super(SegStats, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'summary_file': + if name == "summary_file": return self._list_outputs()[name] return None @@ -982,50 +1085,50 @@ def _gen_filename(self, name): class SegStatsReconAllInputSpec(SegStatsInputSpec): # recon-all input requirements subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--subject %s", mandatory=True, - desc="Subject id being processed") + desc="Subject id being processed", + ) # implicit - ribbon = traits.File( - mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") + ribbon = File(mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") presurf_seg = File(exists=True, desc="Input segmentation volume") transform = File(mandatory=True, exists=True, desc="Input transform file") - lh_orig_nofix = File( - mandatory=True, exists=True, desc="Input lh.orig.nofix") - rh_orig_nofix = File( - mandatory=True, exists=True, desc="Input rh.orig.nofix") + lh_orig_nofix = File(mandatory=True, exists=True, desc="Input lh.orig.nofix") + rh_orig_nofix = File(mandatory=True, exists=True, desc="Input rh.orig.nofix") lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) aseg = File(exists=True, desc="Mandatory implicit input in 5.3") - copy_inputs = traits.Bool(desc="If running as a node, set this to True " + - "otherwise, this will copy the implicit inputs " - + "to the node directory.") + copy_inputs = traits.Bool( + desc="If running as a node, set this to True " + "otherwise, this will copy the implicit inputs " + "to the node directory." + ) class SegStatsReconAll(SegStats): """ This class inherits SegStats and modifies it for use in a recon-all workflow. This implementation mandates implicit inputs that SegStats. - To ensure backwards compatability of SegStats, this class was created. + To ensure backwards compatibility of SegStats, this class was created. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import SegStatsReconAll >>> segstatsreconall = SegStatsReconAll() >>> segstatsreconall.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -1054,118 +1157,130 @@ class SegStatsReconAll(SegStats): >>> segstatsreconall.inputs.exclude_id = 0 >>> segstatsreconall.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' + """ + input_spec = SegStatsReconAllInputSpec output_spec = SegStatsOutputSpec def _format_arg(self, name, spec, value): - if name == 'brainmask_file': + if name == "brainmask_file": return spec.argstr % os.path.basename(value) - return super(SegStatsReconAll, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_orig_nofix, 'surf', - 'lh.orig.nofix') - copy2subjdir(self, self.inputs.rh_orig_nofix, 'surf', - 'rh.orig.nofix') - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.presurf_seg, 'mri', - 'aseg.presurf.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.mgz') - copy2subjdir(self, self.inputs.transform, - os.path.join('mri', 'transforms'), 'talairach.xfm') - copy2subjdir(self, self.inputs.in_intensity, 'mri') - copy2subjdir(self, self.inputs.brainmask_file, 'mri') - return super(SegStatsReconAll, self).run(**inputs) + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_orig_nofix, "surf", "lh.orig.nofix") + copy2subjdir(self, self.inputs.rh_orig_nofix, "surf", "rh.orig.nofix") + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir(self, self.inputs.presurf_seg, "mri", "aseg.presurf.mgz") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") + copy2subjdir( + self, + self.inputs.transform, + os.path.join("mri", "transforms"), + "talairach.xfm", + ) + copy2subjdir(self, self.inputs.in_intensity, "mri") + copy2subjdir(self, self.inputs.brainmask_file, "mri") + return super().run(**inputs) class Label2VolInputSpec(FSTraitedSpec): label_file = InputMultiPath( File(exists=True), - argstr='--label %s...', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--label %s...", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), copyfile=False, mandatory=True, - desc='list of label files') + desc="list of label files", + ) annot_file = File( exists=True, - argstr='--annot %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - requires=('subject_id', 'hemi'), + argstr="--annot %s", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), + requires=("subject_id", "hemi"), mandatory=True, copyfile=False, - desc='surface annotation file') + desc="surface annotation file", + ) seg_file = File( exists=True, - argstr='--seg %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--seg %s", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, copyfile=False, - desc='segmentation file') + desc="segmentation file", + ) aparc_aseg = traits.Bool( - argstr='--aparc+aseg', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--aparc+aseg", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, - desc='use aparc+aseg.mgz in subjectdir as seg') + desc="use aparc+aseg.mgz in subjectdir as seg", + ) template_file = File( - exists=True, - argstr='--temp %s', - mandatory=True, - desc='output template volume') + exists=True, argstr="--temp %s", mandatory=True, desc="output template volume" + ) reg_file = File( exists=True, - argstr='--reg %s', - xor=('reg_file', 'reg_header', 'identity'), - desc='tkregister style matrix VolXYZ = R*LabelXYZ') + argstr="--reg %s", + xor=("reg_file", "reg_header", "identity"), + desc="tkregister style matrix VolXYZ = R*LabelXYZ", + ) reg_header = File( exists=True, - argstr='--regheader %s', - xor=('reg_file', 'reg_header', 'identity'), - desc='label template volume') + argstr="--regheader %s", + xor=("reg_file", "reg_header", "identity"), + desc="label template volume", + ) identity = traits.Bool( - argstr='--identity', - xor=('reg_file', 'reg_header', 'identity'), - desc='set R=I') + argstr="--identity", xor=("reg_file", "reg_header", "identity"), desc="set R=I" + ) invert_mtx = traits.Bool( - argstr='--invertmtx', desc='Invert the registration matrix') + argstr="--invertmtx", desc="Invert the registration matrix" + ) fill_thresh = traits.Range( - 0., 1., argstr='--fillthresh %g', desc='thresh : between 0 and 1') + 0.0, 1.0, argstr="--fillthresh %g", desc="thresh : between 0 and 1" + ) label_voxel_volume = traits.Float( - argstr='--labvoxvol %f', desc='volume of each label point (def 1mm3)') - proj = traits.Tuple( - traits.Enum('abs', 'frac'), + argstr="--labvoxvol %f", desc="volume of each label point (def 1mm3)" + ) + proj = Tuple( + traits.Enum("abs", "frac"), traits.Float, traits.Float, traits.Float, - argstr='--proj %s %f %f %f', - requires=('subject_id', 'hemi'), - desc='project along surface normal') - subject_id = traits.Str(argstr='--subject %s', desc='subject id') + argstr="--proj %s %f %f %f", + requires=("subject_id", "hemi"), + desc="project along surface normal", + ) + subject_id = traits.Str(argstr="--subject %s", desc="subject id") hemi = traits.Enum( - 'lh', 'rh', argstr='--hemi %s', desc='hemisphere to use lh or rh') - surface = traits.Str( - argstr='--surf %s', desc='use surface instead of white') - vol_label_file = File(argstr='--o %s', genfile=True, desc='output volume') + "lh", "rh", argstr="--hemi %s", desc="hemisphere to use lh or rh" + ) + surface = traits.Str(argstr="--surf %s", desc="use surface instead of white") + vol_label_file = File(argstr="--o %s", genfile=True, desc="output volume") label_hit_file = File( - argstr='--hits %s', desc='file with each frame is nhits for a label') + argstr="--hits %s", desc="file with each frame is nhits for a label" + ) map_label_stat = File( - argstr='--label-stat %s', - desc='map the label stats field into the vol') + argstr="--label-stat %s", desc="map the label stats field into the vol" + ) native_vox2ras = traits.Bool( - argstr='--native-vox2ras', - desc='use native vox2ras xform instead of tkregister-style') + argstr="--native-vox2ras", + desc="use native vox2ras xform instead of tkregister-style", + ) class Label2VolOutputSpec(TraitedSpec): - vol_label_file = File(exists=True, desc='output volume') + vol_label_file = File(exists=True, desc="output volume") class Label2Vol(FSCommand): @@ -1173,14 +1288,13 @@ class Label2Vol(FSCommand): Examples -------- - >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') >>> binvol.cmdline 'mri_label2vol --fillthresh 0.5 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' - """ + """ - _cmd = 'mri_label2vol' + _cmd = "mri_label2vol" input_spec = Label2VolInputSpec output_spec = Label2VolOutputSpec @@ -1188,21 +1302,22 @@ def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.vol_label_file if not isdefined(outfile): - for key in ['label_file', 'annot_file', 'seg_file']: + for key in ["label_file", "annot_file", "seg_file"]: if isdefined(getattr(self.inputs, key)): path = getattr(self.inputs, key) if isinstance(path, list): path = path[0] _, src = os.path.split(path) if isdefined(self.inputs.aparc_aseg): - src = 'aparc+aseg.mgz' + src = "aparc+aseg.mgz" outfile = fname_presuffix( - src, suffix='_vol.nii.gz', newpath=os.getcwd(), use_ext=False) - outputs['vol_label_file'] = outfile + src, suffix="_vol.nii.gz", newpath=os.getcwd(), use_ext=False + ) + outputs["vol_label_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'vol_label_file': + if name == "vol_label_file": return self._list_outputs()[name] return None @@ -1210,51 +1325,53 @@ def _gen_filename(self, name): class MS_LDAInputSpec(FSTraitedSpec): lda_labels = traits.List( traits.Int(), - argstr='-lda %s', + argstr="-lda %s", mandatory=True, minlen=2, maxlen=2, - sep=' ', - desc='pair of class labels to optimize') - weight_file = traits.File( - argstr='-weight %s', + sep=" ", + desc="pair of class labels to optimize", + ) + weight_file = File( + argstr="-weight %s", mandatory=True, - desc='filename for the LDA weights (input or output)') - vol_synth_file = traits.File( + desc="filename for the LDA weights (input or output)", + ) + vol_synth_file = File( exists=False, - argstr='-synth %s', + argstr="-synth %s", mandatory=True, - desc=('filename for the synthesized output ' - 'volume')) - label_file = traits.File( - exists=True, argstr='-label %s', desc='filename of the label volume') - mask_file = traits.File( - exists=True, - argstr='-mask %s', - desc='filename of the brain mask volume') + desc=("filename for the synthesized output volume"), + ) + label_file = File( + exists=True, argstr="-label %s", desc="filename of the label volume" + ) + mask_file = File( + exists=True, argstr="-mask %s", desc="filename of the brain mask volume" + ) shift = traits.Int( - argstr='-shift %d', - desc='shift all values equal to the given value to zero') + argstr="-shift %d", desc="shift all values equal to the given value to zero" + ) conform = traits.Bool( - argstr='-conform', - desc=('Conform the input volumes (brain mask ' - 'typically already conformed)')) + argstr="-conform", + desc=("Conform the input volumes (brain mask typically already conformed)"), + ) use_weights = traits.Bool( - argstr='-W', - desc=('Use the weights from a previously ' - 'generated weight file')) + argstr="-W", desc=("Use the weights from a previously generated weight file") + ) images = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, copyfile=False, - desc='list of input FLASH images', - position=-1) + desc="list of input FLASH images", + position=-1, + ) class MS_LDAOutputSpec(TraitedSpec): - weight_file = File(exists=True, desc='') - vol_synth_file = File(exists=True, desc='') + weight_file = File(exists=True, desc="") + vol_synth_file = File(exists=True, desc="") class MS_LDA(FSCommand): @@ -1262,7 +1379,6 @@ class MS_LDA(FSCommand): Examples -------- - >>> grey_label = 2 >>> white_label = 3 >>> zero_value = 1 @@ -1273,38 +1389,37 @@ class MS_LDA(FSCommand): images=['FLASH1.mgz', 'FLASH2.mgz', 'FLASH3.mgz']) >>> optimalWeights.cmdline 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' + """ - _cmd = 'mri_ms_LDA' + _cmd = "mri_ms_LDA" input_spec = MS_LDAInputSpec output_spec = MS_LDAOutputSpec def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_synth): - outputs['vol_synth_file'] = os.path.abspath( - self.inputs.output_synth) + outputs["vol_synth_file"] = os.path.abspath(self.inputs.output_synth) else: - outputs['vol_synth_file'] = os.path.abspath( - self.inputs.vol_synth_file) - if not isdefined( - self.inputs.use_weights) or self.inputs.use_weights is False: - outputs['weight_file'] = os.path.abspath(self.inputs.weight_file) + outputs["vol_synth_file"] = os.path.abspath(self.inputs.vol_synth_file) + if not isdefined(self.inputs.use_weights) or self.inputs.use_weights is False: + outputs["weight_file"] = os.path.abspath(self.inputs.weight_file) return outputs def _verify_weights_file_exists(self): if not os.path.exists(os.path.abspath(self.inputs.weight_file)): raise traits.TraitError( - "MS_LDA: use_weights must accompany an existing weights file") + "MS_LDA: use_weights must accompany an existing weights file" + ) def _format_arg(self, name, spec, value): - if name == 'use_weights': + if name == "use_weights": if self.inputs.use_weights is True: self._verify_weights_file_exists() else: - return '' + return "" # TODO: Fix bug when boolean values are set explicitly to false - return super(MS_LDA, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): pass @@ -1312,57 +1427,55 @@ def _gen_filename(self, name): class Label2LabelInputSpec(FSTraitedSpec): hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="--hemi %s", - mandatory=True, - desc="Input hemisphere") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--trgsubject %s", mandatory=True, - desc="Target subject") + desc="Target subject", + ) sphere_reg = File( - mandatory=True, - exists=True, - desc="Implicit input .sphere.reg") - white = File( - mandatory=True, exists=True, desc="Implicit input .white") + mandatory=True, exists=True, desc="Implicit input .sphere.reg" + ) + white = File(mandatory=True, exists=True, desc="Implicit input .white") source_sphere_reg = File( - mandatory=True, - exists=True, - desc="Implicit input .sphere.reg") + mandatory=True, exists=True, desc="Implicit input .sphere.reg" + ) source_white = File( - mandatory=True, exists=True, desc="Implicit input .white") + mandatory=True, exists=True, desc="Implicit input .white" + ) source_label = File( - argstr="--srclabel %s", - mandatory=True, - exists=True, - desc="Source label") + argstr="--srclabel %s", mandatory=True, exists=True, desc="Source label" + ) source_subject = traits.String( - argstr="--srcsubject %s", mandatory=True, desc="Source subject name") + argstr="--srcsubject %s", mandatory=True, desc="Source subject name" + ) # optional out_file = File( argstr="--trglabel %s", - name_source=['source_label'], - name_template='%s_converted', + name_source=["source_label"], + name_template="%s_converted", hash_files=False, keep_extension=True, - desc="Target label") + desc="Target label", + ) registration_method = traits.Enum( - 'surface', - 'volume', + "surface", + "volume", usedefault=True, argstr="--regmethod %s", - desc="Registration method") + desc="Registration method", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) class Label2LabelOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output label') + out_file = File(exists=True, desc="Output label") class Label2Label(FSCommand): @@ -1393,88 +1506,91 @@ class Label2Label(FSCommand): 'mri_label2label --hemi lh --trglabel lh-pial_converted.stl --regmethod surface --srclabel lh-pial.stl --srcsubject fsaverage --trgsubject 10335' """ - _cmd = 'mri_label2label' + _cmd = "mri_label2label" input_spec = Label2LabelInputSpec output_spec = Label2LabelOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label', - self.inputs.out_file) + outputs["out_file"] = os.path.join( + self.inputs.subjects_dir, + self.inputs.subject_id, + "label", + self.inputs.out_file, + ) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.sphere_reg, 'surf', - '{0}.sphere.reg'.format(hemi)) - copy2subjdir(self, self.inputs.white, 'surf', - '{0}.white'.format(hemi)) + copy2subjdir(self, self.inputs.sphere_reg, "surf", f"{hemi}.sphere.reg") + copy2subjdir(self, self.inputs.white, "surf", f"{hemi}.white") copy2subjdir( self, self.inputs.source_sphere_reg, - 'surf', - '{0}.sphere.reg'.format(hemi), - subject_id=self.inputs.source_subject) + "surf", + f"{hemi}.sphere.reg", + subject_id=self.inputs.source_subject, + ) copy2subjdir( self, self.inputs.source_white, - 'surf', - '{0}.white'.format(hemi), - subject_id=self.inputs.source_subject) + "surf", + f"{hemi}.white", + subject_id=self.inputs.source_subject, + ) # label dir must exist in order for output file to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(Label2Label, self).run(**inputs) + return super().run(**inputs) class Label2AnnotInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="--hemi %s", - mandatory=True, - desc="Input hemisphere") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--s %s", mandatory=True, - desc="Subject name/ID") + desc="Subject name/ID", + ) in_labels = traits.List( - argstr="--l %s...", mandatory=True, desc="List of input label files") + argstr="--l %s...", mandatory=True, desc="List of input label files" + ) out_annot = traits.String( - argstr="--a %s", - mandatory=True, - desc="Name of the annotation to create") + argstr="--a %s", mandatory=True, desc="Name of the annotation to create" + ) orig = File(exists=True, mandatory=True, desc="implicit {hemisphere}.orig") # optional keep_max = traits.Bool( - argstr="--maxstatwinner", desc="Keep label with highest 'stat' value") + argstr="--maxstatwinner", desc="Keep label with highest 'stat' value" + ) verbose_off = traits.Bool( - argstr="--noverbose", - desc="Turn off overlap and stat override messages") + argstr="--noverbose", desc="Turn off overlap and stat override messages" + ) color_table = File( argstr="--ctab %s", exists=True, - desc= - "File that defines the structure names, their indices, and their color" + desc="File that defines the structure names, their indices, and their color", ) copy_inputs = traits.Bool( - desc="copy implicit inputs and create a temp subjects_dir") + desc="copy implicit inputs and create a temp subjects_dir" + ) class Label2AnnotOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output annotation file') + out_file = File(exists=True, desc="Output annotation file") class Label2Annot(FSCommand): @@ -1494,94 +1610,83 @@ class Label2Annot(FSCommand): 'mris_label2annot --hemi lh --l lh.aparc.label --a test --s 10335' """ - _cmd = 'mris_label2annot' + _cmd = "mris_label2annot" input_spec = Label2AnnotInputSpec output_spec = Label2AnnotOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir( self, self.inputs.orig, - folder='surf', - basename='{0}.orig'.format(self.inputs.hemisphere)) + folder="surf", + basename=f"{self.inputs.hemisphere}.orig", + ) # label dir must exist in order for output file to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(Label2Annot, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.join( - str(self.inputs.subjects_dir), str(self.inputs.subject_id), - 'label', - str(self.inputs.hemisphere) + '.' + str(self.inputs.out_annot) + - '.annot') + str(self.inputs.subjects_dir), + str(self.inputs.subject_id), + "label", + str(self.inputs.hemisphere) + "." + str(self.inputs.out_annot) + ".annot", + ) return outputs class SphericalAverageInputSpec(FSTraitedSpec): out_file = File( - argstr="%s", - genfile=True, - exists=False, - position=-1, - desc="Output filename") - in_average = traits.Directory( - argstr="%s", - exists=True, - genfile=True, - position=-2, - desc="Average subject") + argstr="%s", genfile=True, exists=False, position=-1, desc="Output filename" + ) + in_average = Directory( + argstr="%s", exists=True, genfile=True, position=-2, desc="Average subject" + ) in_surf = File( - argstr="%s", - mandatory=True, - exists=True, - position=-3, - desc="Input surface file") + argstr="%s", mandatory=True, exists=True, position=-3, desc="Input surface file" + ) hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="%s", - mandatory=True, - position=-4, - desc="Input hemisphere") + "lh", "rh", argstr="%s", mandatory=True, position=-4, desc="Input hemisphere" + ) fname = traits.String( argstr="%s", mandatory=True, position=-5, - desc="""Filename from the average subject directory. - Example: to use rh.entorhinal.label as the input label - filename, set fname to 'rh.entorhinal' and which to - 'label'. The program will then search for - '{in_average}/label/rh.entorhinal.label' - """) + desc="""\ +Filename from the average subject directory. +Example: to use rh.entorhinal.label as the input label filename, set fname to 'rh.entorhinal' +and which to 'label'. The program will then search for +``/label/rh.entorhinal.label``""", + ) which = traits.Enum( - 'coords', - 'label', - 'vals', - 'curv', - 'area', + "coords", + "label", + "vals", + "curv", + "area", argstr="%s", mandatory=True, position=-6, - desc="No documentation") - subject_id = traits.String( - argstr="-o %s", mandatory=True, desc="Output subject id") + desc="No documentation", + ) + subject_id = traits.String(argstr="-o %s", mandatory=True, desc="Output subject id") # optional erode = traits.Int(argstr="-erode %d", desc="Undocumented") - in_orig = File( - argstr="-orig %s", exists=True, desc="Original surface filename") + in_orig = File(argstr="-orig %s", exists=True, desc="Original surface filename") threshold = traits.Float(argstr="-t %.1f", desc="Undocumented") class SphericalAverageOutputSpec(TraitedSpec): - out_file = File(exists=False, desc='Output label') + out_file = File(exists=False, desc="Output label") class SphericalAverage(FSCommand): @@ -1603,28 +1708,29 @@ class SphericalAverage(FSCommand): >>> sphericalavg.inputs.threshold = 5 >>> sphericalavg.cmdline 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' + """ - _cmd = 'mris_spherical_average' + _cmd = "mris_spherical_average" input_spec = SphericalAverageInputSpec output_spec = SphericalAverageOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_orig' or name == 'in_surf': + if name == "in_orig" or name == "in_surf": surf = os.path.basename(value) - for item in ['lh.', 'rh.']: - surf = surf.replace(item, '') + for item in ["lh.", "rh."]: + surf = surf.replace(item, "") return spec.argstr % surf - return super(SphericalAverage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'in_average': - avg_subject = str(self.inputs.hemisphere) + '.EC_average' + if name == "in_average": + avg_subject = str(self.inputs.hemisphere) + ".EC_average" avg_directory = os.path.join(self.inputs.subjects_dir, avg_subject) if not os.path.isdir(avg_directory): - fs_home = os.path.abspath(os.environ.get('FREESURFER_HOME')) + fs_home = os.path.abspath(os.environ.get("FREESURFER_HOME")) return avg_subject - elif name == 'out_file': + elif name == "out_file": return self._list_outputs()[name] else: return None @@ -1632,15 +1738,15 @@ def _gen_filename(self, name): def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + out_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if isdefined(self.inputs.in_average): basename = os.path.basename(self.inputs.in_average) - basename = basename.replace('_', '_exvivo_') + '.label' + basename = basename.replace("_", "_exvivo_") + ".label" else: - basename = str( - self.inputs.hemisphere) + '.EC_exvivo_average.label' - outputs['out_file'] = os.path.join(out_dir, basename) + basename = str(self.inputs.hemisphere) + ".EC_exvivo_average.label" + outputs["out_file"] = os.path.join(out_dir, basename) return outputs diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py new file mode 100644 index 0000000000..4505985127 --- /dev/null +++ b/nipype/interfaces/freesurfer/petsurfer.py @@ -0,0 +1,665 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer +""" + +import os + +from ... import logging +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + Directory, + InputMultiPath, + isdefined, +) +from .base import FSCommand, FSTraitedSpec + +from .model import GLMFitInputSpec, GLMFit + +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") + + +class GTMSegInputSpec(FSTraitedSpec): + subject_id = traits.String(argstr="--s %s", desc="subject id", mandatory=True) + + xcerseg = traits.Bool( + argstr="--xcerseg", + desc="run xcerebralseg on this subject to create apas+head.mgz", + ) + + out_file = File( + "gtmseg.mgz", + argstr="--o %s", + desc="output volume relative to subject/mri", + usedefault=True, + ) + + upsampling_factor = traits.Int( + argstr="--usf %i", desc="upsampling factor (default is 2)" + ) + + subsegwm = traits.Bool( + argstr="--subsegwm", default=True, desc="subsegment WM into lobes (default)" + ) + + keep_hypo = traits.Bool( + argstr="--keep-hypo", + desc="do not relabel hypointensities as WM when subsegmenting WM", + ) + + keep_cc = traits.Bool( + argstr="--keep-cc", desc="do not relabel corpus callosum as WM" + ) + + dmax = traits.Float( + argstr="--dmax %f", + desc="distance threshold to use when subsegmenting WM (default is 5)", + ) + + ctx_annot = Tuple( + traits.String, + traits.Int, + traits.Int, + argstr="--ctx-annot %s %i %i", + desc="annot lhbase rhbase : annotation to use for cortical segmentation (default is aparc 1000 2000)", + ) + + wm_annot = Tuple( + traits.String, + traits.Int, + traits.Int, + argstr="--wm-annot %s %i %i", + desc="annot lhbase rhbase : annotation to use for WM segmentation (with --subsegwm, default is lobes 3200 4200)", + ) + + output_upsampling_factor = traits.Int( + argstr="--output-usf %i", + desc="set output USF different than USF, mostly for debugging", + ) + + head = traits.String( + argstr="--head %s", desc="use headseg instead of apas+head.mgz" + ) + + subseg_cblum_wm = traits.Bool( + argstr="--subseg-cblum-wm", desc="subsegment cerebellum WM into core and gyri" + ) + + no_pons = traits.Bool( + argstr="--no-pons", desc="do not add pons segmentation when doing ---xcerseg" + ) + + no_vermis = traits.Bool( + argstr="--no-vermis", + desc="do not add vermis segmentation when doing ---xcerseg", + ) + + colortable = File(exists=True, argstr="--ctab %s", desc="colortable") + no_seg_stats = traits.Bool( + argstr="--no-seg-stats", desc="do not compute segmentation stats" + ) + + +class GTMSegOutputSpec(TraitedSpec): + out_file = File(desc="GTM segmentation") + + +class GTMSeg(FSCommand): + """create an anatomical segmentation for the geometric transfer matrix (GTM). + + Examples + -------- + >>> gtmseg = GTMSeg() + >>> gtmseg.inputs.subject_id = 'subject_id' + >>> gtmseg.cmdline + 'gtmseg --o gtmseg.mgz --s subject_id' + """ + + _cmd = "gtmseg" + input_spec = GTMSegInputSpec + output_spec = GTMSegOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs['out_file'] = os.path.join( + self.inputs.subjects_dir, + self.inputs.subject_id, + 'mri', + self.inputs.out_file, + ) + return outputs + + +class GTMPVCInputSpec(FSTraitedSpec): + in_file = File( + exists=True, + argstr="--i %s", + mandatory=True, + copyfile=False, + desc="input volume - source data to pvc", + ) + + frame = traits.Int( + argstr="--frame %i", desc="only process 0-based frame F from inputvol" + ) + + psf = traits.Float(argstr="--psf %f", desc="scanner PSF FWHM in mm") + + segmentation = File( + argstr="--seg %s", + exists=True, + mandatory=True, + desc="segfile : anatomical segmentation to define regions for GTM", + ) + + _reg_xor = ["reg_file", "regheader", "reg_identity"] + reg_file = File( + exists=True, + argstr="--reg %s", + mandatory=True, + desc="LTA registration file that maps PET to anatomical", + xor=_reg_xor, + ) + + regheader = traits.Bool( + argstr="--regheader", + mandatory=True, + desc="assume input and seg share scanner space", + xor=_reg_xor, + ) + + reg_identity = traits.Bool( + argstr="--reg-identity", + mandatory=True, + desc="assume that input is in anatomical space", + xor=_reg_xor, + ) + + pvc_dir = traits.Str(argstr="--o %s", desc="save outputs to dir", genfile=True) + + mask_file = File( + exists=True, + argstr="--mask %s", + desc="ignore areas outside of the mask (in input vol space)", + ) + + auto_mask = Tuple( + traits.Float, + traits.Float, + argstr="--auto-mask %f %f", + desc="FWHM thresh : automatically compute mask", + ) + + no_reduce_fov = traits.Bool( + argstr="--no-reduce-fov", desc="do not reduce FoV to encompass mask" + ) + + reduce_fox_eqodd = traits.Bool( + argstr="--reduce-fox-eqodd", + desc="reduce FoV to encompass mask but force nc=nr and ns to be odd", + ) + + contrast = InputMultiPath( + File(exists=True), argstr="--C %s...", desc="contrast file" + ) + + default_seg_merge = traits.Bool( + argstr="--default-seg-merge", desc="default schema for merging ROIs" + ) + + merge_hypos = traits.Bool( + argstr="--merge-hypos", desc="merge left and right hypointensites into to ROI" + ) + + merge_cblum_wm_gyri = traits.Bool( + argstr="--merge-cblum-wm-gyri", + desc="cerebellum WM gyri back into cerebellum WM", + ) + + tt_reduce = traits.Bool( + argstr="--tt-reduce", desc="reduce segmentation to that of a tissue type" + ) + + replace = Tuple( + traits.Int, + traits.Int, + argstr="--replace %i %i", + desc="Id1 Id2 : replace seg Id1 with seg Id2", + ) + + rescale = traits.List( + argstr="--rescale %s...", + desc="Id1 : specify reference region(s) used to rescale (default is pons)", + ) + + no_rescale = traits.Bool( + argstr="--no-rescale", + desc="do not global rescale such that mean of reference region is scaleref", + ) + + scale_refval = traits.Float( + argstr="--scale-refval %f", + desc="refval : scale such that mean in reference region is refval", + ) + + _ctab_inputs = ("color_table_file", "default_color_table") + color_table_file = File( + exists=True, + argstr="--ctab %s", + xor=_ctab_inputs, + desc="color table file with seg id names", + ) + + default_color_table = traits.Bool( + argstr="--ctab-default", + xor=_ctab_inputs, + desc="use $FREESURFER_HOME/FreeSurferColorLUT.txt", + ) + + tt_update = traits.Bool( + argstr="--tt-update", + desc="changes tissue type of VentralDC, BrainStem, and Pons to be SubcortGM", + ) + + lat = traits.Bool(argstr="--lat", desc="lateralize tissue types") + + no_tfe = traits.Bool( + argstr="--no-tfe", + desc="do not correct for tissue fraction effect (with --psf 0 turns off PVC entirely)", + ) + + no_pvc = traits.Bool( + argstr="--no-pvc", + desc="turns off PVC entirely (both PSF and TFE)", + ) + + tissue_fraction_resolution = traits.Float( + argstr="--segpvfres %f", + desc="set the tissue fraction resolution parameter (def is 0.5)", + ) + + rbv = traits.Bool( + argstr="--rbv", + requires=["subjects_dir"], + desc="perform Region-based Voxelwise (RBV) PVC", + ) + + rbv_res = traits.Float( + argstr="--rbv-res %f", + desc="voxsize : set RBV voxel resolution (good for when standard res takes too much memory)", + ) + + mg = Tuple( + traits.Float, + traits.List(traits.String), + argstr="--mg %g %s", + desc="gmthresh RefId1 RefId2 ...: perform Mueller-Gaertner PVC, gmthresh is min gm pvf bet 0 and 1", + ) + + mg_ref_cerebral_wm = traits.Bool( + argstr="--mg-ref-cerebral-wm", desc=" set MG RefIds to 2 and 41" + ) + + mg_ref_lobes_wm = traits.Bool( + argstr="--mg-ref-lobes-wm", + desc="set MG RefIds to those for lobes when using wm subseg", + ) + + mgx = traits.Float( + argstr="--mgx %f", + desc="gmxthresh : GLM-based Mueller-Gaertner PVC, gmxthresh is min gm pvf bet 0 and 1", + ) + + km_ref = traits.List( + argstr="--km-ref %s...", + desc="RefId1 RefId2 ... : compute reference TAC for KM as mean of given RefIds", + ) + + km_hb = traits.List( + argstr="--km-hb %s...", + desc="RefId1 RefId2 ... : compute HiBinding TAC for KM as mean of given RefIds", + ) + + steady_state_params = Tuple( + traits.Float, + traits.Float, + traits.Float, + argstr="--ss %f %f %f", + desc="bpc scale dcf : steady-state analysis spec blood plasma concentration, unit scale and decay correction factor. You must also spec --km-ref. Turns off rescaling", + ) + + X = traits.Bool( + argstr="--X", desc="save X matrix in matlab4 format as X.mat (it will be big)" + ) + + y = traits.Bool(argstr="--y", desc="save y matrix in matlab4 format as y.mat") + + beta = traits.Bool( + argstr="--beta", desc="save beta matrix in matlab4 format as beta.mat" + ) + + X0 = traits.Bool( + argstr="--X0", + desc="save X0 matrix in matlab4 format as X0.mat (it will be big)", + ) + + save_input = traits.Bool( + argstr="--save-input", desc="saves rescaled input as input.rescaled.nii.gz" + ) + + save_eres = traits.Bool(argstr="--save-eres", desc="saves residual error") + + save_yhat = traits.Bool( + argstr="--save-yhat", + xor=["save_yhat_with_noise"], + desc="save signal estimate (yhat) smoothed with the PSF", + ) + + save_yhat_with_noise = Tuple( + traits.Int, + traits.Int, + argstr="--save-yhat-with-noise %i %i", + xor=["save_yhat"], + desc="seed nreps : save signal estimate (yhat) with noise", + ) + + save_yhat_full_fov = traits.Bool( + argstr="--save-yhat-full-fov", desc="save signal estimate (yhat)" + ) + + save_yhat0 = traits.Bool(argstr="--save-yhat0", desc="save signal estimate (yhat)") + + optimization_schema = traits.Enum( + "3D", + "2D", + "1D", + "3D_MB", + "2D_MB", + "1D_MB", + "MBZ", + "MB3", + argstr="--opt %s", + desc="opt : optimization schema for applying adaptive GTM", + ) + + opt_tol = Tuple( + traits.Int, + traits.Float, + traits.Float, + argstr="--opt-tol %i %f %f", + desc="n_iters_max ftol lin_min_tol : optimization parameters for adaptive gtm using fminsearch", + ) + + opt_brain = traits.Bool(argstr="--opt-brain", desc="apply adaptive GTM") + + opt_seg_merge = traits.Bool( + argstr="--opt-seg-merge", + desc="optimal schema for merging ROIs when applying adaptive GTM", + ) + + num_threads = traits.Int( + argstr="--threads %i", desc="threads : number of threads to use" + ) + + psf_col = traits.Float( + argstr="--psf-col %f", desc="xFWHM : full-width-half-maximum in the x-direction" + ) + + psf_row = traits.Float( + argstr="--psf-row %f", desc="yFWHM : full-width-half-maximum in the y-direction" + ) + + psf_slice = traits.Float( + argstr="--psf-slice %f", + desc="zFWHM : full-width-half-maximum in the z-direction", + ) + + +class GTMPVCOutputSpec(TraitedSpec): + pvc_dir = Directory(desc="output directory") + ref_file = File(desc="Reference TAC in .dat") + hb_nifti = File(desc="High-binding TAC in nifti") + hb_dat = File(desc="High-binding TAC in .dat") + nopvc_file = File(desc="TACs for all regions with no PVC") + gtm_file = File(desc="TACs for all regions with GTM PVC") + gtm_stats = File(desc="Statistics for the GTM PVC") + input_file = File(desc="4D PET file in native volume space") + reg_pet2anat = File(desc="Registration file to go from PET to anat") + reg_anat2pet = File(desc="Registration file to go from anat to PET") + reg_rbvpet2anat = File( + desc="Registration file to go from RBV corrected PET to anat" + ) + reg_anat2rbvpet = File( + desc="Registration file to go from anat to RBV corrected PET" + ) + mgx_ctxgm = File( + desc="Cortical GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + mgx_subctxgm = File( + desc="Subcortical GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + mgx_gm = File( + desc="All GM voxel-wise values corrected using the extended Muller-Gartner method", + ) + rbv = File(desc="All GM voxel-wise values corrected using the RBV method") + opt_params = File( + desc="Optimal parameter estimates for the FWHM using adaptive GTM" + ) + yhat0 = File(desc="4D PET file of signal estimate (yhat) after PVC (unsmoothed)") + yhat = File( + desc="4D PET file of signal estimate (yhat) after PVC (smoothed with PSF)", + ) + yhat_full_fov = File( + desc="4D PET file with full FOV of signal estimate (yhat) after PVC (smoothed with PSF)", + ) + yhat_with_noise = File( + desc="4D PET file with full FOV of signal estimate (yhat) with noise after PVC (smoothed with PSF)", + ) + eres = File( + desc="4D PET file of residual error after PVC (smoothed with PSF)", + ) + tissue_fraction = File( + desc="4D PET file of tissue fraction before PVC", + ) + tissue_fraction_psf = File( + desc="4D PET file of tissue fraction after PVC (smoothed with PSF)", + ) + seg = File( + desc="Segmentation file of regions used for PVC", + ) + seg_ctab = File( + desc="Color table file for segmentation file", + ) + + +class GTMPVC(FSCommand): + """Perform Partial Volume Correction (PVC) to PET Data. + + Examples + -------- + >>> gtmpvc = GTMPVC() + >>> gtmpvc.inputs.in_file = 'sub-01_ses-baseline_pet.nii.gz' + >>> gtmpvc.inputs.segmentation = 'gtmseg.mgz' + >>> gtmpvc.inputs.reg_file = 'sub-01_ses-baseline_pet_mean_reg.lta' + >>> gtmpvc.inputs.pvc_dir = 'pvc' + >>> gtmpvc.inputs.psf = 4 + >>> gtmpvc.inputs.default_seg_merge = True + >>> gtmpvc.inputs.auto_mask = (1, 0.1) + >>> gtmpvc.inputs.km_ref = ['8 47'] + >>> gtmpvc.inputs.km_hb = ['11 12 50 51'] + >>> gtmpvc.inputs.no_rescale = True + >>> gtmpvc.inputs.save_input = True + >>> gtmpvc.cmdline # doctest: +NORMALIZE_WHITESPACE + 'mri_gtmpvc --auto-mask 1.000000 0.100000 --default-seg-merge \ + --i sub-01_ses-baseline_pet.nii.gz --km-hb 11 12 50 51 --km-ref 8 47 --no-rescale \ + --psf 4.000000 --o pvc --reg sub-01_ses-baseline_pet_mean_reg.lta --save-input \ + --seg gtmseg.mgz' + + >>> gtmpvc = GTMPVC() + >>> gtmpvc.inputs.in_file = 'sub-01_ses-baseline_pet.nii.gz' + >>> gtmpvc.inputs.segmentation = 'gtmseg.mgz' + >>> gtmpvc.inputs.regheader = True + >>> gtmpvc.inputs.pvc_dir = 'pvc' + >>> gtmpvc.inputs.mg = (0.5, ["ROI1", "ROI2"]) + >>> gtmpvc.cmdline # doctest: +NORMALIZE_WHITESPACE + 'mri_gtmpvc --i sub-01_ses-baseline_pet.nii.gz --mg 0.5 ROI1 ROI2 --o pvc --regheader --seg gtmseg.mgz' + """ + + _cmd = "mri_gtmpvc" + input_spec = GTMPVCInputSpec + output_spec = GTMPVCOutputSpec + + def _format_arg(self, name, spec, val): + # Values taken from + # https://github.com/freesurfer/freesurfer/blob/fs-7.2/mri_gtmpvc/mri_gtmpvc.cpp#L115-L122 + if name == 'optimization_schema': + return ( + spec.argstr + % { + "3D": 1, + "2D": 2, + "1D": 3, + "3D_MB": 4, + "2D_MB": 5, + "1D_MB": 6, + "MBZ": 7, + "MB3": 8, + }[val] + ) + if name == 'mg': + return spec.argstr % (val[0], ' '.join(val[1])) + return super()._format_arg(name, spec, val) + + def _list_outputs(self): + outputs = self.output_spec().get() + # Get the top-level output directory + if not isdefined(self.inputs.pvc_dir): + pvcdir = os.getcwd() + else: + pvcdir = os.path.abspath(self.inputs.pvc_dir) + outputs["pvc_dir"] = pvcdir + + # Assign the output files that always get created + outputs["ref_file"] = os.path.join(pvcdir, "km.ref.tac.dat") + outputs["hb_nifti"] = os.path.join(pvcdir, "km.hb.tac.nii.gz") + outputs["hb_dat"] = os.path.join(pvcdir, "km.hb.tac.dat") + outputs["nopvc_file"] = os.path.join(pvcdir, "nopvc.nii.gz") + outputs["gtm_file"] = os.path.join(pvcdir, "gtm.nii.gz") + outputs["gtm_stats"] = os.path.join(pvcdir, "gtm.stats.dat") + outputs["reg_pet2anat"] = os.path.join(pvcdir, "aux", "bbpet2anat.lta") + outputs["reg_anat2pet"] = os.path.join(pvcdir, "aux", "anat2bbpet.lta") + outputs["eres"] = os.path.join(pvcdir, "eres.nii.gz") + outputs["tissue_fraction"] = os.path.join( + pvcdir, "aux", "tissue.fraction.nii.gz" + ) + outputs["tissue_fraction_psf"] = os.path.join( + pvcdir, "aux", "tissue.fraction.psf.nii.gz" + ) + outputs["seg"] = os.path.join(pvcdir, "aux", "seg.nii.gz") + outputs["seg_ctab"] = os.path.join(pvcdir, "aux", "seg.ctab") + + # Assign the conditional outputs + if self.inputs.save_input: + outputs["input_file"] = os.path.join(pvcdir, "input.nii.gz") + if self.inputs.save_yhat0: + outputs["yhat0"] = os.path.join(pvcdir, "yhat0.nii.gz") + if self.inputs.save_yhat: + outputs["yhat"] = os.path.join(pvcdir, "yhat.nii.gz") + if self.inputs.save_yhat_full_fov: + outputs["yhat_full_fov"] = os.path.join(pvcdir, "yhat.fullfov.nii.gz") + if self.inputs.save_yhat_with_noise: + outputs["yhat_with_noise"] = os.path.join(pvcdir, "yhat.nii.gz") + if self.inputs.mgx: + outputs["mgx_ctxgm"] = os.path.join(pvcdir, "mgx.ctxgm.nii.gz") + outputs["mgx_subctxgm"] = os.path.join(pvcdir, "mgx.subctxgm.nii.gz") + outputs["mgx_gm"] = os.path.join(pvcdir, "mgx.gm.nii.gz") + if self.inputs.rbv: + outputs["rbv"] = os.path.join(pvcdir, "rbv.nii.gz") + outputs["reg_rbvpet2anat"] = os.path.join(pvcdir, "aux", "rbv2anat.lta") + outputs["reg_anat2rbvpet"] = os.path.join(pvcdir, "aux", "anat2rbv.lta") + if self.inputs.optimization_schema: + outputs["opt_params"] = os.path.join(pvcdir, "aux", "opt.params.dat") + + return outputs + + +class MRTM1InputSpec(GLMFitInputSpec): + mrtm1 = Tuple( + File(exists=True), + File(exists=True), + mandatory=True, + argstr="--mrtm1 %s %s", + desc="RefTac TimeSec : perform MRTM1 kinetic modeling", + ) + + +class MRTM1(GLMFit): + """Perform MRTM1 kinetic modeling. + + Examples + -------- + >>> mrtm = MRTM1() + >>> mrtm.inputs.in_file = 'tac.nii' + >>> mrtm.inputs.mrtm1 = ('ref_tac.dat', 'timing.dat') + >>> mrtm.inputs.glm_dir = 'mrtm' + >>> mrtm.cmdline + 'mri_glmfit --glmdir mrtm --y tac.nii --mrtm1 ref_tac.dat timing.dat' + """ + + input_spec = MRTM1InputSpec + + +class MRTM2InputSpec(GLMFitInputSpec): + mrtm2 = Tuple( + File(exists=True), + File(exists=True), + traits.Float, + mandatory=True, + argstr="--mrtm2 %s %s %f", + desc="RefTac TimeSec k2prime : perform MRTM2 kinetic modeling", + ) + + +class MRTM2(GLMFit): + """Perform MRTM2 kinetic modeling. + Examples + -------- + >>> mrtm2 = MRTM2() + >>> mrtm2.inputs.in_file = 'tac.nii' + >>> mrtm2.inputs.mrtm2 = ('ref_tac.dat', 'timing.dat', 0.07872) + >>> mrtm2.inputs.glm_dir = 'mrtm2' + >>> mrtm2.cmdline + 'mri_glmfit --glmdir mrtm2 --y tac.nii --mrtm2 ref_tac.dat timing.dat 0.078720' + """ + + input_spec = MRTM2InputSpec + + +class LoganInputSpec(GLMFitInputSpec): + logan = Tuple( + File(exists=True), + File(exists=True), + traits.Float, + mandatory=True, + argstr="--logan %s %s %g", + desc="RefTac TimeSec tstar : perform Logan kinetic modeling", + ) + + +class Logan(GLMFit): + """Perform Logan kinetic modeling. + Examples + -------- + >>> logan = Logan() + >>> logan.inputs.in_file = 'tac.nii' + >>> logan.inputs.logan = ('ref_tac.dat', 'timing.dat', 2600) + >>> logan.inputs.glm_dir = 'logan' + >>> logan.cmdline + 'mri_glmfit --glmdir logan --y tac.nii --logan ref_tac.dat timing.dat 2600' + """ + + input_spec = LoganInputSpec diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 2941968f85..5b2fd19a0b 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1,32 +1,38 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by FreeSurfer """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - import os import os.path as op from glob import glob import shutil +import sys +from looseversion import LooseVersion import numpy as np from nibabel import load -from ... import logging, LooseVersion +from ... import logging from ...utils.filemanip import fname_presuffix, check_depends from ..io import FreeSurferSource -from ..base import (TraitedSpec, File, traits, Directory, InputMultiPath, - OutputMultiPath, CommandLine, CommandLineInputSpec, - isdefined) -from .base import (FSCommand, FSTraitedSpec, FSTraitedSpecOpenMP, - FSCommandOpenMP, Info) +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + Directory, + InputMultiPath, + OutputMultiPath, + CommandLine, + CommandLineInputSpec, + isdefined, + InputMultiObject, +) +from .base import FSCommand, FSTraitedSpec, FSTraitedSpecOpenMP, FSCommandOpenMP, Info from .utils import copy2subjdir -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") # Keeping this to avoid breaking external programs that depend on it, but # this should not be used internally @@ -36,22 +42,24 @@ class ParseDICOMDirInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, - argstr='--d %s', + argstr="--d %s", mandatory=True, - desc='path to siemens dicom directory') + desc="path to siemens dicom directory", + ) dicom_info_file = File( - 'dicominfo.txt', - argstr='--o %s', + "dicominfo.txt", + argstr="--o %s", usedefault=True, - desc='file to which results are written') - sortbyrun = traits.Bool(argstr='--sortbyrun', desc='assign run numbers') + desc="file to which results are written", + ) + sortbyrun = traits.Bool(argstr="--sortbyrun", desc="assign run numbers") summarize = traits.Bool( - argstr='--summarize', desc='only print out info for run leaders') + argstr="--summarize", desc="only print out info for run leaders" + ) class ParseDICOMDirOutputSpec(TraitedSpec): - dicom_info_file = File( - exists=True, desc='text file containing dicom information') + dicom_info_file = File(exists=True, desc="text file containing dicom information") class ParseDICOMDir(FSCommand): @@ -68,68 +76,74 @@ class ParseDICOMDir(FSCommand): >>> dcminfo.cmdline 'mri_parse_sdcmdir --d . --o dicominfo.txt --sortbyrun --summarize' - """ + """ - _cmd = 'mri_parse_sdcmdir' + _cmd = "mri_parse_sdcmdir" input_spec = ParseDICOMDirInputSpec output_spec = ParseDICOMDirOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.dicom_info_file): - outputs['dicom_info_file'] = os.path.join( - os.getcwd(), self.inputs.dicom_info_file) + outputs["dicom_info_file"] = os.path.join( + os.getcwd(), self.inputs.dicom_info_file + ) return outputs class UnpackSDICOMDirInputSpec(FSTraitedSpec): source_dir = Directory( exists=True, - argstr='-src %s', + argstr="-src %s", mandatory=True, - desc='directory with the DICOM files') + desc="directory with the DICOM files", + ) output_dir = Directory( - argstr='-targ %s', - desc='top directory into which the files will be unpacked') - run_info = traits.Tuple( + argstr="-targ %s", desc="top directory into which the files will be unpacked" + ) + run_info = Tuple( traits.Int, traits.Str, traits.Str, traits.Str, mandatory=True, - argstr='-run %d %s %s %s', - xor=('run_info', 'config', 'seq_config'), - desc='runno subdir format name : spec unpacking rules on cmdline') + argstr="-run %d %s %s %s", + xor=("run_info", "config", "seq_config"), + desc="runno subdir format name : spec unpacking rules on cmdline", + ) config = File( exists=True, - argstr='-cfg %s', + argstr="-cfg %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), - desc='specify unpacking rules in file') + xor=("run_info", "config", "seq_config"), + desc="specify unpacking rules in file", + ) seq_config = File( exists=True, - argstr='-seqcfg %s', + argstr="-seqcfg %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), - desc='specify unpacking rules based on sequence') + xor=("run_info", "config", "seq_config"), + desc="specify unpacking rules based on sequence", + ) dir_structure = traits.Enum( - 'fsfast', - 'generic', - argstr='-%s', - desc='unpack to specified directory structures') - no_info_dump = traits.Bool( - argstr='-noinfodump', desc='do not create infodump file') + "fsfast", + "generic", + argstr="-%s", + desc="unpack to specified directory structures", + ) + no_info_dump = traits.Bool(argstr="-noinfodump", desc="do not create infodump file") scan_only = File( exists=True, - argstr='-scanonly %s', - desc='only scan the directory and put result in file') - log_file = File( - exists=True, argstr='-log %s', desc='explicilty set log file') + argstr="-scanonly %s", + desc="only scan the directory and put result in file", + ) + log_file = File(exists=True, argstr="-log %s", desc="explicitly set log file") spm_zeropad = traits.Int( - argstr='-nspmzeropad %d', - desc='set frame number zero padding width for SPM') + argstr="-nspmzeropad %d", desc="set frame number zero padding width for SPM" + ) no_unpack_err = traits.Bool( - argstr='-no-unpackerr', desc='do not try to unpack runs with errors') + argstr="-no-unpackerr", desc="do not try to unpack runs with errors" + ) class UnpackSDICOMDir(FSCommand): @@ -150,272 +164,344 @@ class UnpackSDICOMDir(FSCommand): >>> unpack.cmdline 'unpacksdcmdir -generic -targ . -run 5 mprage nii struct -src .' """ - _cmd = 'unpacksdcmdir' + + _cmd = "unpacksdcmdir" input_spec = UnpackSDICOMDirInputSpec class MRIConvertInputSpec(FSTraitedSpec): - read_only = traits.Bool(argstr='--read_only', desc='read the input volume') - no_write = traits.Bool(argstr='--no_write', desc='do not write output') - in_info = traits.Bool(argstr='--in_info', desc='display input info') - out_info = traits.Bool(argstr='--out_info', desc='display output info') - in_stats = traits.Bool(argstr='--in_stats', desc='display input stats') - out_stats = traits.Bool(argstr='--out_stats', desc='display output stats') - in_matrix = traits.Bool(argstr='--in_matrix', desc='display input matrix') - out_matrix = traits.Bool( - argstr='--out_matrix', desc='display output matrix') - in_i_size = traits.Int(argstr='--in_i_size %d', desc='input i size') - in_j_size = traits.Int(argstr='--in_j_size %d', desc='input j size') - in_k_size = traits.Int(argstr='--in_k_size %d', desc='input k size') + read_only = traits.Bool(argstr="--read_only", desc="read the input volume") + no_write = traits.Bool(argstr="--no_write", desc="do not write output") + in_info = traits.Bool(argstr="--in_info", desc="display input info") + out_info = traits.Bool(argstr="--out_info", desc="display output info") + in_stats = traits.Bool(argstr="--in_stats", desc="display input stats") + out_stats = traits.Bool(argstr="--out_stats", desc="display output stats") + in_matrix = traits.Bool(argstr="--in_matrix", desc="display input matrix") + out_matrix = traits.Bool(argstr="--out_matrix", desc="display output matrix") + in_i_size = traits.Int(argstr="--in_i_size %d", desc="input i size") + in_j_size = traits.Int(argstr="--in_j_size %d", desc="input j size") + in_k_size = traits.Int(argstr="--in_k_size %d", desc="input k size") force_ras = traits.Bool( - argstr='--force_ras_good', - desc='use default when orientation info absent') - in_i_dir = traits.Tuple( + argstr="--force_ras_good", desc="use default when orientation info absent" + ) + in_i_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_i_direction %f %f %f', - desc=' ') - in_j_dir = traits.Tuple( + argstr="--in_i_direction %f %f %f", + desc=" ", + ) + in_j_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_j_direction %f %f %f', - desc=' ') - in_k_dir = traits.Tuple( + argstr="--in_j_direction %f %f %f", + desc=" ", + ) + in_k_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_k_direction %f %f %f', - desc=' ') + argstr="--in_k_direction %f %f %f", + desc=" ", + ) _orientations = [ - 'LAI', 'LIA', 'ALI', 'AIL', 'ILA', 'IAL', 'LAS', 'LSA', 'ALS', 'ASL', - 'SLA', 'SAL', 'LPI', 'LIP', 'PLI', 'PIL', 'ILP', 'IPL', 'LPS', 'LSP', - 'PLS', 'PSL', 'SLP', 'SPL', 'RAI', 'RIA', 'ARI', 'AIR', 'IRA', 'IAR', - 'RAS', 'RSA', 'ARS', 'ASR', 'SRA', 'SAR', 'RPI', 'RIP', 'PRI', 'PIR', - 'IRP', 'IPR', 'RPS', 'RSP', 'PRS', 'PSR', 'SRP', 'SPR' + "LAI", + "LIA", + "ALI", + "AIL", + "ILA", + "IAL", + "LAS", + "LSA", + "ALS", + "ASL", + "SLA", + "SAL", + "LPI", + "LIP", + "PLI", + "PIL", + "ILP", + "IPL", + "LPS", + "LSP", + "PLS", + "PSL", + "SLP", + "SPL", + "RAI", + "RIA", + "ARI", + "AIR", + "IRA", + "IAR", + "RAS", + "RSA", + "ARS", + "ASR", + "SRA", + "SAR", + "RPI", + "RIP", + "PRI", + "PIR", + "IRP", + "IPR", + "RPS", + "RSP", + "PRS", + "PSR", + "SRP", + "SPR", ] # _orientations = [comb for comb in itertools.chain(*[[''.join(c) for c in itertools.permutations(s)] for s in [a+b+c for a in 'LR' for b in 'AP' for c in 'IS']])] in_orientation = traits.Enum( _orientations, - argstr='--in_orientation %s', - desc='specify the input orientation') + argstr="--in_orientation %s", + desc="specify the input orientation", + ) in_center = traits.List( traits.Float, maxlen=3, - argstr='--in_center %s', - desc=' ') - sphinx = traits.Bool( - argstr='--sphinx', desc='change orientation info to sphinx') + argstr="--in_center %s", + desc=" ", + ) + sphinx = traits.Bool(argstr="--sphinx", desc="change orientation info to sphinx") out_i_count = traits.Int( - argstr='--out_i_count %d', desc='some count ?? in i direction') + argstr="--out_i_count %d", desc="some count ?? in i direction" + ) out_j_count = traits.Int( - argstr='--out_j_count %d', desc='some count ?? in j direction') + argstr="--out_j_count %d", desc="some count ?? in j direction" + ) out_k_count = traits.Int( - argstr='--out_k_count %d', desc='some count ?? in k direction') - vox_size = traits.Tuple( + argstr="--out_k_count %d", desc="some count ?? in k direction" + ) + vox_size = Tuple( traits.Float, traits.Float, traits.Float, - argstr='-voxsize %f %f %f', - desc= - ' specify the size (mm) - useful for upsampling or downsampling' - ) - out_i_size = traits.Int(argstr='--out_i_size %d', desc='output i size') - out_j_size = traits.Int(argstr='--out_j_size %d', desc='output j size') - out_k_size = traits.Int(argstr='--out_k_size %d', desc='output k size') - out_i_dir = traits.Tuple( + argstr="-voxsize %f %f %f", + desc=" specify the size (mm) - useful for upsampling or downsampling", + ) + out_i_size = traits.Int(argstr="--out_i_size %d", desc="output i size") + out_j_size = traits.Int(argstr="--out_j_size %d", desc="output j size") + out_k_size = traits.Int(argstr="--out_k_size %d", desc="output k size") + out_i_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_i_direction %f %f %f', - desc=' ') - out_j_dir = traits.Tuple( + argstr="--out_i_direction %f %f %f", + desc=" ", + ) + out_j_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_j_direction %f %f %f', - desc=' ') - out_k_dir = traits.Tuple( + argstr="--out_j_direction %f %f %f", + desc=" ", + ) + out_k_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_k_direction %f %f %f', - desc=' ') + argstr="--out_k_direction %f %f %f", + desc=" ", + ) out_orientation = traits.Enum( _orientations, - argstr='--out_orientation %s', - desc='specify the output orientation') - out_center = traits.Tuple( + argstr="--out_orientation %s", + desc="specify the output orientation", + ) + out_center = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_center %f %f %f', - desc=' ') + argstr="--out_center %f %f %f", + desc=" ", + ) out_datatype = traits.Enum( - 'uchar', - 'short', - 'int', - 'float', - argstr='--out_data_type %s', - desc='output data type ') + "uchar", + "short", + "int", + "float", + argstr="--out_data_type %s", + desc="output data type ", + ) resample_type = traits.Enum( - 'interpolate', - 'weighted', - 'nearest', - 'sinc', - 'cubic', - argstr='--resample_type %s', - desc= - ' (default is interpolate)') - no_scale = traits.Bool( - argstr='--no_scale 1', desc='dont rescale values for COR') + "interpolate", + "weighted", + "nearest", + "sinc", + "cubic", + argstr="--resample_type %s", + desc=" (default is interpolate)", + ) + no_scale = traits.Bool(argstr="--no_scale 1", desc="dont rescale values for COR") no_change = traits.Bool( - argstr='--nochange', - desc="don't change type of input to that of template") - tr = traits.Int(argstr='-tr %d', desc='TR in msec') - te = traits.Int(argstr='-te %d', desc='TE in msec') - ti = traits.Int(argstr='-ti %d', desc='TI in msec (note upper case flag)') + argstr="--nochange", desc="don't change type of input to that of template" + ) + tr = traits.Int(argstr="-tr %d", desc="TR in msec") + te = traits.Int(argstr="-te %d", desc="TE in msec") + ti = traits.Int(argstr="-ti %d", desc="TI in msec (note upper case flag)") autoalign_matrix = File( - exists=True, - argstr='--autoalign %s', - desc='text file with autoalign matrix') + exists=True, argstr="--autoalign %s", desc="text file with autoalign matrix" + ) unwarp_gradient = traits.Bool( - argstr='--unwarp_gradient_nonlinearity', - desc='unwarp gradient nonlinearity') + argstr="--unwarp_gradient_nonlinearity", desc="unwarp gradient nonlinearity" + ) apply_transform = File( - exists=True, argstr='--apply_transform %s', desc='apply xfm file') + exists=True, argstr="--apply_transform %s", desc="apply xfm file" + ) apply_inv_transform = File( exists=True, - argstr='--apply_inverse_transform %s', - desc='apply inverse transformation xfm file') - devolve_transform = traits.Str(argstr='--devolvexfm %s', desc='subject id') - crop_center = traits.Tuple( + argstr="--apply_inverse_transform %s", + desc="apply inverse transformation xfm file", + ) + devolve_transform = traits.Str(argstr="--devolvexfm %s", desc="subject id") + crop_center = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--crop %d %d %d', - desc=' crop to 256 around center (x, y, z)') - crop_size = traits.Tuple( + argstr="--crop %d %d %d", + desc=" crop to 256 around center (x, y, z)", + ) + crop_size = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--cropsize %d %d %d', - desc=' crop to size ') + argstr="--cropsize %d %d %d", + desc=" crop to size ", + ) cut_ends = traits.Int( - argstr='--cutends %d', desc='remove ncut slices from the ends') - slice_crop = traits.Tuple( + argstr="--cutends %d", desc="remove ncut slices from the ends" + ) + slice_crop = Tuple( traits.Int, traits.Int, - argstr='--slice-crop %d %d', - desc='s_start s_end : keep slices s_start to s_end') + argstr="--slice-crop %d %d", + desc="s_start s_end : keep slices s_start to s_end", + ) slice_reverse = traits.Bool( - argstr='--slice-reverse', - desc='reverse order of slices, update vox2ras') + argstr="--slice-reverse", desc="reverse order of slices, update vox2ras" + ) slice_bias = traits.Float( - argstr='--slice-bias %f', desc='apply half-cosine bias field') - fwhm = traits.Float( - argstr='--fwhm %f', desc='smooth input volume by fwhm mm') + argstr="--slice-bias %f", desc="apply half-cosine bias field" + ) + fwhm = traits.Float(argstr="--fwhm %f", desc="smooth input volume by fwhm mm") _filetypes = [ - 'cor', 'mgh', 'mgz', 'minc', 'analyze', 'analyze4d', 'spm', 'afni', - 'brik', 'bshort', 'bfloat', 'sdt', 'outline', 'otl', 'gdf', 'nifti1', - 'nii', 'niigz' - ] - _infiletypes = [ - 'ge', 'gelx', 'lx', 'ximg', 'siemens', 'dicom', 'siemens_dicom' + "cor", + "mgh", + "mgz", + "minc", + "analyze", + "analyze4d", + "spm", + "afni", + "brik", + "bshort", + "bfloat", + "sdt", + "outline", + "otl", + "gdf", + "nifti1", + "nii", + "niigz", ] + _infiletypes = ["ge", "gelx", "lx", "ximg", "siemens", "dicom", "siemens_dicom"] in_type = traits.Enum( - _filetypes + _infiletypes, - argstr='--in_type %s', - desc='input file type') - out_type = traits.Enum( - _filetypes, argstr='--out_type %s', desc='output file type') + _filetypes + _infiletypes, argstr="--in_type %s", desc="input file type" + ) + out_type = traits.Enum(_filetypes, argstr="--out_type %s", desc="output file type") ascii = traits.Bool( - argstr='--ascii', desc='save output as ascii col>row>slice>frame') - reorder = traits.Tuple( + argstr="--ascii", desc="save output as ascii col>row>slice>frame" + ) + reorder = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--reorder %d %d %d', - desc='olddim1 olddim2 olddim3') + argstr="--reorder %d %d %d", + desc="olddim1 olddim2 olddim3", + ) invert_contrast = traits.Float( - argstr='--invert_contrast %f', - desc='threshold for inversting contrast') + argstr="--invert_contrast %f", desc="threshold for inversting contrast" + ) in_file = File( exists=True, mandatory=True, position=-2, - argstr='--input_volume %s', - desc='File to read/convert') + argstr="--input_volume %s", + desc="File to read/convert", + ) out_file = File( - argstr='--output_volume %s', + argstr="--output_volume %s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) conform = traits.Bool( - argstr='--conform', - desc= - 'conform to 1mm voxel size in coronal slice direction with 256^3 or more' + argstr="--conform", + desc="conform to 1mm voxel size in coronal slice direction with 256^3 or more", ) - conform_min = traits.Bool( - argstr='--conform_min', desc='conform to smallest size') + conform_min = traits.Bool(argstr="--conform_min", desc="conform to smallest size") conform_size = traits.Float( - argstr='--conform_size %s', desc='conform to size_in_mm') - cw256 = traits.Bool( - argstr='--cw256', desc='confrom to dimensions of 256^3') - parse_only = traits.Bool(argstr='--parse_only', desc='parse input only') - subject_name = traits.Str( - argstr='--subject_name %s', desc='subject name ???') + argstr="--conform_size %s", desc="conform to size_in_mm" + ) + cw256 = traits.Bool(argstr="--cw256", desc="confrom to dimensions of 256^3") + parse_only = traits.Bool(argstr="--parse_only", desc="parse input only") + subject_name = traits.Str(argstr="--subject_name %s", desc="subject name ???") reslice_like = File( - exists=True, - argstr='--reslice_like %s', - desc='reslice output to match file') + exists=True, argstr="--reslice_like %s", desc="reslice output to match file" + ) template_type = traits.Enum( _filetypes + _infiletypes, - argstr='--template_type %s', - desc='template file type') + argstr="--template_type %s", + desc="template file type", + ) split = traits.Bool( - argstr='--split', - desc='split output frames into separate output files.') - frame = traits.Int( - argstr='--frame %d', desc='keep only 0-based frame number') - midframe = traits.Bool( - argstr='--mid-frame', desc='keep only the middle frame') - skip_n = traits.Int(argstr='--nskip %d', desc='skip the first n frames') - drop_n = traits.Int(argstr='--ndrop %d', desc='drop the last n frames') - frame_subsample = traits.Tuple( + argstr="--split", desc="split output frames into separate output files." + ) + frame = traits.Int(argstr="--frame %d", desc="keep only 0-based frame number") + midframe = traits.Bool(argstr="--mid-frame", desc="keep only the middle frame") + skip_n = traits.Int(argstr="--nskip %d", desc="skip the first n frames") + drop_n = traits.Int(argstr="--ndrop %d", desc="drop the last n frames") + frame_subsample = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--fsubsample %d %d %d', - desc='start delta end : frame subsampling (end = -1 for end)') - in_scale = traits.Float( - argstr='--scale %f', desc='input intensity scale factor') + argstr="--fsubsample %d %d %d", + desc="start delta end : frame subsampling (end = -1 for end)", + ) + in_scale = traits.Float(argstr="--scale %f", desc="input intensity scale factor") out_scale = traits.Float( - argstr='--out-scale %d', desc='output intensity scale factor') - in_like = File(exists=True, argstr='--in_like %s', desc='input looks like') + argstr="--out-scale %d", desc="output intensity scale factor" + ) + in_like = File(exists=True, argstr="--in_like %s", desc="input looks like") fill_parcellation = traits.Bool( - argstr='--fill_parcellation', desc='fill parcellation') + argstr="--fill_parcellation", desc="fill parcellation" + ) smooth_parcellation = traits.Bool( - argstr='--smooth_parcellation', desc='smooth parcellation') - zero_outlines = traits.Bool(argstr='--zero_outlines', desc='zero outlines') - color_file = File(exists=True, argstr='--color_file %s', desc='color file') - no_translate = traits.Bool(argstr='--no_translate', desc='???') - status_file = File( - argstr='--status %s', desc='status file for DICOM conversion') + argstr="--smooth_parcellation", desc="smooth parcellation" + ) + zero_outlines = traits.Bool(argstr="--zero_outlines", desc="zero outlines") + color_file = File(exists=True, argstr="--color_file %s", desc="color file") + no_translate = traits.Bool(argstr="--no_translate", desc="???") + status_file = File(argstr="--status %s", desc="status file for DICOM conversion") sdcm_list = File( - exists=True, - argstr='--sdcmlist %s', - desc='list of DICOM files for conversion') + exists=True, argstr="--sdcmlist %s", desc="list of DICOM files for conversion" + ) template_info = traits.Bool( - argstr='--template_info', desc='dump info about template') - crop_gdf = traits.Bool(argstr='--crop_gdf', desc='apply GDF cropping') + argstr="--template_info", desc="dump info about template" + ) + crop_gdf = traits.Bool(argstr="--crop_gdf", desc="apply GDF cropping") zero_ge_z_offset = traits.Bool( - argstr='--zero_ge_z_offset', desc='zero ge z offset ???') + argstr="--zero_ge_z_offset", desc="zero ge z offset ???" + ) class MRIConvertOutputSpec(TraitedSpec): - out_file = OutputMultiPath(File(exists=True), desc='converted output file') + out_file = OutputMultiPath(File(exists=True), desc="converted output file") class MRIConvert(FSCommand): @@ -435,44 +521,44 @@ class MRIConvert(FSCommand): 'mri_convert --out_type mgz --input_volume structural.nii --output_volume outfile.mgz' """ - _cmd = 'mri_convert' + + _cmd = "mri_convert" input_spec = MRIConvertInputSpec output_spec = MRIConvertOutputSpec filemap = dict( - cor='cor', - mgh='mgh', - mgz='mgz', - minc='mnc', - afni='brik', - brik='brik', - bshort='bshort', - spm='img', - analyze='img', - analyze4d='img', - bfloat='bfloat', - nifti1='img', - nii='nii', - niigz='nii.gz') + cor="cor", + mgh="mgh", + mgz="mgz", + minc="mnc", + afni="brik", + brik="brik", + bshort="bshort", + spm="img", + analyze="img", + analyze4d="img", + bfloat="bfloat", + nifti1="img", + nii="nii", + niigz="nii.gz", + ) def _format_arg(self, name, spec, value): - if name in ['in_type', 'out_type', 'template_type']: - if value == 'niigz': - return spec.argstr % 'nii' - return super(MRIConvert, self)._format_arg(name, spec, value) + if name in ["in_type", "out_type", "template_type"]: + if value == "niigz": + return spec.argstr % "nii" + return super()._format_arg(name, spec, value) def _get_outfilename(self): outfile = self.inputs.out_file if not isdefined(outfile): if isdefined(self.inputs.out_type): - suffix = '_out.' + self.filemap[self.inputs.out_type] + suffix = "_out." + self.filemap[self.inputs.out_type] else: - suffix = '_out.nii.gz' + suffix = "_out.nii.gz" outfile = fname_presuffix( - self.inputs.in_file, - newpath=os.getcwd(), - suffix=suffix, - use_ext=False) + self.inputs.in_file, newpath=os.getcwd(), suffix=suffix, use_ext=False + ) return os.path.abspath(outfile) def _list_outputs(self): @@ -484,20 +570,20 @@ def _list_outputs(self): tp = 1 else: tp = size[-1] - if outfile.endswith('.mgz'): - stem = outfile.split('.mgz')[0] - ext = '.mgz' - elif outfile.endswith('.nii.gz'): - stem = outfile.split('.nii.gz')[0] - ext = '.nii.gz' + if outfile.endswith(".mgz"): + stem = outfile.split(".mgz")[0] + ext = ".mgz" + elif outfile.endswith(".nii.gz"): + stem = outfile.split(".nii.gz")[0] + ext = ".nii.gz" else: - stem = '.'.join(outfile.split('.')[:-1]) - ext = '.' + outfile.split('.')[-1] + stem = ".".join(outfile.split(".")[:-1]) + ext = "." + outfile.split(".")[-1] outfile = [] - for idx in range(0, tp): - outfile.append(stem + '%04d' % idx + ext) + for idx in range(tp): + outfile.append(stem + "%04d" % idx + ext) if isdefined(self.inputs.out_type): - if self.inputs.out_type in ['spm', 'analyze']: + if self.inputs.out_type in ["spm", "analyze"]: # generate all outputs size = load(self.inputs.in_file).shape if len(size) == 3: @@ -506,19 +592,18 @@ def _list_outputs(self): tp = size[-1] # have to take care of all the frame manipulations raise Exception( - 'Not taking frame manipulations into account- please warn the developers' + "Not taking frame manipulations into account- please warn the developers" ) - outfiles = [] outfile = self._get_outfilename() - for i in range(tp): - outfiles.append( - fname_presuffix(outfile, suffix='%03d' % (i + 1))) + outfiles = [ + fname_presuffix(outfile, suffix="%03d" % (i + 1)) for i in range(tp) + ] outfile = outfiles - outputs['out_file'] = outfile + outputs["out_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._get_outfilename() return None @@ -527,31 +612,36 @@ class DICOMConvertInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, mandatory=True, - desc='dicom directory from which to convert dicom files') + desc="dicom directory from which to convert dicom files", + ) base_output_dir = Directory( - mandatory=True, - desc='directory in which subject directories are created') + mandatory=True, desc="directory in which subject directories are created" + ) subject_dir_template = traits.Str( - 'S.%04d', usedefault=True, desc='template for subject directory name') - subject_id = traits.Any(desc='subject identifier to insert into template') + "S.%04d", usedefault=True, desc="template for subject directory name" + ) + subject_id = traits.Any(desc="subject identifier to insert into template") file_mapping = traits.List( - traits.Tuple(traits.Str, traits.Str), - desc='defines the output fields of interface') + Tuple(traits.Str, traits.Str), + desc="defines the output fields of interface", + ) out_type = traits.Enum( - 'niigz', + "niigz", MRIConvertInputSpec._filetypes, usedefault=True, - desc='defines the type of output file produced') + desc="defines the type of output file produced", + ) dicom_info = File( - exists=True, - desc='File containing summary information from mri_parse_sdcmdir') + exists=True, desc="File containing summary information from mri_parse_sdcmdir" + ) seq_list = traits.List( traits.Str, - requires=['dicom_info'], - desc='list of pulse sequence names to be converted.') + requires=["dicom_info"], + desc="list of pulse sequence names to be converted.", + ) ignore_single_slice = traits.Bool( - requires=['dicom_info'], - desc='ignore volumes containing a single slice') + requires=["dicom_info"], desc="ignore volumes containing a single slice" + ) class DICOMConvert(FSCommand): @@ -566,27 +656,27 @@ class DICOMConvert(FSCommand): >>> cvt.inputs.file_mapping = [('nifti', '*.nii'), ('info', 'dicom*.txt'), ('dti', '*dti.bv*')] """ - _cmd = 'mri_convert' + + _cmd = "mri_convert" input_spec = DICOMConvertInputSpec def _get_dicomfiles(self): """validate fsl bet options if set to None ignore """ - return glob( - os.path.abspath(os.path.join(self.inputs.dicom_dir, '*-1.dcm'))) + return glob(os.path.abspath(os.path.join(self.inputs.dicom_dir, "*-1.dcm"))) def _get_outdir(self): """returns output directory""" subjid = self.inputs.subject_id if not isdefined(subjid): path, fname = os.path.split(self._get_dicomfiles()[0]) - subjid = int(fname.split('-')[0]) + subjid = int(fname.split("-")[0]) if isdefined(self.inputs.subject_dir_template): subjid = self.inputs.subject_dir_template % subjid basedir = self.inputs.base_output_dir if not isdefined(basedir): - basedir = os.path.abspath('.') + basedir = os.path.abspath(".") outdir = os.path.abspath(os.path.join(basedir, subjid)) return outdir @@ -602,11 +692,11 @@ def _get_runs(self): if self.inputs.seq_list: if self.inputs.ignore_single_slice: if (int(s[8]) > 1) and any( - [s[12].startswith(sn) for sn in self.inputs.seq_list]): + s[12].startswith(sn) for sn in self.inputs.seq_list + ): runs.append(int(s[2])) else: - if any( - [s[12].startswith(sn) for sn in self.inputs.seq_list]): + if any(s[12].startswith(sn) for sn in self.inputs.seq_list): runs.append(int(s[2])) else: runs.append(int(s[2])) @@ -618,11 +708,12 @@ def _get_filelist(self, outdir): for f in self._get_dicomfiles(): head, fname = os.path.split(f) fname, ext = os.path.splitext(fname) - fileparts = fname.split('-') + fileparts = fname.split("-") runno = int(fileparts[1]) out_type = MRIConvert.filemap[self.inputs.out_type] - outfile = os.path.join(outdir, '.'.join( - ('%s-%02d' % (fileparts[0], runno), out_type))) + outfile = os.path.join( + outdir, ".".join(("%s-%02d" % (fileparts[0], runno), out_type)) + ) filemap[runno] = (f, outfile) if self.inputs.dicom_info: files = [filemap[r] for r in self._get_runs()] @@ -632,49 +723,57 @@ def _get_filelist(self, outdir): @property def cmdline(self): - """ `command` plus any arguments (args) + """`command` plus any arguments (args) validates arguments and generates command line""" self._check_mandatory_inputs() outdir = self._get_outdir() cmd = [] if not os.path.exists(outdir): - cmdstr = 'python -c "import os; os.makedirs(\'%s\')"' % outdir + cmdstr = "{} -c \"import os; os.makedirs('{}')\"".format( + op.basename(sys.executable), + outdir, + ) cmd.extend([cmdstr]) - infofile = os.path.join(outdir, 'shortinfo.txt') + infofile = os.path.join(outdir, "shortinfo.txt") if not os.path.exists(infofile): - cmdstr = 'dcmdir-info-mgh %s > %s' % (self.inputs.dicom_dir, - infofile) + cmdstr = f"dcmdir-info-mgh {self.inputs.dicom_dir} > {infofile}" cmd.extend([cmdstr]) files = self._get_filelist(outdir) for infile, outfile in files: if not os.path.exists(outfile): - single_cmd = '%s%s %s %s' % (self._cmd_prefix, self.cmd, - infile, os.path.join(outdir, - outfile)) + single_cmd = "{}{} {} {}".format( + self._cmd_prefix, + self.cmd, + infile, + os.path.join(outdir, outfile), + ) cmd.extend([single_cmd]) - return '; '.join(cmd) + return "; ".join(cmd) class ResampleInputSpec(FSTraitedSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, - desc='file to resample', - position=-2) + desc="file to resample", + position=-2, + ) resampled_file = File( - argstr='-o %s', desc='output filename', genfile=True, position=-1) - voxel_size = traits.Tuple( + argstr="-o %s", desc="output filename", genfile=True, position=-1 + ) + voxel_size = Tuple( traits.Float, traits.Float, traits.Float, - argstr='-vs %.2f %.2f %.2f', - desc='triplet of output voxel sizes', - mandatory=True) + argstr="-vs %.2f %.2f %.2f", + desc="triplet of output voxel sizes", + mandatory=True, + ) class ResampleOutputSpec(TraitedSpec): - resampled_file = File(exists=True, desc='output filename') + resampled_file = File(exists=True, desc="output filename") class Resample(FSCommand): @@ -693,7 +792,7 @@ class Resample(FSCommand): """ - _cmd = 'mri_convert' + _cmd = "mri_convert" input_spec = ResampleInputSpec output_spec = ResampleOutputSpec @@ -702,179 +801,254 @@ def _get_outfilename(self): outfile = self.inputs.resampled_file else: outfile = fname_presuffix( - self.inputs.in_file, newpath=os.getcwd(), suffix='_resample') + self.inputs.in_file, newpath=os.getcwd(), suffix="_resample" + ) return outfile def _list_outputs(self): outputs = self.output_spec().get() - outputs['resampled_file'] = self._get_outfilename() + outputs["resampled_file"] = self._get_outfilename() return outputs def _gen_filename(self, name): - if name == 'resampled_file': + if name == "resampled_file": return self._get_outfilename() return None class ReconAllInputSpec(CommandLineInputSpec): subject_id = traits.Str( - "recon_all", argstr='-subjid %s', desc='subject name', usedefault=True) + "recon_all", + argstr="-subjid %s", + desc="subject name", + xor=["base_template_id", "longitudinal_timepoint_id"], + ) directive = traits.Enum( - 'all', - 'autorecon1', + "all", + "autorecon1", # autorecon2 variants - 'autorecon2', - 'autorecon2-volonly', - 'autorecon2-perhemi', - 'autorecon2-inflate1', - 'autorecon2-cp', - 'autorecon2-wm', + "autorecon2", + "autorecon2-volonly", + "autorecon2-perhemi", + "autorecon2-inflate1", + "autorecon2-cp", + "autorecon2-wm", # autorecon3 variants - 'autorecon3', - 'autorecon3-T2pial', + "autorecon3", + "autorecon3-T2pial", # Mix of autorecon2 and autorecon3 steps - 'autorecon-pial', - 'autorecon-hemi', + "autorecon-pial", + "autorecon-hemi", # Not "multi-stage flags" - 'localGI', - 'qcache', - argstr='-%s', - desc='process directive', + "localGI", + "qcache", + argstr="-%s", + desc="process directive", usedefault=True, - position=0) + position=0, + ) hemi = traits.Enum( - 'lh', 'rh', desc='hemisphere to process', argstr="-hemi %s") + "lh", + "rh", + desc="hemisphere to process", + argstr="-hemi %s", + requires=["subject_id"], + ) T1_files = InputMultiPath( File(exists=True), - argstr='-i %s...', - desc='name of T1 file to process') + argstr="-i %s...", + desc="name of T1 file to process", + requires=["subject_id"], + ) T2_file = File( exists=True, argstr="-T2 %s", - min_ver='5.3.0', - desc='Convert T2 image to orig directory') + min_ver="5.3.0", + desc="Convert T2 image to orig directory", + requires=["subject_id"], + ) FLAIR_file = File( exists=True, argstr="-FLAIR %s", - min_ver='5.3.0', - desc='Convert FLAIR image to orig directory') + min_ver="5.3.0", + desc="Convert FLAIR image to orig directory", + requires=["subject_id"], + ) use_T2 = traits.Bool( argstr="-T2pial", - min_ver='5.3.0', - xor=['use_FLAIR'], - desc='Use T2 image to refine the pial surface') + min_ver="5.3.0", + xor=["use_FLAIR"], + desc="Use T2 image to refine the pial surface", + ) use_FLAIR = traits.Bool( argstr="-FLAIRpial", - min_ver='5.3.0', - xor=['use_T2'], - desc='Use FLAIR image to refine the pial surface') + min_ver="5.3.0", + xor=["use_T2"], + desc="Use FLAIR image to refine the pial surface", + ) openmp = traits.Int( - argstr="-openmp %d", desc="Number of processors to use in parallel") - parallel = traits.Bool( - argstr="-parallel", desc="Enable parallel execution") + argstr="-openmp %d", desc="Number of processors to use in parallel" + ) + parallel = traits.Bool(argstr="-parallel", desc="Enable parallel execution") hires = traits.Bool( argstr="-hires", - min_ver='6.0.0', - desc="Conform to minimum voxel size (for voxels < 1mm)") + min_ver="6.0.0", + desc="Conform to minimum voxel size (for voxels < 1mm)", + ) mprage = traits.Bool( - argstr='-mprage', - desc=('Assume scan parameters are MGH MP-RAGE ' - 'protocol, which produces darker gray matter')) + argstr="-mprage", + desc=( + "Assume scan parameters are MGH MP-RAGE " + "protocol, which produces darker gray matter" + ), + requires=["subject_id"], + ) big_ventricles = traits.Bool( - argstr='-bigventricles', - desc=('For use in subjects with enlarged ' - 'ventricles')) + argstr="-bigventricles", + desc=("For use in subjects with enlarged ventricles"), + ) brainstem = traits.Bool( - argstr='-brainstem-structures', desc='Segment brainstem structures') + argstr="-brainstem-structures", + desc="Segment brainstem structures", + requires=["subject_id"], + ) hippocampal_subfields_T1 = traits.Bool( - argstr='-hippocampal-subfields-T1', - min_ver='6.0.0', - desc='segment hippocampal subfields using input T1 scan') - hippocampal_subfields_T2 = traits.Tuple( + argstr="-hippocampal-subfields-T1", + min_ver="6.0.0", + desc="segment hippocampal subfields using input T1 scan", + requires=["subject_id"], + ) + hippocampal_subfields_T2 = Tuple( File(exists=True), traits.Str(), - argstr='-hippocampal-subfields-T2 %s %s', - min_ver='6.0.0', - desc=('segment hippocampal subfields using T2 scan, identified by ' - 'ID (may be combined with hippocampal_subfields_T1)')) + argstr="-hippocampal-subfields-T2 %s %s", + min_ver="6.0.0", + desc=( + "segment hippocampal subfields using T2 scan, identified by " + "ID (may be combined with hippocampal_subfields_T1)" + ), + requires=["subject_id"], + ) expert = File( - exists=True, - argstr='-expert %s', - desc="Set parameters using expert file") + exists=True, argstr="-expert %s", desc="Set parameters using expert file" + ) xopts = traits.Enum( "use", "clean", "overwrite", - argstr='-xopts-%s', - desc="Use, delete or overwrite existing expert options file") + argstr="-xopts-%s", + desc="Use, delete or overwrite existing expert options file", + ) subjects_dir = Directory( exists=True, - argstr='-sd %s', + argstr="-sd %s", hash_files=False, - desc='path to subjects directory', - genfile=True) - flags = InputMultiPath( - traits.Str, argstr='%s', desc='additional parameters') + desc="path to subjects directory", + genfile=True, + ) + flags = InputMultiPath(traits.Str, argstr="%s", desc="additional parameters") + + # Longitudinal runs + base_template_id = traits.Str( + argstr="-base %s", + desc="base template id", + xor=["subject_id", "longitudinal_timepoint_id"], + requires=["base_timepoint_ids"], + ) + base_timepoint_ids = InputMultiObject( + traits.Str(), + argstr="-base-tp %s...", + desc="processed timepoint to use in template", + ) + longitudinal_timepoint_id = traits.Str( + argstr="-long %s", + desc="longitudinal session/timepoint id", + xor=["subject_id", "base_template_id"], + requires=["longitudinal_template_id"], + position=1, + ) + longitudinal_template_id = traits.Str( + argstr="%s", desc="longitudinal base template id", position=2 + ) # Expert options - talairach = traits.Str( - desc="Flags to pass to talairach commands", xor=['expert']) + talairach = traits.Str(desc="Flags to pass to talairach commands", xor=["expert"]) mri_normalize = traits.Str( - desc="Flags to pass to mri_normalize commands", xor=['expert']) + desc="Flags to pass to mri_normalize commands", xor=["expert"] + ) mri_watershed = traits.Str( - desc="Flags to pass to mri_watershed commands", xor=['expert']) + desc="Flags to pass to mri_watershed commands", xor=["expert"] + ) mri_em_register = traits.Str( - desc="Flags to pass to mri_em_register commands", xor=['expert']) + desc="Flags to pass to mri_em_register commands", xor=["expert"] + ) mri_ca_normalize = traits.Str( - desc="Flags to pass to mri_ca_normalize commands", xor=['expert']) + desc="Flags to pass to mri_ca_normalize commands", xor=["expert"] + ) mri_ca_register = traits.Str( - desc="Flags to pass to mri_ca_register commands", xor=['expert']) + desc="Flags to pass to mri_ca_register commands", xor=["expert"] + ) mri_remove_neck = traits.Str( - desc="Flags to pass to mri_remove_neck commands", xor=['expert']) + desc="Flags to pass to mri_remove_neck commands", xor=["expert"] + ) mri_ca_label = traits.Str( - desc="Flags to pass to mri_ca_label commands", xor=['expert']) + desc="Flags to pass to mri_ca_label commands", xor=["expert"] + ) mri_segstats = traits.Str( - desc="Flags to pass to mri_segstats commands", xor=['expert']) - mri_mask = traits.Str( - desc="Flags to pass to mri_mask commands", xor=['expert']) + desc="Flags to pass to mri_segstats commands", xor=["expert"] + ) + mri_mask = traits.Str(desc="Flags to pass to mri_mask commands", xor=["expert"]) mri_segment = traits.Str( - desc="Flags to pass to mri_segment commands", xor=['expert']) + desc="Flags to pass to mri_segment commands", xor=["expert"] + ) mri_edit_wm_with_aseg = traits.Str( - desc="Flags to pass to mri_edit_wm_with_aseg commands", xor=['expert']) + desc="Flags to pass to mri_edit_wm_with_aseg commands", xor=["expert"] + ) mri_pretess = traits.Str( - desc="Flags to pass to mri_pretess commands", xor=['expert']) - mri_fill = traits.Str( - desc="Flags to pass to mri_fill commands", xor=['expert']) + desc="Flags to pass to mri_pretess commands", xor=["expert"] + ) + mri_fill = traits.Str(desc="Flags to pass to mri_fill commands", xor=["expert"]) mri_tessellate = traits.Str( - desc="Flags to pass to mri_tessellate commands", xor=['expert']) + desc="Flags to pass to mri_tessellate commands", xor=["expert"] + ) mris_smooth = traits.Str( - desc="Flags to pass to mri_smooth commands", xor=['expert']) + desc="Flags to pass to mri_smooth commands", xor=["expert"] + ) mris_inflate = traits.Str( - desc="Flags to pass to mri_inflate commands", xor=['expert']) + desc="Flags to pass to mri_inflate commands", xor=["expert"] + ) mris_sphere = traits.Str( - desc="Flags to pass to mris_sphere commands", xor=['expert']) + desc="Flags to pass to mris_sphere commands", xor=["expert"] + ) mris_fix_topology = traits.Str( - desc="Flags to pass to mris_fix_topology commands", xor=['expert']) + desc="Flags to pass to mris_fix_topology commands", xor=["expert"] + ) mris_make_surfaces = traits.Str( - desc="Flags to pass to mris_make_surfaces commands", xor=['expert']) + desc="Flags to pass to mris_make_surfaces commands", xor=["expert"] + ) mris_surf2vol = traits.Str( - desc="Flags to pass to mris_surf2vol commands", xor=['expert']) + desc="Flags to pass to mris_surf2vol commands", xor=["expert"] + ) mris_register = traits.Str( - desc="Flags to pass to mris_register commands", xor=['expert']) + desc="Flags to pass to mris_register commands", xor=["expert"] + ) mrisp_paint = traits.Str( - desc="Flags to pass to mrisp_paint commands", xor=['expert']) + desc="Flags to pass to mrisp_paint commands", xor=["expert"] + ) mris_ca_label = traits.Str( - desc="Flags to pass to mris_ca_label commands", xor=['expert']) + desc="Flags to pass to mris_ca_label commands", xor=["expert"] + ) mris_anatomical_stats = traits.Str( - desc="Flags to pass to mris_anatomical_stats commands", xor=['expert']) + desc="Flags to pass to mris_anatomical_stats commands", xor=["expert"] + ) mri_aparc2aseg = traits.Str( - desc="Flags to pass to mri_aparc2aseg commands", xor=['expert']) + desc="Flags to pass to mri_aparc2aseg commands", xor=["expert"] + ) class ReconAllOutputSpec(FreeSurferSource.output_spec): - subjects_dir = Directory( - exists=True, desc='Freesurfer subjects directory.') - subject_id = traits.Str(desc='Subject name for whom to retrieve data') + subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory.") + subject_id = traits.Str(desc="Subject name for whom to retrieve data") class ReconAll(CommandLine): @@ -889,7 +1063,7 @@ class ReconAll(CommandLine): >>> reconall.inputs.subject_id = 'foo' >>> reconall.inputs.directive = 'all' >>> reconall.inputs.subjects_dir = '.' - >>> reconall.inputs.T1_files = 'structural.nii' + >>> reconall.inputs.T1_files = ['structural.nii'] >>> reconall.cmdline 'recon-all -all -i structural.nii -subjid foo -sd .' >>> reconall.inputs.flags = "-qcache" @@ -919,7 +1093,7 @@ class ReconAll(CommandLine): >>> reconall_subfields.inputs.subject_id = 'foo' >>> reconall_subfields.inputs.directive = 'all' >>> reconall_subfields.inputs.subjects_dir = '.' - >>> reconall_subfields.inputs.T1_files = 'structural.nii' + >>> reconall_subfields.inputs.T1_files = ['structural.nii'] >>> reconall_subfields.inputs.hippocampal_subfields_T1 = True >>> reconall_subfields.cmdline 'recon-all -all -i structural.nii -hippocampal-subfields-T1 -subjid foo -sd .' @@ -930,10 +1104,28 @@ class ReconAll(CommandLine): >>> reconall_subfields.inputs.hippocampal_subfields_T1 = False >>> reconall_subfields.cmdline 'recon-all -all -i structural.nii -hippocampal-subfields-T2 structural.nii test -subjid foo -sd .' + + Base template creation for longitudinal pipeline: + >>> baserecon = ReconAll() + >>> baserecon.inputs.base_template_id = 'sub-template' + >>> baserecon.inputs.base_timepoint_ids = ['ses-1','ses-2'] + >>> baserecon.inputs.directive = 'all' + >>> baserecon.inputs.subjects_dir = '.' + >>> baserecon.cmdline + 'recon-all -all -base sub-template -base-tp ses-1 -base-tp ses-2 -sd .' + + Longitudinal timepoint run: + >>> longrecon = ReconAll() + >>> longrecon.inputs.longitudinal_timepoint_id = 'ses-1' + >>> longrecon.inputs.longitudinal_template_id = 'sub-template' + >>> longrecon.inputs.directive = 'all' + >>> longrecon.inputs.subjects_dir = '.' + >>> longrecon.cmdline + 'recon-all -all -long ses-1 sub-template -sd .' """ - _cmd = 'recon-all' - _additional_metadata = ['loc', 'altkey'] + _cmd = "recon-all" + _additional_metadata = ["loc", "altkey"] input_spec = ReconAllInputSpec output_spec = ReconAllOutputSpec _can_resume = True @@ -951,215 +1143,429 @@ class ReconAll(CommandLine): # # [0] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV5.3 # [1] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV6.0 + # [2] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV6.0#ReconAllTableStable7.1.1 _autorecon1_steps = [ - ('motioncor', ['mri/rawavg.mgz', 'mri/orig.mgz'], []), + ("motioncor", ["mri/rawavg.mgz", "mri/orig.mgz"], []), ( - 'talairach', + "talairach", [ - 'mri/orig_nu.mgz', - 'mri/transforms/talairach.auto.xfm', - 'mri/transforms/talairach.xfm', + "mri/orig_nu.mgz", + "mri/transforms/talairach.auto.xfm", + "mri/transforms/talairach.xfm", # 'mri/transforms/talairach_avi.log', ], - []), - ('nuintensitycor', ['mri/nu.mgz'], []), - ('normalization', ['mri/T1.mgz'], []), - ('skullstrip', [ - 'mri/transforms/talairach_with_skull.lta', - 'mri/brainmask.auto.mgz', 'mri/brainmask.mgz' - ], []), + [], + ), + ("nuintensitycor", ["mri/nu.mgz"], []), + ("normalization", ["mri/T1.mgz"], []), + ( + "skullstrip", + [ + "mri/transforms/talairach_with_skull.lta", + "mri/brainmask.auto.mgz", + "mri/brainmask.mgz", + ], + [], + ), ] if Info.looseversion() < LooseVersion("6.0.0"): _autorecon2_volonly_steps = [ - ('gcareg', ['mri/transforms/talairach.lta'], []), - ('canorm', ['mri/norm.mgz'], []), - ('careg', ['mri/transforms/talairach.m3z'], []), - ('careginv', [ - 'mri/transforms/talairach.m3z.inv.x.mgz', - 'mri/transforms/talairach.m3z.inv.y.mgz', - 'mri/transforms/talairach.m3z.inv.z.mgz', - ], []), - ('rmneck', ['mri/nu_noneck.mgz'], []), - ('skull-lta', ['mri/transforms/talairach_with_skull_2.lta'], []), - ('calabel', [ - 'mri/aseg.auto_noCCseg.mgz', 'mri/aseg.auto.mgz', - 'mri/aseg.mgz' - ], []), - ('normalization2', ['mri/brain.mgz'], []), - ('maskbfs', ['mri/brain.finalsurfs.mgz'], []), - ('segmentation', - ['mri/wm.seg.mgz', 'mri/wm.asegedit.mgz', 'mri/wm.mgz'], []), + ("gcareg", ["mri/transforms/talairach.lta"], []), + ("canorm", ["mri/norm.mgz"], []), + ("careg", ["mri/transforms/talairach.m3z"], []), + ( + "careginv", + [ + "mri/transforms/talairach.m3z.inv.x.mgz", + "mri/transforms/talairach.m3z.inv.y.mgz", + "mri/transforms/talairach.m3z.inv.z.mgz", + ], + [], + ), + ("rmneck", ["mri/nu_noneck.mgz"], []), + ("skull-lta", ["mri/transforms/talairach_with_skull_2.lta"], []), ( - 'fill', + "calabel", + ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], + [], + ), + ("normalization2", ["mri/brain.mgz"], []), + ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), + ( + "segmentation", + ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], + [], + ), + ( + "fill", [ - 'mri/filled.mgz', + "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], - []), + [], + ), ] _autorecon2_lh_steps = [ - ('tessellate', ['surf/lh.orig.nofix'], []), - ('smooth1', ['surf/lh.smoothwm.nofix'], []), - ('inflate1', ['surf/lh.inflated.nofix'], []), - ('qsphere', ['surf/lh.qsphere.nofix'], []), - ('fix', ['surf/lh.orig'], []), - ('white', [ - 'surf/lh.white', 'surf/lh.curv', 'surf/lh.area', - 'label/lh.cortex.label' - ], []), - ('smooth2', ['surf/lh.smoothwm'], []), - ('inflate2', [ - 'surf/lh.inflated', 'surf/lh.sulc', 'surf/lh.inflated.H', - 'surf/lh.inflated.K' - ], []), + ("tessellate", ["surf/lh.orig.nofix"], []), + ("smooth1", ["surf/lh.smoothwm.nofix"], []), + ("inflate1", ["surf/lh.inflated.nofix"], []), + ("qsphere", ["surf/lh.qsphere.nofix"], []), + ("fix", ["surf/lh.orig"], []), + ( + "white", + [ + "surf/lh.white", + "surf/lh.curv", + "surf/lh.area", + "label/lh.cortex.label", + ], + [], + ), + ("smooth2", ["surf/lh.smoothwm"], []), + ( + "inflate2", + [ + "surf/lh.inflated", + "surf/lh.sulc", + "surf/lh.inflated.H", + "surf/lh.inflated.K", + ], + [], + ), # Undocumented in ReconAllTableStableV5.3 - ('curvstats', ['stats/lh.curv.stats'], []), + ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ - ('sphere', ['surf/lh.sphere'], []), - ('surfreg', ['surf/lh.sphere.reg'], []), - ('jacobian_white', ['surf/lh.jacobian_white'], []), - ('avgcurv', ['surf/lh.avg_curv'], []), - ('cortparc', ['label/lh.aparc.annot'], []), - ('pial', [ - 'surf/lh.pial', 'surf/lh.curv.pial', 'surf/lh.area.pial', - 'surf/lh.thickness' - ], []), + ("sphere", ["surf/lh.sphere"], []), + ("surfreg", ["surf/lh.sphere.reg"], []), + ("jacobian_white", ["surf/lh.jacobian_white"], []), + ("avgcurv", ["surf/lh.avg_curv"], []), + ("cortparc", ["label/lh.aparc.annot"], []), + ( + "pial", + [ + "surf/lh.pial", + "surf/lh.curv.pial", + "surf/lh.area.pial", + "surf/lh.thickness", + ], + [], + ), # Misnamed outputs in ReconAllTableStableV5.3: ?h.w-c.pct.mgz - ('pctsurfcon', ['surf/lh.w-g.pct.mgh'], []), - ('parcstats', ['stats/lh.aparc.stats'], []), - ('cortparc2', ['label/lh.aparc.a2009s.annot'], []), - ('parcstats2', ['stats/lh.aparc.a2009s.stats'], []), + ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), + ("parcstats", ["stats/lh.aparc.stats"], []), + ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), + ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), # Undocumented in ReconAllTableStableV5.3 - ('cortparc3', ['label/lh.aparc.DKTatlas40.annot'], []), + ("cortparc3", ["label/lh.aparc.DKTatlas40.annot"], []), # Undocumented in ReconAllTableStableV5.3 - ('parcstats3', ['stats/lh.aparc.a2009s.stats'], []), - ('label-exvivo-ec', ['label/lh.entorhinal_exvivo.label'], []), + ("parcstats3", ["stats/lh.aparc.a2009s.stats"], []), + ("label-exvivo-ec", ["label/lh.entorhinal_exvivo.label"], []), ] _autorecon3_added_steps = [ - ('cortribbon', - ['mri/lh.ribbon.mgz', 'mri/rh.ribbon.mgz', 'mri/ribbon.mgz'], []), - ('segstats', ['stats/aseg.stats'], []), - ('aparc2aseg', ['mri/aparc+aseg.mgz', 'mri/aparc.a2009s+aseg.mgz'], - []), - ('wmparc', ['mri/wmparc.mgz', 'stats/wmparc.stats'], []), - ('balabels', ['label/BA.ctab', 'label/BA.thresh.ctab'], []), + ( + "cortribbon", + ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], + [], + ), + ("segstats", ["stats/aseg.stats"], []), + ("aparc2aseg", ["mri/aparc+aseg.mgz", "mri/aparc.a2009s+aseg.mgz"], []), + ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), + ("balabels", ["label/BA.ctab", "label/BA.thresh.ctab"], []), + ] + elif Info.looseversion() < LooseVersion("7.0.0"): + _autorecon2_volonly_steps = [ + ("gcareg", ["mri/transforms/talairach.lta"], []), + ("canorm", ["mri/norm.mgz"], []), + ("careg", ["mri/transforms/talairach.m3z"], []), + ( + "calabel", + ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], + [], + ), + ("normalization2", ["mri/brain.mgz"], []), + ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), + ( + "segmentation", + ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], + [], + ), + ( + "fill", + [ + "mri/filled.mgz", + # 'scripts/ponscc.cut.log', + ], + [], + ), + ] + _autorecon2_lh_steps = [ + ("tessellate", ["surf/lh.orig.nofix"], []), + ("smooth1", ["surf/lh.smoothwm.nofix"], []), + ("inflate1", ["surf/lh.inflated.nofix"], []), + ("qsphere", ["surf/lh.qsphere.nofix"], []), + ("fix", ["surf/lh.orig"], []), + ( + "white", + [ + "surf/lh.white.preaparc", + "surf/lh.curv", + "surf/lh.area", + "label/lh.cortex.label", + ], + [], + ), + ("smooth2", ["surf/lh.smoothwm"], []), + ("inflate2", ["surf/lh.inflated", "surf/lh.sulc"], []), + ( + "curvHK", + [ + "surf/lh.white.H", + "surf/lh.white.K", + "surf/lh.inflated.H", + "surf/lh.inflated.K", + ], + [], + ), + ("curvstats", ["stats/lh.curv.stats"], []), + ] + _autorecon3_lh_steps = [ + ("sphere", ["surf/lh.sphere"], []), + ("surfreg", ["surf/lh.sphere.reg"], []), + ("jacobian_white", ["surf/lh.jacobian_white"], []), + ("avgcurv", ["surf/lh.avg_curv"], []), + ("cortparc", ["label/lh.aparc.annot"], []), + ( + "pial", + [ + "surf/lh.pial", + "surf/lh.curv.pial", + "surf/lh.area.pial", + "surf/lh.thickness", + "surf/lh.white", + ], + [], + ), + ("parcstats", ["stats/lh.aparc.stats"], []), + ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), + ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), + ("cortparc3", ["label/lh.aparc.DKTatlas.annot"], []), + ("parcstats3", ["stats/lh.aparc.DKTatlas.stats"], []), + ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), + ] + _autorecon3_added_steps = [ + ( + "cortribbon", + ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], + [], + ), + ("hyporelabel", ["mri/aseg.presurf.hypos.mgz"], []), + ( + "aparc2aseg", + [ + "mri/aparc+aseg.mgz", + "mri/aparc.a2009s+aseg.mgz", + "mri/aparc.DKTatlas+aseg.mgz", + ], + [], + ), + ("apas2aseg", ["mri/aseg.mgz"], ["mri/aparc+aseg.mgz"]), + ("segstats", ["stats/aseg.stats"], []), + ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), + # Note that this is a very incomplete list; however the ctab + # files are last to be touched, so this should be reasonable + ( + "balabels", + [ + "label/BA_exvivo.ctab", + "label/BA_exvivo.thresh.ctab", + "label/lh.entorhinal_exvivo.label", + "label/rh.entorhinal_exvivo.label", + ], + [], + ), ] else: _autorecon2_volonly_steps = [ - ('gcareg', ['mri/transforms/talairach.lta'], []), - ('canorm', ['mri/norm.mgz'], []), - ('careg', ['mri/transforms/talairach.m3z'], []), - ('calabel', [ - 'mri/aseg.auto_noCCseg.mgz', 'mri/aseg.auto.mgz', - 'mri/aseg.mgz' - ], []), - ('normalization2', ['mri/brain.mgz'], []), - ('maskbfs', ['mri/brain.finalsurfs.mgz'], []), - ('segmentation', - ['mri/wm.seg.mgz', 'mri/wm.asegedit.mgz', 'mri/wm.mgz'], []), + ("gcareg", ["mri/transforms/talairach.lta"], []), + ("canorm", ["mri/norm.mgz"], []), + ("careg", ["mri/transforms/talairach.m3z"], []), ( - 'fill', + "calabel", [ - 'mri/filled.mgz', + "mri/aseg.auto_noCCseg.mgz", + "mri/aseg.auto.mgz", + "mri/aseg.presurf.mgz", + ], + [], + ), + ("normalization2", ["mri/brain.mgz"], []), + ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), + ( + "segmentation", + ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], + [], + ), + ( + "fill", + [ + "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], - []), + [], + ), ] _autorecon2_lh_steps = [ - ('tessellate', ['surf/lh.orig.nofix'], []), - ('smooth1', ['surf/lh.smoothwm.nofix'], []), - ('inflate1', ['surf/lh.inflated.nofix'], []), - ('qsphere', ['surf/lh.qsphere.nofix'], []), - ('fix', ['surf/lh.orig'], []), - ('white', [ - 'surf/lh.white.preaparc', 'surf/lh.curv', 'surf/lh.area', - 'label/lh.cortex.label' - ], []), - ('smooth2', ['surf/lh.smoothwm'], []), - ('inflate2', ['surf/lh.inflated', 'surf/lh.sulc'], []), - ('curvHK', [ - 'surf/lh.white.H', 'surf/lh.white.K', 'surf/lh.inflated.H', - 'surf/lh.inflated.K' - ], []), - ('curvstats', ['stats/lh.curv.stats'], []), + ("tessellate", ["surf/lh.orig.nofix"], []), + ("smooth1", ["surf/lh.smoothwm.nofix"], []), + ("inflate1", ["surf/lh.inflated.nofix"], []), + ("qsphere", ["surf/lh.qsphere.nofix"], []), + ("fix", ["surf/lh.inflated", "surf/lh.orig"], []), + ( + "white", + [ + "surf/lh.white.preaparc", + "surf/lh.curv", + "surf/lh.area", + "label/lh.cortex.label", + ], + [], + ), + ("smooth2", ["surf/lh.smoothwm"], []), + ("inflate2", ["surf/lh.inflated", "surf/lh.sulc"], []), + ( + "curvHK", + [ + "surf/lh.white.H", + "surf/lh.white.K", + "surf/lh.inflated.H", + "surf/lh.inflated.K", + ], + [], + ), + ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ - ('sphere', ['surf/lh.sphere'], []), - ('surfreg', ['surf/lh.sphere.reg'], []), - ('jacobian_white', ['surf/lh.jacobian_white'], []), - ('avgcurv', ['surf/lh.avg_curv'], []), - ('cortparc', ['label/lh.aparc.annot'], []), - ('pial', [ - 'surf/lh.pial', 'surf/lh.curv.pial', 'surf/lh.area.pial', - 'surf/lh.thickness', 'surf/lh.white' - ], []), - ('parcstats', ['stats/lh.aparc.stats'], []), - ('cortparc2', ['label/lh.aparc.a2009s.annot'], []), - ('parcstats2', ['stats/lh.aparc.a2009s.stats'], []), - ('cortparc3', ['label/lh.aparc.DKTatlas.annot'], []), - ('parcstats3', ['stats/lh.aparc.DKTatlas.stats'], []), - ('pctsurfcon', ['surf/lh.w-g.pct.mgh'], []), + ("sphere", ["surf/lh.sphere"], []), + ("surfreg", ["surf/lh.sphere.reg"], []), + ("jacobian_white", ["surf/lh.jacobian_white"], []), + ("avgcurv", ["surf/lh.avg_curv"], []), + ("cortparc", ["label/lh.aparc.annot"], []), + ( + "pial", + [ + "surf/lh.pial", + "surf/lh.curv.pial", + "surf/lh.area.pial", + "surf/lh.thickness", + "surf/lh.white", + ], + [], + ), + ("parcstats", ["stats/lh.aparc.stats"], []), + ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), + ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), + ("cortparc3", ["label/lh.aparc.DKTatlas.annot"], []), + ("parcstats3", ["stats/lh.aparc.DKTatlas.stats"], []), + ("pctsurfcon", ["surf/lh.w-g.pct.mgh", "stats/lh.w-g.pct.stats"], []), ] _autorecon3_added_steps = [ - ('cortribbon', - ['mri/lh.ribbon.mgz', 'mri/rh.ribbon.mgz', 'mri/ribbon.mgz'], []), - ('hyporelabel', ['mri/aseg.presurf.hypos.mgz'], []), - ('aparc2aseg', [ - 'mri/aparc+aseg.mgz', 'mri/aparc.a2009s+aseg.mgz', - 'mri/aparc.DKTatlas+aseg.mgz' - ], []), - ('apas2aseg', ['mri/aseg.mgz'], ['mri/aparc+aseg.mgz']), - ('segstats', ['stats/aseg.stats'], []), - ('wmparc', ['mri/wmparc.mgz', 'stats/wmparc.stats'], []), + ( + "cortribbon", + ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], + [], + ), + ("hyporelabel", ["mri/aseg.presurf.hypos.mgz"], []), + ( + "aparc2aseg", + [ + "mri/aparc+aseg.mgz", + "mri/aparc.a2009s+aseg.mgz", + "mri/aparc.DKTatlas+aseg.mgz", + ], + [], + ), + ("apas2aseg", ["mri/aseg.mgz"], ["mri/aparc+aseg.mgz"]), + ("segstats", ["stats/aseg.stats"], []), + ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), # Note that this is a very incomplete list; however the ctab # files are last to be touched, so this should be reasonable - ('balabels', [ - 'label/BA_exvivo.ctab', 'label/BA_exvivo.thresh.ctab', - 'label/lh.entorhinal_exvivo.label', - 'label/rh.entorhinal_exvivo.label' - ], []), + ( + "balabels", + [ + "label/BA_exvivo.ctab", + "label/BA_exvivo.thresh.ctab", + "label/lh.entorhinal_exvivo.label", + "label/rh.entorhinal_exvivo.label", + "label/lh.perirhinal_exvivo.label", + "label/rh.perirhinal_exvivo.label", + ], + [], + ), ] # Fill out autorecon2 steps - _autorecon2_rh_steps = [(step, [out.replace('lh', 'rh') - for out in outs], ins) - for step, outs, ins in _autorecon2_lh_steps] - _autorecon2_perhemi_steps = [(step, [ - of for out in outs for of in (out, out.replace('lh', 'rh')) - ], ins) for step, outs, ins in _autorecon2_lh_steps] + _autorecon2_rh_steps = [ + (step, [out.replace("lh", "rh") for out in outs], ins) + for step, outs, ins in _autorecon2_lh_steps + ] + _autorecon2_perhemi_steps = [ + (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) + for step, outs, ins in _autorecon2_lh_steps + ] _autorecon2_steps = _autorecon2_volonly_steps + _autorecon2_perhemi_steps # Fill out autorecon3 steps - _autorecon3_rh_steps = [(step, [out.replace('lh', 'rh') - for out in outs], ins) - for step, outs, ins in _autorecon3_lh_steps] - _autorecon3_perhemi_steps = [(step, [ - of for out in outs for of in (out, out.replace('lh', 'rh')) - ], ins) for step, outs, ins in _autorecon3_lh_steps] + _autorecon3_rh_steps = [ + (step, [out.replace("lh", "rh") for out in outs], ins) + for step, outs, ins in _autorecon3_lh_steps + ] + _autorecon3_perhemi_steps = [ + (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) + for step, outs, ins in _autorecon3_lh_steps + ] _autorecon3_steps = _autorecon3_perhemi_steps + _autorecon3_added_steps # Fill out autorecon-hemi lh/rh steps - _autorecon_lh_steps = (_autorecon2_lh_steps + _autorecon3_lh_steps) - _autorecon_rh_steps = (_autorecon2_rh_steps + _autorecon3_rh_steps) + _autorecon_lh_steps = _autorecon2_lh_steps + _autorecon3_lh_steps + _autorecon_rh_steps = _autorecon2_rh_steps + _autorecon3_rh_steps _steps = _autorecon1_steps + _autorecon2_steps + _autorecon3_steps _binaries = [ - 'talairach', 'mri_normalize', 'mri_watershed', 'mri_em_register', - 'mri_ca_normalize', 'mri_ca_register', 'mri_remove_neck', - 'mri_ca_label', 'mri_segstats', 'mri_mask', 'mri_segment', - 'mri_edit_wm_with_aseg', 'mri_pretess', 'mri_fill', 'mri_tessellate', - 'mris_smooth', 'mris_inflate', 'mris_sphere', 'mris_fix_topology', - 'mris_make_surfaces', 'mris_surf2vol', 'mris_register', 'mrisp_paint', - 'mris_ca_label', 'mris_anatomical_stats', 'mri_aparc2aseg' + "talairach", + "mri_normalize", + "mri_watershed", + "mri_em_register", + "mri_ca_normalize", + "mri_ca_register", + "mri_remove_neck", + "mri_ca_label", + "mri_segstats", + "mri_mask", + "mri_segment", + "mri_edit_wm_with_aseg", + "mri_pretess", + "mri_fill", + "mri_tessellate", + "mris_smooth", + "mris_inflate", + "mris_sphere", + "mris_fix_topology", + "mris_make_surfaces", + "mris_surf2vol", + "mris_register", + "mrisp_paint", + "mris_ca_label", + "mris_anatomical_stats", + "mri_aparc2aseg", ] def _gen_subjects_dir(self): return os.getcwd() def _gen_filename(self, name): - if name == 'subjects_dir': + if name == "subjects_dir": return self._gen_subjects_dir() return None @@ -1175,53 +1581,104 @@ def _list_outputs(self): if isdefined(self.inputs.hemi): hemi = self.inputs.hemi else: - hemi = 'both' + hemi = "both" outputs = self._outputs().get() - outputs.update( - FreeSurferSource( - subject_id=self.inputs.subject_id, - subjects_dir=subjects_dir, - hemi=hemi)._list_outputs()) - outputs['subject_id'] = self.inputs.subject_id - outputs['subjects_dir'] = subjects_dir + # If using longitudinal pipeline, update subject id accordingly, + # otherwise use original/default subject_id + if isdefined(self.inputs.base_template_id): + outputs.update( + FreeSurferSource( + subject_id=self.inputs.base_template_id, + subjects_dir=subjects_dir, + hemi=hemi, + )._list_outputs() + ) + outputs["subject_id"] = self.inputs.base_template_id + elif isdefined(self.inputs.longitudinal_timepoint_id): + subject_id = f"{self.inputs.longitudinal_timepoint_id}.long.{self.inputs.longitudinal_template_id}" + outputs.update( + FreeSurferSource( + subject_id=subject_id, subjects_dir=subjects_dir, hemi=hemi + )._list_outputs() + ) + outputs["subject_id"] = subject_id + else: + outputs.update( + FreeSurferSource( + subject_id=self.inputs.subject_id, + subjects_dir=subjects_dir, + hemi=hemi, + )._list_outputs() + ) + outputs["subject_id"] = self.inputs.subject_id + + outputs["subjects_dir"] = subjects_dir return outputs def _is_resuming(self): subjects_dir = self.inputs.subjects_dir if not isdefined(subjects_dir): subjects_dir = self._gen_subjects_dir() - if os.path.isdir( - os.path.join(subjects_dir, self.inputs.subject_id, 'mri')): - return True + + # Check for longitudinal pipeline + if not isdefined(self.inputs.subject_id): + if isdefined(self.inputs.base_template_id): + if os.path.isdir( + os.path.join(subjects_dir, self.inputs.base_template_id, "mri") + ): + return True + elif isdefined(self.inputs.longitudinal_template_id): + if os.path.isdir( + os.path.join( + subjects_dir, + f"{self.inputs.longitudinal_timepoint_id}.long.{self.inputs.longitudinal_template_id}", + "mri", + ) + ): + return True + else: + if os.path.isdir(os.path.join(subjects_dir, self.inputs.subject_id, "mri")): + return True return False def _format_arg(self, name, trait_spec, value): - if name == 'T1_files': + if name == "T1_files": if self._is_resuming(): return None - if name == 'hippocampal_subfields_T1' and \ - isdefined(self.inputs.hippocampal_subfields_T2): + if name == "hippocampal_subfields_T1" and isdefined( + self.inputs.hippocampal_subfields_T2 + ): return None - if all((name == 'hippocampal_subfields_T2', + if all( + ( + name == "hippocampal_subfields_T2", isdefined(self.inputs.hippocampal_subfields_T1) - and self.inputs.hippocampal_subfields_T1)): - argstr = trait_spec.argstr.replace('T2', 'T1T2') + and self.inputs.hippocampal_subfields_T1, + ) + ): + argstr = trait_spec.argstr.replace("T2", "T1T2") return argstr % value - if name == 'directive' and value == 'autorecon-hemi': + if name == "directive" and value == "autorecon-hemi": if not isdefined(self.inputs.hemi): - raise ValueError("Directive 'autorecon-hemi' requires hemi " - "input to be set") - value += ' ' + self.inputs.hemi - if all((name == 'hemi', isdefined(self.inputs.directive) - and self.inputs.directive == 'autorecon-hemi')): + raise ValueError( + "Directive 'autorecon-hemi' requires hemi input to be set" + ) + value += " " + self.inputs.hemi + if all( + ( + name == "hemi", + isdefined(self.inputs.directive) + and self.inputs.directive == "autorecon-hemi", + ) + ): return None - return super(ReconAll, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) @property def cmdline(self): - cmd = super(ReconAll, self).cmdline + cmd = super().cmdline # Adds '-expert' flag if expert flags are passed # Mutually exclusive with 'expert' input parameter @@ -1237,28 +1694,26 @@ def cmdline(self): directive = self.inputs.directive if not isdefined(directive): steps = [] - elif directive == 'autorecon1': + elif directive == "autorecon1": steps = self._autorecon1_steps - elif directive == 'autorecon2-volonly': + elif directive == "autorecon2-volonly": steps = self._autorecon2_volonly_steps - elif directive == 'autorecon2-perhemi': + elif directive == "autorecon2-perhemi": steps = self._autorecon2_perhemi_steps - elif directive.startswith('autorecon2'): + elif directive.startswith("autorecon2"): if isdefined(self.inputs.hemi): - if self.inputs.hemi == 'lh': - steps = (self._autorecon2_volonly_steps + - self._autorecon2_lh_steps) + if self.inputs.hemi == "lh": + steps = self._autorecon2_volonly_steps + self._autorecon2_lh_steps else: - steps = (self._autorecon2_volonly_steps + - self._autorecon2_rh_steps) + steps = self._autorecon2_volonly_steps + self._autorecon2_rh_steps else: steps = self._autorecon2_steps - elif directive == 'autorecon-hemi': - if self.inputs.hemi == 'lh': + elif directive == "autorecon-hemi": + if self.inputs.hemi == "lh": steps = self._autorecon_lh_steps else: steps = self._autorecon_rh_steps - elif directive == 'autorecon3': + elif directive == "autorecon3": steps = self._autorecon3_steps else: steps = self._steps @@ -1266,8 +1721,8 @@ def cmdline(self): no_run = True flags = [] for step, outfiles, infiles in steps: - flag = '-{}'.format(step) - noflag = '-no{}'.format(step) + flag = f"-{step}" + noflag = f"-no{step}" if noflag in cmd: continue elif flag in cmd: @@ -1275,42 +1730,43 @@ def cmdline(self): continue subj_dir = os.path.join(subjects_dir, self.inputs.subject_id) - if check_depends([os.path.join(subj_dir, f) for f in outfiles], - [os.path.join(subj_dir, f) for f in infiles]): + if check_depends( + [os.path.join(subj_dir, f) for f in outfiles], + [os.path.join(subj_dir, f) for f in infiles], + ): flags.append(noflag) else: no_run = False if no_run and not self.force_run: - iflogger.info('recon-all complete : Not running') + iflogger.info("recon-all complete : Not running") return "echo recon-all: nothing to do" - cmd += ' ' + ' '.join(flags) - iflogger.info('resume recon-all : %s', cmd) + cmd += " " + " ".join(flags) + iflogger.info("resume recon-all : %s", cmd) return cmd def _prep_expert_file(self): if isdefined(self.inputs.expert): - return '' + return "" lines = [] for binary in self._binaries: args = getattr(self.inputs, binary) if isdefined(args): - lines.append('{} {}\n'.format(binary, args)) + lines.append(f"{binary} {args}\n") if lines == []: - return '' + return "" - contents = ''.join(lines) - if not isdefined(self.inputs.xopts) and \ - self._get_expert_file() == contents: - return ' -xopts-use' + contents = "".join(lines) + if not isdefined(self.inputs.xopts) and self._get_expert_file() == contents: + return " -xopts-use" - expert_fname = os.path.abspath('expert.opts') - with open(expert_fname, 'w') as fobj: + expert_fname = os.path.abspath("expert.opts") + with open(expert_fname, "w") as fobj: fobj.write(contents) - return ' -expert {}'.format(expert_fname) + return f" -expert {expert_fname}" def _get_expert_file(self): # Read pre-existing options file, if it exists @@ -1319,11 +1775,12 @@ def _get_expert_file(self): else: subjects_dir = self._gen_subjects_dir() - xopts_file = os.path.join(subjects_dir, self.inputs.subject_id, - 'scripts', 'expert-options') + xopts_file = os.path.join( + subjects_dir, self.inputs.subject_id, "scripts", "expert-options" + ) if not os.path.exists(xopts_file): - return '' - with open(xopts_file, 'r') as fobj: + return "" + with open(xopts_file) as fobj: return fobj.read() @property @@ -1335,110 +1792,123 @@ def version(self): class BBRegisterInputSpec(FSTraitedSpec): subject_id = traits.Str( - argstr='--s %s', desc='freesurfer subject id', mandatory=True) + argstr="--s %s", desc="freesurfer subject id", mandatory=True + ) source_file = File( - argstr='--mov %s', - desc='source file to be registered', + argstr="--mov %s", + desc="source file to be registered", mandatory=True, - copyfile=False) + copyfile=False, + ) init = traits.Enum( - 'spm', - 'fsl', - 'header', - argstr='--init-%s', + "spm", + "fsl", + "header", + argstr="--init-%s", mandatory=True, - xor=['init_reg_file'], - desc='initialize registration spm, fsl, header') + xor=["init_reg_file"], + desc="initialize registration spm, fsl, header", + ) init_reg_file = File( exists=True, - argstr='--init-reg %s', - desc='existing registration file', - xor=['init'], - mandatory=True) + argstr="--init-reg %s", + desc="existing registration file", + xor=["init"], + mandatory=True, + ) contrast_type = traits.Enum( - 't1', - 't2', - 'bold', - 'dti', - argstr='--%s', - desc='contrast type of image', - mandatory=True) + "t1", + "t2", + "bold", + "dti", + argstr="--%s", + desc="contrast type of image", + mandatory=True, + ) intermediate_file = File( exists=True, argstr="--int %s", - desc="Intermediate image, e.g. in case of partial FOV") + desc="Intermediate image, e.g. in case of partial FOV", + ) reg_frame = traits.Int( argstr="--frame %d", xor=["reg_middle_frame"], - desc="0-based frame index for 4D source file") + desc="0-based frame index for 4D source file", + ) reg_middle_frame = traits.Bool( argstr="--mid-frame", xor=["reg_frame"], - desc="Register middle frame of 4D source file") + desc="Register middle frame of 4D source file", + ) out_reg_file = File( - argstr='--reg %s', desc='output registration file', genfile=True) + argstr="--reg %s", desc="output registration file", genfile=True + ) spm_nifti = traits.Bool( - argstr="--spm-nii", - desc="force use of nifti rather than analyze with SPM") + argstr="--spm-nii", desc="force use of nifti rather than analyze with SPM" + ) epi_mask = traits.Bool( - argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2") + argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2" + ) dof = traits.Enum( - 6, 9, 12, argstr='--%d', desc='number of transform degrees of freedom') + 6, 9, 12, argstr="--%d", desc="number of transform degrees of freedom" + ) fsldof = traits.Int( - argstr='--fsl-dof %d', - desc='degrees of freedom for initial registration (FSL)') + argstr="--fsl-dof %d", desc="degrees of freedom for initial registration (FSL)" + ) out_fsl_file = traits.Either( traits.Bool, File, argstr="--fslmat %s", - desc="write the transformation matrix in FSL FLIRT format") + desc="write the transformation matrix in FSL FLIRT format", + ) out_lta_file = traits.Either( traits.Bool, File, argstr="--lta %s", - min_ver='5.2.0', - desc="write the transformation matrix in LTA format") + min_ver="5.2.0", + desc="write the transformation matrix in LTA format", + ) registered_file = traits.Either( traits.Bool, File, - argstr='--o %s', - desc='output warped sourcefile either True or filename') + argstr="--o %s", + desc="output warped sourcefile either True or filename", + ) init_cost_file = traits.Either( traits.Bool, File, - argstr='--initcost %s', - desc='output initial registration cost file') + argstr="--initcost %s", + desc="output initial registration cost file", + ) class BBRegisterInputSpec6(BBRegisterInputSpec): init = traits.Enum( - 'coreg', - 'rr', - 'spm', - 'fsl', - 'header', - 'best', - argstr='--init-%s', - xor=['init_reg_file'], - desc='initialize registration with mri_coreg, spm, fsl, or header') + "coreg", + "rr", + "spm", + "fsl", + "header", + "best", + argstr="--init-%s", + xor=["init_reg_file"], + desc="initialize registration with mri_coreg, spm, fsl, or header", + ) init_reg_file = File( exists=True, - argstr='--init-reg %s', - desc='existing registration file', - xor=['init']) + argstr="--init-reg %s", + desc="existing registration file", + xor=["init"], + ) class BBRegisterOutputSpec(TraitedSpec): - out_reg_file = File(exists=True, desc='Output registration file') - out_fsl_file = File( - exists=True, desc='Output FLIRT-style registration file') - out_lta_file = File(exists=True, desc='Output LTA-style registration file') - min_cost_file = File( - exists=True, desc='Output registration minimum cost file') - init_cost_file = File( - exists=True, desc='Output initial registration cost file') - registered_file = File( - exists=True, desc='Registered and resampled source file') + out_reg_file = File(exists=True, desc="Output registration file") + out_fsl_file = File(exists=True, desc="Output FLIRT-style registration file") + out_lta_file = File(exists=True, desc="Output LTA-style registration file") + min_cost_file = File(exists=True, desc="Output registration minimum cost file") + init_cost_file = File(exists=True, desc="Output initial registration cost file") + registered_file = File(exists=True, desc="Registered and resampled source file") class BBRegister(FSCommand): @@ -1458,69 +1928,74 @@ class BBRegister(FSCommand): """ - _cmd = 'bbregister' - if LooseVersion('0.0.0') < Info.looseversion() < LooseVersion("6.0.0"): + _cmd = "bbregister" + if LooseVersion("0.0.0") < Info.looseversion() < LooseVersion("6.0.0"): input_spec = BBRegisterInputSpec else: input_spec = BBRegisterInputSpec6 output_spec = BBRegisterOutputSpec def _list_outputs(self): - outputs = self.output_spec().get() _in = self.inputs if isdefined(_in.out_reg_file): - outputs['out_reg_file'] = op.abspath(_in.out_reg_file) + outputs["out_reg_file"] = op.abspath(_in.out_reg_file) elif _in.source_file: - suffix = '_bbreg_%s.dat' % _in.subject_id - outputs['out_reg_file'] = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) + suffix = "_bbreg_%s.dat" % _in.subject_id + outputs["out_reg_file"] = fname_presuffix( + _in.source_file, suffix=suffix, use_ext=False + ) if isdefined(_in.registered_file): if isinstance(_in.registered_file, bool): - outputs['registered_file'] = fname_presuffix( - _in.source_file, suffix='_bbreg') + outputs["registered_file"] = fname_presuffix( + _in.source_file, suffix="_bbreg" + ) else: - outputs['registered_file'] = op.abspath(_in.registered_file) + outputs["registered_file"] = op.abspath(_in.registered_file) if isdefined(_in.out_lta_file): if isinstance(_in.out_lta_file, bool): - suffix = '_bbreg_%s.lta' % _in.subject_id + suffix = "_bbreg_%s.lta" % _in.subject_id out_lta_file = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) - outputs['out_lta_file'] = out_lta_file + _in.source_file, suffix=suffix, use_ext=False + ) + outputs["out_lta_file"] = out_lta_file else: - outputs['out_lta_file'] = op.abspath(_in.out_lta_file) + outputs["out_lta_file"] = op.abspath(_in.out_lta_file) if isdefined(_in.out_fsl_file): if isinstance(_in.out_fsl_file, bool): - suffix = '_bbreg_%s.mat' % _in.subject_id + suffix = "_bbreg_%s.mat" % _in.subject_id out_fsl_file = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) - outputs['out_fsl_file'] = out_fsl_file + _in.source_file, suffix=suffix, use_ext=False + ) + outputs["out_fsl_file"] = out_fsl_file else: - outputs['out_fsl_file'] = op.abspath(_in.out_fsl_file) + outputs["out_fsl_file"] = op.abspath(_in.out_fsl_file) if isdefined(_in.init_cost_file): if isinstance(_in.out_fsl_file, bool): - outputs[ - 'init_cost_file'] = outputs['out_reg_file'] + '.initcost' + outputs["init_cost_file"] = outputs["out_reg_file"] + ".initcost" else: - outputs['init_cost_file'] = op.abspath(_in.init_cost_file) + outputs["init_cost_file"] = op.abspath(_in.init_cost_file) - outputs['min_cost_file'] = outputs['out_reg_file'] + '.mincost' + outputs["min_cost_file"] = outputs["out_reg_file"] + ".mincost" return outputs def _format_arg(self, name, spec, value): - if name in ('registered_file', 'out_fsl_file', 'out_lta_file', - 'init_cost_file') and isinstance(value, bool): + if name in ( + "registered_file", + "out_fsl_file", + "out_lta_file", + "init_cost_file", + ) and isinstance(value, bool): value = self._list_outputs()[name] - return super(BBRegister, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - - if name == 'out_reg_file': + if name == "out_reg_file": return self._list_outputs()[name] return None @@ -1528,123 +2003,150 @@ def _gen_filename(self, name): class ApplyVolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, - argstr='--mov %s', + argstr="--mov %s", copyfile=False, mandatory=True, - desc='Input volume you wish to transform') - transformed_file = File( - desc='Output volume', argstr='--o %s', genfile=True) - _targ_xor = ('target_file', 'tal', 'fs_target') + desc="Input volume you wish to transform", + ) + transformed_file = File(desc="Output volume", argstr="--o %s", genfile=True) + _targ_xor = ("target_file", "tal", "fs_target") target_file = File( exists=True, - argstr='--targ %s', + argstr="--targ %s", xor=_targ_xor, - desc='Output template volume', - mandatory=True) + desc="Output template volume", + mandatory=True, + ) tal = traits.Bool( - argstr='--tal', + argstr="--tal", xor=_targ_xor, mandatory=True, - desc='map to a sub FOV of MNI305 (with --reg only)') + desc="map to a sub FOV of MNI305 (with --reg only)", + ) tal_resolution = traits.Float( - argstr="--talres %.10f", desc="Resolution to sample when using tal") + argstr="--talres %.10f", desc="Resolution to sample when using tal" + ) fs_target = traits.Bool( - argstr='--fstarg', + argstr="--fstarg", xor=_targ_xor, mandatory=True, - requires=['reg_file'], - desc='use orig.mgz from subject in regfile as target') - _reg_xor = ('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject') + requires=["reg_file"], + desc="use orig.mgz from subject in regfile as target", + ) + _reg_xor = ( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ) reg_file = File( exists=True, xor=_reg_xor, - argstr='--reg %s', + argstr="--reg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)') + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + ) lta_file = File( exists=True, xor=_reg_xor, - argstr='--lta %s', + argstr="--lta %s", mandatory=True, - desc='Linear Transform Array file') + desc="Linear Transform Array file", + ) lta_inv_file = File( exists=True, xor=_reg_xor, - argstr='--lta-inv %s', + argstr="--lta-inv %s", mandatory=True, - desc='LTA, invert') + desc="LTA, invert", + ) reg_file = File( exists=True, xor=_reg_xor, - argstr='--reg %s', + argstr="--reg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)') + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + ) fsl_reg_file = File( exists=True, xor=_reg_xor, - argstr='--fsl %s', + argstr="--fsl %s", mandatory=True, - desc='fslRAS-to-fslRAS matrix (FSL format)') + desc="fslRAS-to-fslRAS matrix (FSL format)", + ) xfm_reg_file = File( exists=True, xor=_reg_xor, - argstr='--xfm %s', + argstr="--xfm %s", mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix (MNI format)') + desc="ScannerRAS-to-ScannerRAS matrix (MNI format)", + ) reg_header = traits.Bool( xor=_reg_xor, - argstr='--regheader', + argstr="--regheader", mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix = identity') + desc="ScannerRAS-to-ScannerRAS matrix = identity", + ) mni_152_reg = traits.Bool( - xor=_reg_xor, - argstr='--regheader', - mandatory=True, - desc='target MNI152 space') + xor=_reg_xor, argstr="--regheader", mandatory=True, desc="target MNI152 space" + ) subject = traits.Str( xor=_reg_xor, - argstr='--s %s', + argstr="--s %s", mandatory=True, - desc='set matrix = identity and use subject for any templates') - inverse = traits.Bool(desc='sample from target to source', argstr='--inv') + desc="set matrix = identity and use subject for any templates", + ) + inverse = traits.Bool(desc="sample from target to source", argstr="--inv") interp = traits.Enum( - 'trilin', - 'nearest', - 'cubic', - argstr='--interp %s', - desc='Interpolation method ( or nearest)') + "trilin", + "nearest", + "cubic", + argstr="--interp %s", + desc="Interpolation method ( or nearest)", + ) no_resample = traits.Bool( - desc='Do not resample; just change vox2ras matrix', - argstr='--no-resample') + desc="Do not resample; just change vox2ras matrix", argstr="--no-resample" + ) m3z_file = File( argstr="--m3z %s", - desc=('This is the morph to be applied to the volume. ' - 'Unless the morph is in mri/transforms (eg.: for ' - 'talairach.m3z computed by reconall), you will need ' - 'to specify the full path to this morph and use the ' - '--noDefM3zPath flag.')) + desc=( + "This is the morph to be applied to the volume. " + "Unless the morph is in mri/transforms (eg.: for " + "talairach.m3z computed by reconall), you will need " + "to specify the full path to this morph and use the " + "--noDefM3zPath flag." + ), + ) no_ded_m3z_path = traits.Bool( argstr="--noDefM3zPath", - requires=['m3z_file'], - desc=('To be used with the m3z flag. ' - 'Instructs the code not to look for the' - 'm3z morph in the default location ' - '(SUBJECTS_DIR/subj/mri/transforms), ' - 'but instead just use the path ' - 'indicated in --m3z.')) + requires=["m3z_file"], + desc=( + "To be used with the m3z flag. " + "Instructs the code not to look for the" + "m3z morph in the default location " + "(SUBJECTS_DIR/subj/mri/transforms), " + "but instead just use the path " + "indicated in --m3z." + ), + ) invert_morph = traits.Bool( argstr="--inv-morph", - requires=['m3z_file'], - desc=('Compute and use the inverse of the ' - 'non-linear morph to resample the input ' - 'volume. To be used by --m3z.')) + requires=["m3z_file"], + desc=( + "Compute and use the inverse of the " + "non-linear morph to resample the input " + "volume. To be used by --m3z." + ), + ) class ApplyVolTransformOutputSpec(TraitedSpec): - transformed_file = File( - exists=True, desc='Path to output file if used normally') + transformed_file = File(exists=True, desc="Path to output file if used normally") class ApplyVolTransform(FSCommand): @@ -1664,7 +2166,7 @@ class ApplyVolTransform(FSCommand): """ - _cmd = 'mri_vol2vol' + _cmd = "mri_vol2vol" input_spec = ApplyVolTransformInputSpec output_spec = ApplyVolTransformOutputSpec @@ -1673,67 +2175,69 @@ def _get_outfile(self): if not isdefined(outfile): if self.inputs.inverse is True: if self.inputs.fs_target is True: - src = 'orig.mgz' + src = "orig.mgz" else: src = self.inputs.target_file else: src = self.inputs.source_file - outfile = fname_presuffix( - src, newpath=os.getcwd(), suffix='_warped') + outfile = fname_presuffix(src, newpath=os.getcwd(), suffix="_warped") return outfile def _list_outputs(self): outputs = self.output_spec().get() - outputs['transformed_file'] = os.path.abspath(self._get_outfile()) + outputs["transformed_file"] = os.path.abspath(self._get_outfile()) return outputs def _gen_filename(self, name): - if name == 'transformed_file': + if name == "transformed_file": return self._get_outfile() return None class SmoothInputSpec(FSTraitedSpec): - in_file = File( - exists=True, desc='source volume', argstr='--i %s', mandatory=True) + in_file = File(exists=True, desc="source volume", argstr="--i %s", mandatory=True) reg_file = File( - desc='registers volume to surface anatomical ', - argstr='--reg %s', + desc="registers volume to surface anatomical ", + argstr="--reg %s", mandatory=True, - exists=True) - smoothed_file = File(desc='output volume', argstr='--o %s', genfile=True) - proj_frac_avg = traits.Tuple( + exists=True, + ) + smoothed_file = File(desc="output volume", argstr="--o %s", genfile=True) + proj_frac_avg = Tuple( traits.Float, traits.Float, traits.Float, - xor=['proj_frac'], - desc='average a long normal min max delta', - argstr='--projfrac-avg %.2f %.2f %.2f') + xor=["proj_frac"], + desc="average a long normal min max delta", + argstr="--projfrac-avg %.2f %.2f %.2f", + ) proj_frac = traits.Float( - desc='project frac of thickness a long surface normal', - xor=['proj_frac_avg'], - argstr='--projfrac %s') + desc="project frac of thickness a long surface normal", + xor=["proj_frac_avg"], + argstr="--projfrac %s", + ) surface_fwhm = traits.Range( low=0.0, - requires=['reg_file'], + requires=["reg_file"], mandatory=True, - xor=['num_iters'], - desc='surface FWHM in mm', - argstr='--fwhm %f') + xor=["num_iters"], + desc="surface FWHM in mm", + argstr="--fwhm %f", + ) num_iters = traits.Range( low=1, - xor=['surface_fwhm'], + xor=["surface_fwhm"], mandatory=True, - argstr='--niters %d', - desc='number of iterations instead of fwhm') + argstr="--niters %d", + desc="number of iterations instead of fwhm", + ) vol_fwhm = traits.Range( - low=0.0, - argstr='--vol-fwhm %f', - desc='volume smoothing outside of surface') + low=0.0, argstr="--vol-fwhm %f", desc="volume smoothing outside of surface" + ) class SmoothOutputSpec(TraitedSpec): - smoothed_file = File(exists=True, desc='smoothed input volume') + smoothed_file = File(exists=True, desc="smoothed input volume") class Smooth(FSCommand): @@ -1758,7 +2262,7 @@ class Smooth(FSCommand): """ - _cmd = 'mris_volsmooth' + _cmd = "mris_volsmooth" input_spec = SmoothInputSpec output_spec = SmoothOutputSpec @@ -1766,143 +2270,150 @@ def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.smoothed_file if not isdefined(outfile): - outfile = self._gen_fname(self.inputs.in_file, suffix='_smooth') - outputs['smoothed_file'] = outfile + outfile = self._gen_fname(self.inputs.in_file, suffix="_smooth") + outputs["smoothed_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'smoothed_file': + if name == "smoothed_file": return self._list_outputs()[name] return None class RobustRegisterInputSpec(FSTraitedSpec): - source_file = File( - exists=True, - mandatory=True, - argstr='--mov %s', - desc='volume to be registered') + exists=True, mandatory=True, argstr="--mov %s", desc="volume to be registered" + ) target_file = File( exists=True, mandatory=True, - argstr='--dst %s', - desc='target volume for the registration') + argstr="--dst %s", + desc="target volume for the registration", + ) out_reg_file = traits.Either( True, File, default=True, usedefault=True, - argstr='--lta %s', - desc='registration file; either True or filename') + argstr="--lta %s", + desc="registration file; either True or filename", + ) registered_file = traits.Either( traits.Bool, File, - argstr='--warp %s', - desc='registered image; either True or filename') + argstr="--warp %s", + desc="registered image; either True or filename", + ) weights_file = traits.Either( traits.Bool, File, - argstr='--weights %s', - desc='weights image to write; either True or filename') + argstr="--weights %s", + desc="weights image to write; either True or filename", + ) est_int_scale = traits.Bool( - argstr='--iscale', - desc='estimate intensity scale (recommended for unnormalized images)') + argstr="--iscale", + desc="estimate intensity scale (recommended for unnormalized images)", + ) trans_only = traits.Bool( - argstr='--transonly', desc='find 3 parameter translation only') + argstr="--transonly", desc="find 3 parameter translation only" + ) in_xfm_file = File( - exists=True, - argstr='--transform', - desc='use initial transform on source') + exists=True, argstr="--transform", desc="use initial transform on source" + ) half_source = traits.Either( traits.Bool, File, - argstr='--halfmov %s', - desc="write source volume mapped to halfway space") + argstr="--halfmov %s", + desc="write source volume mapped to halfway space", + ) half_targ = traits.Either( traits.Bool, File, argstr="--halfdst %s", - desc="write target volume mapped to halfway space") + desc="write target volume mapped to halfway space", + ) half_weights = traits.Either( traits.Bool, File, argstr="--halfweights %s", - desc="write weights volume mapped to halfway space") + desc="write weights volume mapped to halfway space", + ) half_source_xfm = traits.Either( traits.Bool, File, argstr="--halfmovlta %s", - desc="write transform from source to halfway space") + desc="write transform from source to halfway space", + ) half_targ_xfm = traits.Either( traits.Bool, File, argstr="--halfdstlta %s", - desc="write transform from target to halfway space") + desc="write transform from target to halfway space", + ) auto_sens = traits.Bool( - argstr='--satit', - xor=['outlier_sens'], + argstr="--satit", + xor=["outlier_sens"], mandatory=True, - desc='auto-detect good sensitivity') + desc="auto-detect good sensitivity", + ) outlier_sens = traits.Float( - argstr='--sat %.4f', - xor=['auto_sens'], + argstr="--sat %.4f", + xor=["auto_sens"], mandatory=True, - desc='set outlier sensitivity explicitly') + desc="set outlier sensitivity explicitly", + ) least_squares = traits.Bool( - argstr='--leastsquares', - desc='use least squares instead of robust estimator') - no_init = traits.Bool(argstr='--noinit', desc='skip transform init') + argstr="--leastsquares", desc="use least squares instead of robust estimator" + ) + no_init = traits.Bool(argstr="--noinit", desc="skip transform init") init_orient = traits.Bool( - argstr='--initorient', - desc='use moments for initial orient (recommended for stripped brains)' + argstr="--initorient", + desc="use moments for initial orient (recommended for stripped brains)", ) max_iterations = traits.Int( - argstr='--maxit %d', desc='maximum # of times on each resolution') + argstr="--maxit %d", desc="maximum # of times on each resolution" + ) high_iterations = traits.Int( - argstr='--highit %d', desc='max # of times on highest resolution') + argstr="--highit %d", desc="max # of times on highest resolution" + ) iteration_thresh = traits.Float( - argstr='--epsit %.3f', desc='stop iterations when below threshold') + argstr="--epsit %.3f", desc="stop iterations when below threshold" + ) subsample_thresh = traits.Int( - argstr='--subsample %d', - desc='subsample if dimension is above threshold size') + argstr="--subsample %d", desc="subsample if dimension is above threshold size" + ) outlier_limit = traits.Float( - argstr='--wlimit %.3f', desc='set maximal outlier limit in satit') + argstr="--wlimit %.3f", desc="set maximal outlier limit in satit" + ) write_vo2vox = traits.Bool( - argstr='--vox2vox', desc='output vox2vox matrix (default is RAS2RAS)') - no_multi = traits.Bool( - argstr='--nomulti', desc='work on highest resolution') + argstr="--vox2vox", desc="output vox2vox matrix (default is RAS2RAS)" + ) + no_multi = traits.Bool(argstr="--nomulti", desc="work on highest resolution") mask_source = File( - exists=True, - argstr='--maskmov %s', - desc='image to mask source volume with') + exists=True, argstr="--maskmov %s", desc="image to mask source volume with" + ) mask_target = File( - exists=True, - argstr='--maskdst %s', - desc='image to mask target volume with') + exists=True, argstr="--maskdst %s", desc="image to mask target volume with" + ) force_double = traits.Bool( - argstr='--doubleprec', desc='use double-precision intensities') - force_float = traits.Bool( - argstr='--floattype', desc='use float intensities') + argstr="--doubleprec", desc="use double-precision intensities" + ) + force_float = traits.Bool(argstr="--floattype", desc="use float intensities") class RobustRegisterOutputSpec(TraitedSpec): - out_reg_file = File(exists=True, desc="output registration file") - registered_file = File( - exists=True, desc="output image with registration applied") + registered_file = File(exists=True, desc="output image with registration applied") weights_file = File(exists=True, desc="image of weights used") - half_source = File( - exists=True, desc="source image mapped to halfway space") + half_source = File(exists=True, desc="source image mapped to halfway space") half_targ = File(exists=True, desc="target image mapped to halfway space") - half_weights = File( - exists=True, desc="weights image mapped to halfway space") + half_weights = File(exists=True, desc="weights image mapped to halfway space") half_source_xfm = File( - exists=True, - desc="transform file to map source image to halfway space") + exists=True, desc="transform file to map source image to halfway space" + ) half_targ_xfm = File( - exists=True, - desc="transform file to map target image to halfway space") + exists=True, desc="transform file to map target image to halfway space" + ) class RobustRegister(FSCommand): @@ -1927,24 +2438,30 @@ class RobustRegister(FSCommand): """ - _cmd = 'mri_robust_register' + _cmd = "mri_robust_register" input_spec = RobustRegisterInputSpec output_spec = RobustRegisterOutputSpec def _format_arg(self, name, spec, value): - options = ("out_reg_file", "registered_file", "weights_file", - "half_source", "half_targ", "half_weights", - "half_source_xfm", "half_targ_xfm") + options = ( + "out_reg_file", + "registered_file", + "weights_file", + "half_source", + "half_targ", + "half_weights", + "half_source_xfm", + "half_targ_xfm", + ) if name in options and isinstance(value, bool): value = self._list_outputs()[name] - return super(RobustRegister, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() - prefices = dict( - src=self.inputs.source_file, trg=self.inputs.target_file) - suffices = dict( + prefixes = dict(src=self.inputs.source_file, trg=self.inputs.target_file) + suffixes = dict( out_reg_file=("src", "_robustreg.lta", False), registered_file=("src", "_robustreg", True), weights_file=("src", "_robustweights", True), @@ -1952,56 +2469,50 @@ def _list_outputs(self): half_targ=("trg", "_halfway", True), half_weights=("src", "_halfweights", True), half_source_xfm=("src", "_robustxfm.lta", False), - half_targ_xfm=("trg", "_robustxfm.lta", False)) - for name, sufftup in list(suffices.items()): + half_targ_xfm=("trg", "_robustxfm.lta", False), + ) + for name, sufftup in list(suffixes.items()): value = getattr(self.inputs, name) if value: if value is True: outputs[name] = fname_presuffix( - prefices[sufftup[0]], + prefixes[sufftup[0]], suffix=sufftup[1], newpath=cwd, - use_ext=sufftup[2]) + use_ext=sufftup[2], + ) else: outputs[name] = os.path.abspath(value) return outputs class FitMSParamsInputSpec(FSTraitedSpec): - in_files = traits.List( File(exists=True), argstr="%s", position=-2, mandatory=True, - desc="list of FLASH images (must be in mgh format)") - tr_list = traits.List( - traits.Int, desc="list of TRs of the input files (in msec)") - te_list = traits.List( - traits.Float, desc="list of TEs of the input files (in msec)") - flip_list = traits.List( - traits.Int, desc="list of flip angles of the input files") + desc="list of FLASH images (must be in mgh format)", + ) + tr_list = traits.List(traits.Int, desc="list of TRs of the input files (in msec)") + te_list = traits.List(traits.Float, desc="list of TEs of the input files (in msec)") + flip_list = traits.List(traits.Int, desc="list of flip angles of the input files") xfm_list = traits.List( - File(exists=True), - desc="list of transform files to apply to each FLASH image") + File(exists=True), desc="list of transform files to apply to each FLASH image" + ) out_dir = Directory( - argstr="%s", - position=-1, - genfile=True, - desc="directory to store output in") + argstr="%s", position=-1, genfile=True, desc="directory to store output in" + ) class FitMSParamsOutputSpec(TraitedSpec): - - t1_image = File( - exists=True, desc="image of estimated T1 relaxation values") - pd_image = File( - exists=True, desc="image of estimated proton density values") + t1_image = File(exists=True, desc="image of estimated T1 relaxation values") + pd_image = File(exists=True, desc="image of estimated proton density values") t2star_image = File(exists=True, desc="image of estimated T2* values") class FitMSParams(FSCommand): - """Estimate tissue paramaters from a set of FLASH images. + """Estimate tissue parameters from a set of FLASH images. Examples -------- @@ -2013,6 +2524,7 @@ class FitMSParams(FSCommand): 'mri_ms_fitparms flash_05.mgz flash_30.mgz flash_parameters' """ + _cmd = "mri_ms_fitparms" input_spec = FitMSParamsInputSpec output_spec = FitMSParamsOutputSpec @@ -2026,13 +2538,12 @@ def _format_arg(self, name, spec, value): if isdefined(self.inputs.te_list): cmd = " ".join((cmd, "-te %.3f" % self.inputs.te_list[i])) if isdefined(self.inputs.flip_list): - cmd = " ".join((cmd, - "-fa %.1f" % self.inputs.flip_list[i])) + cmd = " ".join((cmd, "-fa %.1f" % self.inputs.flip_list[i])) if isdefined(self.inputs.xfm_list): cmd = " ".join((cmd, "-at %s" % self.inputs.xfm_list[i])) - cmd = " ".join((cmd, file)) + cmd = f"{cmd} {file}" return cmd - return super(FitMSParams, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -2052,40 +2563,34 @@ def _gen_filename(self, name): class SynthesizeFLASHInputSpec(FSTraitedSpec): - fixed_weighting = traits.Bool( position=1, argstr="-w", - desc="use a fixed weighting to generate optimal gray/white contrast") + desc="use a fixed weighting to generate optimal gray/white contrast", + ) tr = traits.Float( - mandatory=True, - position=2, - argstr="%.2f", - desc="repetition time (in msec)") + mandatory=True, position=2, argstr="%.2f", desc="repetition time (in msec)" + ) flip_angle = traits.Float( - mandatory=True, - position=3, - argstr="%.2f", - desc="flip angle (in degrees)") + mandatory=True, position=3, argstr="%.2f", desc="flip angle (in degrees)" + ) te = traits.Float( - mandatory=True, position=4, argstr="%.3f", desc="echo time (in msec)") + mandatory=True, position=4, argstr="%.3f", desc="echo time (in msec)" + ) t1_image = File( - exists=True, - mandatory=True, - position=5, - argstr="%s", - desc="image of T1 values") + exists=True, mandatory=True, position=5, argstr="%s", desc="image of T1 values" + ) pd_image = File( exists=True, mandatory=True, position=6, argstr="%s", - desc="image of proton density values") + desc="image of proton density values", + ) out_file = File(genfile=True, argstr="%s", desc="image to write") class SynthesizeFLASHOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="synthesized FLASH acquisition") @@ -2103,6 +2608,7 @@ class SynthesizeFLASH(FSCommand): 'mri_synthesize 20.00 30.00 3.000 T1.mgz PD.mgz flash_30syn.mgz' """ + _cmd = "mri_synthesize" input_spec = SynthesizeFLASHInputSpec output_spec = SynthesizeFLASHOutputSpec @@ -2113,7 +2619,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_fname( - "synth-flash_%02d.mgz" % self.inputs.flip_angle, suffix="") + "synth-flash_%02d.mgz" % self.inputs.flip_angle, suffix="" + ) return outputs def _gen_filename(self, name): @@ -2128,55 +2635,53 @@ class MNIBiasCorrectionInputSpec(FSTraitedSpec): exists=True, mandatory=True, argstr="--i %s", - desc="input volume. Input can be any format accepted by mri_convert.") + desc="input volume. Input can be any format accepted by mri_convert.", + ) # optional out_file = File( argstr="--o %s", - name_source=['in_file'], - name_template='%s_output', + name_source=["in_file"], + name_template="%s_output", hash_files=False, keep_extension=True, desc="output volume. Output can be any format accepted by mri_convert. " - + "If the output format is COR, then the directory must exist.") + "If the output format is COR, then the directory must exist.", + ) iterations = traits.Int( - 4, usedefault=True, + 4, + usedefault=True, argstr="--n %d", - desc= - "Number of iterations to run nu_correct. Default is 4. This is the number of times " - + + desc="Number of iterations to run nu_correct. Default is 4. This is the number of times " "that nu_correct is repeated (ie, using the output from the previous run as the input for " - + - "the next). This is different than the -iterations option to nu_correct." + "the next). This is different than the -iterations option to nu_correct.", ) protocol_iterations = traits.Int( argstr="--proto-iters %d", - desc= - "Passes Np as argument of the -iterations flag of nu_correct. This is different " - + - "than the --n flag above. Default is not to pass nu_correct the -iterations flag." + desc="Passes Np as argument of the -iterations flag of nu_correct. This is different " + "than the --n flag above. Default is not to pass nu_correct the -iterations flag.", ) distance = traits.Int(argstr="--distance %d", desc="N3 -distance option") no_rescale = traits.Bool( argstr="--no-rescale", - desc="do not rescale so that global mean of output == input global mean" + desc="do not rescale so that global mean of output == input global mean", ) mask = File( exists=True, argstr="--mask %s", - desc= - "brainmask volume. Input can be any format accepted by mri_convert.") + desc="brainmask volume. Input can be any format accepted by mri_convert.", + ) transform = File( exists=True, argstr="--uchar %s", - desc="tal.xfm. Use mri_make_uchar instead of conforming") + desc="tal.xfm. Use mri_make_uchar instead of conforming", + ) stop = traits.Float( argstr="--stop %f", - desc= - "Convergence threshold below which iteration stops (suggest 0.01 to 0.0001)" + desc="Convergence threshold below which iteration stops (suggest 0.01 to 0.0001)", ) shrink = traits.Int( - argstr="--shrink %d", - desc="Shrink parameter for finer sampling (default is 4)") + argstr="--shrink %d", desc="Shrink parameter for finer sampling (default is 4)" + ) class MNIBiasCorrectionOutputSpec(TraitedSpec): @@ -2184,7 +2689,7 @@ class MNIBiasCorrectionOutputSpec(TraitedSpec): class MNIBiasCorrection(FSCommand): - """ Wrapper for nu_correct, a program from the Montreal Neurological Insitute (MNI) + """Wrapper for nu_correct, a program from the Montreal Neurological Institute (MNI) used for correcting intensity non-uniformity (ie, bias fields). You must have the MNI software installed on your system to run this. See [www.bic.mni.mcgill.ca/software/N3] for more info. @@ -2204,13 +2709,14 @@ class MNIBiasCorrection(FSCommand): >>> correct.cmdline 'mri_nu_correct.mni --distance 50 --i norm.mgz --n 6 --o norm_output.mgz --proto-iters 1000' - References: + References ---------- [http://freesurfer.net/fswiki/mri_nu_correct.mni] [http://www.bic.mni.mcgill.ca/software/N3] [https://github.com/BIC-MNI/N3] """ + _cmd = "mri_nu_correct.mni" input_spec = MNIBiasCorrectionInputSpec output_spec = MNIBiasCorrectionOutputSpec @@ -2219,26 +2725,21 @@ class MNIBiasCorrection(FSCommand): class WatershedSkullStripInputSpec(FSTraitedSpec): # required in_file = File( - argstr="%s", - exists=True, - mandatory=True, - position=-2, - desc="input volume") + argstr="%s", exists=True, mandatory=True, position=-2, desc="input volume" + ) out_file = File( - 'brainmask.auto.mgz', + "brainmask.auto.mgz", argstr="%s", exists=False, mandatory=True, position=-1, usedefault=True, - desc="output volume") + desc="output volume", + ) # optional - t1 = traits.Bool( - argstr="-T1", desc="specify T1 input volume (T1 grey value = 110)") - brain_atlas = File( - argstr="-brain_atlas %s", exists=True, position=-4, desc="") - transform = File( - argstr="%s", exists=False, position=-3, desc="undocumented") + t1 = traits.Bool(argstr="-T1", desc="specify T1 input volume (T1 grey value = 110)") + brain_atlas = File(argstr="-brain_atlas %s", exists=True, position=-4, desc="") + transform = File(argstr="%s", exists=False, position=-3, desc="undocumented") class WatershedSkullStripOutputSpec(TraitedSpec): @@ -2246,10 +2747,10 @@ class WatershedSkullStripOutputSpec(TraitedSpec): class WatershedSkullStrip(FSCommand): - """ This program strips skull and other outer non-brain tissue and + """This program strips skull and other outer non-brain tissue and produces the brain volume from T1 volume or the scanned volume. - The "watershed" segmentation algorithm was used to dertermine the + The "watershed" segmentation algorithm was used to determine the intensity values for white matter, grey matter, and CSF. A force field was then used to fit a spherical surface to the brain. The shape of the surface fit was then evaluated against a previously @@ -2270,50 +2771,52 @@ class WatershedSkullStrip(FSCommand): >>> skullstrip.cmdline 'mri_watershed -T1 transforms/talairach_with_skull.lta T1.mgz brainmask.auto.mgz' """ - _cmd = 'mri_watershed' + + _cmd = "mri_watershed" input_spec = WatershedSkullStripInputSpec output_spec = WatershedSkullStripOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class NormalizeInputSpec(FSTraitedSpec): # required in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-2, - desc="The input file for Normalize") + desc="The input file for Normalize", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", hash_files=False, keep_extension=True, - desc="The output file for Normalize") + desc="The output file for Normalize", + ) # optional gradient = traits.Int( - argstr="-g %d", - desc="use max intensity/mm gradient g (default=1)") + argstr="-g %d", desc="use max intensity/mm gradient g (default=1)" + ) mask = File( - argstr="-mask %s", - exists=True, - desc="The input mask file for Normalize") + argstr="-mask %s", exists=True, desc="The input mask file for Normalize" + ) segmentation = File( - argstr="-aseg %s", - exists=True, - desc="The input segmentation for Normalize") + argstr="-aseg %s", exists=True, desc="The input segmentation for Normalize" + ) transform = File( - exists=True, desc="Tranform file from the header of the input file") + exists=True, desc="Transform file from the header of the input file" + ) class NormalizeOutputSpec(TraitedSpec): - out_file = traits.File(exists=False, desc="The output file for Normalize") + out_file = File(exists=False, desc="The output file for Normalize") class Normalize(FSCommand): @@ -2331,68 +2834,74 @@ class Normalize(FSCommand): >>> normalize.cmdline 'mri_normalize -g 1 T1.mgz T1_norm.mgz' """ + _cmd = "mri_normalize" input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class CANormalizeInputSpec(FSTraitedSpec): in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-4, - desc="The input file for CANormalize") + desc="The input file for CANormalize", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", hash_files=False, keep_extension=True, - desc="The output file for CANormalize") + desc="The output file for CANormalize", + ) atlas = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-3, - desc="The atlas file in gca format") + desc="The atlas file in gca format", + ) transform = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-2, - desc="The tranform file in lta format") + desc="The transform file in lta format", + ) # optional - mask = File( - argstr='-mask %s', exists=True, desc="Specifies volume to use as mask") + mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") control_points = File( - argstr='-c %s', desc="File name for the output control points") + argstr="-c %s", desc="File name for the output control points" + ) long_file = File( - argstr='-long %s', - desc='undocumented flag used in longitudinal processing') + argstr="-long %s", desc="undocumented flag used in longitudinal processing" + ) class CANormalizeOutputSpec(TraitedSpec): - out_file = traits.File(exists=False, desc="The output file for Normalize") - control_points = File( - exists=False, desc="The output control points for Normalize") + out_file = File(exists=False, desc="The output file for Normalize") + control_points = File(exists=False, desc="The output control points for Normalize") class CANormalize(FSCommand): """This program creates a normalized volume using the brain volume and an input gca file. - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__. Examples - ======== - + -------- >>> from nipype.interfaces import freesurfer >>> ca_normalize = freesurfer.CANormalize() >>> ca_normalize.inputs.in_file = "T1.mgz" @@ -2400,100 +2909,103 @@ class CANormalize(FSCommand): >>> ca_normalize.inputs.transform = "trans.mat" # in practice use .lta transforms >>> ca_normalize.cmdline 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' + """ + _cmd = "mri_ca_normalize" input_spec = CANormalizeInputSpec output_spec = CANormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['control_points'] = os.path.abspath(self.inputs.control_points) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["control_points"] = os.path.abspath(self.inputs.control_points) return outputs class CARegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-3, - desc="The input volume for CARegister") + desc="The input volume for CARegister", + ) out_file = File( - argstr='%s', - position=-1, - genfile=True, - desc="The output volume for CARegister") + argstr="%s", position=-1, genfile=True, desc="The output volume for CARegister" + ) template = File( - argstr='%s', - exists=True, - position=-2, - desc="The template file in gca format") + argstr="%s", exists=True, position=-2, desc="The template file in gca format" + ) # optional - mask = File( - argstr='-mask %s', exists=True, desc="Specifies volume to use as mask") + mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") invert_and_save = traits.Bool( - argstr='-invert-and-save', + argstr="-invert-and-save", position=-4, - desc= - "Invert and save the .m3z multi-dimensional talaraich transform to x, y, and z .mgz files" + desc="Invert and save the .m3z multi-dimensional talaraich transform to x, y, and z .mgz files", ) - no_big_ventricles = traits.Bool( - argstr='-nobigventricles', desc="No big ventricles") + no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") transform = File( - argstr='-T %s', exists=True, desc="Specifies transform in lta format") + argstr="-T %s", exists=True, desc="Specifies transform in lta format" + ) align = traits.String( - argstr='-align-%s', desc="Specifies when to perform alignment") + argstr="-align-%s", desc="Specifies when to perform alignment" + ) levels = traits.Int( - argstr='-levels %d', - desc= - "defines how many surrounding voxels will be used in interpolations, default is 6" + argstr="-levels %d", + desc="defines how many surrounding voxels will be used in interpolations, default is 6", ) A = traits.Int( - argstr='-A %d', - desc='undocumented flag used in longitudinal processing') + argstr="-A %d", desc="undocumented flag used in longitudinal processing" + ) l_files = InputMultiPath( File(exists=False), - argstr='-l %s', - desc='undocumented flag used in longitudinal processing') + argstr="-l %s", + desc="undocumented flag used in longitudinal processing", + ) class CARegisterOutputSpec(TraitedSpec): - out_file = traits.File(exists=False, desc="The output file for CARegister") + out_file = File(exists=False, desc="The output file for CARegister") class CARegister(FSCommandOpenMP): """Generates a multi-dimensional talairach transform from a gca file and talairach.lta file - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__ Examples - ======== + -------- >>> from nipype.interfaces import freesurfer >>> ca_register = freesurfer.CARegister() >>> ca_register.inputs.in_file = "norm.mgz" >>> ca_register.inputs.out_file = "talairach.m3z" >>> ca_register.cmdline 'mri_ca_register norm.mgz talairach.m3z' + """ + _cmd = "mri_ca_register" input_spec = CARegisterInputSpec output_spec = CARegisterOutputSpec def _format_arg(self, name, spec, value): if name == "l_files" and len(value) == 1: - value.append('identity.nofile') - return super(CARegister, self)._format_arg(name, spec, value) + value.append("identity.nofile") + return super()._format_arg(name, spec, value) def _gen_fname(self, name): - if name == 'out_file': - return os.path.abspath('talairach.m3z') + if name == "out_file": + return os.path.abspath("talairach.m3z") return None def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2504,53 +3016,58 @@ class CALabelInputSpec(FSTraitedSpecOpenMP): position=-4, mandatory=True, exists=True, - desc="Input volume for CALabel") + desc="Input volume for CALabel", + ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, - desc="Output file for CALabel") + desc="Output file for CALabel", + ) transform = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input transform for CALabel") + desc="Input transform for CALabel", + ) template = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input template for CALabel") + desc="Input template for CALabel", + ) # optional in_vol = File(argstr="-r %s", exists=True, desc="set input volume") intensities = File( argstr="-r %s", exists=True, - desc="input label intensities file(used in longitudinal processing)") - no_big_ventricles = traits.Bool( - argstr="-nobigventricles", desc="No big ventricles") + desc="input label intensities file(used in longitudinal processing)", + ) + no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") align = traits.Bool(argstr="-align", desc="Align CALabel") prior = traits.Float(argstr="-prior %.1f", desc="Prior for CALabel") - relabel_unlikely = traits.Tuple( + relabel_unlikely = Tuple( traits.Int, traits.Float, argstr="-relabel_unlikely %d %.1f", - desc=("Reclassify voxels at least some std" - " devs from the mean using some size" - " Gaussian window")) - label = traits.File( + desc=( + "Reclassify voxels at least some std" + " devs from the mean using some size" + " Gaussian window" + ), + ) + label = File( argstr="-l %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file" + desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) - aseg = traits.File( + aseg = File( argstr="-aseg %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file" + desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) @@ -2559,12 +3076,15 @@ class CALabelOutputSpec(TraitedSpec): class CALabel(FSCommandOpenMP): - """ - For complete details, see the `FS Documentation `_ + """Label subcortical structures based in GCA model. - Examples - ======== + See Also + -------- + For complete details, see the `FS Documentation + `__ + Examples + -------- >>> from nipype.interfaces import freesurfer >>> ca_label = freesurfer.CALabel() >>> ca_label.inputs.in_file = "norm.mgz" @@ -2573,79 +3093,83 @@ class CALabel(FSCommandOpenMP): >>> ca_label.inputs.template = "Template_6.nii" # in practice use .gcs extension >>> ca_label.cmdline 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' + """ + _cmd = "mri_ca_label" input_spec = CALabelInputSpec output_spec = CALabelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCALabelInputSpec(FSTraitedSpecOpenMP): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="%s", position=-5, usedefault=True, mandatory=True, - desc="Subject name or ID") + desc="Subject name or ID", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", argstr="%s", position=-4, mandatory=True, - desc="Hemisphere ('lh' or 'rh')") + desc="Hemisphere ('lh' or 'rh')", + ) canonsurf = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input canonical surface file") + desc="Input canonical surface file", + ) classifier = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Classifier array input file") + desc="Classifier array input file", + ) smoothwm = File( - mandatory=True, - exists=True, - desc="implicit input {hemisphere}.smoothwm") - curv = File( - mandatory=True, exists=True, desc="implicit input {hemisphere}.curv") - sulc = File( - mandatory=True, exists=True, desc="implicit input {hemisphere}.sulc") + mandatory=True, exists=True, desc="implicit input {hemisphere}.smoothwm" + ) + curv = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.curv") + sulc = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.sulc") out_file = File( argstr="%s", position=-1, exists=False, - name_source=['hemisphere'], + name_source=["hemisphere"], keep_extension=True, hash_files=False, name_template="%s.aparc.annot", - desc="Annotated surface output file") + desc="Annotated surface output file", + ) # optional - label = traits.File( + label = File( argstr="-l %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file" + desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) - aseg = traits.File( + aseg = File( argstr="-aseg %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file" + desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) seed = traits.Int(argstr="-seed %d", desc="") - copy_inputs = traits.Bool(desc="Copies implicit inputs to node directory " - + "and creates a temp subjects_directory. " + - "Use this when running as a node") + copy_inputs = traits.Bool( + desc="Copies implicit inputs to node directory " + "and creates a temp subjects_directory. " + "Use this when running as a node" + ) class MRIsCALabelOutputSpec(TraitedSpec): @@ -2678,6 +3202,7 @@ class MRIsCALabel(FSCommandOpenMP): >>> ca_label.cmdline 'mris_ca_label test lh lh.pial im1.nii lh.aparc.annot' """ + _cmd = "mris_ca_label" input_spec = MRIsCALabelInputSpec output_spec = MRIsCALabelOutputSpec @@ -2685,39 +3210,43 @@ class MRIsCALabel(FSCommandOpenMP): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.canonsurf, folder='surf') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.canonsurf, folder="surf") copy2subjdir( self, self.inputs.smoothwm, - folder='surf', - basename='{0}.smoothwm'.format(self.inputs.hemisphere)) + folder="surf", + basename=f"{self.inputs.hemisphere}.smoothwm", + ) copy2subjdir( self, self.inputs.curv, - folder='surf', - basename='{0}.curv'.format(self.inputs.hemisphere)) + folder="surf", + basename=f"{self.inputs.hemisphere}.curv", + ) copy2subjdir( self, self.inputs.sulc, - folder='surf', - basename='{0}.sulc'.format(self.inputs.hemisphere)) + folder="surf", + basename=f"{self.inputs.hemisphere}.sulc", + ) # The label directory must exist in order for an output to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(MRIsCALabel, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self.output_spec().get() out_basename = os.path.basename(self.inputs.out_file) - outputs['out_file'] = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label', - out_basename) + outputs["out_file"] = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label", out_basename + ) return outputs @@ -2726,39 +3255,44 @@ class SegmentCCInputSpec(FSTraitedSpec): argstr="-aseg %s", mandatory=True, exists=True, - desc="Input aseg file to read from subjects directory") + desc="Input aseg file to read from subjects directory", + ) in_norm = File( mandatory=True, exists=True, - desc="Required undocumented input {subject}/mri/norm.mgz") + desc="Required undocumented input {subject}/mri/norm.mgz", + ) out_file = File( argstr="-o %s", exists=False, - name_source=['in_file'], - name_template='%s.auto.mgz', + name_source=["in_file"], + name_template="%s.auto.mgz", hash_files=False, keep_extension=False, - desc="Filename to write aseg including CC") + desc="Filename to write aseg including CC", + ) out_rotation = File( argstr="-lta %s", mandatory=True, exists=False, - desc="Global filepath for writing rotation lta") + desc="Global filepath for writing rotation lta", + ) subject_id = traits.String( - 'subject_id', + "subject_id", argstr="%s", mandatory=True, position=-1, usedefault=True, - desc="Subject name") + desc="Subject name", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) class SegmentCCOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output segmentation uncluding corpus collosum") + out_file = File(exists=False, desc="Output segmentation including corpus collosum") out_rotation = File(exists=False, desc="Output lta rotation file") @@ -2798,42 +3332,41 @@ def _format_arg(self, name, spec, value): # mri_cc can't use abspaths just the basename basename = os.path.basename(value) return spec.argstr % basename - return super(SegmentCC, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['out_rotation'] = os.path.abspath(self.inputs.out_rotation) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["out_rotation"] = os.path.abspath(self.inputs.out_rotation) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir for originalfile in [self.inputs.in_file, self.inputs.in_norm]: - copy2subjdir(self, originalfile, folder='mri') - return super(SegmentCC, self).run(**inputs) + copy2subjdir(self, originalfile, folder="mri") + return super().run(**inputs) def aggregate_outputs(self, runtime=None, needed_outputs=None): # it is necessary to find the output files and move - # them to the correct loacation + # them to the correct location predicted_outputs = self._list_outputs() - for name in ['out_file', 'out_rotation']: + for name in ["out_file", "out_rotation"]: out_file = predicted_outputs[name] if not os.path.isfile(out_file): out_base = os.path.basename(out_file) if isdefined(self.inputs.subjects_dir): - subj_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id) + subj_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id + ) else: - subj_dir = os.path.join(os.getcwd(), - self.inputs.subject_id) - if name == 'out_file': - out_tmp = os.path.join(subj_dir, 'mri', out_base) - elif name == 'out_rotation': - out_tmp = os.path.join(subj_dir, 'mri', 'transforms', - out_base) + subj_dir = os.path.join(os.getcwd(), self.inputs.subject_id) + if name == "out_file": + out_tmp = os.path.join(subj_dir, "mri", out_base) + elif name == "out_rotation": + out_tmp = os.path.join(subj_dir, "mri", "transforms", out_base) else: out_tmp = None # move the file to correct location @@ -2841,8 +3374,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if not os.path.isdir(os.path.dirname(out_tmp)): os.makedirs(os.path.dirname(out_tmp)) shutil.move(out_tmp, out_file) - return super(SegmentCC, self).aggregate_outputs( - runtime, needed_outputs) + return super().aggregate_outputs(runtime, needed_outputs) class SegmentWMInputSpec(FSTraitedSpec): @@ -2851,13 +3383,15 @@ class SegmentWMInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-2, - desc="Input file for SegmentWM") + desc="Input file for SegmentWM", + ) out_file = File( argstr="%s", exists=False, mandatory=True, position=-1, - desc="File to be written as output for SegmentWM") + desc="File to be written as output for SegmentWM", + ) class SegmentWMOutputSpec(TraitedSpec): @@ -2887,7 +3421,7 @@ class SegmentWM(FSCommand): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2897,28 +3431,31 @@ class EditWMwithAsegInputSpec(FSTraitedSpec): position=-4, mandatory=True, exists=True, - desc="Input white matter segmentation file") + desc="Input white matter segmentation file", + ) brain_file = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input brain/T1 file") + desc="Input brain/T1 file", + ) seg_file = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input presurf segmentation file") + desc="Input presurf segmentation file", + ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, - desc="File to be written as output") + desc="File to be written as output", + ) # optional - keep_in = traits.Bool( - argstr="-keep-in", desc="Keep edits as found in input volume") + keep_in = traits.Bool(argstr="-keep-in", desc="Keep edits as found in input volume") class EditWMwithAsegOutputSpec(TraitedSpec): @@ -2941,13 +3478,14 @@ class EditWMwithAseg(FSCommand): >>> editwm.cmdline 'mri_edit_wm_with_aseg -keep-in T1.mgz norm.mgz aseg.mgz wm.asegedit.mgz' """ - _cmd = 'mri_edit_wm_with_aseg' + + _cmd = "mri_edit_wm_with_aseg" input_spec = EditWMwithAsegInputSpec output_spec = EditWMwithAsegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2956,61 +3494,65 @@ class ConcatenateLTAInputSpec(FSTraitedSpec): in_lta1 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-3, - desc='maps some src1 to dst1') + desc="maps some src1 to dst1", + ) in_lta2 = traits.Either( File(exists=True), - 'identity.nofile', - argstr='%s', + "identity.nofile", + argstr="%s", position=-2, mandatory=True, - desc='maps dst1(src2) to dst2') + desc="maps dst1(src2) to dst2", + ) out_file = File( position=-1, - argstr='%s', + argstr="%s", hash_files=False, - name_source=['in_lta1'], - name_template='%s_concat', + name_source=["in_lta1"], + name_template="%s_concat", keep_extension=True, - desc='the combined LTA maps: src1 to dst2 = LTA2*LTA1') + desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1", + ) # Inversion and transform type - invert_1 = traits.Bool( - argstr='-invert1', desc='invert in_lta1 before applying it') - invert_2 = traits.Bool( - argstr='-invert2', desc='invert in_lta2 before applying it') - invert_out = traits.Bool(argstr='-invertout', desc='invert output LTA') + invert_1 = traits.Bool(argstr="-invert1", desc="invert in_lta1 before applying it") + invert_2 = traits.Bool(argstr="-invert2", desc="invert in_lta2 before applying it") + invert_out = traits.Bool(argstr="-invertout", desc="invert output LTA") out_type = traits.Enum( - 'VOX2VOX', 'RAS2RAS', argstr='-out_type %d', desc='set final LTA type') + "VOX2VOX", "RAS2RAS", argstr="-out_type %d", desc="set final LTA type" + ) # Talairach options - tal_source_file = traits.File( + tal_source_file = File( exists=True, - argstr='-tal %s', + argstr="-tal %s", position=-5, - requires=['tal_template_file'], - desc='if in_lta2 is talairach.xfm, specify source for talairach') - tal_template_file = traits.File( + requires=["tal_template_file"], + desc="if in_lta2 is talairach.xfm, specify source for talairach", + ) + tal_template_file = File( exists=True, - argstr='%s', + argstr="%s", position=-4, - requires=['tal_source_file'], - desc='if in_lta2 is talairach.xfm, specify template for talairach') + requires=["tal_source_file"], + desc="if in_lta2 is talairach.xfm, specify template for talairach", + ) - subject = traits.Str( - argstr='-subject %s', desc='set subject in output LTA') + subject = traits.Str(argstr="-subject %s", desc="set subject in output LTA") # Note rmsdiff would be xor out_file, and would be most easily dealt with # in a new interface. -CJM 2017.10.05 class ConcatenateLTAOutputSpec(TraitedSpec): out_file = File( - exists=False, desc='the combined LTA maps: src1 to dst2 = LTA2*LTA1') + exists=False, desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1" + ) class ConcatenateLTA(FSCommand): - """ Concatenates two consecutive LTA transformations into one overall + """Concatenates two consecutive LTA transformations into one overall transformation Out = LTA2*LTA1 @@ -3039,11 +3581,11 @@ class ConcatenateLTA(FSCommand): 'mri_concatenate_lta -invert1 -out_type 1 lta1.lta identity.nofile inv1.lta' """ - _cmd = 'mri_concatenate_lta' + _cmd = "mri_concatenate_lta" input_spec = ConcatenateLTAInputSpec output_spec = ConcatenateLTAOutputSpec def _format_arg(self, name, spec, value): - if name == 'out_type': - value = {'VOX2VOX': 0, 'RAS2RAS': 1}[value] - return super(ConcatenateLTA, self)._format_arg(name, spec, value) + if name == "out_type": + value = {"VOX2VOX": 0, "RAS2RAS": 1}[value] + return super()._format_arg(name, spec, value) diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index 99ee7d0179..bc70fc44a6 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -1,10 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import os.path @@ -12,32 +9,37 @@ from ... import logging from ...utils.filemanip import split_filename, copyfile -from .base import (FSCommand, FSTraitedSpec, FSScriptCommand, - FSScriptOutputSpec, FSCommandOpenMP, FSTraitedSpecOpenMP) -from ..base import (isdefined, TraitedSpec, File, traits, Directory) +from .base import ( + FSCommand, + FSTraitedSpec, + FSScriptCommand, + FSScriptOutputSpec, + FSCommandOpenMP, + FSTraitedSpecOpenMP, +) +from ..base import isdefined, TraitedSpec, File, traits, Tuple, Directory -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class MPRtoMNI305InputSpec(FSTraitedSpec): # environment variables, required # usedefault=True is hack for on_trait_change in __init__ reference_dir = Directory( - "", exists=True, mandatory=True, usedefault=True, desc="TODO") - target = traits.String( - "", mandatory=True, usedefault=True, desc="input atlas file") + "", exists=True, mandatory=True, usedefault=True, desc="TODO" + ) + target = traits.String("", mandatory=True, usedefault=True, desc="input atlas file") # required in_file = File( - argstr='%s', - usedefault=True, - desc="the input file prefix for MPRtoMNI305") + argstr="%s", usedefault=True, desc="the input file prefix for MPRtoMNI305" + ) class MPRtoMNI305OutputSpec(FSScriptOutputSpec): out_file = File( - exists=False, - desc="The output file '_to__t4_vox2vox.txt'") + exists=False, desc="The output file '_to__t4_vox2vox.txt'" + ) class MPRtoMNI305(FSScriptCommand): @@ -45,8 +47,7 @@ class MPRtoMNI305(FSScriptCommand): For complete details, see FreeSurfer documentation Examples - ======== - + -------- >>> from nipype.interfaces.freesurfer import MPRtoMNI305, Info >>> mprtomni305 = MPRtoMNI305() >>> mprtomni305.inputs.target = 'structural.nii' @@ -63,79 +64,74 @@ class MPRtoMNI305(FSScriptCommand): >>> mprtomni305.run() # doctest: +SKIP """ + _cmd = "mpr2mni305" input_spec = MPRtoMNI305InputSpec output_spec = MPRtoMNI305OutputSpec def __init__(self, **inputs): - super(MPRtoMNI305, self).__init__(**inputs) - self.inputs.on_trait_change(self._environ_update, 'target') - self.inputs.on_trait_change(self._environ_update, 'reference_dir') + super().__init__(**inputs) + self.inputs.on_trait_change(self._environ_update, "target") + self.inputs.on_trait_change(self._environ_update, "reference_dir") def _format_arg(self, opt, spec, val): - if opt in ['target', 'reference_dir']: + if opt in ["target", "reference_dir"]: return "" - elif opt == 'in_file': + elif opt == "in_file": _, retval, ext = split_filename(val) # Need to copy file to working cache directory! copyfile( - val, - os.path.abspath(retval + ext), - copy=True, - hashmethod='content') + val, os.path.abspath(retval + ext), copy=True, hashmethod="content" + ) return retval - return super(MPRtoMNI305, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _environ_update(self): # refdir = os.path.join(Info.home(), val) refdir = self.inputs.reference_dir target = self.inputs.target - self.inputs.environ['MPR2MNI305_TARGET'] = target + self.inputs.environ["MPR2MNI305_TARGET"] = target self.inputs.environ["REFDIR"] = refdir def _get_fname(self, fname): return split_filename(fname)[1] def _list_outputs(self): - outputs = super(MPRtoMNI305, self)._list_outputs() - fullname = "_".join([ - self._get_fname(self.inputs.in_file), "to", self.inputs.target, - "t4", "vox2vox.txt" - ]) - outputs['out_file'] = os.path.abspath(fullname) + outputs = super()._list_outputs() + fullname = "_".join( + [ + self._get_fname(self.inputs.in_file), + "to", + self.inputs.target, + "t4", + "vox2vox.txt", + ] + ) + outputs["out_file"] = os.path.abspath(fullname) return outputs class RegisterAVItoTalairachInputSpec(FSTraitedSpec): in_file = File( - argstr='%s', - exists=True, - mandatory=True, - position=0, - desc="The input file") + argstr="%s", exists=True, mandatory=True, position=0, desc="The input file" + ) target = File( - argstr='%s', - exists=True, - mandatory=True, - position=1, - desc="The target file") + argstr="%s", exists=True, mandatory=True, position=1, desc="The target file" + ) vox2vox = File( - argstr='%s', - exists=True, - mandatory=True, - position=2, - desc="The vox2vox file") + argstr="%s", exists=True, mandatory=True, position=2, desc="The vox2vox file" + ) out_file = File( - 'talairach.auto.xfm', + "talairach.auto.xfm", usedefault=True, - argstr='%s', + argstr="%s", position=3, - desc="The transform output") + desc="The transform output", + ) class RegisterAVItoTalairachOutputSpec(FSScriptOutputSpec): - out_file = traits.File( - exists=False, desc="The output file for RegisterAVItoTalairach") + out_file = File(exists=False, desc="The output file for RegisterAVItoTalairach") class RegisterAVItoTalairach(FSScriptCommand): @@ -170,50 +166,43 @@ class RegisterAVItoTalairach(FSScriptCommand): >>> register.run() # doctest: +SKIP """ + _cmd = "avi2talxfm" input_spec = RegisterAVItoTalairachInputSpec output_spec = RegisterAVItoTalairachOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class EMRegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( - argstr="%s", - exists=True, - mandatory=True, - position=-3, - desc="in brain volume") + argstr="%s", exists=True, mandatory=True, position=-3, desc="in brain volume" + ) template = File( - argstr="%s", - exists=True, - mandatory=True, - position=-2, - desc="template gca") + argstr="%s", exists=True, mandatory=True, position=-2, desc="template gca" + ) out_file = File( argstr="%s", exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s_transform.lta", hash_files=False, keep_extension=False, position=-1, - desc="output transform") + desc="output transform", + ) # optional - skull = traits.Bool( - argstr="-skull", desc="align to atlas containing skull (uns=5)") + skull = traits.Bool(argstr="-skull", desc="align to atlas containing skull (uns=5)") mask = File(argstr="-mask %s", exists=True, desc="use volume as a mask") nbrspacing = traits.Int( argstr="-uns %d", - desc= - "align to atlas containing skull setting unknown_nbr_spacing = nbrspacing" + desc="align to atlas containing skull setting unknown_nbr_spacing = nbrspacing", ) - transform = File( - argstr="-t %s", exists=True, desc="Previously computed transform") + transform = File(argstr="-t %s", exists=True, desc="Previously computed transform") class EMRegisterOutputSpec(TraitedSpec): @@ -221,7 +210,7 @@ class EMRegisterOutputSpec(TraitedSpec): class EMRegister(FSCommandOpenMP): - """ This program creates a tranform in lta format + """This program creates a transform in lta format Examples ======== @@ -235,13 +224,14 @@ class EMRegister(FSCommandOpenMP): >>> register.cmdline 'mri_em_register -uns 9 -skull norm.mgz aseg.mgz norm_transform.lta' """ - _cmd = 'mri_em_register' + + _cmd = "mri_em_register" input_spec = EMRegisterInputSpec output_spec = EMRegisterOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -253,46 +243,48 @@ class RegisterInputSpec(FSTraitedSpec): mandatory=True, position=-3, copyfile=True, - desc="Surface to register, often {hemi}.sphere") + desc="Surface to register, often {hemi}.sphere", + ) target = File( argstr="%s", exists=True, mandatory=True, position=-2, - desc="The data to register to. In normal recon-all usage, " + - "this is a template file for average surface.") + desc="The data to register to. In normal recon-all usage, " + "this is a template file for average surface.", + ) in_sulc = File( exists=True, mandatory=True, copyfile=True, - desc= - "Undocumented mandatory input file ${SUBJECTS_DIR}/surf/{hemisphere}.sulc " + desc="Undocumented mandatory input file ${SUBJECTS_DIR}/surf/{hemisphere}.sulc ", ) out_file = File( argstr="%s", exists=False, position=-1, genfile=True, - desc="Output surface file to capture registration") + desc="Output surface file to capture registration", + ) # optional curv = traits.Bool( argstr="-curv", - requires=['in_smoothwm'], - desc="Use smoothwm curvature for final alignment") + requires=["in_smoothwm"], + desc="Use smoothwm curvature for final alignment", + ) in_smoothwm = File( exists=True, copyfile=True, - desc= - "Undocumented input file ${SUBJECTS_DIR}/surf/{hemisphere}.smoothwm ") + desc="Undocumented input file ${SUBJECTS_DIR}/surf/{hemisphere}.smoothwm ", + ) class RegisterOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output surface file to capture registration") + out_file = File(exists=False, desc="Output surface file to capture registration") class Register(FSCommand): - """ This program registers a surface to an average surface template. + """This program registers a surface to an average surface template. Examples ======== @@ -308,26 +300,26 @@ class Register(FSCommand): 'mris_register -curv lh.pial aseg.mgz lh.pial.reg' """ - _cmd = 'mris_register' + _cmd = "mris_register" input_spec = RegisterInputSpec output_spec = RegisterOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'curv': + if opt == "curv": return spec.argstr - return super(Register, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - outputs['out_file'] = os.path.abspath(self.inputs.in_surf) + '.reg' + outputs["out_file"] = os.path.abspath(self.inputs.in_surf) + ".reg" return outputs @@ -338,14 +330,12 @@ class PaintInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-2, - desc="Surface file with grid (vertices) onto which the " + - "template data is to be sampled or 'painted'") + desc="Surface file with grid (vertices) onto which the " + "template data is to be sampled or 'painted'", + ) template = File( - argstr="%s", - exists=True, - mandatory=True, - position=-3, - desc="Template file") + argstr="%s", exists=True, mandatory=True, position=-3, desc="Template file" + ) # optional template_param = traits.Int(desc="Frame number of the input template") averages = traits.Int(argstr="-a %d", desc="Average curvature patterns") @@ -355,17 +345,17 @@ class PaintInputSpec(FSTraitedSpec): position=-1, name_template="%s.avg_curv", hash_files=False, - name_source=['in_surf'], + name_source=["in_surf"], keep_extension=False, - desc="File containing a surface-worth of per-vertex values, " + - "saved in 'curvature' format.") + desc="File containing a surface-worth of per-vertex values, " + "saved in 'curvature' format.", + ) class PaintOutputSpec(TraitedSpec): out_file = File( exists=False, - desc= - "File containing a surface-worth of per-vertex values, saved in 'curvature' format." + desc="File containing a surface-worth of per-vertex values, saved in 'curvature' format.", ) @@ -389,154 +379,170 @@ class Paint(FSCommand): 'mrisp_paint -a 5 aseg.mgz lh.pial lh.avg_curv' """ - _cmd = 'mrisp_paint' + _cmd = "mrisp_paint" input_spec = PaintInputSpec output_spec = PaintOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'template': + if opt == "template": if isdefined(self.inputs.template_param): - return spec.argstr % ( - val + '#' + str(self.inputs.template_param)) - return super(Paint, self)._format_arg(opt, spec, val) + return spec.argstr % (val + "#" + str(self.inputs.template_param)) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRICoregInputSpec(FSTraitedSpec): source_file = File( - argstr='--mov %s', - desc='source file to be registered', + argstr="--mov %s", + desc="source file to be registered", mandatory=True, - copyfile=False) + copyfile=False, + ) reference_file = File( - argstr='--ref %s', - desc='reference (target) file', + argstr="--ref %s", + desc="reference (target) file", mandatory=True, copyfile=False, - xor=['subject_id']) + xor=["subject_id"], + ) out_lta_file = traits.Either( True, File, - argstr='--lta %s', + argstr="--lta %s", default=True, usedefault=True, - desc='output registration file (LTA format)') + desc="output registration file (LTA format)", + ) out_reg_file = traits.Either( - True, - File, - argstr='--regdat %s', - desc='output registration file (REG format)') + True, File, argstr="--regdat %s", desc="output registration file (REG format)" + ) out_params_file = traits.Either( - True, File, argstr='--params %s', desc='output parameters file') + True, File, argstr="--params %s", desc="output parameters file" + ) subjects_dir = Directory( - exists=True, argstr='--sd %s', desc='FreeSurfer SUBJECTS_DIR') + exists=True, argstr="--sd %s", desc="FreeSurfer SUBJECTS_DIR" + ) subject_id = traits.Str( - argstr='--s %s', + argstr="--s %s", position=1, mandatory=True, - xor=['reference_file'], - requires=['subjects_dir'], - desc='freesurfer subject ID (implies ``reference_mask == ' - 'aparc+aseg.mgz`` unless otherwise specified)') + xor=["reference_file"], + requires=["subjects_dir"], + desc="freesurfer subject ID (implies ``reference_mask == " + "aparc+aseg.mgz`` unless otherwise specified)", + ) dof = traits.Enum( - 6, - 9, - 12, - argstr='--dof %d', - desc='number of transform degrees of freedom') + 6, 9, 12, argstr="--dof %d", desc="number of transform degrees of freedom" + ) reference_mask = traits.Either( False, traits.Str, - argstr='--ref-mask %s', + argstr="--ref-mask %s", position=2, - desc='mask reference volume with given mask, or None if ``False``') + desc="mask reference volume with given mask, or None if ``False``", + ) source_mask = traits.Str( - argstr='--mov-mask', desc='mask source file with given mask') - num_threads = traits.Int( - argstr='--threads %d', desc='number of OpenMP threads') + argstr="--mov-mask", desc="mask source file with given mask" + ) + num_threads = traits.Int(argstr="--threads %d", desc="number of OpenMP threads") no_coord_dithering = traits.Bool( - argstr='--no-coord-dither', desc='turn off coordinate dithering') + argstr="--no-coord-dither", desc="turn off coordinate dithering" + ) no_intensity_dithering = traits.Bool( - argstr='--no-intensity-dither', desc='turn off intensity dithering') + argstr="--no-intensity-dither", desc="turn off intensity dithering" + ) sep = traits.List( - argstr='--sep %s...', + argstr="--sep %s...", minlen=1, maxlen=2, - desc='set spatial scales, in voxels (default [2, 4])') - initial_translation = traits.Tuple( + desc="set spatial scales, in voxels (default [2, 4])", + ) + initial_translation = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--trans %g %g %g', - desc='initial translation in mm (implies no_cras0)') - initial_rotation = traits.Tuple( + argstr="--trans %g %g %g", + desc="initial translation in mm (implies no_cras0)", + ) + initial_rotation = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--rot %g %g %g', - desc='initial rotation in degrees') - initial_scale = traits.Tuple( + argstr="--rot %g %g %g", + desc="initial rotation in degrees", + ) + initial_scale = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--scale %g %g %g', - desc='initial scale') - initial_shear = traits.Tuple( + argstr="--scale %g %g %g", + desc="initial scale", + ) + initial_shear = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--shear %g %g %g', - desc='initial shear (Hxy, Hxz, Hyz)') + argstr="--shear %g %g %g", + desc="initial shear (Hxy, Hxz, Hyz)", + ) no_cras0 = traits.Bool( - argstr='--no-cras0', - desc='do not set translation parameters to align ' - 'centers of source and reference files') + argstr="--no-cras0", + desc="do not set translation parameters to align " + "centers of source and reference files", + ) max_iters = traits.Range( - low=1, argstr='--nitersmax %d', desc='maximum iterations (default: 4)') + low=1, argstr="--nitersmax %d", desc="maximum iterations (default: 4)" + ) ftol = traits.Float( - argstr='--ftol %e', desc='floating-point tolerance (default=1e-7)') - linmintol = traits.Float(argstr='--linmintol %e') + argstr="--ftol %e", desc="floating-point tolerance (default=1e-7)" + ) + linmintol = traits.Float(argstr="--linmintol %e") saturation_threshold = traits.Range( low=0.0, high=100.0, - argstr='--sat %g', - desc='saturation threshold (default=9.999)') + argstr="--sat %g", + desc="saturation threshold (default=9.999)", + ) conform_reference = traits.Bool( - argstr='--conf-ref', desc='conform reference without rescaling') - no_brute_force = traits.Bool( - argstr='--no-bf', desc='do not brute force search') + argstr="--conf-ref", desc="conform reference without rescaling" + ) + no_brute_force = traits.Bool(argstr="--no-bf", desc="do not brute force search") brute_force_limit = traits.Float( - argstr='--bf-lim %g', - xor=['no_brute_force'], - desc='constrain brute force search to +/- lim') + argstr="--bf-lim %g", + xor=["no_brute_force"], + desc="constrain brute force search to +/- lim", + ) brute_force_samples = traits.Int( - argstr='--bf-nsamp %d', - xor=['no_brute_force'], - desc='number of samples in brute force search') + argstr="--bf-nsamp %d", + xor=["no_brute_force"], + desc="number of samples in brute force search", + ) no_smooth = traits.Bool( - argstr='--no-smooth', - desc='do not apply smoothing to either reference or source file') + argstr="--no-smooth", + desc="do not apply smoothing to either reference or source file", + ) ref_fwhm = traits.Float( - argstr='--ref-fwhm', desc='apply smoothing to reference file') + argstr="--ref-fwhm", desc="apply smoothing to reference file" + ) source_oob = traits.Bool( - argstr='--mov-oob', - desc='count source voxels that are out-of-bounds as 0') + argstr="--mov-oob", desc="count source voxels that are out-of-bounds as 0" + ) # Skipping mat2par class MRICoregOutputSpec(TraitedSpec): - out_reg_file = File(exists=True, desc='output registration file') - out_lta_file = File(exists=True, desc='output LTA-style registration file') - out_params_file = File(exists=True, desc='output parameters file') + out_reg_file = File(exists=True, desc="output registration file") + out_lta_file = File(exists=True, desc="output LTA-style registration file") + out_params_file = File(exists=True, desc="output parameters file") class MRICoreg(FSCommand): - """ This program registers one volume to another + """This program registers one volume to another mri_coreg is a C reimplementation of spm_coreg in FreeSurfer @@ -571,17 +577,16 @@ class MRICoreg(FSCommand): 'mri_coreg --s fsaverage --no-ref-mask --lta .../registration.lta --sep 4 --sep 5 --mov moving1.nii --sd .' """ - _cmd = 'mri_coreg' + _cmd = "mri_coreg" input_spec = MRICoregInputSpec output_spec = MRICoregOutputSpec def _format_arg(self, opt, spec, val): - if opt in ('out_reg_file', 'out_lta_file', - 'out_params_file') and val is True: + if opt in ("out_reg_file", "out_lta_file", "out_params_file") and val is True: val = self._list_outputs()[opt] - elif opt == 'reference_mask' and val is False: - return '--no-ref-mask' - return super(MRICoreg, self)._format_arg(opt, spec, val) + elif opt == "reference_mask" and val is False: + return "--no-ref-mask" + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() @@ -589,19 +594,19 @@ def _list_outputs(self): out_lta_file = self.inputs.out_lta_file if isdefined(out_lta_file): if out_lta_file is True: - out_lta_file = 'registration.lta' - outputs['out_lta_file'] = os.path.abspath(out_lta_file) + out_lta_file = "registration.lta" + outputs["out_lta_file"] = os.path.abspath(out_lta_file) out_reg_file = self.inputs.out_reg_file if isdefined(out_reg_file): if out_reg_file is True: - out_reg_file = 'registration.dat' - outputs['out_reg_file'] = os.path.abspath(out_reg_file) + out_reg_file = "registration.dat" + outputs["out_reg_file"] = os.path.abspath(out_reg_file) out_params_file = self.inputs.out_params_file if isdefined(out_params_file): if out_params_file is True: - out_params_file = 'registration.par' - outputs['out_params_file'] = os.path.abspath(out_params_file) + out_params_file = "registration.par" + outputs["out_params_file"] = os.path.abspath(out_params_file) return outputs diff --git a/nipype/interfaces/freesurfer/tests/__init__.py b/nipype/interfaces/freesurfer/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/freesurfer/tests/__init__.py +++ b/nipype/interfaces/freesurfer/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/freesurfer/tests/test_BBRegister.py b/nipype/interfaces/freesurfer/tests/test_BBRegister.py index 3e80ad4e9c..1b3496c516 100644 --- a/nipype/interfaces/freesurfer/tests/test_BBRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_BBRegister.py @@ -1,114 +1,49 @@ -from __future__ import unicode_literals from ..preprocess import BBRegister, BBRegisterInputSpec6 def test_BBRegister_inputs(): input_map_5_3 = dict( - args=dict(argstr='%s', ), - contrast_type=dict( - argstr='--%s', - mandatory=True, - ), - dof=dict(argstr='--%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi_mask=dict(argstr='--epi-mask', ), - fsldof=dict(argstr='--fsl-dof %d', ), - init=dict( - argstr='--init-%s', - mandatory=True, - xor=['init_reg_file'], - ), - init_cost_file=dict(argstr='--initcost %s', ), - init_reg_file=dict( - argstr='--init-reg %s', - mandatory=True, - xor=['init'], - ), - intermediate_file=dict(argstr='--int %s', ), - out_fsl_file=dict(argstr='--fslmat %s', ), - out_lta_file=dict( - argstr='--lta %s', - min_ver='5.2.0', - ), - out_reg_file=dict( - argstr='--reg %s', - genfile=True, - ), - reg_frame=dict( - argstr='--frame %d', - xor=['reg_middle_frame'], - ), - reg_middle_frame=dict( - argstr='--mid-frame', - xor=['reg_frame'], - ), - registered_file=dict(argstr='--o %s', ), - source_file=dict( - argstr='--mov %s', - copyfile=False, - mandatory=True, - ), - spm_nifti=dict(argstr='--spm-nii', ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - ), + args=dict(argstr="%s"), + contrast_type=dict(argstr="--%s", mandatory=True), + dof=dict(argstr="--%d"), + environ=dict(nohash=True, usedefault=True), + epi_mask=dict(argstr="--epi-mask"), + fsldof=dict(argstr="--fsl-dof %d"), + init=dict(argstr="--init-%s", mandatory=True, xor=["init_reg_file"]), + init_cost_file=dict(argstr="--initcost %s"), + init_reg_file=dict(argstr="--init-reg %s", mandatory=True, xor=["init"]), + intermediate_file=dict(argstr="--int %s"), + out_fsl_file=dict(argstr="--fslmat %s"), + out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0"), + out_reg_file=dict(argstr="--reg %s", genfile=True), + reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"]), + reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"]), + registered_file=dict(argstr="--o %s"), + source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True), + spm_nifti=dict(argstr="--spm-nii"), + subject_id=dict(argstr="--s %s", mandatory=True), subjects_dir=dict(), ) input_map_6_0 = dict( - args=dict(argstr='%s', ), - contrast_type=dict( - argstr='--%s', - mandatory=True, - ), - dof=dict(argstr='--%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi_mask=dict(argstr='--epi-mask', ), - fsldof=dict(argstr='--fsl-dof %d', ), - init=dict( - argstr='--init-%s', - xor=['init_reg_file'], - ), - init_reg_file=dict( - argstr='--init-reg %s', - xor=['init'], - ), - init_cost_file=dict(argstr='--initcost %s', ), - intermediate_file=dict(argstr='--int %s', ), - out_fsl_file=dict(argstr='--fslmat %s', ), - out_lta_file=dict( - argstr='--lta %s', - min_ver='5.2.0', - ), - out_reg_file=dict( - argstr='--reg %s', - genfile=True, - ), - reg_frame=dict( - argstr='--frame %d', - xor=['reg_middle_frame'], - ), - reg_middle_frame=dict( - argstr='--mid-frame', - xor=['reg_frame'], - ), - registered_file=dict(argstr='--o %s', ), - source_file=dict( - argstr='--mov %s', - copyfile=False, - mandatory=True, - ), - spm_nifti=dict(argstr='--spm-nii', ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - ), + args=dict(argstr="%s"), + contrast_type=dict(argstr="--%s", mandatory=True), + dof=dict(argstr="--%d"), + environ=dict(nohash=True, usedefault=True), + epi_mask=dict(argstr="--epi-mask"), + fsldof=dict(argstr="--fsl-dof %d"), + init=dict(argstr="--init-%s", xor=["init_reg_file"]), + init_reg_file=dict(argstr="--init-reg %s", xor=["init"]), + init_cost_file=dict(argstr="--initcost %s"), + intermediate_file=dict(argstr="--int %s"), + out_fsl_file=dict(argstr="--fslmat %s"), + out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0"), + out_reg_file=dict(argstr="--reg %s", genfile=True), + reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"]), + reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"]), + registered_file=dict(argstr="--o %s"), + source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True), + spm_nifti=dict(argstr="--spm-nii"), + subject_id=dict(argstr="--s %s", mandatory=True), subjects_dir=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py index 704dfeaaf3..cdc2cc3131 100644 --- a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py @@ -1,5 +1,4 @@ # Modified 2017.04.21 by Chris Markiewicz -from __future__ import unicode_literals import pytest from ..base import FSSurfaceCommand @@ -9,7 +8,7 @@ def test_FSSurfaceCommand_inputs(): input_map = dict( - args=dict(argstr='%s'), + args=dict(argstr="%s"), environ=dict(nohash=True, usedefault=True), subjects_dir=dict(), ) @@ -23,21 +22,24 @@ def test_FSSurfaceCommand_inputs(): @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_associated_file(tmpdir): fssrc = FreeSurferSource( - subjects_dir=fs.Info.subjectsdir(), subject_id='fsaverage', hemi='lh') + subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" + ) fssrc.base_dir = tmpdir.strpath fssrc.resource_monitor = False fsavginfo = fssrc.run().outputs.get() # Pairs of white/pial files in the same directories - for white, pial in [('lh.white', 'lh.pial'), ('./lh.white', './lh.pial'), - (fsavginfo['white'], fsavginfo['pial'])]: - + for white, pial in [ + ("lh.white", "lh.pial"), + ("./lh.white", "./lh.pial"), + (fsavginfo["white"], fsavginfo["pial"]), + ]: # Unspecified paths, possibly with missing hemisphere information, # are equivalent to using the same directory and hemisphere - for name in ('pial', 'lh.pial', pial): + for name in ("pial", "lh.pial", pial): assert FSSurfaceCommand._associated_file(white, name) == pial # With path information, no changes are made - for name in ('./pial', './lh.pial', fsavginfo['pial']): + for name in ("./pial", "./lh.pial", fsavginfo["pial"]): assert FSSurfaceCommand._associated_file(white, name) == name diff --git a/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py b/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py index ed32693df5..23618b2aa8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py @@ -1,41 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AddXFormToHeader def test_AddXFormToHeader_inputs(): input_map = dict( - args=dict(argstr='%s', ), - copy_name=dict(argstr='-c', ), + args=dict( + argstr="%s", + ), + copy_name=dict( + argstr="-c", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, usedefault=True, ), subjects_dir=dict(), transform=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - verbose=dict(argstr='-v', ), + verbose=dict( + argstr="-v", + ), ) inputs = AddXFormToHeader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddXFormToHeader_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AddXFormToHeader.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py index fc5bc39b6f..bc65ee096c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py @@ -1,51 +1,105 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Aparc2Aseg def test_Aparc2Aseg_inputs(): input_map = dict( - a2009s=dict(argstr='--a2009s', ), - args=dict(argstr='%s', ), - aseg=dict(argstr='--aseg %s', ), + a2009s=dict( + argstr="--a2009s", + ), + args=dict( + argstr="%s", + ), + aseg=dict( + argstr="--aseg %s", + extensions=None, + ), copy_inputs=dict(), - ctxseg=dict(argstr='--ctxseg %s', ), + ctxseg=dict( + argstr="--ctxseg %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - filled=dict(), - hypo_wm=dict(argstr='--hypo-as-wm', ), - label_wm=dict(argstr='--labelwm', ), - lh_annotation=dict(mandatory=True, ), - lh_pial=dict(mandatory=True, ), - lh_ribbon=dict(mandatory=True, ), - lh_white=dict(mandatory=True, ), + filled=dict( + extensions=None, + ), + hypo_wm=dict( + argstr="--hypo-as-wm", + ), + label_wm=dict( + argstr="--labelwm", + ), + lh_annotation=dict( + extensions=None, + mandatory=True, + ), + lh_pial=dict( + extensions=None, + mandatory=True, + ), + lh_ribbon=dict( + extensions=None, + mandatory=True, + ), + lh_white=dict( + extensions=None, + mandatory=True, + ), out_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, + mandatory=True, + ), + rh_annotation=dict( + extensions=None, + mandatory=True, + ), + rh_pial=dict( + extensions=None, + mandatory=True, + ), + rh_ribbon=dict( + extensions=None, + mandatory=True, + ), + rh_white=dict( + extensions=None, + mandatory=True, + ), + ribbon=dict( + extensions=None, mandatory=True, ), - rh_annotation=dict(mandatory=True, ), - rh_pial=dict(mandatory=True, ), - rh_ribbon=dict(mandatory=True, ), - rh_white=dict(mandatory=True, ), - ribbon=dict(mandatory=True, ), - rip_unknown=dict(argstr='--rip-unknown', ), + rip_unknown=dict( + argstr="--rip-unknown", + ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), - volmask=dict(argstr='--volmask', ), + volmask=dict( + argstr="--volmask", + ), ) inputs = Aparc2Aseg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Aparc2Aseg_outputs(): - output_map = dict(out_file=dict(argstr='%s', ), ) + output_map = dict( + out_file=dict( + argstr="%s", + extensions=None, + ), + ) outputs = Aparc2Aseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py b/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py index 7044fdde2f..e2738ac746 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py @@ -1,21 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Apas2Aseg def test_Apas2Aseg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='--i %s', + argstr="--i %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, mandatory=True, ), subjects_dir=dict(), @@ -25,8 +28,15 @@ def test_Apas2Aseg_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Apas2Aseg_outputs(): - output_map = dict(out_file=dict(argstr='%s', ), ) + output_map = dict( + out_file=dict( + argstr="%s", + extensions=None, + ), + ) outputs = Apas2Aseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py index 69f56d7dde..b3a8e80806 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py @@ -1,50 +1,79 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ApplyMask def test_ApplyMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - invert_xfm=dict(argstr='-invert', ), - keep_mask_deletion_edits=dict(argstr='-keep_mask_deletion_edits', ), + invert_xfm=dict( + argstr="-invert", + ), + keep_mask_deletion_edits=dict( + argstr="-keep_mask_deletion_edits", + ), mask_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - mask_thresh=dict(argstr='-T %.4f', ), + mask_thresh=dict( + argstr="-T %.4f", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=True, keep_extension=True, - name_source=['in_file'], - name_template='%s_masked', + name_source=["in_file"], + name_template="%s_masked", position=-1, ), subjects_dir=dict(), - transfer=dict(argstr='-transfer %d', ), - use_abs=dict(argstr='-abs', ), - xfm_file=dict(argstr='-xform %s', ), - xfm_source=dict(argstr='-lta_src %s', ), - xfm_target=dict(argstr='-lta_dst %s', ), + transfer=dict( + argstr="-transfer %d", + ), + use_abs=dict( + argstr="-abs", + ), + xfm_file=dict( + argstr="-xform %s", + extensions=None, + ), + xfm_source=dict( + argstr="-lta_src %s", + extensions=None, + ), + xfm_target=dict( + argstr="-lta_dst %s", + extensions=None, + ), ) inputs = ApplyMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyMask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py index c9e8f85904..2aa4ae59f8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py @@ -1,101 +1,185 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ApplyVolTransform def test_ApplyVolTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fs_target=dict( - argstr='--fstarg', + argstr="--fstarg", mandatory=True, - requires=['reg_file'], - xor=('target_file', 'tal', 'fs_target'), + requires=["reg_file"], + xor=("target_file", "tal", "fs_target"), ), fsl_reg_file=dict( - argstr='--fsl %s', + argstr="--fsl %s", + extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), + ), + interp=dict( + argstr="--interp %s", + ), + inverse=dict( + argstr="--inv", ), - interp=dict(argstr='--interp %s', ), - inverse=dict(argstr='--inv', ), invert_morph=dict( - argstr='--inv-morph', - requires=['m3z_file'], + argstr="--inv-morph", + requires=["m3z_file"], ), lta_file=dict( - argstr='--lta %s', + argstr="--lta %s", + extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), lta_inv_file=dict( - argstr='--lta-inv %s', + argstr="--lta-inv %s", + extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), + ), + m3z_file=dict( + argstr="--m3z %s", + extensions=None, ), - m3z_file=dict(argstr='--m3z %s', ), mni_152_reg=dict( - argstr='--regheader', + argstr="--regheader", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), no_ded_m3z_path=dict( - argstr='--noDefM3zPath', - requires=['m3z_file'], + argstr="--noDefM3zPath", + requires=["m3z_file"], + ), + no_resample=dict( + argstr="--no-resample", ), - no_resample=dict(argstr='--no-resample', ), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", + extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), reg_header=dict( - argstr='--regheader', + argstr="--regheader", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), source_file=dict( - argstr='--mov %s', + argstr="--mov %s", copyfile=False, + extensions=None, mandatory=True, ), subject=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), subjects_dir=dict(), tal=dict( - argstr='--tal', + argstr="--tal", mandatory=True, - xor=('target_file', 'tal', 'fs_target'), + xor=("target_file", "tal", "fs_target"), + ), + tal_resolution=dict( + argstr="--talres %.10f", ), - tal_resolution=dict(argstr='--talres %.10f', ), target_file=dict( - argstr='--targ %s', + argstr="--targ %s", + extensions=None, mandatory=True, - xor=('target_file', 'tal', 'fs_target'), + xor=("target_file", "tal", "fs_target"), ), transformed_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), xfm_reg_file=dict( - argstr='--xfm %s', + argstr="--xfm %s", + extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), ) inputs = ApplyVolTransform.input_spec() @@ -103,8 +187,14 @@ def test_ApplyVolTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyVolTransform_outputs(): - output_map = dict(transformed_file=dict(), ) + output_map = dict( + transformed_file=dict( + extensions=None, + ), + ) outputs = ApplyVolTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py index 93db55cd30..e7a95d1a3d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py @@ -1,68 +1,121 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Binarize def test_Binarize_inputs(): input_map = dict( - abs=dict(argstr='--abs', ), - args=dict(argstr='%s', ), - bin_col_num=dict(argstr='--bincol', ), - bin_val=dict(argstr='--binval %d', ), - bin_val_not=dict(argstr='--binvalnot %d', ), + abs=dict( + argstr="--abs", + ), + args=dict( + argstr="%s", + ), + bin_col_num=dict( + argstr="--bincol", + ), + bin_val=dict( + argstr="--binval %d", + ), + bin_val_not=dict( + argstr="--binvalnot %d", + ), binary_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), - count_file=dict(argstr='--count %s', ), - dilate=dict(argstr='--dilate %d', ), + count_file=dict( + argstr="--count %s", + ), + dilate=dict( + argstr="--dilate %d", + ), environ=dict( nohash=True, usedefault=True, ), - erode=dict(argstr='--erode %d', ), - erode2d=dict(argstr='--erode2d %d', ), - frame_no=dict(argstr='--frame %s', ), + erode=dict( + argstr="--erode %d", + ), + erode2d=dict( + argstr="--erode2d %d", + ), + frame_no=dict( + argstr="--frame %s", + ), in_file=dict( - argstr='--i %s', + argstr="--i %s", copyfile=False, + extensions=None, mandatory=True, ), - invert=dict(argstr='--inv', ), - mask_file=dict(argstr='--mask maskvol', ), - mask_thresh=dict(argstr='--mask-thresh %f', ), - match=dict(argstr='--match %d...', ), + invert=dict( + argstr="--inv", + ), + mask_file=dict( + argstr="--mask maskvol", + extensions=None, + ), + mask_thresh=dict( + argstr="--mask-thresh %f", + ), + match=dict( + argstr="--match %d...", + ), max=dict( - argstr='--max %f', - xor=['wm_ven_csf'], + argstr="--max %f", + xor=["wm_ven_csf"], + ), + merge_file=dict( + argstr="--merge %s", + extensions=None, ), - merge_file=dict(argstr='--merge %s', ), min=dict( - argstr='--min %f', - xor=['wm_ven_csf'], + argstr="--min %f", + xor=["wm_ven_csf"], + ), + out_type=dict( + argstr="", + ), + rmax=dict( + argstr="--rmax %f", + ), + rmin=dict( + argstr="--rmin %f", ), - out_type=dict(argstr='', ), - rmax=dict(argstr='--rmax %f', ), - rmin=dict(argstr='--rmin %f', ), subjects_dir=dict(), - ventricles=dict(argstr='--ventricles', ), - wm=dict(argstr='--wm', ), + ventricles=dict( + argstr="--ventricles", + ), + wm=dict( + argstr="--wm", + ), wm_ven_csf=dict( - argstr='--wm+vcsf', - xor=['min', 'max'], + argstr="--wm+vcsf", + xor=["min", "max"], + ), + zero_edges=dict( + argstr="--zero-edges", + ), + zero_slice_edge=dict( + argstr="--zero-slice-edges", ), - zero_edges=dict(argstr='--zero-edges', ), - zero_slice_edge=dict(argstr='--zero-slice-edges', ), ) inputs = Binarize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Binarize_outputs(): output_map = dict( - binary_file=dict(), - count_file=dict(), + binary_file=dict( + extensions=None, + ), + count_file=dict( + extensions=None, + ), ) outputs = Binarize.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py b/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py index 1cf35fcedb..d89e51841b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py @@ -1,42 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import CALabel def test_CALabel_inputs(): input_map = dict( - align=dict(argstr='-align', ), - args=dict(argstr='%s', ), - aseg=dict(argstr='-aseg %s', ), + align=dict( + argstr="-align", + ), + args=dict( + argstr="%s", + ), + aseg=dict( + argstr="-aseg %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), - in_vol=dict(argstr='-r %s', ), - intensities=dict(argstr='-r %s', ), - label=dict(argstr='-l %s', ), - no_big_ventricles=dict(argstr='-nobigventricles', ), + in_vol=dict( + argstr="-r %s", + extensions=None, + ), + intensities=dict( + argstr="-r %s", + extensions=None, + ), + label=dict( + argstr="-l %s", + extensions=None, + ), + no_big_ventricles=dict( + argstr="-nobigventricles", + ), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), - prior=dict(argstr='-prior %.1f', ), - relabel_unlikely=dict(argstr='-relabel_unlikely %d %.1f', ), + prior=dict( + argstr="-prior %.1f", + ), + relabel_unlikely=dict( + argstr="-relabel_unlikely %d %.1f", + ), subjects_dir=dict(), template=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), transform=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), @@ -46,8 +71,14 @@ def test_CALabel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CALabel_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CALabel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py index 0e8ec025ec..58788da3a4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py @@ -1,39 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import CANormalize def test_CANormalize_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), atlas=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - control_points=dict(argstr='-c %s', ), + control_points=dict( + argstr="-c %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), - long_file=dict(argstr='-long %s', ), - mask=dict(argstr='-mask %s', ), + long_file=dict( + argstr="-long %s", + extensions=None, + ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", position=-1, ), subjects_dir=dict(), transform=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), @@ -43,10 +57,16 @@ def test_CANormalize_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CANormalize_outputs(): output_map = dict( - control_points=dict(), - out_file=dict(), + control_points=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = CANormalize.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py b/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py index e3cbf52f10..3f410524f0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py @@ -1,50 +1,76 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import CARegister def test_CARegister_inputs(): input_map = dict( - A=dict(argstr='-A %d', ), - align=dict(argstr='-align-%s', ), - args=dict(argstr='%s', ), + A=dict( + argstr="-A %d", + ), + align=dict( + argstr="-align-%s", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), invert_and_save=dict( - argstr='-invert-and-save', + argstr="-invert-and-save", position=-4, ), - l_files=dict(argstr='-l %s', ), - levels=dict(argstr='-levels %d', ), - mask=dict(argstr='-mask %s', ), - no_big_ventricles=dict(argstr='-nobigventricles', ), + l_files=dict( + argstr="-l %s", + ), + levels=dict( + argstr="-levels %d", + ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + no_big_ventricles=dict( + argstr="-nobigventricles", + ), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), template=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - transform=dict(argstr='-T %s', ), + transform=dict( + argstr="-T %s", + extensions=None, + ), ) inputs = CARegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CARegister_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CARegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py index a7d3ad3bb0..d14e203079 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py @@ -1,30 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CheckTalairachAlignment def test_CheckTalairachAlignment_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-xfm %s', + argstr="-xfm %s", + extensions=None, mandatory=True, position=-1, - xor=['subject'], + xor=["subject"], ), subject=dict( - argstr='-subj %s', + argstr="-subj %s", mandatory=True, position=-1, - xor=['in_file'], + xor=["in_file"], ), subjects_dir=dict(), threshold=dict( - argstr='-T %.3f', + argstr="-T %.3f", usedefault=True, ), ) @@ -33,8 +35,14 @@ def test_CheckTalairachAlignment_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CheckTalairachAlignment_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CheckTalairachAlignment.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py b/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py index e4e8efb718..992f3e308c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py @@ -1,47 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Concatenate def test_Concatenate_inputs(): input_map = dict( - add_val=dict(argstr='--add %f', ), - args=dict(argstr='%s', ), - combine=dict(argstr='--combine', ), + add_val=dict( + argstr="--add %f", + ), + args=dict( + argstr="%s", + ), + combine=dict( + argstr="--combine", + ), concatenated_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), environ=dict( nohash=True, usedefault=True, ), - gmean=dict(argstr='--gmean %d', ), + gmean=dict( + argstr="--gmean %d", + ), in_files=dict( - argstr='--i %s...', + argstr="--i %s...", mandatory=True, ), - keep_dtype=dict(argstr='--keep-datatype', ), - mask_file=dict(argstr='--mask %s', ), - max_bonfcor=dict(argstr='--max-bonfcor', ), - max_index=dict(argstr='--max-index', ), - mean_div_n=dict(argstr='--mean-div-n', ), - multiply_by=dict(argstr='--mul %f', ), - multiply_matrix_file=dict(argstr='--mtx %s', ), - paired_stats=dict(argstr='--paired-%s', ), - sign=dict(argstr='--%s', ), - sort=dict(argstr='--sort', ), - stats=dict(argstr='--%s', ), + keep_dtype=dict( + argstr="--keep-datatype", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + max_bonfcor=dict( + argstr="--max-bonfcor", + ), + max_index=dict( + argstr="--max-index", + ), + mean_div_n=dict( + argstr="--mean-div-n", + ), + multiply_by=dict( + argstr="--mul %f", + ), + multiply_matrix_file=dict( + argstr="--mtx %s", + extensions=None, + ), + paired_stats=dict( + argstr="--paired-%s", + ), + sign=dict( + argstr="--%s", + ), + sort=dict( + argstr="--sort", + ), + stats=dict( + argstr="--%s", + ), subjects_dir=dict(), - vote=dict(argstr='--vote', ), + vote=dict( + argstr="--vote", + ), ) inputs = Concatenate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Concatenate_outputs(): - output_map = dict(concatenated_file=dict(), ) + output_map = dict( + concatenated_file=dict( + extensions=None, + ), + ) outputs = Concatenate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py index 40a465b249..dad221b734 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py @@ -1,48 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ConcatenateLTA def test_ConcatenateLTA_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_lta1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), in_lta2=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), - invert_1=dict(argstr='-invert1', ), - invert_2=dict(argstr='-invert2', ), - invert_out=dict(argstr='-invertout', ), + invert_1=dict( + argstr="-invert1", + ), + invert_2=dict( + argstr="-invert2", + ), + invert_out=dict( + argstr="-invertout", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_lta1'], - name_template='%s_concat', + name_source=["in_lta1"], + name_template="%s_concat", position=-1, ), - out_type=dict(argstr='-out_type %d', ), - subject=dict(argstr='-subject %s', ), + out_type=dict( + argstr="-out_type %d", + ), + subject=dict( + argstr="-subject %s", + ), subjects_dir=dict(), tal_source_file=dict( - argstr='-tal %s', + argstr="-tal %s", + extensions=None, position=-5, - requires=['tal_template_file'], + requires=["tal_template_file"], ), tal_template_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-4, - requires=['tal_source_file'], + requires=["tal_source_file"], ), ) inputs = ConcatenateLTA.input_spec() @@ -50,8 +65,14 @@ def test_ConcatenateLTA_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConcatenateLTA_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ConcatenateLTA.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py b/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py index 7999001813..8409f26757 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py @@ -1,43 +1,70 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Contrast def test_Contrast_inputs(): input_map = dict( - annotation=dict(mandatory=True, ), - args=dict(argstr='%s', ), + annotation=dict( + extensions=None, + mandatory=True, + ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), - cortex=dict(mandatory=True, ), + cortex=dict( + extensions=None, + mandatory=True, + ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( - argstr='--%s-only', + argstr="--%s-only", + mandatory=True, + ), + orig=dict( + extensions=None, + mandatory=True, + ), + rawavg=dict( + extensions=None, mandatory=True, ), - orig=dict(mandatory=True, ), - rawavg=dict(mandatory=True, ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), - thickness=dict(mandatory=True, ), - white=dict(mandatory=True, ), + thickness=dict( + extensions=None, + mandatory=True, + ), + white=dict( + extensions=None, + mandatory=True, + ), ) inputs = Contrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Contrast_outputs(): output_map = dict( - out_contrast=dict(), - out_log=dict(), - out_stats=dict(), + out_contrast=dict( + extensions=None, + ), + out_log=dict( + extensions=None, + ), + out_stats=dict( + extensions=None, + ), ) outputs = Contrast.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py index f29b76df29..c230edb8ba 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py @@ -1,38 +1,56 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Curvature def test_Curvature_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averages=dict(argstr='-a %d', ), + args=dict( + argstr="%s", + ), + averages=dict( + argstr="-a %d", + ), copy_input=dict(), - distances=dict(argstr='-distances %d %d', ), + distances=dict( + argstr="-distances %d %d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), - n=dict(argstr='-n', ), - save=dict(argstr='-w', ), + n=dict( + argstr="-n", + ), + save=dict( + argstr="-w", + ), subjects_dir=dict(), - threshold=dict(argstr='-thresh %.3f', ), + threshold=dict( + argstr="-thresh %.3f", + ), ) inputs = Curvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Curvature_outputs(): output_map = dict( - out_gauss=dict(), - out_mean=dict(), + out_gauss=dict( + extensions=None, + ), + out_mean=dict( + extensions=None, + ), ) outputs = Curvature.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py index 19d6a4772e..4e8e3d5bc2 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py @@ -1,19 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CurvatureStats def test_CurvatureStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), curvfile1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), curvfile2=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), @@ -22,35 +25,51 @@ def test_CurvatureStats_inputs(): usedefault=True, ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-3, ), - min_max=dict(argstr='-m', ), + min_max=dict( + argstr="-m", + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, hash_files=False, - name_source=['hemisphere'], - name_template='%s.curv.stats', + name_source=["hemisphere"], + name_template="%s.curv.stats", ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-4, usedefault=True, ), subjects_dir=dict(), - surface=dict(argstr='-F %s', ), - values=dict(argstr='-G', ), - write=dict(argstr='--writeCurvatureFiles', ), + surface=dict( + argstr="-F %s", + extensions=None, + ), + values=dict( + argstr="-G", + ), + write=dict( + argstr="--writeCurvatureFiles", + ), ) inputs = CurvatureStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CurvatureStats_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CurvatureStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py index efd53fb773..bda2620fe1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py @@ -1,23 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DICOMConvert def test_DICOMConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base_output_dir=dict(mandatory=True, ), - dicom_dir=dict(mandatory=True, ), - dicom_info=dict(), + args=dict( + argstr="%s", + ), + base_output_dir=dict( + mandatory=True, + ), + dicom_dir=dict( + mandatory=True, + ), + dicom_info=dict( + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), file_mapping=dict(), - ignore_single_slice=dict(requires=['dicom_info'], ), - out_type=dict(usedefault=True, ), - seq_list=dict(requires=['dicom_info'], ), - subject_dir_template=dict(usedefault=True, ), + ignore_single_slice=dict( + requires=["dicom_info"], + ), + out_type=dict( + usedefault=True, + ), + seq_list=dict( + requires=["dicom_info"], + ), + subject_dir_template=dict( + usedefault=True, + ), subject_id=dict(), subjects_dir=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py index e681a03871..7bf1b895f1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py @@ -1,47 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import EMRegister def test_EMRegister_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - mask=dict(argstr='-mask %s', ), - nbrspacing=dict(argstr='-uns %d', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + nbrspacing=dict( + argstr="-uns %d", + ), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_transform.lta', + name_source=["in_file"], + name_template="%s_transform.lta", position=-1, ), - skull=dict(argstr='-skull', ), + skull=dict( + argstr="-skull", + ), subjects_dir=dict(), template=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - transform=dict(argstr='-t %s', ), + transform=dict( + argstr="-t %s", + extensions=None, + ), ) inputs = EMRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMRegister_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = EMRegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py b/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py index f971f4fab9..5c51ed848d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import EditWMwithAseg def test_EditWMwithAseg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brain_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), @@ -16,18 +18,23 @@ def test_EditWMwithAseg_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), - keep_in=dict(argstr='-keep-in', ), + keep_in=dict( + argstr="-keep-in", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), seg_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), @@ -38,8 +45,14 @@ def test_EditWMwithAseg_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EditWMwithAseg_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = EditWMwithAseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py b/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py index 996d079b48..9d05019824 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import EulerNumber def test_EulerNumber_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), @@ -22,8 +24,13 @@ def test_EulerNumber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EulerNumber_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + defects=dict(), + euler=dict(), + ) outputs = EulerNumber.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py b/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py index f3f4896a75..59997ad5b4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py @@ -1,24 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ExtractMainComponent def test_ExtractMainComponent_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='%s', - name_source='in_file', - name_template='%s.maincmp', + argstr="%s", + extensions=None, + name_source="in_file", + name_template="%s.maincmp", position=2, ), ) @@ -27,8 +30,14 @@ def test_ExtractMainComponent_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractMainComponent_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ExtractMainComponent.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py b/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py index 0546d99006..aa53727cc8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import FSCommand def test_FSCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py b/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py index 5b8bca1484..f61b52c1ea 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import FSCommandOpenMP def test_FSCommandOpenMP_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py b/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py index e791bb5976..03cf55eb69 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import FSScriptCommand def test_FSScriptCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py b/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py index 9e6b1cbc8b..7842c5333a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py @@ -1,23 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FitMSParams def test_FitMSParams_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), flip_list=dict(), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), out_dir=dict( - argstr='%s', + argstr="%s", genfile=True, position=-1, ), @@ -31,11 +32,19 @@ def test_FitMSParams_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitMSParams_outputs(): output_map = dict( - pd_image=dict(), - t1_image=dict(), - t2star_image=dict(), + pd_image=dict( + extensions=None, + ), + t1_image=dict( + extensions=None, + ), + t2star_image=dict( + extensions=None, + ), ) outputs = FitMSParams.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py b/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py index 8b61823b42..0037c02270 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py @@ -1,31 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import FixTopology def test_FixTopology_inputs(): input_map = dict( - args=dict(argstr='%s', ), - copy_inputs=dict(mandatory=True, ), + args=dict( + argstr="%s", + ), + copy_inputs=dict( + mandatory=True, + ), environ=dict( nohash=True, usedefault=True, ), - ga=dict(argstr='-ga', ), + ga=dict( + argstr="-ga", + ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), - in_brain=dict(mandatory=True, ), - in_inflated=dict(mandatory=True, ), - in_orig=dict(mandatory=True, ), - in_wm=dict(mandatory=True, ), - mgz=dict(argstr='-mgz', ), - seed=dict(argstr='-seed %d', ), - sphere=dict(argstr='-sphere %s', ), + in_brain=dict( + extensions=None, + mandatory=True, + ), + in_inflated=dict( + extensions=None, + mandatory=True, + ), + in_orig=dict( + extensions=None, + mandatory=True, + ), + in_wm=dict( + extensions=None, + mandatory=True, + ), + mgz=dict( + argstr="-mgz", + ), + seed=dict( + argstr="-seed %d", + ), + sphere=dict( + argstr="-sphere %s", + extensions=None, + ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, usedefault=True, @@ -37,8 +61,14 @@ def test_FixTopology_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FixTopology_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FixTopology.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py b/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py index 8608444fc8..12550be8b3 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py @@ -1,38 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..longitudinal import FuseSegmentations def test_FuseSegmentations_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_norms=dict( - argstr='-n %s', + argstr="-n %s", mandatory=True, ), in_segmentations=dict( - argstr='-a %s', + argstr="-a %s", mandatory=True, ), in_segmentations_noCC=dict( - argstr='-c %s', + argstr="-c %s", mandatory=True, ), out_file=dict( + extensions=None, mandatory=True, position=-1, ), subject_id=dict( - argstr='%s', + argstr="%s", position=-3, ), subjects_dir=dict(), timepoints=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), @@ -42,8 +44,14 @@ def test_FuseSegmentations_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FuseSegmentations_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FuseSegmentations.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py index 828bbfab03..4d62a03be6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py @@ -1,115 +1,222 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import GLMFit def test_GLMFit_inputs(): input_map = dict( - allow_ill_cond=dict(argstr='--illcond', ), - allow_repeated_subjects=dict(argstr='--allowsubjrep', ), - args=dict(argstr='%s', ), - calc_AR1=dict(argstr='--tar1', ), - check_opts=dict(argstr='--checkopts', ), - compute_log_y=dict(argstr='--logy', ), - contrast=dict(argstr='--C %s...', ), + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + bp_clip_max=dict( + argstr="--bp-clip-max %f", + ), + bp_clip_neg=dict( + argstr="--bp-clip-neg", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), cortex=dict( - argstr='--cortex', - xor=['label_file'], + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", ), - debug=dict(argstr='--debug', ), design=dict( - argstr='--X %s', - xor=('fsgd', 'design', 'one_sample'), + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", ), - diag=dict(argstr='--diag %d', ), - diag_cluster=dict(argstr='--diag-cluster', ), environ=dict( nohash=True, usedefault=True, ), fixed_fx_dof=dict( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], ), fixed_fx_dof_file=dict( - argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", ), - fixed_fx_var=dict(argstr='--yffxvar %s', ), - force_perm=dict(argstr='--perm-force', ), fsgd=dict( - argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", ), - fwhm=dict(argstr='--fwhm %f', ), glm_dir=dict( - argstr='--glmdir %s', + argstr="--glmdir %s", genfile=True, ), hemi=dict(), in_file=dict( - argstr='--y %s', + argstr="--y %s", copyfile=False, + extensions=None, mandatory=True, ), - invert_mask=dict(argstr='--mask-inv', ), + invert_mask=dict( + argstr="--mask-inv", + ), label_file=dict( - argstr='--label %s', - xor=['cortex'], + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", ), - mask_file=dict(argstr='--mask %s', ), - no_contrast_ok=dict(argstr='--no-contrasts-ok', ), - no_est_fwhm=dict(argstr='--no-est-fwhm', ), - no_mask_smooth=dict(argstr='--no-mask-smooth', ), no_prune=dict( - argstr='--no-prune', - xor=['prunethresh'], + argstr="--no-prune", + xor=["prunethresh"], ), one_sample=dict( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", ), - pca=dict(argstr='--pca', ), - per_voxel_reg=dict(argstr='--pvr %s...', ), - profile=dict(argstr='--profile %d', ), - prune=dict(argstr='--prune', ), prune_thresh=dict( - argstr='--prune_thr %f', - xor=['noprune'], - ), - resynth_test=dict(argstr='--resynthtest %d', ), - save_cond=dict(argstr='--save-cond', ), - save_estimate=dict(argstr='--yhat-save', ), - save_res_corr_mtx=dict(argstr='--eres-scm', ), - save_residual=dict(argstr='--eres-save', ), - seed=dict(argstr='--seed %d', ), - self_reg=dict(argstr='--selfreg %d %d %d', ), - sim_done_file=dict(argstr='--sim-done %s', ), - sim_sign=dict(argstr='--sim-sign %s', ), - simulation=dict(argstr='--sim %s %d %f %s', ), + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), subject_id=dict(), subjects_dir=dict(), surf=dict( - argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], - ), - surf_geo=dict(usedefault=True, ), - synth=dict(argstr='--synth', ), - uniform=dict(argstr='--uniform %f %f', ), - var_fwhm=dict(argstr='--var-fwhm %f', ), - vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict(xor=['weighted_ls'], ), + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), weight_inv=dict( - argstr='--w-inv', - xor=['weighted_ls'], + argstr="--w-inv", + xor=["weighted_ls"], ), weight_sqrt=dict( - argstr='--w-sqrt', - xor=['weighted_ls'], + argstr="--w-sqrt", + xor=["weighted_ls"], ), weighted_ls=dict( - argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = GLMFit.input_spec() @@ -117,25 +224,57 @@ def test_GLMFit_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GLMFit_outputs(): output_map = dict( - beta_file=dict(), - dof_file=dict(), - error_file=dict(), - error_stddev_file=dict(), - error_var_file=dict(), - estimate_file=dict(), - frame_eigenvectors=dict(), + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), ftest_file=dict(), - fwhm_file=dict(), + fwhm_file=dict( + extensions=None, + ), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), - mask_file=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), sig_file=dict(), - singular_values=dict(), - spatial_eigenvectors=dict(), - svd_stats_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), ) outputs = GLMFit.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py b/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py new file mode 100644 index 0000000000..99c0002be4 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_GTMPVC.py @@ -0,0 +1,291 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import GTMPVC + + +def test_GTMPVC_inputs(): + input_map = dict( + X=dict( + argstr="--X", + ), + X0=dict( + argstr="--X0", + ), + args=dict( + argstr="%s", + ), + auto_mask=dict( + argstr="--auto-mask %f %f", + ), + beta=dict( + argstr="--beta", + ), + color_table_file=dict( + argstr="--ctab %s", + extensions=None, + xor=("color_table_file", "default_color_table"), + ), + contrast=dict( + argstr="--C %s...", + ), + default_color_table=dict( + argstr="--ctab-default", + xor=("color_table_file", "default_color_table"), + ), + default_seg_merge=dict( + argstr="--default-seg-merge", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + frame=dict( + argstr="--frame %i", + ), + in_file=dict( + argstr="--i %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + km_hb=dict( + argstr="--km-hb %s...", + ), + km_ref=dict( + argstr="--km-ref %s...", + ), + lat=dict( + argstr="--lat", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + merge_cblum_wm_gyri=dict( + argstr="--merge-cblum-wm-gyri", + ), + merge_hypos=dict( + argstr="--merge-hypos", + ), + mg=dict( + argstr="--mg %g %s", + ), + mg_ref_cerebral_wm=dict( + argstr="--mg-ref-cerebral-wm", + ), + mg_ref_lobes_wm=dict( + argstr="--mg-ref-lobes-wm", + ), + mgx=dict( + argstr="--mgx %f", + ), + no_pvc=dict( + argstr="--no-pvc", + ), + no_reduce_fov=dict( + argstr="--no-reduce-fov", + ), + no_rescale=dict( + argstr="--no-rescale", + ), + no_tfe=dict( + argstr="--no-tfe", + ), + num_threads=dict( + argstr="--threads %i", + ), + opt_brain=dict( + argstr="--opt-brain", + ), + opt_seg_merge=dict( + argstr="--opt-seg-merge", + ), + opt_tol=dict( + argstr="--opt-tol %i %f %f", + ), + optimization_schema=dict( + argstr="--opt %s", + ), + psf=dict( + argstr="--psf %f", + ), + psf_col=dict( + argstr="--psf-col %f", + ), + psf_row=dict( + argstr="--psf-row %f", + ), + psf_slice=dict( + argstr="--psf-slice %f", + ), + pvc_dir=dict( + argstr="--o %s", + genfile=True, + ), + rbv=dict( + argstr="--rbv", + requires=["subjects_dir"], + ), + rbv_res=dict( + argstr="--rbv-res %f", + ), + reduce_fox_eqodd=dict( + argstr="--reduce-fox-eqodd", + ), + reg_file=dict( + argstr="--reg %s", + extensions=None, + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + reg_identity=dict( + argstr="--reg-identity", + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + regheader=dict( + argstr="--regheader", + mandatory=True, + xor=["reg_file", "regheader", "reg_identity"], + ), + replace=dict( + argstr="--replace %i %i", + ), + rescale=dict( + argstr="--rescale %s...", + ), + save_eres=dict( + argstr="--save-eres", + ), + save_input=dict( + argstr="--save-input", + ), + save_yhat=dict( + argstr="--save-yhat", + xor=["save_yhat_with_noise"], + ), + save_yhat0=dict( + argstr="--save-yhat0", + ), + save_yhat_full_fov=dict( + argstr="--save-yhat-full-fov", + ), + save_yhat_with_noise=dict( + argstr="--save-yhat-with-noise %i %i", + xor=["save_yhat"], + ), + scale_refval=dict( + argstr="--scale-refval %f", + ), + segmentation=dict( + argstr="--seg %s", + extensions=None, + mandatory=True, + ), + steady_state_params=dict( + argstr="--ss %f %f %f", + ), + subjects_dir=dict(), + tissue_fraction_resolution=dict( + argstr="--segpvfres %f", + ), + tt_reduce=dict( + argstr="--tt-reduce", + ), + tt_update=dict( + argstr="--tt-update", + ), + y=dict( + argstr="--y", + ), + ) + inputs = GTMPVC.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_GTMPVC_outputs(): + output_map = dict( + eres=dict( + extensions=None, + ), + gtm_file=dict( + extensions=None, + ), + gtm_stats=dict( + extensions=None, + ), + hb_dat=dict( + extensions=None, + ), + hb_nifti=dict( + extensions=None, + ), + input_file=dict( + extensions=None, + ), + mgx_ctxgm=dict( + extensions=None, + ), + mgx_gm=dict( + extensions=None, + ), + mgx_subctxgm=dict( + extensions=None, + ), + nopvc_file=dict( + extensions=None, + ), + opt_params=dict( + extensions=None, + ), + pvc_dir=dict(), + rbv=dict( + extensions=None, + ), + ref_file=dict( + extensions=None, + ), + reg_anat2pet=dict( + extensions=None, + ), + reg_anat2rbvpet=dict( + extensions=None, + ), + reg_pet2anat=dict( + extensions=None, + ), + reg_rbvpet2anat=dict( + extensions=None, + ), + seg=dict( + extensions=None, + ), + seg_ctab=dict( + extensions=None, + ), + tissue_fraction=dict( + extensions=None, + ), + tissue_fraction_psf=dict( + extensions=None, + ), + yhat=dict( + extensions=None, + ), + yhat0=dict( + extensions=None, + ), + yhat_full_fov=dict( + extensions=None, + ), + yhat_with_noise=dict( + extensions=None, + ), + ) + outputs = GTMPVC.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py b/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py new file mode 100644 index 0000000000..026cc33b77 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_GTMSeg.py @@ -0,0 +1,88 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import GTMSeg + + +def test_GTMSeg_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + colortable=dict( + argstr="--ctab %s", + extensions=None, + ), + ctx_annot=dict( + argstr="--ctx-annot %s %i %i", + ), + dmax=dict( + argstr="--dmax %f", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + head=dict( + argstr="--head %s", + ), + keep_cc=dict( + argstr="--keep-cc", + ), + keep_hypo=dict( + argstr="--keep-hypo", + ), + no_pons=dict( + argstr="--no-pons", + ), + no_seg_stats=dict( + argstr="--no-seg-stats", + ), + no_vermis=dict( + argstr="--no-vermis", + ), + out_file=dict( + argstr="--o %s", + extensions=None, + usedefault=True, + ), + output_upsampling_factor=dict( + argstr="--output-usf %i", + ), + subject_id=dict( + argstr="--s %s", + mandatory=True, + ), + subjects_dir=dict(), + subseg_cblum_wm=dict( + argstr="--subseg-cblum-wm", + ), + subsegwm=dict( + argstr="--subsegwm", + ), + upsampling_factor=dict( + argstr="--usf %i", + ), + wm_annot=dict( + argstr="--wm-annot %s %i %i", + ), + xcerseg=dict( + argstr="--xcerseg", + ), + ) + inputs = GTMSeg.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_GTMSeg_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = GTMSeg.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py b/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py index 2fa225e87e..aa6d5d302e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ImageInfo def test_ImageInfo_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=1, ), subjects_dir=dict(), @@ -21,6 +23,8 @@ def test_ImageInfo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageInfo_outputs(): output_map = dict( TE=dict(), @@ -31,7 +35,9 @@ def test_ImageInfo_outputs(): file_format=dict(), info=dict(), orientation=dict(), - out_file=dict(), + out_file=dict( + extensions=None, + ), ph_enc_dir=dict(), vox_sizes=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py index 49f5e6b48f..f62c085839 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py @@ -1,31 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Jacobian def test_Jacobian_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_mappedsurf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), in_origsurf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_origsurf'], - name_template='%s.jacobian', + name_source=["in_origsurf"], + name_template="%s.jacobian", position=-1, ), subjects_dir=dict(), @@ -35,8 +39,14 @@ def test_Jacobian_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Jacobian_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Jacobian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py index bcbf971d01..b70bd34c45 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py @@ -1,77 +1,109 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import LTAConvert def test_LTAConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_fsl=dict( - argstr='--infsl %s', + argstr="--infsl %s", + extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_itk=dict( - argstr='--initk %s', + argstr="--initk %s", + extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_lta=dict( - argstr='--inlta %s', + argstr="--inlta %s", mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_mni=dict( - argstr='--inmni %s', + argstr="--inmni %s", + extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_niftyreg=dict( - argstr='--inniftyreg %s', + argstr="--inniftyreg %s", + extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_reg=dict( - argstr='--inreg %s', + argstr="--inreg %s", + extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), + ), + invert=dict( + argstr="--invert", ), - invert=dict(argstr='--invert', ), ltavox2vox=dict( - argstr='--ltavox2vox', - requires=['out_lta'], - ), - out_fsl=dict(argstr='--outfsl %s', ), - out_itk=dict(argstr='--outitk %s', ), - out_lta=dict(argstr='--outlta %s', ), - out_mni=dict(argstr='--outmni %s', ), - out_reg=dict(argstr='--outreg %s', ), - source_file=dict(argstr='--src %s', ), - target_conform=dict(argstr='--trgconform', ), - target_file=dict(argstr='--trg %s', ), + argstr="--ltavox2vox", + requires=["out_lta"], + ), + out_fsl=dict( + argstr="--outfsl %s", + ), + out_itk=dict( + argstr="--outitk %s", + ), + out_lta=dict( + argstr="--outlta %s", + ), + out_mni=dict( + argstr="--outmni %s", + ), + out_reg=dict( + argstr="--outreg %s", + ), + source_file=dict( + argstr="--src %s", + extensions=None, + ), + target_conform=dict( + argstr="--trgconform", + ), + target_file=dict( + argstr="--trg %s", + extensions=None, + ), ) inputs = LTAConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LTAConvert_outputs(): output_map = dict( - out_fsl=dict(), - out_itk=dict(), - out_lta=dict(), - out_mni=dict(), - out_reg=dict(), + out_fsl=dict( + extensions=None, + ), + out_itk=dict( + extensions=None, + ), + out_lta=dict( + extensions=None, + ), + out_mni=dict( + extensions=None, + ), + out_reg=dict( + extensions=None, + ), ) outputs = LTAConvert.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py index 2d7761006b..7e1caf88cc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py @@ -1,46 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Label2Annot def test_Label2Annot_inputs(): input_map = dict( - args=dict(argstr='%s', ), - color_table=dict(argstr='--ctab %s', ), + args=dict( + argstr="%s", + ), + color_table=dict( + argstr="--ctab %s", + extensions=None, + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), in_labels=dict( - argstr='--l %s...', + argstr="--l %s...", + mandatory=True, + ), + keep_max=dict( + argstr="--maxstatwinner", + ), + orig=dict( + extensions=None, mandatory=True, ), - keep_max=dict(argstr='--maxstatwinner', ), - orig=dict(mandatory=True, ), out_annot=dict( - argstr='--a %s', + argstr="--a %s", mandatory=True, ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), - verbose_off=dict(argstr='--noverbose', ), + verbose_off=dict( + argstr="--noverbose", + ), ) inputs = Label2Annot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Annot_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Label2Annot.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py index 7511cd0dbb..34f99e1a24 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py @@ -1,57 +1,78 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Label2Label def test_Label2Label_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), out_file=dict( - argstr='--trglabel %s', + argstr="--trglabel %s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['source_label'], - name_template='%s_converted', + name_source=["source_label"], + name_template="%s_converted", ), registration_method=dict( - argstr='--regmethod %s', + argstr="--regmethod %s", usedefault=True, ), source_label=dict( - argstr='--srclabel %s', + argstr="--srclabel %s", + extensions=None, + mandatory=True, + ), + source_sphere_reg=dict( + extensions=None, mandatory=True, ), - source_sphere_reg=dict(mandatory=True, ), source_subject=dict( - argstr='--srcsubject %s', + argstr="--srcsubject %s", + mandatory=True, + ), + source_white=dict( + extensions=None, + mandatory=True, + ), + sphere_reg=dict( + extensions=None, mandatory=True, ), - source_white=dict(mandatory=True, ), - sphere_reg=dict(mandatory=True, ), subject_id=dict( - argstr='--trgsubject %s', + argstr="--trgsubject %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), - white=dict(mandatory=True, ), + white=dict( + extensions=None, + mandatory=True, + ), ) inputs = Label2Label.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Label_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Label2Label.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py index fb2726635f..aa1b19f564 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py @@ -1,71 +1,98 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Label2Vol def test_Label2Vol_inputs(): input_map = dict( annot_file=dict( - argstr='--annot %s', + argstr="--annot %s", copyfile=False, + extensions=None, mandatory=True, - requires=('subject_id', 'hemi'), - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + requires=("subject_id", "hemi"), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), aparc_aseg=dict( - argstr='--aparc+aseg', + argstr="--aparc+aseg", mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), - fill_thresh=dict(argstr='--fillthresh %g', ), - hemi=dict(argstr='--hemi %s', ), + fill_thresh=dict( + argstr="--fillthresh %g", + ), + hemi=dict( + argstr="--hemi %s", + ), identity=dict( - argstr='--identity', - xor=('reg_file', 'reg_header', 'identity'), + argstr="--identity", + xor=("reg_file", "reg_header", "identity"), + ), + invert_mtx=dict( + argstr="--invertmtx", ), - invert_mtx=dict(argstr='--invertmtx', ), label_file=dict( - argstr='--label %s...', + argstr="--label %s...", copyfile=False, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), + ), + label_hit_file=dict( + argstr="--hits %s", + extensions=None, + ), + label_voxel_volume=dict( + argstr="--labvoxvol %f", + ), + map_label_stat=dict( + argstr="--label-stat %s", + extensions=None, + ), + native_vox2ras=dict( + argstr="--native-vox2ras", ), - label_hit_file=dict(argstr='--hits %s', ), - label_voxel_volume=dict(argstr='--labvoxvol %f', ), - map_label_stat=dict(argstr='--label-stat %s', ), - native_vox2ras=dict(argstr='--native-vox2ras', ), proj=dict( - argstr='--proj %s %f %f %f', - requires=('subject_id', 'hemi'), + argstr="--proj %s %f %f %f", + requires=("subject_id", "hemi"), ), reg_file=dict( - argstr='--reg %s', - xor=('reg_file', 'reg_header', 'identity'), + argstr="--reg %s", + extensions=None, + xor=("reg_file", "reg_header", "identity"), ), reg_header=dict( - argstr='--regheader %s', - xor=('reg_file', 'reg_header', 'identity'), + argstr="--regheader %s", + extensions=None, + xor=("reg_file", "reg_header", "identity"), ), seg_file=dict( - argstr='--seg %s', + argstr="--seg %s", copyfile=False, + extensions=None, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), + ), + subject_id=dict( + argstr="--subject %s", ), - subject_id=dict(argstr='--subject %s', ), subjects_dir=dict(), - surface=dict(argstr='--surf %s', ), + surface=dict( + argstr="--surf %s", + ), template_file=dict( - argstr='--temp %s', + argstr="--temp %s", + extensions=None, mandatory=True, ), vol_label_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), ) @@ -74,8 +101,14 @@ def test_Label2Vol_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Vol_outputs(): - output_map = dict(vol_label_file=dict(), ) + output_map = dict( + vol_label_file=dict( + extensions=None, + ), + ) outputs = Label2Vol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Logan.py b/nipype/interfaces/freesurfer/tests/test_auto_Logan.py new file mode 100644 index 0000000000..34c6dfa6c7 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_Logan.py @@ -0,0 +1,284 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import Logan + + +def test_Logan_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + bp_clip_max=dict( + argstr="--bp-clip-max %f", + ), + bp_clip_neg=dict( + argstr="--bp-clip-neg", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %g", + mandatory=True, + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = Logan.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Logan_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = Logan.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py index 7b6ae4a945..3b3c2f0852 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py @@ -1,46 +1,71 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MNIBiasCorrection def test_MNIBiasCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %d', ), + args=dict( + argstr="%s", + ), + distance=dict( + argstr="--distance %d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='--i %s', + argstr="--i %s", + extensions=None, mandatory=True, ), iterations=dict( - argstr='--n %d', + argstr="--n %d", usedefault=True, ), - mask=dict(argstr='--mask %s', ), - no_rescale=dict(argstr='--no-rescale', ), + mask=dict( + argstr="--mask %s", + extensions=None, + ), + no_rescale=dict( + argstr="--no-rescale", + ), out_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_output', + name_source=["in_file"], + name_template="%s_output", + ), + protocol_iterations=dict( + argstr="--proto-iters %d", + ), + shrink=dict( + argstr="--shrink %d", + ), + stop=dict( + argstr="--stop %f", ), - protocol_iterations=dict(argstr='--proto-iters %d', ), - shrink=dict(argstr='--shrink %d', ), - stop=dict(argstr='--stop %f', ), subjects_dir=dict(), - transform=dict(argstr='--uchar %s', ), + transform=dict( + argstr="--uchar %s", + extensions=None, + ), ) inputs = MNIBiasCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MNIBiasCorrection_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MNIBiasCorrection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py b/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py index b4b7436120..4f21cc2f61 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import MPRtoMNI305 def test_MPRtoMNI305_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, usedefault=True, ), reference_dir=dict( @@ -29,10 +31,17 @@ def test_MPRtoMNI305_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MPRtoMNI305_outputs(): output_map = dict( - log_file=dict(usedefault=True, ), - out_file=dict(), + log_file=dict( + extensions=None, + usedefault=True, + ), + out_file=dict( + extensions=None, + ), ) outputs = MPRtoMNI305.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py index b44cf7a308..9e229078ef 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py @@ -1,115 +1,292 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRIConvert def test_MRIConvert_inputs(): input_map = dict( - apply_inv_transform=dict(argstr='--apply_inverse_transform %s', ), - apply_transform=dict(argstr='--apply_transform %s', ), - args=dict(argstr='%s', ), - ascii=dict(argstr='--ascii', ), - autoalign_matrix=dict(argstr='--autoalign %s', ), - color_file=dict(argstr='--color_file %s', ), - conform=dict(argstr='--conform', ), - conform_min=dict(argstr='--conform_min', ), - conform_size=dict(argstr='--conform_size %s', ), - crop_center=dict(argstr='--crop %d %d %d', ), - crop_gdf=dict(argstr='--crop_gdf', ), - crop_size=dict(argstr='--cropsize %d %d %d', ), - cut_ends=dict(argstr='--cutends %d', ), - cw256=dict(argstr='--cw256', ), - devolve_transform=dict(argstr='--devolvexfm %s', ), - drop_n=dict(argstr='--ndrop %d', ), + apply_inv_transform=dict( + argstr="--apply_inverse_transform %s", + extensions=None, + ), + apply_transform=dict( + argstr="--apply_transform %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), + ascii=dict( + argstr="--ascii", + ), + autoalign_matrix=dict( + argstr="--autoalign %s", + extensions=None, + ), + color_file=dict( + argstr="--color_file %s", + extensions=None, + ), + conform=dict( + argstr="--conform", + ), + conform_min=dict( + argstr="--conform_min", + ), + conform_size=dict( + argstr="--conform_size %s", + ), + crop_center=dict( + argstr="--crop %d %d %d", + ), + crop_gdf=dict( + argstr="--crop_gdf", + ), + crop_size=dict( + argstr="--cropsize %d %d %d", + ), + cut_ends=dict( + argstr="--cutends %d", + ), + cw256=dict( + argstr="--cw256", + ), + devolve_transform=dict( + argstr="--devolvexfm %s", + ), + drop_n=dict( + argstr="--ndrop %d", + ), environ=dict( nohash=True, usedefault=True, ), - fill_parcellation=dict(argstr='--fill_parcellation', ), - force_ras=dict(argstr='--force_ras_good', ), - frame=dict(argstr='--frame %d', ), - frame_subsample=dict(argstr='--fsubsample %d %d %d', ), - fwhm=dict(argstr='--fwhm %f', ), - in_center=dict(argstr='--in_center %s', ), + fill_parcellation=dict( + argstr="--fill_parcellation", + ), + force_ras=dict( + argstr="--force_ras_good", + ), + frame=dict( + argstr="--frame %d", + ), + frame_subsample=dict( + argstr="--fsubsample %d %d %d", + ), + fwhm=dict( + argstr="--fwhm %f", + ), + in_center=dict( + argstr="--in_center %s", + ), in_file=dict( - argstr='--input_volume %s', + argstr="--input_volume %s", + extensions=None, mandatory=True, position=-2, ), - in_i_dir=dict(argstr='--in_i_direction %f %f %f', ), - in_i_size=dict(argstr='--in_i_size %d', ), - in_info=dict(argstr='--in_info', ), - in_j_dir=dict(argstr='--in_j_direction %f %f %f', ), - in_j_size=dict(argstr='--in_j_size %d', ), - in_k_dir=dict(argstr='--in_k_direction %f %f %f', ), - in_k_size=dict(argstr='--in_k_size %d', ), - in_like=dict(argstr='--in_like %s', ), - in_matrix=dict(argstr='--in_matrix', ), - in_orientation=dict(argstr='--in_orientation %s', ), - in_scale=dict(argstr='--scale %f', ), - in_stats=dict(argstr='--in_stats', ), - in_type=dict(argstr='--in_type %s', ), - invert_contrast=dict(argstr='--invert_contrast %f', ), - midframe=dict(argstr='--mid-frame', ), - no_change=dict(argstr='--nochange', ), - no_scale=dict(argstr='--no_scale 1', ), - no_translate=dict(argstr='--no_translate', ), - no_write=dict(argstr='--no_write', ), - out_center=dict(argstr='--out_center %f %f %f', ), - out_datatype=dict(argstr='--out_data_type %s', ), + in_i_dir=dict( + argstr="--in_i_direction %f %f %f", + ), + in_i_size=dict( + argstr="--in_i_size %d", + ), + in_info=dict( + argstr="--in_info", + ), + in_j_dir=dict( + argstr="--in_j_direction %f %f %f", + ), + in_j_size=dict( + argstr="--in_j_size %d", + ), + in_k_dir=dict( + argstr="--in_k_direction %f %f %f", + ), + in_k_size=dict( + argstr="--in_k_size %d", + ), + in_like=dict( + argstr="--in_like %s", + extensions=None, + ), + in_matrix=dict( + argstr="--in_matrix", + ), + in_orientation=dict( + argstr="--in_orientation %s", + ), + in_scale=dict( + argstr="--scale %f", + ), + in_stats=dict( + argstr="--in_stats", + ), + in_type=dict( + argstr="--in_type %s", + ), + invert_contrast=dict( + argstr="--invert_contrast %f", + ), + midframe=dict( + argstr="--mid-frame", + ), + no_change=dict( + argstr="--nochange", + ), + no_scale=dict( + argstr="--no_scale 1", + ), + no_translate=dict( + argstr="--no_translate", + ), + no_write=dict( + argstr="--no_write", + ), + out_center=dict( + argstr="--out_center %f %f %f", + ), + out_datatype=dict( + argstr="--out_data_type %s", + ), out_file=dict( - argstr='--output_volume %s', + argstr="--output_volume %s", + extensions=None, genfile=True, position=-1, ), - out_i_count=dict(argstr='--out_i_count %d', ), - out_i_dir=dict(argstr='--out_i_direction %f %f %f', ), - out_i_size=dict(argstr='--out_i_size %d', ), - out_info=dict(argstr='--out_info', ), - out_j_count=dict(argstr='--out_j_count %d', ), - out_j_dir=dict(argstr='--out_j_direction %f %f %f', ), - out_j_size=dict(argstr='--out_j_size %d', ), - out_k_count=dict(argstr='--out_k_count %d', ), - out_k_dir=dict(argstr='--out_k_direction %f %f %f', ), - out_k_size=dict(argstr='--out_k_size %d', ), - out_matrix=dict(argstr='--out_matrix', ), - out_orientation=dict(argstr='--out_orientation %s', ), - out_scale=dict(argstr='--out-scale %d', ), - out_stats=dict(argstr='--out_stats', ), - out_type=dict(argstr='--out_type %s', ), - parse_only=dict(argstr='--parse_only', ), - read_only=dict(argstr='--read_only', ), - reorder=dict(argstr='--reorder %d %d %d', ), - resample_type=dict(argstr='--resample_type %s', ), - reslice_like=dict(argstr='--reslice_like %s', ), - sdcm_list=dict(argstr='--sdcmlist %s', ), - skip_n=dict(argstr='--nskip %d', ), - slice_bias=dict(argstr='--slice-bias %f', ), - slice_crop=dict(argstr='--slice-crop %d %d', ), - slice_reverse=dict(argstr='--slice-reverse', ), - smooth_parcellation=dict(argstr='--smooth_parcellation', ), - sphinx=dict(argstr='--sphinx', ), - split=dict(argstr='--split', ), - status_file=dict(argstr='--status %s', ), - subject_name=dict(argstr='--subject_name %s', ), + out_i_count=dict( + argstr="--out_i_count %d", + ), + out_i_dir=dict( + argstr="--out_i_direction %f %f %f", + ), + out_i_size=dict( + argstr="--out_i_size %d", + ), + out_info=dict( + argstr="--out_info", + ), + out_j_count=dict( + argstr="--out_j_count %d", + ), + out_j_dir=dict( + argstr="--out_j_direction %f %f %f", + ), + out_j_size=dict( + argstr="--out_j_size %d", + ), + out_k_count=dict( + argstr="--out_k_count %d", + ), + out_k_dir=dict( + argstr="--out_k_direction %f %f %f", + ), + out_k_size=dict( + argstr="--out_k_size %d", + ), + out_matrix=dict( + argstr="--out_matrix", + ), + out_orientation=dict( + argstr="--out_orientation %s", + ), + out_scale=dict( + argstr="--out-scale %d", + ), + out_stats=dict( + argstr="--out_stats", + ), + out_type=dict( + argstr="--out_type %s", + ), + parse_only=dict( + argstr="--parse_only", + ), + read_only=dict( + argstr="--read_only", + ), + reorder=dict( + argstr="--reorder %d %d %d", + ), + resample_type=dict( + argstr="--resample_type %s", + ), + reslice_like=dict( + argstr="--reslice_like %s", + extensions=None, + ), + sdcm_list=dict( + argstr="--sdcmlist %s", + extensions=None, + ), + skip_n=dict( + argstr="--nskip %d", + ), + slice_bias=dict( + argstr="--slice-bias %f", + ), + slice_crop=dict( + argstr="--slice-crop %d %d", + ), + slice_reverse=dict( + argstr="--slice-reverse", + ), + smooth_parcellation=dict( + argstr="--smooth_parcellation", + ), + sphinx=dict( + argstr="--sphinx", + ), + split=dict( + argstr="--split", + ), + status_file=dict( + argstr="--status %s", + extensions=None, + ), + subject_name=dict( + argstr="--subject_name %s", + ), subjects_dir=dict(), - te=dict(argstr='-te %d', ), - template_info=dict(argstr='--template_info', ), - template_type=dict(argstr='--template_type %s', ), - ti=dict(argstr='-ti %d', ), - tr=dict(argstr='-tr %d', ), - unwarp_gradient=dict(argstr='--unwarp_gradient_nonlinearity', ), - vox_size=dict(argstr='-voxsize %f %f %f', ), - zero_ge_z_offset=dict(argstr='--zero_ge_z_offset', ), - zero_outlines=dict(argstr='--zero_outlines', ), + te=dict( + argstr="-te %d", + ), + template_info=dict( + argstr="--template_info", + ), + template_type=dict( + argstr="--template_type %s", + ), + ti=dict( + argstr="-ti %d", + ), + tr=dict( + argstr="-tr %d", + ), + unwarp_gradient=dict( + argstr="--unwarp_gradient_nonlinearity", + ), + vox_size=dict( + argstr="-voxsize %f %f %f", + ), + zero_ge_z_offset=dict( + argstr="--zero_ge_z_offset", + ), + zero_outlines=dict( + argstr="--zero_outlines", + ), ) inputs = MRIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIConvert_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict(), + ) outputs = MRIConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py b/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py index 6c12cabdc2..3d85129f3d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py @@ -1,83 +1,140 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import MRICoreg def test_MRICoreg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brute_force_limit=dict( - argstr='--bf-lim %g', - xor=['no_brute_force'], + argstr="--bf-lim %g", + xor=["no_brute_force"], ), brute_force_samples=dict( - argstr='--bf-nsamp %d', - xor=['no_brute_force'], + argstr="--bf-nsamp %d", + xor=["no_brute_force"], + ), + conform_reference=dict( + argstr="--conf-ref", + ), + dof=dict( + argstr="--dof %d", ), - conform_reference=dict(argstr='--conf-ref', ), - dof=dict(argstr='--dof %d', ), environ=dict( nohash=True, usedefault=True, ), - ftol=dict(argstr='--ftol %e', ), - initial_rotation=dict(argstr='--rot %g %g %g', ), - initial_scale=dict(argstr='--scale %g %g %g', ), - initial_shear=dict(argstr='--shear %g %g %g', ), - initial_translation=dict(argstr='--trans %g %g %g', ), - linmintol=dict(argstr='--linmintol %e', ), - max_iters=dict(argstr='--nitersmax %d', ), - no_brute_force=dict(argstr='--no-bf', ), - no_coord_dithering=dict(argstr='--no-coord-dither', ), - no_cras0=dict(argstr='--no-cras0', ), - no_intensity_dithering=dict(argstr='--no-intensity-dither', ), - no_smooth=dict(argstr='--no-smooth', ), - num_threads=dict(argstr='--threads %d', ), + ftol=dict( + argstr="--ftol %e", + ), + initial_rotation=dict( + argstr="--rot %g %g %g", + ), + initial_scale=dict( + argstr="--scale %g %g %g", + ), + initial_shear=dict( + argstr="--shear %g %g %g", + ), + initial_translation=dict( + argstr="--trans %g %g %g", + ), + linmintol=dict( + argstr="--linmintol %e", + ), + max_iters=dict( + argstr="--nitersmax %d", + ), + no_brute_force=dict( + argstr="--no-bf", + ), + no_coord_dithering=dict( + argstr="--no-coord-dither", + ), + no_cras0=dict( + argstr="--no-cras0", + ), + no_intensity_dithering=dict( + argstr="--no-intensity-dither", + ), + no_smooth=dict( + argstr="--no-smooth", + ), + num_threads=dict( + argstr="--threads %d", + ), out_lta_file=dict( - argstr='--lta %s', + argstr="--lta %s", usedefault=True, ), - out_params_file=dict(argstr='--params %s', ), - out_reg_file=dict(argstr='--regdat %s', ), - ref_fwhm=dict(argstr='--ref-fwhm', ), + out_params_file=dict( + argstr="--params %s", + ), + out_reg_file=dict( + argstr="--regdat %s", + ), + ref_fwhm=dict( + argstr="--ref-fwhm", + ), reference_file=dict( - argstr='--ref %s', + argstr="--ref %s", copyfile=False, + extensions=None, mandatory=True, - xor=['subject_id'], + xor=["subject_id"], ), reference_mask=dict( - argstr='--ref-mask %s', + argstr="--ref-mask %s", position=2, ), - saturation_threshold=dict(argstr='--sat %g', ), - sep=dict(argstr='--sep %s...', ), + saturation_threshold=dict( + argstr="--sat %g", + ), + sep=dict( + argstr="--sep %s...", + ), source_file=dict( - argstr='--mov %s', + argstr="--mov %s", copyfile=False, + extensions=None, mandatory=True, ), - source_mask=dict(argstr='--mov-mask', ), - source_oob=dict(argstr='--mov-oob', ), + source_mask=dict( + argstr="--mov-mask", + ), + source_oob=dict( + argstr="--mov-oob", + ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, position=1, - requires=['subjects_dir'], - xor=['reference_file'], + requires=["subjects_dir"], + xor=["reference_file"], + ), + subjects_dir=dict( + argstr="--sd %s", ), - subjects_dir=dict(argstr='--sd %s', ), ) inputs = MRICoreg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRICoreg_outputs(): output_map = dict( - out_lta_file=dict(), - out_params_file=dict(), - out_reg_file=dict(), + out_lta_file=dict( + extensions=None, + ), + out_params_file=dict( + extensions=None, + ), + out_reg_file=dict( + extensions=None, + ), ) outputs = MRICoreg.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py index 719986961d..bf359364ba 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py @@ -1,39 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIFill def test_MRIFill_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - log_file=dict(argstr='-a %s', ), + log_file=dict( + argstr="-a %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), - segmentation=dict(argstr='-segmentation %s', ), + segmentation=dict( + argstr="-segmentation %s", + extensions=None, + ), subjects_dir=dict(), - transform=dict(argstr='-xform %s', ), + transform=dict( + argstr="-xform %s", + extensions=None, + ), ) inputs = MRIFill.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIFill_outputs(): output_map = dict( - log_file=dict(), - out_file=dict(), + log_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), ) outputs = MRIFill.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py index f43d217256..ccb2ab4388 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIMarchingCubes def test_MRIMarchingCubes_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), connectivity_value=dict( - argstr='%d', + argstr="%d", position=-1, usedefault=True, ), @@ -16,17 +17,19 @@ def test_MRIMarchingCubes_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), label_value=dict( - argstr='%d', + argstr="%d", mandatory=True, position=2, ), out_file=dict( - argstr='./%s', + argstr="./%s", + extensions=None, genfile=True, position=-2, ), @@ -37,8 +40,14 @@ def test_MRIMarchingCubes_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIMarchingCubes_outputs(): - output_map = dict(surface=dict(), ) + output_map = dict( + surface=dict( + extensions=None, + ), + ) outputs = MRIMarchingCubes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py index 2e7c7d2ea5..e6a239fbd5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py @@ -1,50 +1,66 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIPretess def test_MRIPretess_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_filled=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), in_norm=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - keep=dict(argstr='-keep', ), + keep=dict( + argstr="-keep", + ), label=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-3, usedefault=True, ), - nocorners=dict(argstr='-nocorners', ), + nocorners=dict( + argstr="-nocorners", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source=['in_filled'], - name_template='%s_pretesswm', + name_source=["in_filled"], + name_template="%s_pretesswm", position=-1, ), subjects_dir=dict(), - test=dict(argstr='-test', ), + test=dict( + argstr="-test", + ), ) inputs = MRIPretess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIPretess_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRIPretess.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py index c92b4fea15..845e6c6c3c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py @@ -1,81 +1,101 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MRISPreproc def test_MRISPreproc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fsgd_file=dict( - argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--fsgd %s", + extensions=None, + xor=("subjects", "fsgd_file", "subject_file"), ), fwhm=dict( - argstr='--fwhm %f', - xor=['num_iters'], + argstr="--fwhm %f", + xor=["num_iters"], ), fwhm_source=dict( - argstr='--fwhm-src %f', - xor=['num_iters_source'], + argstr="--fwhm-src %f", + xor=["num_iters_source"], ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), num_iters=dict( - argstr='--niters %d', - xor=['fwhm'], + argstr="--niters %d", + xor=["fwhm"], ), num_iters_source=dict( - argstr='--niterssrc %d', - xor=['fwhm_source'], + argstr="--niterssrc %d", + xor=["fwhm_source"], ), out_file=dict( - argstr='--out %s', + argstr="--out %s", + extensions=None, genfile=True, ), - proj_frac=dict(argstr='--projfrac %s', ), - smooth_cortex_only=dict(argstr='--smooth-cortex-only', ), - source_format=dict(argstr='--srcfmt %s', ), + proj_frac=dict( + argstr="--projfrac %s", + ), + smooth_cortex_only=dict( + argstr="--smooth-cortex-only", + ), + source_format=dict( + argstr="--srcfmt %s", + ), subject_file=dict( - argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--f %s", + extensions=None, + xor=("subjects", "fsgd_file", "subject_file"), ), subjects=dict( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--s %s...", + xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--area %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + ), + surf_dir=dict( + argstr="--surfdir %s", ), - surf_dir=dict(argstr='--surfdir %s', ), surf_measure=dict( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( - argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--is %s...", + xor=("surf_measure", "surf_measure_file", "surf_area"), ), target=dict( - argstr='--target %s', + argstr="--target %s", mandatory=True, ), - vol_measure_file=dict(argstr='--iv %s %s...', ), + vol_measure_file=dict( + argstr="--iv %s %s...", + ), ) inputs = MRISPreproc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRISPreproc_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRISPreproc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py index e72d8adb9e..5bdb0614e5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py @@ -1,93 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MRISPreprocReconAll def test_MRISPreprocReconAll_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), fsgd_file=dict( - argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--fsgd %s", + extensions=None, + xor=("subjects", "fsgd_file", "subject_file"), ), fwhm=dict( - argstr='--fwhm %f', - xor=['num_iters'], + argstr="--fwhm %f", + xor=["num_iters"], ), fwhm_source=dict( - argstr='--fwhm-src %f', - xor=['num_iters_source'], + argstr="--fwhm-src %f", + xor=["num_iters_source"], ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), - lh_surfreg_target=dict(requires=['surfreg_files'], ), + lh_surfreg_target=dict( + extensions=None, + requires=["surfreg_files"], + ), num_iters=dict( - argstr='--niters %d', - xor=['fwhm'], + argstr="--niters %d", + xor=["fwhm"], ), num_iters_source=dict( - argstr='--niterssrc %d', - xor=['fwhm_source'], + argstr="--niterssrc %d", + xor=["fwhm_source"], ), out_file=dict( - argstr='--out %s', + argstr="--out %s", + extensions=None, genfile=True, ), - proj_frac=dict(argstr='--projfrac %s', ), - rh_surfreg_target=dict(requires=['surfreg_files'], ), - smooth_cortex_only=dict(argstr='--smooth-cortex-only', ), - source_format=dict(argstr='--srcfmt %s', ), + proj_frac=dict( + argstr="--projfrac %s", + ), + rh_surfreg_target=dict( + extensions=None, + requires=["surfreg_files"], + ), + smooth_cortex_only=dict( + argstr="--smooth-cortex-only", + ), + source_format=dict( + argstr="--srcfmt %s", + ), subject_file=dict( - argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--f %s", + extensions=None, + xor=("subjects", "fsgd_file", "subject_file"), ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", usedefault=True, - xor=('subjects', 'fsgd_file', 'subject_file', 'subject_id'), + xor=("subjects", "fsgd_file", "subject_file", "subject_id"), ), subjects=dict( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--s %s...", + xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--area %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + ), + surf_dir=dict( + argstr="--surfdir %s", ), - surf_dir=dict(argstr='--surfdir %s', ), surf_measure=dict( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--meas %s", + extensions=None, + xor=("surf_measure", "surf_measure_file", "surf_area"), ), surfreg_files=dict( - argstr='--surfreg %s', - requires=['lh_surfreg_target', 'rh_surfreg_target'], + argstr="--surfreg %s", + requires=["lh_surfreg_target", "rh_surfreg_target"], ), target=dict( - argstr='--target %s', + argstr="--target %s", mandatory=True, ), - vol_measure_file=dict(argstr='--iv %s %s...', ), + vol_measure_file=dict( + argstr="--iv %s %s...", + ), ) inputs = MRISPreprocReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRISPreprocReconAll_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRISPreprocReconAll.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py b/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py index a5899cfe70..8aa7210d0e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py @@ -1,41 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRITessellate def test_MRITessellate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), label_value=dict( - argstr='%d', + argstr="%d", mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), - tesselate_all_voxels=dict(argstr='-a', ), - use_real_RAS_coordinates=dict(argstr='-n', ), + tesselate_all_voxels=dict( + argstr="-a", + ), + use_real_RAS_coordinates=dict( + argstr="-n", + ), ) inputs = MRITessellate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRITessellate_outputs(): - output_map = dict(surface=dict(), ) + output_map = dict( + surface=dict( + extensions=None, + ), + ) outputs = MRITessellate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py index 441be3e0a4..e37cf0723a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py @@ -1,61 +1,88 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRIsCALabel def test_MRIsCALabel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict(argstr='-aseg %s', ), + args=dict( + argstr="%s", + ), + aseg=dict( + argstr="-aseg %s", + extensions=None, + ), canonsurf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), classifier=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), copy_inputs=dict(), - curv=dict(mandatory=True, ), + curv=dict( + extensions=None, + mandatory=True, + ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-4, ), - label=dict(argstr='-l %s', ), + label=dict( + argstr="-l %s", + extensions=None, + ), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['hemisphere'], - name_template='%s.aparc.annot', + name_source=["hemisphere"], + name_template="%s.aparc.annot", position=-1, ), - seed=dict(argstr='-seed %d', ), - smoothwm=dict(mandatory=True, ), + seed=dict( + argstr="-seed %d", + ), + smoothwm=dict( + extensions=None, + mandatory=True, + ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-5, usedefault=True, ), subjects_dir=dict(), - sulc=dict(mandatory=True, ), + sulc=dict( + extensions=None, + mandatory=True, + ), ) inputs = MRIsCALabel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCALabel_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRIsCALabel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py index cbd68c8457..1ef9c95c46 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py @@ -1,42 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIsCalc def test_MRIsCalc_inputs(): input_map = dict( action=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, - xor=['in_float', 'in_int'], + xor=["in_float", "in_int"], ), in_float=dict( - argstr='%f', + argstr="%f", position=-1, - xor=['in_file2', 'in_int'], + xor=["in_file2", "in_int"], ), in_int=dict( - argstr='%d', + argstr="%d", position=-1, - xor=['in_file2', 'in_float'], + xor=["in_file2", "in_float"], ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, mandatory=True, ), subjects_dir=dict(), @@ -46,8 +50,14 @@ def test_MRIsCalc_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCalc_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRIsCalc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py index 657292c5c7..01aef41a01 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py @@ -1,22 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIsCombine def test_MRIsCombine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='--combinesurfs %s', + argstr="--combinesurfs %s", mandatory=True, position=1, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, mandatory=True, position=-1, @@ -28,8 +30,14 @@ def test_MRIsCombine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCombine_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRIsCombine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py index 21622b2449..daf4462ff8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py @@ -1,57 +1,106 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIsConvert def test_MRIsConvert_inputs(): input_map = dict( - annot_file=dict(argstr='--annot %s', ), - args=dict(argstr='%s', ), - dataarray_num=dict(argstr='--da_num %d', ), + annot_file=dict( + argstr="--annot %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), + dataarray_num=dict( + argstr="--da_num %d", + ), environ=dict( nohash=True, usedefault=True, ), - functional_file=dict(argstr='-f %s', ), + functional_file=dict( + argstr="-f %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - label_file=dict(argstr='--label %s', ), - labelstats_outfile=dict(argstr='--labelstats %s', ), - normal=dict(argstr='-n', ), - origname=dict(argstr='-o %s', ), + label_file=dict( + argstr="--label %s", + extensions=None, + ), + labelstats_outfile=dict( + argstr="--labelstats %s", + extensions=None, + ), + normal=dict( + argstr="-n", + ), + origname=dict( + argstr="-o %s", + ), out_datatype=dict( mandatory=True, - xor=['out_file'], + xor=["out_file"], ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, mandatory=True, position=-1, - xor=['out_datatype'], + xor=["out_datatype"], + ), + parcstats_file=dict( + argstr="--parcstats %s", + extensions=None, + ), + patch=dict( + argstr="-p", + ), + rescale=dict( + argstr="-r", + ), + scalarcurv_file=dict( + argstr="-c %s", + extensions=None, + ), + scale=dict( + argstr="-s %.3f", ), - parcstats_file=dict(argstr='--parcstats %s', ), - patch=dict(argstr='-p', ), - rescale=dict(argstr='-r', ), - scalarcurv_file=dict(argstr='-c %s', ), - scale=dict(argstr='-s %.3f', ), subjects_dir=dict(), - talairachxfm_subjid=dict(argstr='-t %s', ), - to_scanner=dict(argstr='--to-scanner', ), - to_tkr=dict(argstr='--to-tkr', ), - vertex=dict(argstr='-v', ), - xyz_ascii=dict(argstr='-a', ), + talairachxfm_subjid=dict( + argstr="-t %s", + ), + to_scanner=dict( + argstr="--to-scanner", + ), + to_tkr=dict( + argstr="--to-tkr", + ), + vertex=dict( + argstr="-v", + ), + xyz_ascii=dict( + argstr="-a", + ), ) inputs = MRIsConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsConvert_outputs(): - output_map = dict(converted=dict(), ) + output_map = dict( + converted=dict( + extensions=None, + ), + ) outputs = MRIsConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py index 8639c1e7b6..05e34a29b5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py @@ -1,58 +1,78 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIsExpand def test_MRIsExpand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), distance=dict( - argstr='%g', + argstr="%g", mandatory=True, position=-2, ), - dt=dict(argstr='-T %g', ), + dt=dict( + argstr="-T %g", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=-3, ), - nsurfaces=dict(argstr='-N %d', ), + nsurfaces=dict( + argstr="-N %d", + ), out_name=dict( - argstr='%s', + argstr="%s", position=-1, usedefault=True, ), pial=dict( - argstr='-pial %s', + argstr="-pial %s", copyfile=False, ), - smooth_averages=dict(argstr='-A %d', ), + smooth_averages=dict( + argstr="-A %d", + ), sphere=dict( copyfile=False, usedefault=True, ), - spring=dict(argstr='-S %g', ), + spring=dict( + argstr="-S %g", + ), subjects_dir=dict(), - thickness=dict(argstr='-thickness', ), + thickness=dict( + argstr="-thickness", + ), thickness_name=dict( - argstr='-thickness_name %s', + argstr="-thickness_name %s", copyfile=False, ), - write_iterations=dict(argstr='-W %d', ), + write_iterations=dict( + argstr="-W %d", + ), ) inputs = MRIsExpand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsExpand_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRIsExpand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py index 73e48a2521..9cc45189a0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py @@ -1,34 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRIsInflate def test_MRIsInflate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), no_save_sulc=dict( - argstr='-no-save-sulc', - xor=['out_sulc'], + argstr="-no-save-sulc", + xor=["out_sulc"], ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s.inflated', + name_source=["in_file"], + name_template="%s.inflated", position=-1, ), - out_sulc=dict(xor=['no_save_sulc'], ), + out_sulc=dict( + extensions=None, + xor=["no_save_sulc"], + ), subjects_dir=dict(), ) inputs = MRIsInflate.input_spec() @@ -36,10 +42,16 @@ def test_MRIsInflate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsInflate_outputs(): output_map = dict( - out_file=dict(), - out_sulc=dict(), + out_file=dict( + extensions=None, + ), + out_sulc=dict( + extensions=None, + ), ) outputs = MRIsInflate.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRTM1.py b/nipype/interfaces/freesurfer/tests/test_auto_MRTM1.py new file mode 100644 index 0000000000..1637214b9e --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRTM1.py @@ -0,0 +1,284 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import MRTM1 + + +def test_MRTM1_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + bp_clip_max=dict( + argstr="--bp-clip-max %f", + ), + bp_clip_neg=dict( + argstr="--bp-clip-neg", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + mandatory=True, + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = MRTM1.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRTM1_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = MRTM1.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py b/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py new file mode 100644 index 0000000000..dea4ca3a92 --- /dev/null +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRTM2.py @@ -0,0 +1,284 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..petsurfer import MRTM2 + + +def test_MRTM2_inputs(): + input_map = dict( + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + bp_clip_max=dict( + argstr="--bp-clip-max %f", + ), + bp_clip_neg=dict( + argstr="--bp-clip-neg", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), + cortex=dict( + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", + ), + design=dict( + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fixed_fx_dof=dict( + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + ), + fixed_fx_dof_file=dict( + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", + ), + fsgd=dict( + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", + ), + glm_dir=dict( + argstr="--glmdir %s", + genfile=True, + ), + hemi=dict(), + in_file=dict( + argstr="--y %s", + copyfile=False, + extensions=None, + mandatory=True, + ), + invert_mask=dict( + argstr="--mask-inv", + ), + label_file=dict( + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + mandatory=True, + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", + ), + no_prune=dict( + argstr="--no-prune", + xor=["prunethresh"], + ), + one_sample=dict( + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", + ), + prune_thresh=dict( + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), + subject_id=dict(), + subjects_dir=dict(), + surf=dict( + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), + weight_inv=dict( + argstr="--w-inv", + xor=["weighted_ls"], + ), + weight_sqrt=dict( + argstr="--w-sqrt", + xor=["weighted_ls"], + ), + weighted_ls=dict( + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), + ), + ) + inputs = MRTM2.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRTM2_outputs(): + output_map = dict( + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), + ftest_file=dict(), + fwhm_file=dict( + extensions=None, + ), + gamma_file=dict(), + gamma_var_file=dict(), + glm_dir=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + sig_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), + ) + outputs = MRTM2.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py b/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py index f3453818c5..093dd3d9b8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py @@ -1,38 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MS_LDA def test_MS_LDA_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conform=dict(argstr='-conform', ), + args=dict( + argstr="%s", + ), + conform=dict( + argstr="-conform", + ), environ=dict( nohash=True, usedefault=True, ), images=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, ), - label_file=dict(argstr='-label %s', ), + label_file=dict( + argstr="-label %s", + extensions=None, + ), lda_labels=dict( - argstr='-lda %s', + argstr="-lda %s", mandatory=True, - sep=' ', + sep=" ", + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + shift=dict( + argstr="-shift %d", ), - mask_file=dict(argstr='-mask %s', ), - shift=dict(argstr='-shift %d', ), subjects_dir=dict(), - use_weights=dict(argstr='-W', ), + use_weights=dict( + argstr="-W", + ), vol_synth_file=dict( - argstr='-synth %s', + argstr="-synth %s", + extensions=None, mandatory=True, ), weight_file=dict( - argstr='-weight %s', + argstr="-weight %s", + extensions=None, mandatory=True, ), ) @@ -41,10 +56,16 @@ def test_MS_LDA_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MS_LDA_outputs(): output_map = dict( - vol_synth_file=dict(), - weight_file=dict(), + vol_synth_file=dict( + extensions=None, + ), + weight_file=dict( + extensions=None, + ), ) outputs = MS_LDA.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py index 5dfb555346..e3778911e6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py @@ -1,24 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MakeAverageSubject def test_MakeAverageSubject_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), out_name=dict( - argstr='--out %s', + argstr="--out %s", + extensions=None, usedefault=True, ), subjects_dir=dict(), subjects_ids=dict( - argstr='--subjects %s', + argstr="--subjects %s", mandatory=True, - sep=' ', + sep=" ", ), ) inputs = MakeAverageSubject.input_spec() @@ -26,8 +28,12 @@ def test_MakeAverageSubject_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeAverageSubject_outputs(): - output_map = dict(average_subject_name=dict(), ) + output_map = dict( + average_subject_name=dict(), + ) outputs = MakeAverageSubject.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py index 7ff18eeba6..06316d071a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py @@ -1,68 +1,119 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MakeSurfaces def test_MakeSurfaces_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), - fix_mtl=dict(argstr='-fix_mtl', ), + fix_mtl=dict( + argstr="-fix_mtl", + ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), - in_T1=dict(argstr='-T1 %s', ), - in_aseg=dict(argstr='-aseg %s', ), - in_filled=dict(mandatory=True, ), - in_label=dict(xor=['noaparc'], ), + in_T1=dict( + argstr="-T1 %s", + extensions=None, + ), + in_aseg=dict( + argstr="-aseg %s", + extensions=None, + ), + in_filled=dict( + extensions=None, + mandatory=True, + ), + in_label=dict( + extensions=None, + xor=["noaparc"], + ), in_orig=dict( - argstr='-orig %s', + argstr="-orig %s", + extensions=None, mandatory=True, ), - in_white=dict(), - in_wm=dict(mandatory=True, ), - longitudinal=dict(argstr='-long', ), - maximum=dict(argstr='-max %.1f', ), - mgz=dict(argstr='-mgz', ), - no_white=dict(argstr='-nowhite', ), + in_white=dict( + extensions=None, + ), + in_wm=dict( + extensions=None, + mandatory=True, + ), + longitudinal=dict( + argstr="-long", + ), + maximum=dict( + argstr="-max %.1f", + ), + mgz=dict( + argstr="-mgz", + ), + no_white=dict( + argstr="-nowhite", + ), noaparc=dict( - argstr='-noaparc', - xor=['in_label'], + argstr="-noaparc", + xor=["in_label"], ), orig_pial=dict( - argstr='-orig_pial %s', - requires=['in_label'], + argstr="-orig_pial %s", + extensions=None, + requires=["in_label"], + ), + orig_white=dict( + argstr="-orig_white %s", + extensions=None, ), - orig_white=dict(argstr='-orig_white %s', ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, usedefault=True, ), subjects_dir=dict(), - white=dict(argstr='-white %s', ), - white_only=dict(argstr='-whiteonly', ), + white=dict( + argstr="-white %s", + ), + white_only=dict( + argstr="-whiteonly", + ), ) inputs = MakeSurfaces.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeSurfaces_outputs(): output_map = dict( - out_area=dict(), - out_cortex=dict(), - out_curv=dict(), - out_pial=dict(), - out_thickness=dict(), - out_white=dict(), + out_area=dict( + extensions=None, + ), + out_cortex=dict( + extensions=None, + ), + out_curv=dict( + extensions=None, + ), + out_pial=dict( + extensions=None, + ), + out_thickness=dict( + extensions=None, + ), + out_white=dict( + extensions=None, + ), ) outputs = MakeSurfaces.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py index 1923b5ceed..271f0bb328 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py @@ -1,41 +1,60 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Normalize def test_Normalize_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - gradient=dict(argstr='-g %d', ), + gradient=dict( + argstr="-g %d", + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - mask=dict(argstr='-mask %s', ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", position=-1, ), - segmentation=dict(argstr='-aseg %s', ), + segmentation=dict( + argstr="-aseg %s", + extensions=None, + ), subjects_dir=dict(), - transform=dict(), + transform=dict( + extensions=None, + ), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Normalize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py index 1091bdbdc3..51b2f2cd0b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py @@ -1,115 +1,222 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import OneSampleTTest def test_OneSampleTTest_inputs(): input_map = dict( - allow_ill_cond=dict(argstr='--illcond', ), - allow_repeated_subjects=dict(argstr='--allowsubjrep', ), - args=dict(argstr='%s', ), - calc_AR1=dict(argstr='--tar1', ), - check_opts=dict(argstr='--checkopts', ), - compute_log_y=dict(argstr='--logy', ), - contrast=dict(argstr='--C %s...', ), + allow_ill_cond=dict( + argstr="--illcond", + ), + allow_repeated_subjects=dict( + argstr="--allowsubjrep", + ), + args=dict( + argstr="%s", + ), + bp_clip_max=dict( + argstr="--bp-clip-max %f", + ), + bp_clip_neg=dict( + argstr="--bp-clip-neg", + ), + calc_AR1=dict( + argstr="--tar1", + ), + check_opts=dict( + argstr="--checkopts", + ), + compute_log_y=dict( + argstr="--logy", + ), + contrast=dict( + argstr="--C %s...", + ), cortex=dict( - argstr='--cortex', - xor=['label_file'], + argstr="--cortex", + xor=["label_file"], + ), + debug=dict( + argstr="--debug", ), - debug=dict(argstr='--debug', ), design=dict( - argstr='--X %s', - xor=('fsgd', 'design', 'one_sample'), + argstr="--X %s", + extensions=None, + xor=("fsgd", "design", "one_sample"), + ), + diag=dict( + argstr="--diag %d", + ), + diag_cluster=dict( + argstr="--diag-cluster", ), - diag=dict(argstr='--diag %d', ), - diag_cluster=dict(argstr='--diag-cluster', ), environ=dict( nohash=True, usedefault=True, ), fixed_fx_dof=dict( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], ), fixed_fx_dof_file=dict( - argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], + argstr="--ffxdofdat %d", + extensions=None, + xor=["fixed_fx_dof"], + ), + fixed_fx_var=dict( + argstr="--yffxvar %s", + extensions=None, + ), + force_perm=dict( + argstr="--perm-force", ), - fixed_fx_var=dict(argstr='--yffxvar %s', ), - force_perm=dict(argstr='--perm-force', ), fsgd=dict( - argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), + argstr="--fsgd %s %s", + xor=("fsgd", "design", "one_sample"), + ), + fwhm=dict( + argstr="--fwhm %f", ), - fwhm=dict(argstr='--fwhm %f', ), glm_dir=dict( - argstr='--glmdir %s', + argstr="--glmdir %s", genfile=True, ), hemi=dict(), in_file=dict( - argstr='--y %s', + argstr="--y %s", copyfile=False, + extensions=None, mandatory=True, ), - invert_mask=dict(argstr='--mask-inv', ), + invert_mask=dict( + argstr="--mask-inv", + ), label_file=dict( - argstr='--label %s', - xor=['cortex'], + argstr="--label %s", + extensions=None, + xor=["cortex"], + ), + logan=dict( + argstr="--logan %s %s %f", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mrtm1=dict( + argstr="--mrtm1 %s %s", + ), + mrtm2=dict( + argstr="--mrtm2 %s %s %f", + ), + nii=dict( + argstr="--nii", + xor=["nii", "nii_gz"], + ), + nii_gz=dict( + argstr="--nii.gz", + xor=["nii", "nii_gz"], + ), + no_contrast_ok=dict( + argstr="--no-contrasts-ok", + ), + no_est_fwhm=dict( + argstr="--no-est-fwhm", + ), + no_mask_smooth=dict( + argstr="--no-mask-smooth", ), - mask_file=dict(argstr='--mask %s', ), - no_contrast_ok=dict(argstr='--no-contrasts-ok', ), - no_est_fwhm=dict(argstr='--no-est-fwhm', ), - no_mask_smooth=dict(argstr='--no-mask-smooth', ), no_prune=dict( - argstr='--no-prune', - xor=['prunethresh'], + argstr="--no-prune", + xor=["prunethresh"], ), one_sample=dict( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict( + argstr="--pca", + ), + per_voxel_reg=dict( + argstr="--pvr %s...", + ), + profile=dict( + argstr="--profile %d", + ), + prune=dict( + argstr="--prune", ), - pca=dict(argstr='--pca', ), - per_voxel_reg=dict(argstr='--pvr %s...', ), - profile=dict(argstr='--profile %d', ), - prune=dict(argstr='--prune', ), prune_thresh=dict( - argstr='--prune_thr %f', - xor=['noprune'], - ), - resynth_test=dict(argstr='--resynthtest %d', ), - save_cond=dict(argstr='--save-cond', ), - save_estimate=dict(argstr='--yhat-save', ), - save_res_corr_mtx=dict(argstr='--eres-scm', ), - save_residual=dict(argstr='--eres-save', ), - seed=dict(argstr='--seed %d', ), - self_reg=dict(argstr='--selfreg %d %d %d', ), - sim_done_file=dict(argstr='--sim-done %s', ), - sim_sign=dict(argstr='--sim-sign %s', ), - simulation=dict(argstr='--sim %s %d %f %s', ), + argstr="--prune_thr %f", + xor=["noprune"], + ), + resynth_test=dict( + argstr="--resynthtest %d", + ), + save_cond=dict( + argstr="--save-cond", + ), + save_estimate=dict( + argstr="--yhat-save", + ), + save_res_corr_mtx=dict( + argstr="--eres-scm", + ), + save_residual=dict( + argstr="--eres-save", + ), + seed=dict( + argstr="--seed %d", + ), + self_reg=dict( + argstr="--selfreg %d %d %d", + ), + sim_done_file=dict( + argstr="--sim-done %s", + extensions=None, + ), + sim_sign=dict( + argstr="--sim-sign %s", + ), + simulation=dict( + argstr="--sim %s %d %f %s", + ), subject_id=dict(), subjects_dir=dict(), surf=dict( - argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], - ), - surf_geo=dict(usedefault=True, ), - synth=dict(argstr='--synth', ), - uniform=dict(argstr='--uniform %f %f', ), - var_fwhm=dict(argstr='--var-fwhm %f', ), - vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict(xor=['weighted_ls'], ), + argstr="--surf %s %s %s", + requires=["subject_id", "hemi"], + ), + surf_geo=dict( + usedefault=True, + ), + synth=dict( + argstr="--synth", + ), + uniform=dict( + argstr="--uniform %f %f", + ), + var_fwhm=dict( + argstr="--var-fwhm %f", + ), + vox_dump=dict( + argstr="--voxdump %d %d %d", + ), + weight_file=dict( + extensions=None, + xor=["weighted_ls"], + ), weight_inv=dict( - argstr='--w-inv', - xor=['weighted_ls'], + argstr="--w-inv", + xor=["weighted_ls"], ), weight_sqrt=dict( - argstr='--w-sqrt', - xor=['weighted_ls'], + argstr="--w-sqrt", + xor=["weighted_ls"], ), weighted_ls=dict( - argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + argstr="--wls %s", + extensions=None, + xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = OneSampleTTest.input_spec() @@ -117,25 +224,57 @@ def test_OneSampleTTest_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneSampleTTest_outputs(): output_map = dict( - beta_file=dict(), - dof_file=dict(), - error_file=dict(), - error_stddev_file=dict(), - error_var_file=dict(), - estimate_file=dict(), - frame_eigenvectors=dict(), + beta_file=dict( + extensions=None, + ), + bp_file=dict( + extensions=None, + ), + dof_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + error_stddev_file=dict( + extensions=None, + ), + error_var_file=dict( + extensions=None, + ), + estimate_file=dict( + extensions=None, + ), + frame_eigenvectors=dict( + extensions=None, + ), ftest_file=dict(), - fwhm_file=dict(), + fwhm_file=dict( + extensions=None, + ), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), - mask_file=dict(), + k2p_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), sig_file=dict(), - singular_values=dict(), - spatial_eigenvectors=dict(), - svd_stats_file=dict(), + singular_values=dict( + extensions=None, + ), + spatial_eigenvectors=dict( + extensions=None, + ), + svd_stats_file=dict( + extensions=None, + ), ) outputs = OneSampleTTest.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py index c452594e55..d95c4c9fa3 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py @@ -1,32 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Paint def test_Paint_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averages=dict(argstr='-a %d', ), + args=dict( + argstr="%s", + ), + averages=dict( + argstr="-a %d", + ), environ=dict( nohash=True, usedefault=True, ), in_surf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_surf'], - name_template='%s.avg_curv', + name_source=["in_surf"], + name_template="%s.avg_curv", position=-1, ), subjects_dir=dict(), template=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), @@ -37,8 +43,14 @@ def test_Paint_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Paint_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Paint.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py index 81ac276b2f..e1632020b5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py @@ -1,78 +1,128 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ParcellationStats def test_ParcellationStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict(mandatory=True, ), - brainmask=dict(mandatory=True, ), + args=dict( + argstr="%s", + ), + aseg=dict( + extensions=None, + mandatory=True, + ), + brainmask=dict( + extensions=None, + mandatory=True, + ), copy_inputs=dict(), - cortex_label=dict(), + cortex_label=dict( + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), in_annotation=dict( - argstr='-a %s', - xor=['in_label'], + argstr="-a %s", + extensions=None, + xor=["in_label"], + ), + in_cortex=dict( + argstr="-cortex %s", + extensions=None, ), - in_cortex=dict(argstr='-cortex %s', ), in_label=dict( - argstr='-l %s', - xor=['in_annotatoin', 'out_color'], + argstr="-l %s", + extensions=None, + xor=["in_annotatoin", "out_color"], + ), + lh_pial=dict( + extensions=None, + mandatory=True, + ), + lh_white=dict( + extensions=None, + mandatory=True, + ), + mgz=dict( + argstr="-mgz", ), - lh_pial=dict(mandatory=True, ), - lh_white=dict(mandatory=True, ), - mgz=dict(argstr='-mgz', ), out_color=dict( - argstr='-c %s', + argstr="-c %s", + extensions=None, genfile=True, - xor=['in_label'], + xor=["in_label"], ), out_table=dict( - argstr='-f %s', + argstr="-f %s", + extensions=None, genfile=True, - requires=['tabular_output'], + requires=["tabular_output"], + ), + rh_pial=dict( + extensions=None, + mandatory=True, + ), + rh_white=dict( + extensions=None, + mandatory=True, + ), + ribbon=dict( + extensions=None, + mandatory=True, ), - rh_pial=dict(mandatory=True, ), - rh_white=dict(mandatory=True, ), - ribbon=dict(mandatory=True, ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-3, usedefault=True, ), subjects_dir=dict(), surface=dict( - argstr='%s', + argstr="%s", position=-1, ), - tabular_output=dict(argstr='-b', ), + tabular_output=dict( + argstr="-b", + ), th3=dict( - argstr='-th3', - requires=['cortex_label'], + argstr="-th3", + requires=["cortex_label"], + ), + thickness=dict( + extensions=None, + mandatory=True, + ), + transform=dict( + extensions=None, + mandatory=True, + ), + wm=dict( + extensions=None, + mandatory=True, ), - thickness=dict(mandatory=True, ), - transform=dict(mandatory=True, ), - wm=dict(mandatory=True, ), ) inputs = ParcellationStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ParcellationStats_outputs(): output_map = dict( - out_color=dict(), - out_table=dict(), + out_color=dict( + extensions=None, + ), + out_table=dict( + extensions=None, + ), ) outputs = ParcellationStats.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py b/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py index 93254ddcb3..3168ac64ec 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py @@ -1,34 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ParseDICOMDir def test_ParseDICOMDir_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dicom_dir=dict( - argstr='--d %s', + argstr="--d %s", mandatory=True, ), dicom_info_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - sortbyrun=dict(argstr='--sortbyrun', ), + sortbyrun=dict( + argstr="--sortbyrun", + ), subjects_dir=dict(), - summarize=dict(argstr='--summarize', ), + summarize=dict( + argstr="--summarize", + ), ) inputs = ParseDICOMDir.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ParseDICOMDir_outputs(): - output_map = dict(dicom_info_file=dict(), ) + output_map = dict( + dicom_info_file=dict( + extensions=None, + ), + ) outputs = ParseDICOMDir.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py index 39f6a8c942..f31bdb89f4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py @@ -1,24 +1,45 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ReconAll def test_ReconAll_inputs(): input_map = dict( FLAIR_file=dict( - argstr='-FLAIR %s', - min_ver='5.3.0', + argstr="-FLAIR %s", + extensions=None, + min_ver="5.3.0", + requires=["subject_id"], + ), + T1_files=dict( + argstr="-i %s...", + requires=["subject_id"], ), - T1_files=dict(argstr='-i %s...', ), T2_file=dict( - argstr='-T2 %s', - min_ver='5.3.0', + argstr="-T2 %s", + extensions=None, + min_ver="5.3.0", + requires=["subject_id"], + ), + args=dict( + argstr="%s", + ), + base_template_id=dict( + argstr="-base %s", + requires=["base_timepoint_ids"], + xor=["subject_id", "longitudinal_timepoint_id"], + ), + base_timepoint_ids=dict( + argstr="-base-tp %s...", + ), + big_ventricles=dict( + argstr="-bigventricles", + ), + brainstem=dict( + argstr="-brainstem-structures", + requires=["subject_id"], ), - args=dict(argstr='%s', ), - big_ventricles=dict(argstr='-bigventricles', ), - brainstem=dict(argstr='-brainstem-structures', ), directive=dict( - argstr='-%s', + argstr="-%s", position=0, usedefault=True, ), @@ -26,161 +47,299 @@ def test_ReconAll_inputs(): nohash=True, usedefault=True, ), - expert=dict(argstr='-expert %s', ), - flags=dict(argstr='%s', ), - hemi=dict(argstr='-hemi %s', ), + expert=dict( + argstr="-expert %s", + extensions=None, + ), + flags=dict( + argstr="%s", + ), + hemi=dict( + argstr="-hemi %s", + requires=["subject_id"], + ), hippocampal_subfields_T1=dict( - argstr='-hippocampal-subfields-T1', - min_ver='6.0.0', + argstr="-hippocampal-subfields-T1", + min_ver="6.0.0", + requires=["subject_id"], ), hippocampal_subfields_T2=dict( - argstr='-hippocampal-subfields-T2 %s %s', - min_ver='6.0.0', + argstr="-hippocampal-subfields-T2 %s %s", + min_ver="6.0.0", + requires=["subject_id"], ), hires=dict( - argstr='-hires', - min_ver='6.0.0', - ), - mprage=dict(argstr='-mprage', ), - mri_aparc2aseg=dict(xor=['expert'], ), - mri_ca_label=dict(xor=['expert'], ), - mri_ca_normalize=dict(xor=['expert'], ), - mri_ca_register=dict(xor=['expert'], ), - mri_edit_wm_with_aseg=dict(xor=['expert'], ), - mri_em_register=dict(xor=['expert'], ), - mri_fill=dict(xor=['expert'], ), - mri_mask=dict(xor=['expert'], ), - mri_normalize=dict(xor=['expert'], ), - mri_pretess=dict(xor=['expert'], ), - mri_remove_neck=dict(xor=['expert'], ), - mri_segment=dict(xor=['expert'], ), - mri_segstats=dict(xor=['expert'], ), - mri_tessellate=dict(xor=['expert'], ), - mri_watershed=dict(xor=['expert'], ), - mris_anatomical_stats=dict(xor=['expert'], ), - mris_ca_label=dict(xor=['expert'], ), - mris_fix_topology=dict(xor=['expert'], ), - mris_inflate=dict(xor=['expert'], ), - mris_make_surfaces=dict(xor=['expert'], ), - mris_register=dict(xor=['expert'], ), - mris_smooth=dict(xor=['expert'], ), - mris_sphere=dict(xor=['expert'], ), - mris_surf2vol=dict(xor=['expert'], ), - mrisp_paint=dict(xor=['expert'], ), - openmp=dict(argstr='-openmp %d', ), - parallel=dict(argstr='-parallel', ), + argstr="-hires", + min_ver="6.0.0", + ), + longitudinal_template_id=dict( + argstr="%s", + position=2, + ), + longitudinal_timepoint_id=dict( + argstr="-long %s", + position=1, + requires=["longitudinal_template_id"], + xor=["subject_id", "base_template_id"], + ), + mprage=dict( + argstr="-mprage", + requires=["subject_id"], + ), + mri_aparc2aseg=dict( + xor=["expert"], + ), + mri_ca_label=dict( + xor=["expert"], + ), + mri_ca_normalize=dict( + xor=["expert"], + ), + mri_ca_register=dict( + xor=["expert"], + ), + mri_edit_wm_with_aseg=dict( + xor=["expert"], + ), + mri_em_register=dict( + xor=["expert"], + ), + mri_fill=dict( + xor=["expert"], + ), + mri_mask=dict( + xor=["expert"], + ), + mri_normalize=dict( + xor=["expert"], + ), + mri_pretess=dict( + xor=["expert"], + ), + mri_remove_neck=dict( + xor=["expert"], + ), + mri_segment=dict( + xor=["expert"], + ), + mri_segstats=dict( + xor=["expert"], + ), + mri_tessellate=dict( + xor=["expert"], + ), + mri_watershed=dict( + xor=["expert"], + ), + mris_anatomical_stats=dict( + xor=["expert"], + ), + mris_ca_label=dict( + xor=["expert"], + ), + mris_fix_topology=dict( + xor=["expert"], + ), + mris_inflate=dict( + xor=["expert"], + ), + mris_make_surfaces=dict( + xor=["expert"], + ), + mris_register=dict( + xor=["expert"], + ), + mris_smooth=dict( + xor=["expert"], + ), + mris_sphere=dict( + xor=["expert"], + ), + mris_surf2vol=dict( + xor=["expert"], + ), + mrisp_paint=dict( + xor=["expert"], + ), + openmp=dict( + argstr="-openmp %d", + ), + parallel=dict( + argstr="-parallel", + ), subject_id=dict( - argstr='-subjid %s', - usedefault=True, + argstr="-subjid %s", + xor=["base_template_id", "longitudinal_timepoint_id"], ), subjects_dir=dict( - argstr='-sd %s', + argstr="-sd %s", genfile=True, hash_files=False, ), - talairach=dict(xor=['expert'], ), + talairach=dict( + xor=["expert"], + ), use_FLAIR=dict( - argstr='-FLAIRpial', - min_ver='5.3.0', - xor=['use_T2'], + argstr="-FLAIRpial", + min_ver="5.3.0", + xor=["use_T2"], ), use_T2=dict( - argstr='-T2pial', - min_ver='5.3.0', - xor=['use_FLAIR'], + argstr="-T2pial", + min_ver="5.3.0", + xor=["use_FLAIR"], + ), + xopts=dict( + argstr="-xopts-%s", ), - xopts=dict(argstr='-xopts-%s', ), ) inputs = ReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReconAll_outputs(): output_map = dict( BA_stats=dict( - altkey='BA', - loc='stats', + altkey="BA", + loc="stats", + ), + T1=dict( + extensions=None, + loc="mri", ), - T1=dict(loc='mri', ), annot=dict( - altkey='*annot', - loc='label', + altkey="*annot", + loc="label", ), aparc_a2009s_stats=dict( - altkey='aparc.a2009s', - loc='stats', + altkey="aparc.a2009s", + loc="stats", ), aparc_aseg=dict( - altkey='aparc*aseg', - loc='mri', + altkey="aparc*aseg", + loc="mri", ), aparc_stats=dict( - altkey='aparc', - loc='stats', + altkey="aparc", + loc="stats", ), area_pial=dict( - altkey='area.pial', - loc='surf', + altkey="area.pial", + loc="surf", + ), + aseg=dict( + extensions=None, + loc="mri", ), - aseg=dict(loc='mri', ), aseg_stats=dict( - altkey='aseg', - loc='stats', + altkey="aseg", + loc="stats", + ), + avg_curv=dict( + loc="surf", + ), + brain=dict( + extensions=None, + loc="mri", + ), + brainmask=dict( + extensions=None, + loc="mri", + ), + curv=dict( + loc="surf", ), - avg_curv=dict(loc='surf', ), - brain=dict(loc='mri', ), - brainmask=dict(loc='mri', ), - curv=dict(loc='surf', ), curv_pial=dict( - altkey='curv.pial', - loc='surf', + altkey="curv.pial", + loc="surf", ), curv_stats=dict( - altkey='curv', - loc='stats', + altkey="curv", + loc="stats", ), entorhinal_exvivo_stats=dict( - altkey='entorhinal_exvivo', - loc='stats', + altkey="entorhinal_exvivo", + loc="stats", + ), + filled=dict( + extensions=None, + loc="mri", ), - filled=dict(loc='mri', ), graymid=dict( - altkey=['graymid', 'midthickness'], - loc='surf', + altkey=["graymid", "midthickness"], + loc="surf", + ), + inflated=dict( + loc="surf", + ), + jacobian_white=dict( + loc="surf", ), - inflated=dict(loc='surf', ), - jacobian_white=dict(loc='surf', ), label=dict( - altkey='*label', - loc='label', - ), - norm=dict(loc='mri', ), - nu=dict(loc='mri', ), - orig=dict(loc='mri', ), - pial=dict(loc='surf', ), - rawavg=dict(loc='mri', ), + altkey="*label", + loc="label", + ), + norm=dict( + extensions=None, + loc="mri", + ), + nu=dict( + extensions=None, + loc="mri", + ), + orig=dict( + extensions=None, + loc="mri", + ), + pial=dict( + loc="surf", + ), + rawavg=dict( + extensions=None, + loc="mri", + ), ribbon=dict( - altkey='*ribbon', - loc='mri', + altkey="*ribbon", + loc="mri", + ), + smoothwm=dict( + loc="surf", + ), + sphere=dict( + loc="surf", ), - smoothwm=dict(loc='surf', ), - sphere=dict(loc='surf', ), sphere_reg=dict( - altkey='sphere.reg', - loc='surf', + altkey="sphere.reg", + loc="surf", ), subject_id=dict(), subjects_dir=dict(), - sulc=dict(loc='surf', ), - thickness=dict(loc='surf', ), - volume=dict(loc='surf', ), - white=dict(loc='surf', ), - wm=dict(loc='mri', ), - wmparc=dict(loc='mri', ), + sulc=dict( + loc="surf", + ), + thickness=dict( + loc="surf", + ), + volume=dict( + loc="surf", + ), + white=dict( + loc="surf", + ), + wm=dict( + extensions=None, + loc="mri", + ), + wmparc=dict( + extensions=None, + loc="mri", + ), wmparc_stats=dict( - altkey='wmparc', - loc='stats', + altkey="wmparc", + loc="stats", ), ) outputs = ReconAll.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Register.py b/nipype/interfaces/freesurfer/tests/test_auto_Register.py index b4eff5133c..c10daabd58 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Register.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Register.py @@ -1,38 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import Register def test_Register_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), curv=dict( - argstr='-curv', - requires=['in_smoothwm'], + argstr="-curv", + requires=["in_smoothwm"], ), environ=dict( nohash=True, usedefault=True, ), - in_smoothwm=dict(copyfile=True, ), + in_smoothwm=dict( + copyfile=True, + extensions=None, + ), in_sulc=dict( copyfile=True, + extensions=None, mandatory=True, ), in_surf=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), target=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), @@ -42,8 +50,14 @@ def test_Register_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Register_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Register.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py b/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py index 0bc88bf935..f66ac1bda7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py @@ -1,33 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import RegisterAVItoTalairach def test_RegisterAVItoTalairach_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, usedefault=True, ), subjects_dir=dict(), target=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), vox2vox=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), @@ -37,10 +42,17 @@ def test_RegisterAVItoTalairach_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegisterAVItoTalairach_outputs(): output_map = dict( - log_file=dict(usedefault=True, ), - out_file=dict(), + log_file=dict( + extensions=None, + usedefault=True, + ), + out_file=dict( + extensions=None, + ), ) outputs = RegisterAVItoTalairach.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py index 859aff0820..eeac74f722 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import RelabelHypointensities def test_RelabelHypointensities_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), aseg=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), @@ -17,23 +19,26 @@ def test_RelabelHypointensities_inputs(): ), lh_white=dict( copyfile=True, + extensions=None, mandatory=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['aseg'], - name_template='%s.hypos.mgz', + name_source=["aseg"], + name_template="%s.hypos.mgz", position=-1, ), rh_white=dict( copyfile=True, + extensions=None, mandatory=True, ), subjects_dir=dict(), surf_directory=dict( - argstr='%s', + argstr="%s", position=-2, usedefault=True, ), @@ -43,8 +48,15 @@ def test_RelabelHypointensities_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RelabelHypointensities_outputs(): - output_map = dict(out_file=dict(argstr='%s', ), ) + output_map = dict( + out_file=dict( + argstr="%s", + extensions=None, + ), + ) outputs = RelabelHypointensities.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py index 69e1d453a4..735ea7b84a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import RemoveIntersection def test_RemoveIntersection_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-1, ), subjects_dir=dict(), @@ -31,8 +34,14 @@ def test_RemoveIntersection_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RemoveIntersection_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RemoveIntersection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py index 9e095ddba0..3d2ce30cbd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py @@ -1,37 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import RemoveNeck def test_RemoveNeck_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_noneck', + name_source=["in_file"], + name_template="%s_noneck", position=-1, ), - radius=dict(argstr='-radius %d', ), + radius=dict( + argstr="-radius %d", + ), subjects_dir=dict(), template=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), transform=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), @@ -41,8 +48,14 @@ def test_RemoveNeck_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RemoveNeck_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RemoveNeck.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Resample.py b/nipype/interfaces/freesurfer/tests/test_auto_Resample.py index ecb939d89a..280a8a4cc1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Resample.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Resample.py @@ -1,28 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Resample def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=-2, ), resampled_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), voxel_size=dict( - argstr='-vs %.2f %.2f %.2f', + argstr="-vs %.2f %.2f %.2f", mandatory=True, ), ) @@ -31,8 +34,14 @@ def test_Resample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(resampled_file=dict(), ) + output_map = dict( + resampled_file=dict( + extensions=None, + ), + ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py index af9020c278..3f7e1b96a0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py @@ -1,78 +1,150 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import RobustRegister def test_RobustRegister_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), auto_sens=dict( - argstr='--satit', + argstr="--satit", mandatory=True, - xor=['outlier_sens'], + xor=["outlier_sens"], ), environ=dict( nohash=True, usedefault=True, ), - est_int_scale=dict(argstr='--iscale', ), - force_double=dict(argstr='--doubleprec', ), - force_float=dict(argstr='--floattype', ), - half_source=dict(argstr='--halfmov %s', ), - half_source_xfm=dict(argstr='--halfmovlta %s', ), - half_targ=dict(argstr='--halfdst %s', ), - half_targ_xfm=dict(argstr='--halfdstlta %s', ), - half_weights=dict(argstr='--halfweights %s', ), - high_iterations=dict(argstr='--highit %d', ), - in_xfm_file=dict(argstr='--transform', ), - init_orient=dict(argstr='--initorient', ), - iteration_thresh=dict(argstr='--epsit %.3f', ), - least_squares=dict(argstr='--leastsquares', ), - mask_source=dict(argstr='--maskmov %s', ), - mask_target=dict(argstr='--maskdst %s', ), - max_iterations=dict(argstr='--maxit %d', ), - no_init=dict(argstr='--noinit', ), - no_multi=dict(argstr='--nomulti', ), + est_int_scale=dict( + argstr="--iscale", + ), + force_double=dict( + argstr="--doubleprec", + ), + force_float=dict( + argstr="--floattype", + ), + half_source=dict( + argstr="--halfmov %s", + ), + half_source_xfm=dict( + argstr="--halfmovlta %s", + ), + half_targ=dict( + argstr="--halfdst %s", + ), + half_targ_xfm=dict( + argstr="--halfdstlta %s", + ), + half_weights=dict( + argstr="--halfweights %s", + ), + high_iterations=dict( + argstr="--highit %d", + ), + in_xfm_file=dict( + argstr="--transform", + extensions=None, + ), + init_orient=dict( + argstr="--initorient", + ), + iteration_thresh=dict( + argstr="--epsit %.3f", + ), + least_squares=dict( + argstr="--leastsquares", + ), + mask_source=dict( + argstr="--maskmov %s", + extensions=None, + ), + mask_target=dict( + argstr="--maskdst %s", + extensions=None, + ), + max_iterations=dict( + argstr="--maxit %d", + ), + no_init=dict( + argstr="--noinit", + ), + no_multi=dict( + argstr="--nomulti", + ), out_reg_file=dict( - argstr='--lta %s', + argstr="--lta %s", usedefault=True, ), - outlier_limit=dict(argstr='--wlimit %.3f', ), + outlier_limit=dict( + argstr="--wlimit %.3f", + ), outlier_sens=dict( - argstr='--sat %.4f', + argstr="--sat %.4f", mandatory=True, - xor=['auto_sens'], + xor=["auto_sens"], + ), + registered_file=dict( + argstr="--warp %s", ), - registered_file=dict(argstr='--warp %s', ), source_file=dict( - argstr='--mov %s', + argstr="--mov %s", + extensions=None, mandatory=True, ), subjects_dir=dict(), - subsample_thresh=dict(argstr='--subsample %d', ), + subsample_thresh=dict( + argstr="--subsample %d", + ), target_file=dict( - argstr='--dst %s', + argstr="--dst %s", + extensions=None, mandatory=True, ), - trans_only=dict(argstr='--transonly', ), - weights_file=dict(argstr='--weights %s', ), - write_vo2vox=dict(argstr='--vox2vox', ), + trans_only=dict( + argstr="--transonly", + ), + weights_file=dict( + argstr="--weights %s", + ), + write_vo2vox=dict( + argstr="--vox2vox", + ), ) inputs = RobustRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustRegister_outputs(): output_map = dict( - half_source=dict(), - half_source_xfm=dict(), - half_targ=dict(), - half_targ_xfm=dict(), - half_weights=dict(), - out_reg_file=dict(), - registered_file=dict(), - weights_file=dict(), + half_source=dict( + extensions=None, + ), + half_source_xfm=dict( + extensions=None, + ), + half_targ=dict( + extensions=None, + ), + half_targ_xfm=dict( + extensions=None, + ), + half_weights=dict( + extensions=None, + ), + out_reg_file=dict( + extensions=None, + ), + registered_file=dict( + extensions=None, + ), + weights_file=dict( + extensions=None, + ), ) outputs = RobustRegister.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py index e845a764e4..8c180332db 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py @@ -1,55 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..longitudinal import RobustTemplate def test_RobustTemplate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), auto_detect_sensitivity=dict( - argstr='--satit', + argstr="--satit", mandatory=True, - xor=['outlier_sensitivity'], + xor=["outlier_sensitivity"], + ), + average_metric=dict( + argstr="--average %d", ), - average_metric=dict(argstr='--average %d', ), environ=dict( nohash=True, usedefault=True, ), - fixed_timepoint=dict(argstr='--fixtp', ), + fixed_timepoint=dict( + argstr="--fixtp", + ), in_files=dict( - argstr='--mov %s', + argstr="--mov %s", mandatory=True, ), - in_intensity_scales=dict(argstr='--iscalein %s', ), - initial_timepoint=dict(argstr='--inittp %d', ), - initial_transforms=dict(argstr='--ixforms %s', ), - intensity_scaling=dict(argstr='--iscale', ), - no_iteration=dict(argstr='--noit', ), + in_intensity_scales=dict( + argstr="--iscalein %s", + ), + initial_timepoint=dict( + argstr="--inittp %d", + ), + initial_transforms=dict( + argstr="--ixforms %s", + ), + intensity_scaling=dict( + argstr="--iscale", + ), + no_iteration=dict( + argstr="--noit", + ), num_threads=dict(), out_file=dict( - argstr='--template %s', + argstr="--template %s", + extensions=None, mandatory=True, usedefault=True, ), outlier_sensitivity=dict( - argstr='--sat %.4f', + argstr="--sat %.4f", mandatory=True, - xor=['auto_detect_sensitivity'], + xor=["auto_detect_sensitivity"], + ), + scaled_intensity_outputs=dict( + argstr="--iscaleout %s", ), - scaled_intensity_outputs=dict(argstr='--iscaleout %s', ), subjects_dir=dict(), - subsample_threshold=dict(argstr='--subsample %d', ), - transform_outputs=dict(argstr='--lta %s', ), + subsample_threshold=dict( + argstr="--subsample %d", + ), + transform_outputs=dict( + argstr="--lta %s", + ), ) inputs = RobustTemplate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustTemplate_outputs(): output_map = dict( - out_file=dict(), + out_file=dict( + extensions=None, + ), scaled_intensity_outputs=dict(), transform_outputs=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py index 0926eebba2..de9ffe2485 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py @@ -1,113 +1,161 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SampleToSurface def test_SampleToSurface_inputs(): input_map = dict( - apply_rot=dict(argstr='--rot %.3f %.3f %.3f', ), - apply_trans=dict(argstr='--trans %.3f %.3f %.3f', ), - args=dict(argstr='%s', ), + apply_rot=dict( + argstr="--rot %.3f %.3f %.3f", + ), + apply_trans=dict( + argstr="--trans %.3f %.3f %.3f", + ), + args=dict( + argstr="%s", + ), cortex_mask=dict( - argstr='--cortex', - xor=['mask_label'], + argstr="--cortex", + xor=["mask_label"], ), environ=dict( nohash=True, usedefault=True, ), - fix_tk_reg=dict(argstr='--fixtkreg', ), - float2int_method=dict(argstr='--float2int %s', ), - frame=dict(argstr='--frame %d', ), + fix_tk_reg=dict( + argstr="--fixtkreg", + ), + float2int_method=dict( + argstr="--float2int %s", + ), + frame=dict( + argstr="--frame %d", + ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), - hits_file=dict(argstr='--srchit %s', ), - hits_type=dict(argstr='--srchit_type', ), + hits_file=dict( + argstr="--srchit %s", + ), + hits_type=dict( + argstr="--srchit_type", + ), ico_order=dict( - argstr='--icoorder %d', - requires=['target_subject'], + argstr="--icoorder %d", + requires=["target_subject"], + ), + interp_method=dict( + argstr="--interp %s", ), - interp_method=dict(argstr='--interp %s', ), mask_label=dict( - argstr='--mask %s', - xor=['cortex_mask'], + argstr="--mask %s", + extensions=None, + xor=["cortex_mask"], ), mni152reg=dict( - argstr='--mni152reg', + argstr="--mni152reg", mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], + xor=["reg_file", "reg_header", "mni152reg"], ), no_reshape=dict( - argstr='--noreshape', - xor=['reshape'], + argstr="--noreshape", + xor=["reshape"], ), out_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), - out_type=dict(argstr='--out_type %s', ), + out_type=dict( + argstr="--out_type %s", + ), override_reg_subj=dict( - argstr='--srcsubject %s', - requires=['subject_id'], + argstr="--srcsubject %s", + requires=["subject_id"], ), projection_stem=dict( mandatory=True, - xor=['sampling_method'], + xor=["sampling_method"], + ), + reference_file=dict( + argstr="--ref %s", + extensions=None, ), - reference_file=dict(argstr='--ref %s', ), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", + extensions=None, mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], + xor=["reg_file", "reg_header", "mni152reg"], ), reg_header=dict( - argstr='--regheader %s', + argstr="--regheader %s", mandatory=True, - requires=['subject_id'], - xor=['reg_file', 'reg_header', 'mni152reg'], + requires=["subject_id"], + xor=["reg_file", "reg_header", "mni152reg"], ), reshape=dict( - argstr='--reshape', - xor=['no_reshape'], + argstr="--reshape", + xor=["no_reshape"], + ), + reshape_slices=dict( + argstr="--rf %d", ), - reshape_slices=dict(argstr='--rf %d', ), sampling_method=dict( - argstr='%s', + argstr="%s", mandatory=True, - requires=['sampling_range', 'sampling_units'], - xor=['projection_stem'], + requires=["sampling_range", "sampling_units"], + xor=["projection_stem"], ), sampling_range=dict(), sampling_units=dict(), - scale_input=dict(argstr='--scale %.3f', ), - smooth_surf=dict(argstr='--surf-fwhm %.3f', ), - smooth_vol=dict(argstr='--fwhm %.3f', ), + scale_input=dict( + argstr="--scale %.3f", + ), + smooth_surf=dict( + argstr="--surf-fwhm %.3f", + ), + smooth_vol=dict( + argstr="--fwhm %.3f", + ), source_file=dict( - argstr='--mov %s', + argstr="--mov %s", + extensions=None, mandatory=True, ), subject_id=dict(), subjects_dir=dict(), surf_reg=dict( - argstr='--surfreg %s', - requires=['target_subject'], + argstr="--surfreg %s", + requires=["target_subject"], + ), + surface=dict( + argstr="--surf %s", + ), + target_subject=dict( + argstr="--trgsubject %s", + ), + vox_file=dict( + argstr="--nvox %s", ), - surface=dict(argstr='--surf %s', ), - target_subject=dict(argstr='--trgsubject %s', ), - vox_file=dict(argstr='--nvox %s', ), ) inputs = SampleToSurface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SampleToSurface_outputs(): output_map = dict( - hits_file=dict(), - out_file=dict(), - vox_file=dict(), + hits_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + vox_file=dict( + extensions=None, + ), ) outputs = SampleToSurface.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py index 8feb61d9d8..dfb82e8b85 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py @@ -1,96 +1,176 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import SegStats def test_SegStats_inputs(): input_map = dict( annot=dict( - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), - ), - args=dict(argstr='%s', ), - avgwf_file=dict(argstr='--avgwfvol %s', ), - avgwf_txt_file=dict(argstr='--avgwf %s', ), - brain_vol=dict(argstr='--%s', ), - brainmask_file=dict(argstr='--brainmask %s', ), - calc_power=dict(argstr='--%s', ), - calc_snr=dict(argstr='--snr', ), + xor=("segmentation_file", "annot", "surf_label"), + ), + args=dict( + argstr="%s", + ), + avgwf_file=dict( + argstr="--avgwfvol %s", + ), + avgwf_txt_file=dict( + argstr="--avgwf %s", + ), + brain_vol=dict( + argstr="--%s", + ), + brainmask_file=dict( + argstr="--brainmask %s", + extensions=None, + ), + calc_power=dict( + argstr="--%s", + ), + calc_snr=dict( + argstr="--snr", + ), color_table_file=dict( - argstr='--ctab %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab %s", + extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), + ), + cortex_vol_from_surf=dict( + argstr="--surf-ctx-vol", ), - cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), default_color_table=dict( - argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-default", + xor=("color_table_file", "default_color_table", "gca_color_table"), + ), + empty=dict( + argstr="--empty", ), - empty=dict(argstr='--empty', ), environ=dict( nohash=True, usedefault=True, ), - etiv=dict(argstr='--etiv', ), + etiv=dict( + argstr="--etiv", + ), etiv_only=dict(), - euler=dict(argstr='--euler', ), - exclude_ctx_gm_wm=dict(argstr='--excl-ctxgmwm', ), - exclude_id=dict(argstr='--excludeid %d', ), - frame=dict(argstr='--frame %d', ), + euler=dict( + argstr="--euler", + ), + exclude_ctx_gm_wm=dict( + argstr="--excl-ctxgmwm", + ), + exclude_id=dict( + argstr="--excludeid %d", + ), + frame=dict( + argstr="--frame %d", + ), gca_color_table=dict( - argstr='--ctab-gca %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-gca %s", + extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), + ), + in_file=dict( + argstr="--i %s", + extensions=None, + ), + in_intensity=dict( + argstr="--in %s --in-intensity-name %s", + extensions=None, ), - in_file=dict(argstr='--i %s', ), - in_intensity=dict(argstr='--in %s --in-intensity-name %s', ), intensity_units=dict( - argstr='--in-intensity-units %s', - requires=['in_intensity'], + argstr="--in-intensity-units %s", + requires=["in_intensity"], + ), + mask_erode=dict( + argstr="--maskerode %d", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mask_frame=dict( + requires=["mask_file"], + ), + mask_invert=dict( + argstr="--maskinvert", ), - mask_erode=dict(argstr='--maskerode %d', ), - mask_file=dict(argstr='--mask %s', ), - mask_frame=dict(requires=['mask_file'], ), - mask_invert=dict(argstr='--maskinvert', ), mask_sign=dict(), - mask_thresh=dict(argstr='--maskthresh %f', ), - multiply=dict(argstr='--mul %f', ), - non_empty_only=dict(argstr='--nonempty', ), - partial_volume_file=dict(argstr='--pv %s', ), - segment_id=dict(argstr='--id %s...', ), + mask_thresh=dict( + argstr="--maskthresh %f", + ), + multiply=dict( + argstr="--mul %f", + ), + non_empty_only=dict( + argstr="--nonempty", + ), + partial_volume_file=dict( + argstr="--pv %s", + extensions=None, + ), + segment_id=dict( + argstr="--id %s...", + ), segmentation_file=dict( - argstr='--seg %s', + argstr="--seg %s", + extensions=None, mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), + ), + sf_avg_file=dict( + argstr="--sfavg %s", + ), + subcort_gm=dict( + argstr="--subcortgray", ), - sf_avg_file=dict(argstr='--sfavg %s', ), - subcort_gm=dict(argstr='--subcortgray', ), subjects_dir=dict(), summary_file=dict( - argstr='--sum %s', + argstr="--sum %s", + extensions=None, genfile=True, position=-1, ), - supratent=dict(argstr='--supratent', ), + supratent=dict( + argstr="--supratent", + ), surf_label=dict( - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), + ), + total_gray=dict( + argstr="--totalgray", + ), + vox=dict( + argstr="--vox %s", + ), + wm_vol_from_surf=dict( + argstr="--surf-wm-vol", ), - total_gray=dict(argstr='--totalgray', ), - vox=dict(argstr='--vox %s', ), - wm_vol_from_surf=dict(argstr='--surf-wm-vol', ), ) inputs = SegStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegStats_outputs(): output_map = dict( - avgwf_file=dict(), - avgwf_txt_file=dict(), - sf_avg_file=dict(), - summary_file=dict(), + avgwf_file=dict( + extensions=None, + ), + avgwf_txt_file=dict( + extensions=None, + ), + sf_avg_file=dict( + extensions=None, + ), + summary_file=dict( + extensions=None, + ), ) outputs = SegStats.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py index e65dc82e3b..0121dd7d9e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py @@ -1,112 +1,220 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import SegStatsReconAll def test_SegStatsReconAll_inputs(): input_map = dict( annot=dict( - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), - ), - args=dict(argstr='%s', ), - aseg=dict(), - avgwf_file=dict(argstr='--avgwfvol %s', ), - avgwf_txt_file=dict(argstr='--avgwf %s', ), - brain_vol=dict(argstr='--%s', ), - brainmask_file=dict(argstr='--brainmask %s', ), - calc_power=dict(argstr='--%s', ), - calc_snr=dict(argstr='--snr', ), + xor=("segmentation_file", "annot", "surf_label"), + ), + args=dict( + argstr="%s", + ), + aseg=dict( + extensions=None, + ), + avgwf_file=dict( + argstr="--avgwfvol %s", + ), + avgwf_txt_file=dict( + argstr="--avgwf %s", + ), + brain_vol=dict( + argstr="--%s", + ), + brainmask_file=dict( + argstr="--brainmask %s", + extensions=None, + ), + calc_power=dict( + argstr="--%s", + ), + calc_snr=dict( + argstr="--snr", + ), color_table_file=dict( - argstr='--ctab %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab %s", + extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), ), copy_inputs=dict(), - cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), + cortex_vol_from_surf=dict( + argstr="--surf-ctx-vol", + ), default_color_table=dict( - argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-default", + xor=("color_table_file", "default_color_table", "gca_color_table"), + ), + empty=dict( + argstr="--empty", ), - empty=dict(argstr='--empty', ), environ=dict( nohash=True, usedefault=True, ), - etiv=dict(argstr='--etiv', ), + etiv=dict( + argstr="--etiv", + ), etiv_only=dict(), - euler=dict(argstr='--euler', ), - exclude_ctx_gm_wm=dict(argstr='--excl-ctxgmwm', ), - exclude_id=dict(argstr='--excludeid %d', ), - frame=dict(argstr='--frame %d', ), + euler=dict( + argstr="--euler", + ), + exclude_ctx_gm_wm=dict( + argstr="--excl-ctxgmwm", + ), + exclude_id=dict( + argstr="--excludeid %d", + ), + frame=dict( + argstr="--frame %d", + ), gca_color_table=dict( - argstr='--ctab-gca %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-gca %s", + extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), + ), + in_file=dict( + argstr="--i %s", + extensions=None, + ), + in_intensity=dict( + argstr="--in %s --in-intensity-name %s", + extensions=None, ), - in_file=dict(argstr='--i %s', ), - in_intensity=dict(argstr='--in %s --in-intensity-name %s', ), intensity_units=dict( - argstr='--in-intensity-units %s', - requires=['in_intensity'], - ), - lh_orig_nofix=dict(mandatory=True, ), - lh_pial=dict(mandatory=True, ), - lh_white=dict(mandatory=True, ), - mask_erode=dict(argstr='--maskerode %d', ), - mask_file=dict(argstr='--mask %s', ), - mask_frame=dict(requires=['mask_file'], ), - mask_invert=dict(argstr='--maskinvert', ), + argstr="--in-intensity-units %s", + requires=["in_intensity"], + ), + lh_orig_nofix=dict( + extensions=None, + mandatory=True, + ), + lh_pial=dict( + extensions=None, + mandatory=True, + ), + lh_white=dict( + extensions=None, + mandatory=True, + ), + mask_erode=dict( + argstr="--maskerode %d", + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + ), + mask_frame=dict( + requires=["mask_file"], + ), + mask_invert=dict( + argstr="--maskinvert", + ), mask_sign=dict(), - mask_thresh=dict(argstr='--maskthresh %f', ), - multiply=dict(argstr='--mul %f', ), - non_empty_only=dict(argstr='--nonempty', ), - partial_volume_file=dict(argstr='--pv %s', ), - presurf_seg=dict(), - rh_orig_nofix=dict(mandatory=True, ), - rh_pial=dict(mandatory=True, ), - rh_white=dict(mandatory=True, ), - ribbon=dict(mandatory=True, ), - segment_id=dict(argstr='--id %s...', ), + mask_thresh=dict( + argstr="--maskthresh %f", + ), + multiply=dict( + argstr="--mul %f", + ), + non_empty_only=dict( + argstr="--nonempty", + ), + partial_volume_file=dict( + argstr="--pv %s", + extensions=None, + ), + presurf_seg=dict( + extensions=None, + ), + rh_orig_nofix=dict( + extensions=None, + mandatory=True, + ), + rh_pial=dict( + extensions=None, + mandatory=True, + ), + rh_white=dict( + extensions=None, + mandatory=True, + ), + ribbon=dict( + extensions=None, + mandatory=True, + ), + segment_id=dict( + argstr="--id %s...", + ), segmentation_file=dict( - argstr='--seg %s', + argstr="--seg %s", + extensions=None, mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), + ), + sf_avg_file=dict( + argstr="--sfavg %s", + ), + subcort_gm=dict( + argstr="--subcortgray", ), - sf_avg_file=dict(argstr='--sfavg %s', ), - subcort_gm=dict(argstr='--subcortgray', ), subject_id=dict( - argstr='--subject %s', + argstr="--subject %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), summary_file=dict( - argstr='--sum %s', + argstr="--sum %s", + extensions=None, genfile=True, position=-1, ), - supratent=dict(argstr='--supratent', ), + supratent=dict( + argstr="--supratent", + ), surf_label=dict( - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), + ), + total_gray=dict( + argstr="--totalgray", + ), + transform=dict( + extensions=None, + mandatory=True, + ), + vox=dict( + argstr="--vox %s", + ), + wm_vol_from_surf=dict( + argstr="--surf-wm-vol", ), - total_gray=dict(argstr='--totalgray', ), - transform=dict(mandatory=True, ), - vox=dict(argstr='--vox %s', ), - wm_vol_from_surf=dict(argstr='--surf-wm-vol', ), ) inputs = SegStatsReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegStatsReconAll_outputs(): output_map = dict( - avgwf_file=dict(), - avgwf_txt_file=dict(), - sf_avg_file=dict(), - summary_file=dict(), + avgwf_file=dict( + extensions=None, + ), + avgwf_txt_file=dict( + extensions=None, + ), + sf_avg_file=dict( + extensions=None, + ), + summary_file=dict( + extensions=None, + ), ) outputs = SegStatsReconAll.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py index e6b5fb2679..7c16a1f476 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py @@ -1,34 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SegmentCC def test_SegmentCC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-aseg %s', + argstr="-aseg %s", + extensions=None, + mandatory=True, + ), + in_norm=dict( + extensions=None, mandatory=True, ), - in_norm=dict(mandatory=True, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.auto.mgz', + name_source=["in_file"], + name_template="%s.auto.mgz", ), out_rotation=dict( - argstr='-lta %s', + argstr="-lta %s", + extensions=None, mandatory=True, ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, usedefault=True, @@ -40,10 +47,16 @@ def test_SegmentCC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegmentCC_outputs(): output_map = dict( - out_file=dict(), - out_rotation=dict(), + out_file=dict( + extensions=None, + ), + out_rotation=dict( + extensions=None, + ), ) outputs = SegmentCC.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py b/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py index aa742e8fea..8aac066c26 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SegmentWM def test_SegmentWM_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), @@ -27,8 +30,14 @@ def test_SegmentWM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegmentWM_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SegmentWM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py index fe4581dee0..e20de1c795 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py @@ -1,56 +1,68 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Smooth def test_Smooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='--i %s', + argstr="--i %s", + extensions=None, mandatory=True, ), num_iters=dict( - argstr='--niters %d', + argstr="--niters %d", mandatory=True, - xor=['surface_fwhm'], + xor=["surface_fwhm"], ), proj_frac=dict( - argstr='--projfrac %s', - xor=['proj_frac_avg'], + argstr="--projfrac %s", + xor=["proj_frac_avg"], ), proj_frac_avg=dict( - argstr='--projfrac-avg %.2f %.2f %.2f', - xor=['proj_frac'], + argstr="--projfrac-avg %.2f %.2f %.2f", + xor=["proj_frac"], ), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", + extensions=None, mandatory=True, ), smoothed_file=dict( - argstr='--o %s', + argstr="--o %s", + extensions=None, genfile=True, ), subjects_dir=dict(), surface_fwhm=dict( - argstr='--fwhm %f', + argstr="--fwhm %f", mandatory=True, - requires=['reg_file'], - xor=['num_iters'], + requires=["reg_file"], + xor=["num_iters"], + ), + vol_fwhm=dict( + argstr="--vol-fwhm %f", ), - vol_fwhm=dict(argstr='--vol-fwhm %f', ), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_file=dict(), ) + output_map = dict( + smoothed_file=dict( + extensions=None, + ), + ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py index 8ce4dce075..5f97cc281b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py @@ -1,47 +1,82 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SmoothTessellation def test_SmoothTessellation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curvature_averaging_iterations=dict(argstr='-a %d', ), - disable_estimates=dict(argstr='-nw', ), + args=dict( + argstr="%s", + ), + curvature_averaging_iterations=dict( + argstr="-a %d", + ), + disable_estimates=dict( + argstr="-nw", + ), environ=dict( nohash=True, usedefault=True, ), - gaussian_curvature_norm_steps=dict(argstr='%d ', ), - gaussian_curvature_smoothing_steps=dict(argstr='%d', ), + gaussian_curvature_norm_steps=dict( + argstr="%d", + ), + gaussian_curvature_smoothing_steps=dict( + argstr=" %d", + ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), - normalize_area=dict(argstr='-area', ), - out_area_file=dict(argstr='-b %s', ), - out_curvature_file=dict(argstr='-c %s', ), + normalize_area=dict( + argstr="-area", + ), + out_area_file=dict( + argstr="-b %s", + extensions=None, + ), + out_curvature_file=dict( + argstr="-c %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), - seed=dict(argstr='-seed %d', ), - smoothing_iterations=dict(argstr='-n %d', ), - snapshot_writing_iterations=dict(argstr='-w %d', ), + seed=dict( + argstr="-seed %d", + ), + smoothing_iterations=dict( + argstr="-n %d", + ), + snapshot_writing_iterations=dict( + argstr="-w %d", + ), subjects_dir=dict(), - use_gaussian_curvature_smoothing=dict(argstr='-g', ), - use_momentum=dict(argstr='-m', ), + use_gaussian_curvature_smoothing=dict( + argstr="-g", + ), + use_momentum=dict( + argstr="-m", + ), ) inputs = SmoothTessellation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SmoothTessellation_outputs(): - output_map = dict(surface=dict(), ) + output_map = dict( + surface=dict( + extensions=None, + ), + ) outputs = SmoothTessellation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py index 461398e6a8..84673e2951 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py @@ -1,32 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Sphere def test_Sphere_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, position=-2, ), - in_smoothwm=dict(copyfile=True, ), - magic=dict(argstr='-q', ), + in_smoothwm=dict( + copyfile=True, + extensions=None, + ), + magic=dict( + argstr="-q", + ), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s.sphere', + name_source=["in_file"], + name_template="%s.sphere", position=-1, ), - seed=dict(argstr='-seed %d', ), + seed=dict( + argstr="-seed %d", + ), subjects_dir=dict(), ) inputs = Sphere.input_spec() @@ -34,8 +44,14 @@ def test_Sphere_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Sphere_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Sphere.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py b/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py index efdc032787..73f5b3efd7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py @@ -1,50 +1,60 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import SphericalAverage def test_SphericalAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - erode=dict(argstr='-erode %d', ), + erode=dict( + argstr="-erode %d", + ), fname=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-5, ), hemisphere=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-4, ), in_average=dict( - argstr='%s', + argstr="%s", genfile=True, position=-2, ), - in_orig=dict(argstr='-orig %s', ), + in_orig=dict( + argstr="-orig %s", + extensions=None, + ), in_surf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), subject_id=dict( - argstr='-o %s', + argstr="-o %s", mandatory=True, ), subjects_dir=dict(), - threshold=dict(argstr='-t %.1f', ), + threshold=dict( + argstr="-t %.1f", + ), which=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-6, ), @@ -54,8 +64,14 @@ def test_SphericalAverage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SphericalAverage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SphericalAverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py index ca3f96c42b..01dc354710 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py @@ -1,53 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Surface2VolTransform def test_Surface2VolTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), mkmask=dict( - argstr='--mkmask', - xor=['source_file'], + argstr="--mkmask", + xor=["source_file"], + ), + projfrac=dict( + argstr="--projfrac %s", ), - projfrac=dict(argstr='--projfrac %s', ), reg_file=dict( - argstr='--volreg %s', + argstr="--volreg %s", + extensions=None, mandatory=True, - xor=['subject_id'], + xor=["subject_id"], ), source_file=dict( - argstr='--surfval %s', + argstr="--surfval %s", copyfile=False, + extensions=None, mandatory=True, - xor=['mkmask'], + xor=["mkmask"], ), subject_id=dict( - argstr='--identity %s', - xor=['reg_file'], + argstr="--identity %s", + xor=["reg_file"], + ), + subjects_dir=dict( + argstr="--sd %s", + ), + surf_name=dict( + argstr="--surf %s", + ), + template_file=dict( + argstr="--template %s", + extensions=None, ), - subjects_dir=dict(argstr='--sd %s', ), - surf_name=dict(argstr='--surf %s', ), - template_file=dict(argstr='--template %s', ), transformed_file=dict( - argstr='--outvol %s', + argstr="--outvol %s", + extensions=None, hash_files=False, - name_source=['source_file'], - name_template='%s_asVol.nii', + name_source=["source_file"], + name_template="%s_asVol.nii", ), vertexvol_file=dict( - argstr='--vtxvol %s', + argstr="--vtxvol %s", + extensions=None, hash_files=False, - name_source=['source_file'], - name_template='%s_asVol_vertex.nii', + name_source=["source_file"], + name_template="%s_asVol_vertex.nii", ), ) inputs = Surface2VolTransform.input_spec() @@ -55,10 +69,16 @@ def test_Surface2VolTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Surface2VolTransform_outputs(): output_map = dict( - transformed_file=dict(), - vertexvol_file=dict(), + transformed_file=dict( + extensions=None, + ), + vertexvol_file=dict( + extensions=None, + ), ) outputs = Surface2VolTransform.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py index 84bef6ed7a..7876dfa1cc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SurfaceSmooth def test_SurfaceSmooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cortex=dict( - argstr='--cortex', + argstr="--cortex", usedefault=True, ), environ=dict( @@ -15,28 +16,32 @@ def test_SurfaceSmooth_inputs(): usedefault=True, ), fwhm=dict( - argstr='--fwhm %.4f', - xor=['smooth_iters'], + argstr="--fwhm %.4f", + xor=["smooth_iters"], ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), in_file=dict( - argstr='--sval %s', + argstr="--sval %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='--tval %s', + argstr="--tval %s", + extensions=None, genfile=True, ), - reshape=dict(argstr='--reshape', ), + reshape=dict( + argstr="--reshape", + ), smooth_iters=dict( - argstr='--smooth %d', - xor=['fwhm'], + argstr="--smooth %d", + xor=["fwhm"], ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, ), subjects_dir=dict(), @@ -46,8 +51,14 @@ def test_SurfaceSmooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceSmooth_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SurfaceSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py index a413d410b1..c778bcc959 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py @@ -1,100 +1,142 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SurfaceSnapshots def test_SurfaceSnapshots_inputs(): input_map = dict( annot_file=dict( - argstr='-annotation %s', - xor=['annot_name'], + argstr="-annotation %s", + extensions=None, + xor=["annot_name"], ), annot_name=dict( - argstr='-annotation %s', - xor=['annot_file'], + argstr="-annotation %s", + xor=["annot_file"], + ), + args=dict( + argstr="%s", + ), + colortable=dict( + argstr="-colortable %s", + extensions=None, + ), + demean_overlay=dict( + argstr="-zm", ), - args=dict(argstr='%s', ), - colortable=dict(argstr='-colortable %s', ), - demean_overlay=dict(argstr='-zm', ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, ), identity_reg=dict( - argstr='-overlay-reg-identity', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + argstr="-overlay-reg-identity", + xor=["overlay_reg", "identity_reg", "mni152_reg"], + ), + invert_overlay=dict( + argstr="-invphaseflag 1", ), - invert_overlay=dict(argstr='-invphaseflag 1', ), label_file=dict( - argstr='-label %s', - xor=['label_name'], + argstr="-label %s", + extensions=None, + xor=["label_name"], ), label_name=dict( - argstr='-label %s', - xor=['label_file'], + argstr="-label %s", + xor=["label_file"], + ), + label_outline=dict( + argstr="-label-outline", + ), + label_under=dict( + argstr="-labels-under", ), - label_outline=dict(argstr='-label-outline', ), - label_under=dict(argstr='-labels-under', ), mni152_reg=dict( - argstr='-mni152reg', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + argstr="-mni152reg", + xor=["overlay_reg", "identity_reg", "mni152_reg"], + ), + orig_suffix=dict( + argstr="-orig %s", ), - orig_suffix=dict(argstr='-orig %s', ), overlay=dict( - argstr='-overlay %s', - requires=['overlay_range'], + argstr="-overlay %s", + extensions=None, + requires=["overlay_range"], + ), + overlay_range=dict( + argstr="%s", + ), + overlay_range_offset=dict( + argstr="-foffset %.3f", ), - overlay_range=dict(argstr='%s', ), - overlay_range_offset=dict(argstr='-foffset %.3f', ), overlay_reg=dict( - argstr='-overlay-reg %s', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + argstr="-overlay-reg %s", + extensions=None, + xor=["overlay_reg", "identity_reg", "mni152_reg"], + ), + patch_file=dict( + argstr="-patch %s", + extensions=None, + ), + reverse_overlay=dict( + argstr="-revphaseflag 1", ), - patch_file=dict(argstr='-patch %s', ), - reverse_overlay=dict(argstr='-revphaseflag 1', ), screenshot_stem=dict(), - show_color_scale=dict(argstr='-colscalebarflag 1', ), - show_color_text=dict(argstr='-colscaletext 1', ), + show_color_scale=dict( + argstr="-colscalebarflag 1", + ), + show_color_text=dict( + argstr="-colscaletext 1", + ), show_curv=dict( - argstr='-curv', - xor=['show_gray_curv'], + argstr="-curv", + xor=["show_gray_curv"], ), show_gray_curv=dict( - argstr='-gray', - xor=['show_curv'], + argstr="-gray", + xor=["show_curv"], ), six_images=dict(), - sphere_suffix=dict(argstr='-sphere %s', ), - stem_template_args=dict(requires=['screenshot_stem'], ), + sphere_suffix=dict( + argstr="-sphere %s", + ), + stem_template_args=dict( + requires=["screenshot_stem"], + ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, ), subjects_dir=dict(), surface=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), tcl_script=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, ), - truncate_overlay=dict(argstr='-truncphaseflag 1', ), + truncate_overlay=dict( + argstr="-truncphaseflag 1", + ), ) inputs = SurfaceSnapshots.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceSnapshots_outputs(): - output_map = dict(snapshots=dict(), ) + output_map = dict( + snapshots=dict(), + ) outputs = SurfaceSnapshots.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py index 0546a275dc..88923befd4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py @@ -1,58 +1,76 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SurfaceTransform def test_SurfaceTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( - argstr='--hemi %s', + argstr="--hemi %s", mandatory=True, ), out_file=dict( - argstr='--tval %s', + argstr="--tval %s", + extensions=None, genfile=True, ), - reshape=dict(argstr='--reshape', ), - reshape_factor=dict(argstr='--reshape-factor', ), + reshape=dict( + argstr="--reshape", + ), + reshape_factor=dict( + argstr="--reshape-factor", + ), source_annot_file=dict( - argstr='--sval-annot %s', + argstr="--sval-annot %s", + extensions=None, mandatory=True, - xor=['source_file'], + xor=["source_file"], ), source_file=dict( - argstr='--sval %s', + argstr="--sval %s", + extensions=None, mandatory=True, - xor=['source_annot_file'], + xor=["source_annot_file"], ), source_subject=dict( - argstr='--srcsubject %s', + argstr="--srcsubject %s", mandatory=True, ), source_type=dict( - argstr='--sfmt %s', - requires=['source_file'], + argstr="--sfmt %s", + requires=["source_file"], ), subjects_dir=dict(), - target_ico_order=dict(argstr='--trgicoorder %d', ), + target_ico_order=dict( + argstr="--trgicoorder %d", + ), target_subject=dict( - argstr='--trgsubject %s', + argstr="--trgsubject %s", mandatory=True, ), - target_type=dict(argstr='--tfmt %s', ), + target_type=dict( + argstr="--tfmt %s", + ), ) inputs = SurfaceTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceTransform_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SurfaceTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py b/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py index ea121d877e..7d52c994bc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py @@ -1,46 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SynthesizeFLASH def test_SynthesizeFLASH_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixed_weighting=dict( - argstr='-w', + argstr="-w", position=1, ), flip_angle=dict( - argstr='%.2f', + argstr="%.2f", mandatory=True, position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, ), pd_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=6, ), subjects_dir=dict(), t1_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=5, ), te=dict( - argstr='%.3f', + argstr="%.3f", mandatory=True, position=4, ), tr=dict( - argstr='%.2f', + argstr="%.2f", mandatory=True, position=2, ), @@ -50,8 +54,14 @@ def test_SynthesizeFLASH_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SynthesizeFLASH_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SynthesizeFLASH.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py b/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py index 8ce925fcc7..384f44edd2 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py @@ -1,22 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TalairachAVI def test_TalairachAVI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlas=dict(argstr='--atlas %s', ), + args=dict( + argstr="%s", + ), + atlas=dict( + argstr="--atlas %s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='--i %s', + argstr="--i %s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='--xfm %s', + argstr="--xfm %s", + extensions=None, mandatory=True, ), subjects_dir=dict(), @@ -26,11 +31,19 @@ def test_TalairachAVI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TalairachAVI_outputs(): output_map = dict( - out_file=dict(), - out_log=dict(), - out_txt=dict(), + out_file=dict( + extensions=None, + ), + out_log=dict( + extensions=None, + ), + out_txt=dict( + extensions=None, + ), ) outputs = TalairachAVI.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py b/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py index d07e572365..c6536186aa 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TalairachQC def test_TalairachQC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), log_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), @@ -22,8 +24,15 @@ def test_TalairachQC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TalairachQC_outputs(): - output_map = dict(log_file=dict(usedefault=True, ), ) + output_map = dict( + log_file=dict( + extensions=None, + usedefault=True, + ), + ) outputs = TalairachQC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py index 17f8e53a1f..31cdedb679 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py @@ -1,65 +1,98 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Tkregister2 def test_Tkregister2_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fsl_in_matrix=dict(argstr='--fsl %s', ), - fsl_out=dict(argstr='--fslregout %s', ), + fsl_in_matrix=dict( + argstr="--fsl %s", + extensions=None, + ), + fsl_out=dict( + argstr="--fslregout %s", + ), fstal=dict( - argstr='--fstal', - xor=['target_image', 'moving_image', 'reg_file'], + argstr="--fstal", + xor=["target_image", "moving_image", "reg_file"], ), fstarg=dict( - argstr='--fstarg', - xor=['target_image'], + argstr="--fstarg", + xor=["target_image"], + ), + invert_lta_in=dict( + requires=["lta_in"], ), - invert_lta_in=dict(requires=['lta_in'], ), invert_lta_out=dict( - argstr='--ltaout-inv', - requires=['lta_in'], + argstr="--ltaout-inv", + requires=["lta_in"], + ), + lta_in=dict( + argstr="--lta %s", + extensions=None, + ), + lta_out=dict( + argstr="--ltaout %s", ), - lta_in=dict(argstr='--lta %s', ), - lta_out=dict(argstr='--ltaout %s', ), moving_image=dict( - argstr='--mov %s', + argstr="--mov %s", + extensions=None, mandatory=True, ), - movscale=dict(argstr='--movscale %f', ), + movscale=dict( + argstr="--movscale %f", + ), noedit=dict( - argstr='--noedit', + argstr="--noedit", usedefault=True, ), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", + extensions=None, mandatory=True, usedefault=True, ), - reg_header=dict(argstr='--regheader', ), - subject_id=dict(argstr='--s %s', ), + reg_header=dict( + argstr="--regheader", + ), + subject_id=dict( + argstr="--s %s", + ), subjects_dir=dict(), target_image=dict( - argstr='--targ %s', - xor=['fstarg'], + argstr="--targ %s", + extensions=None, + xor=["fstarg"], + ), + xfm=dict( + argstr="--xfm %s", + extensions=None, ), - xfm=dict(argstr='--xfm %s', ), ) inputs = Tkregister2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tkregister2_outputs(): output_map = dict( - fsl_file=dict(), - lta_file=dict(), - reg_file=dict(), + fsl_file=dict( + extensions=None, + ), + lta_file=dict( + extensions=None, + ), + reg_file=dict( + extensions=None, + ), ) outputs = Tkregister2.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py index 9427e60940..b6b27e67b8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py @@ -1,41 +1,60 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import UnpackSDICOMDir def test_UnpackSDICOMDir_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), config=dict( - argstr='-cfg %s', + argstr="-cfg %s", + extensions=None, mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=("run_info", "config", "seq_config"), + ), + dir_structure=dict( + argstr="-%s", ), - dir_structure=dict(argstr='-%s', ), environ=dict( nohash=True, usedefault=True, ), - log_file=dict(argstr='-log %s', ), - no_info_dump=dict(argstr='-noinfodump', ), - no_unpack_err=dict(argstr='-no-unpackerr', ), - output_dir=dict(argstr='-targ %s', ), + log_file=dict( + argstr="-log %s", + extensions=None, + ), + no_info_dump=dict( + argstr="-noinfodump", + ), + no_unpack_err=dict( + argstr="-no-unpackerr", + ), + output_dir=dict( + argstr="-targ %s", + ), run_info=dict( - argstr='-run %d %s %s %s', + argstr="-run %d %s %s %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=("run_info", "config", "seq_config"), + ), + scan_only=dict( + argstr="-scanonly %s", + extensions=None, ), - scan_only=dict(argstr='-scanonly %s', ), seq_config=dict( - argstr='-seqcfg %s', + argstr="-seqcfg %s", + extensions=None, mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=("run_info", "config", "seq_config"), ), source_dir=dict( - argstr='-src %s', + argstr="-src %s", mandatory=True, ), - spm_zeropad=dict(argstr='-nspmzeropad %d', ), + spm_zeropad=dict( + argstr="-nspmzeropad %d", + ), subjects_dir=dict(), ) inputs = UnpackSDICOMDir.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py index 3e898a81f7..152f03eaa8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py @@ -1,44 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import VolumeMask def test_VolumeMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict(xor=['in_aseg'], ), + args=dict( + argstr="%s", + ), + aseg=dict( + extensions=None, + xor=["in_aseg"], + ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), in_aseg=dict( - argstr='--aseg_name %s', - xor=['aseg'], + argstr="--aseg_name %s", + extensions=None, + xor=["aseg"], ), left_ribbonlabel=dict( - argstr='--label_left_ribbon %d', + argstr="--label_left_ribbon %d", mandatory=True, ), left_whitelabel=dict( - argstr='--label_left_white %d', + argstr="--label_left_white %d", + mandatory=True, + ), + lh_pial=dict( + extensions=None, + mandatory=True, + ), + lh_white=dict( + extensions=None, + mandatory=True, + ), + rh_pial=dict( + extensions=None, + mandatory=True, + ), + rh_white=dict( + extensions=None, mandatory=True, ), - lh_pial=dict(mandatory=True, ), - lh_white=dict(mandatory=True, ), - rh_pial=dict(mandatory=True, ), - rh_white=dict(mandatory=True, ), right_ribbonlabel=dict( - argstr='--label_right_ribbon %d', + argstr="--label_right_ribbon %d", mandatory=True, ), right_whitelabel=dict( - argstr='--label_right_white %d', + argstr="--label_right_white %d", mandatory=True, ), - save_ribbon=dict(argstr='--save_ribbon', ), + save_ribbon=dict( + argstr="--save_ribbon", + ), subject_id=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, usedefault=True, @@ -50,11 +69,19 @@ def test_VolumeMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VolumeMask_outputs(): output_map = dict( - lh_ribbon=dict(), - out_ribbon=dict(), - rh_ribbon=dict(), + lh_ribbon=dict( + extensions=None, + ), + out_ribbon=dict( + extensions=None, + ), + rh_ribbon=dict( + extensions=None, + ), ) outputs = VolumeMask.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py b/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py index 3586e7d234..5e8609c4c6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import WatershedSkullStrip def test_WatershedSkullStrip_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brain_atlas=dict( - argstr='-brain_atlas %s', + argstr="-brain_atlas %s", + extensions=None, position=-4, ), environ=dict( @@ -15,20 +17,25 @@ def test_WatershedSkullStrip_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), subjects_dir=dict(), - t1=dict(argstr='-T1', ), + t1=dict( + argstr="-T1", + ), transform=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), ) @@ -37,8 +44,14 @@ def test_WatershedSkullStrip_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WatershedSkullStrip_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = WatershedSkullStrip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index cd8d129690..f300edf3a1 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -1,14 +1,11 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -import os import numpy as np import nibabel as nb import pytest -from nipype.utils import NUMPY_MMAP from nipype.interfaces.freesurfer import model, no_freesurfer import nipype.pipeline.engine as pe @@ -17,9 +14,9 @@ def test_concatenate(tmpdir): tmpdir.chdir() - in1 = tmpdir.join('cont1.nii').strpath - in2 = tmpdir.join('cont2.nii').strpath - out = 'bar.nii' + in1 = tmpdir.join("cont1.nii").strpath + in2 = tmpdir.join("cont2.nii").strpath + out = "bar.nii" data1 = np.zeros((3, 3, 3, 1), dtype=np.float32) data2 = np.ones((3, 3, 3, 5), dtype=np.float32) @@ -31,27 +28,28 @@ def test_concatenate(tmpdir): # Test default behavior res = model.Concatenate(in_files=[in1, in2]).run() - assert res.outputs.concatenated_file == tmpdir.join( - 'concat_output.nii.gz').strpath - assert np.allclose(nb.load('concat_output.nii.gz').get_data(), out_data) + assert res.outputs.concatenated_file == tmpdir.join("concat_output.nii.gz").strpath + assert np.allclose(nb.load("concat_output.nii.gz").get_fdata(), out_data) # Test specified concatenated_file res = model.Concatenate(in_files=[in1, in2], concatenated_file=out).run() assert res.outputs.concatenated_file == tmpdir.join(out).strpath - assert np.allclose(nb.load(out, mmap=NUMPY_MMAP).get_data(), out_data) + assert np.allclose(nb.load(out).get_fdata(), out_data) # Test in workflow - wf = pe.Workflow('test_concatenate', base_dir=tmpdir.strpath) + wf = pe.Workflow("test_concatenate", base_dir=tmpdir.strpath) concat = pe.Node( - model.Concatenate(in_files=[in1, in2], concatenated_file=out), - name='concat') + model.Concatenate(in_files=[in1, in2], concatenated_file=out), name="concat" + ) wf.add_nodes([concat]) wf.run() assert np.allclose( - nb.load(tmpdir.join('test_concatenate', 'concat', - out).strpath).get_data(), out_data) + nb.load(tmpdir.join("test_concatenate", "concat", out).strpath).get_fdata(), + out_data, + ) # Test a simple statistic res = model.Concatenate( - in_files=[in1, in2], concatenated_file=out, stats='mean').run() - assert np.allclose(nb.load(out, mmap=NUMPY_MMAP).get_data(), mean_data) + in_files=[in1, in2], concatenated_file=out, stats="mean" + ).run() + assert np.allclose(nb.load(out).get_fdata(), mean_data) diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index f9fc09515a..a6e2c3cbf9 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -1,19 +1,16 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import str import os import pytest -from nipype.testing.fixtures import create_files_in_directory +from looseversion import LooseVersion +from nipype.testing.fixtures import create_files_in_directory from nipype.interfaces import freesurfer from nipype.interfaces.freesurfer import Info -from nipype import LooseVersion -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_robustregister(create_files_in_directory): filelist, outdir = create_files_in_directory @@ -21,7 +18,7 @@ def test_robustregister(create_files_in_directory): cwd = os.getcwd() # make sure command gets called - assert reg.cmd == 'mri_robust_register' + assert reg.cmd == "mri_robust_register" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -31,32 +28,35 @@ def test_robustregister(create_files_in_directory): reg.inputs.source_file = filelist[0] reg.inputs.target_file = filelist[1] reg.inputs.auto_sens = True - assert reg.cmdline == ('mri_robust_register --satit --lta ' - '%s/%s_robustreg.lta --mov %s --dst %s' % - (cwd, filelist[0][:-4], filelist[0], filelist[1])) + assert reg.cmdline == ( + "mri_robust_register --satit --lta " + "%s/%s_robustreg.lta --mov %s --dst %s" + % (cwd, filelist[0][:-4], filelist[0], filelist[1]) + ) # constructor based parameter setting reg2 = freesurfer.RobustRegister( source_file=filelist[0], target_file=filelist[1], outlier_sens=3.0, - out_reg_file='foo.lta', - half_targ=True) + out_reg_file="foo.lta", + half_targ=True, + ) assert reg2.cmdline == ( - 'mri_robust_register --halfdst %s_halfway.nii --lta foo.lta ' - '--sat 3.0000 --mov %s --dst %s' % - (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1])) + "mri_robust_register --halfdst %s_halfway.nii --lta foo.lta " + "--sat 3.0000 --mov %s --dst %s" + % (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1]) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_fitmsparams(create_files_in_directory): filelist, outdir = create_files_in_directory fit = freesurfer.FitMSParams() # make sure command gets called - assert fit.cmd == 'mri_ms_fitparms' + assert fit.cmd == "mri_ms_fitparms" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -65,29 +65,30 @@ def test_fitmsparams(create_files_in_directory): # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir - assert fit.cmdline == 'mri_ms_fitparms %s %s %s' % (filelist[0], - filelist[1], outdir) + assert fit.cmdline == "mri_ms_fitparms {} {} {}".format( + filelist[0], + filelist[1], + outdir, + ) # constructor based parameter setting fit2 = freesurfer.FitMSParams( - in_files=filelist, - te_list=[1.5, 3.5], - flip_list=[20, 30], - out_dir=outdir) + in_files=filelist, te_list=[1.5, 3.5], flip_list=[20, 30], out_dir=outdir + ) assert fit2.cmdline == ( - 'mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s' % - (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir)) + "mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s" + % (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_synthesizeflash(create_files_in_directory): filelist, outdir = create_files_in_directory syn = freesurfer.SynthesizeFLASH() # make sure command gets called - assert syn.cmd == 'mri_synthesize' + assert syn.cmd == "mri_synthesize" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -100,20 +101,22 @@ def test_synthesizeflash(create_files_in_directory): syn.inputs.te = 4.5 syn.inputs.tr = 20 - assert syn.cmdline == ('mri_synthesize 20.00 30.00 4.500 %s %s %s' % - (filelist[0], filelist[1], - os.path.join(outdir, 'synth-flash_30.mgz'))) + assert syn.cmdline == ( + "mri_synthesize 20.00 30.00 4.500 %s %s %s" + % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_30.mgz")) + ) # constructor based parameters setting syn2 = freesurfer.SynthesizeFLASH( - t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25) - assert syn2.cmdline == ('mri_synthesize 25.00 20.00 5.000 %s %s %s' % - (filelist[0], filelist[1], - os.path.join(outdir, 'synth-flash_20.mgz'))) + t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25 + ) + assert syn2.cmdline == ( + "mri_synthesize 25.00 20.00 5.000 %s %s %s" + % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_20.mgz")) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_mandatory_outvol(create_files_in_directory): filelist, outdir = create_files_in_directory mni = freesurfer.MNIBiasCorrection() @@ -128,28 +131,30 @@ def test_mandatory_outvol(create_files_in_directory): # test with minimal args mni.inputs.in_file = filelist[0] base, ext = os.path.splitext(os.path.basename(filelist[0])) - if ext == '.gz': + if ext == ".gz": base, ext2 = os.path.splitext(base) ext = ext2 + ext - assert mni.cmdline == ('mri_nu_correct.mni --i %s --n 4 --o %s_output%s' % - (filelist[0], base, ext)) + assert mni.cmdline == ( + f"mri_nu_correct.mni --i {filelist[0]} --n 4 --o {base}_output{ext}" + ) # test with custom outfile - mni.inputs.out_file = 'new_corrected_file.mgz' + mni.inputs.out_file = "new_corrected_file.mgz" assert mni.cmdline == ( - 'mri_nu_correct.mni --i %s --n 4 --o new_corrected_file.mgz' % (filelist[0])) + "mri_nu_correct.mni --i %s --n 4 --o new_corrected_file.mgz" % (filelist[0]) + ) # constructor based tests mni2 = freesurfer.MNIBiasCorrection( - in_file=filelist[0], out_file='bias_corrected_output', iterations=2) + in_file=filelist[0], out_file="bias_corrected_output", iterations=2 + ) assert mni2.cmdline == ( - 'mri_nu_correct.mni --i %s --n 2 --o bias_corrected_output' % - filelist[0]) + "mri_nu_correct.mni --i %s --n 2 --o bias_corrected_output" % filelist[0] + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_bbregister(create_files_in_directory): filelist, outdir = create_files_in_directory bbr = freesurfer.BBRegister() @@ -161,9 +166,9 @@ def test_bbregister(create_files_in_directory): with pytest.raises(ValueError): bbr.cmdline - bbr.inputs.subject_id = 'fsaverage' + bbr.inputs.subject_id = "fsaverage" bbr.inputs.source_file = filelist[0] - bbr.inputs.contrast_type = 't2' + bbr.inputs.contrast_type = "t2" # Check that 'init' is mandatory in FS < 6, but not in 6+ if Info.looseversion() < LooseVersion("6.0.0"): @@ -172,20 +177,20 @@ def test_bbregister(create_files_in_directory): else: bbr.cmdline - bbr.inputs.init = 'fsl' + bbr.inputs.init = "fsl" base, ext = os.path.splitext(os.path.basename(filelist[0])) - if ext == '.gz': + if ext == ".gz": base, _ = os.path.splitext(base) - assert bbr.cmdline == ('bbregister --t2 --init-fsl ' - '--reg {base}_bbreg_fsaverage.dat ' - '--mov {full} --s fsaverage'.format( - full=filelist[0], base=base)) + assert bbr.cmdline == ( + "bbregister --t2 --init-fsl " + "--reg {base}_bbreg_fsaverage.dat " + "--mov {full} --s fsaverage".format(full=filelist[0], base=base) + ) def test_FSVersion(): - """Check that FSVersion is a string that can be compared with LooseVersion - """ + """Check that FSVersion is a string that can be compared with LooseVersion""" assert isinstance(freesurfer.preprocess.FSVersion, str) assert LooseVersion(freesurfer.preprocess.FSVersion) >= LooseVersion("0") diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index f3ff8fd5ee..323c04166d 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -1,14 +1,12 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open import os import os.path as op import pytest -from nipype.testing.fixtures import (create_files_in_directory_plus_dummy_file, - create_surf_file_in_directory) +from nipype.testing.fixtures import ( + create_files_in_directory_plus_dummy_file, + create_surf_file_in_directory, +) from nipype.pipeline import engine as pe from nipype.interfaces import freesurfer as fs @@ -18,10 +16,9 @@ @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_sample2surf(create_files_in_directory_plus_dummy_file): - s2s = fs.SampleToSurface() # Test underlying command - assert s2s.cmd == 'mri_vol2surf' + assert s2s.cmd == "mri_vol2surf" # Test mandatory args exception with pytest.raises(ValueError): @@ -35,29 +32,30 @@ def test_sample2surf(create_files_in_directory_plus_dummy_file): s2s.inputs.reference_file = files[1] s2s.inputs.hemi = "lh" s2s.inputs.reg_file = files[2] - s2s.inputs.sampling_range = .5 + s2s.inputs.sampling_range = 0.5 s2s.inputs.sampling_units = "frac" s2s.inputs.sampling_method = "point" # Test a basic command line assert s2s.cmdline == ( "mri_vol2surf " - "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" % - (os.path.join(cwd, "lh.a.mgz"), files[1], files[0])) + "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" + % (os.path.join(cwd, "lh.a.mgz"), files[1], files[0]) + ) # Test identity s2sish = fs.SampleToSurface( - source_file=files[1], reference_file=files[0], hemi="rh") + source_file=files[1], reference_file=files[0], hemi="rh" + ) assert s2s != s2sish # Test hits file name creation s2s.inputs.hits_file = True - assert s2s._get_outfilename("hits_file") == os.path.join( - cwd, "lh.a_hits.mgz") + assert s2s._get_outfilename("hits_file") == os.path.join(cwd, "lh.a_hits.mgz") # Test that a 2-tuple range raises an error def set_illegal_range(): - s2s.inputs.sampling_range = (.2, .5) + s2s.inputs.sampling_range = (0.2, 0.5) with pytest.raises(TraitError): set_illegal_range() @@ -65,7 +63,6 @@ def set_illegal_range(): @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfsmooth(create_surf_file_in_directory): - smooth = fs.SurfaceSmooth() # Test underlying command @@ -86,9 +83,10 @@ def test_surfsmooth(create_surf_file_in_directory): smooth.inputs.hemi = "lh" # Test the command line - assert smooth.cmdline == \ - ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" % - (surf, cwd, fwhm)) + assert smooth.cmdline == ( + "mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" + % (surf, cwd, fwhm) + ) # Test identity shmooth = fs.SurfaceSmooth( @@ -96,13 +94,13 @@ def test_surfsmooth(create_surf_file_in_directory): fwhm=6, in_file=surf, hemi="lh", - out_file="lh.a_smooth.nii") + out_file="lh.a_smooth.nii", + ) assert smooth != shmooth @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfxfm(create_surf_file_in_directory): - xfm = fs.SurfaceTransform() # Test underlying command @@ -122,22 +120,23 @@ def test_surfxfm(create_surf_file_in_directory): xfm.inputs.hemi = "lh" # Test the command line - assert xfm.cmdline == \ - ("mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" % - (cwd, surf)) + assert xfm.cmdline == ( + "mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" + % (cwd, surf) + ) # Test identity xfmish = fs.SurfaceTransform( source_subject="fsaverage", target_subject="my_subject", source_file=surf, - hemi="lh") + hemi="lh", + ) assert xfm != xfmish @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfshots(create_files_in_directory_plus_dummy_file): - fotos = fs.SurfaceSnapshots() # Test underlying command @@ -150,7 +149,7 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): # Create testing files files, cwd = create_files_in_directory_plus_dummy_file - # Test input settins + # Test input settings fotos.inputs.subject_id = "fsaverage" fotos.inputs.hemi = "lh" fotos.inputs.surface = "pial" @@ -159,8 +158,7 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): assert fotos.cmdline == "tksurfer fsaverage lh pial -tcl snapshots.tcl" # Test identity - schmotos = fs.SurfaceSnapshots( - subject_id="mysubject", hemi="rh", surface="white") + schmotos = fs.SurfaceSnapshots(subject_id="mysubject", hemi="rh", surface="white") assert fotos != schmotos # Test that the tcl script gets written @@ -186,25 +184,25 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_mrisexpand(tmpdir): fssrc = FreeSurferSource( - subjects_dir=fs.Info.subjectsdir(), subject_id='fsaverage', hemi='lh') + subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" + ) fsavginfo = fssrc.run().outputs.get() # dt=60 to ensure very short runtime expand_if = fs.MRIsExpand( - in_file=fsavginfo['smoothwm'], out_name='expandtmp', distance=1, dt=60) + in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 + ) expand_nd = pe.Node( fs.MRIsExpand( - in_file=fsavginfo['smoothwm'], - out_name='expandtmp', - distance=1, - dt=60), - name='expand_node') + in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 + ), + name="expand_node", + ) # Interfaces should have same command line at instantiation - orig_cmdline = 'mris_expand -T 60 {} 1 expandtmp'.format( - fsavginfo['smoothwm']) + orig_cmdline = "mris_expand -T 60 {} 1 expandtmp".format(fsavginfo["smoothwm"]) assert expand_if.cmdline == orig_cmdline assert expand_nd.interface.cmdline == orig_cmdline @@ -212,16 +210,34 @@ def test_mrisexpand(tmpdir): nd_res = expand_nd.run() # Commandlines differ - node_cmdline = 'mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm ' \ - '1 expandtmp'.format(cwd=nd_res.runtime.cwd) + node_cmdline = ( + "mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm " + "1 expandtmp".format(cwd=nd_res.runtime.cwd) + ) assert nd_res.runtime.cmdline == node_cmdline # Check output - if_out_file = expand_if._list_outputs()['out_file'] - nd_out_file = nd_res.outputs.get()['out_file'] + if_out_file = expand_if._list_outputs()["out_file"] + nd_out_file = nd_res.outputs.get()["out_file"] # Same filename assert op.basename(if_out_file) == op.basename(nd_out_file) # Interface places output in source directory - assert op.dirname(if_out_file) == op.dirname(fsavginfo['smoothwm']) + assert op.dirname(if_out_file) == op.dirname(fsavginfo["smoothwm"]) # Node places output in working directory assert op.dirname(nd_out_file) == nd_res.runtime.cwd + + +@pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") +def test_eulernumber(tmpdir): + # grab a surface from fsaverage + fssrc = FreeSurferSource( + subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" + ) + pial = fssrc.run().outputs.pial + assert isinstance(pial, str), "Problem when fetching surface file" + + eu = fs.EulerNumber() + eu.inputs.in_file = pial + res = eu.run() + assert res.outputs.defects == 0 + assert res.outputs.euler == 2 diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 55e38576bb..777f42f019 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1,49 +1,77 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to assorted Freesurfer utility programs. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open - import os import re import shutil from ... import logging from ...utils.filemanip import fname_presuffix, split_filename -from ..base import (TraitedSpec, File, traits, OutputMultiPath, isdefined, - CommandLine, CommandLineInputSpec) -from .base import (FSCommand, FSTraitedSpec, FSSurfaceCommand, FSScriptCommand, - FSScriptOutputSpec, FSTraitedSpecOpenMP, FSCommandOpenMP) -__docformat__ = 'restructuredtext' +from ..base import ( + TraitedSpec, + Directory, + File, + traits, + Tuple, + OutputMultiPath, + isdefined, + CommandLine, + CommandLineInputSpec, +) +from .base import ( + FSCommand, + FSTraitedSpec, + FSSurfaceCommand, + FSScriptCommand, + FSScriptOutputSpec, + FSTraitedSpecOpenMP, + FSCommandOpenMP, +) + +__docformat__ = "restructuredtext" filemap = dict( - cor='cor', - mgh='mgh', - mgz='mgz', - minc='mnc', - afni='brik', - brik='brik', - bshort='bshort', - spm='img', - analyze='img', - analyze4d='img', - bfloat='bfloat', - nifti1='img', - nii='nii', - niigz='nii.gz', - gii='gii') + cor="cor", + mgh="mgh", + mgz="mgz", + minc="mnc", + afni="brik", + brik="brik", + bshort="bshort", + spm="img", + analyze="img", + analyze4d="img", + bfloat="bfloat", + nifti1="img", + nii="nii", + niigz="nii.gz", + gii="gii", +) filetypes = [ - 'cor', 'mgh', 'mgz', 'minc', 'analyze', 'analyze4d', 'spm', 'afni', 'brik', - 'bshort', 'bfloat', 'sdt', 'outline', 'otl', 'gdf', 'nifti1', 'nii', - 'niigz' + "cor", + "mgh", + "mgz", + "minc", + "analyze", + "analyze4d", + "spm", + "afni", + "brik", + "bshort", + "bfloat", + "sdt", + "outline", + "otl", + "gdf", + "nifti1", + "nii", + "niigz", ] -implicit_filetypes = ['gii'] +implicit_filetypes = ["gii"] -logger = logging.getLogger('nipype.interface') +logger = logging.getLogger("nipype.interface") def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): @@ -61,7 +89,7 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): if isdefined(cls.inputs.subject_id): subject_id = cls.inputs.subject_id else: - subject_id = 'subject_id' # default + subject_id = "subject_id" # default # check for basename if basename is None: basename = os.path.basename(in_file) @@ -88,25 +116,22 @@ def createoutputdirs(outputs): class SampleToSurfaceInputSpec(FSTraitedSpec): - source_file = File( exists=True, mandatory=True, argstr="--mov %s", - desc="volume to sample values from") + desc="volume to sample values from", + ) reference_file = File( - exists=True, - argstr="--ref %s", - desc="reference volume (default is orig.mgz)") + exists=True, argstr="--ref %s", desc="reference volume (default is orig.mgz)" + ) hemi = traits.Enum( - "lh", - "rh", - mandatory=True, - argstr="--hemi %s", - desc="target hemisphere") + "lh", "rh", mandatory=True, argstr="--hemi %s", desc="target hemisphere" + ) surface = traits.String( - argstr="--surf %s", desc="target surface (default is white)") + argstr="--surf %s", desc="target surface (default is white)" + ) reg_xors = ["reg_file", "reg_header", "mni152reg"] reg_file = File( @@ -114,35 +139,41 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): argstr="--reg %s", mandatory=True, xor=reg_xors, - desc="source-to-reference registration file") + desc="source-to-reference registration file", + ) reg_header = traits.Bool( argstr="--regheader %s", requires=["subject_id"], mandatory=True, xor=reg_xors, - desc="register based on header geometry") + desc="register based on header geometry", + ) mni152reg = traits.Bool( argstr="--mni152reg", mandatory=True, xor=reg_xors, - desc="source volume is in MNI152 space") + desc="source volume is in MNI152 space", + ) - apply_rot = traits.Tuple( + apply_rot = Tuple( traits.Float, traits.Float, traits.Float, argstr="--rot %.3f %.3f %.3f", - desc="rotation angles (in degrees) to apply to reg matrix") - apply_trans = traits.Tuple( + desc="rotation angles (in degrees) to apply to reg matrix", + ) + apply_trans = Tuple( traits.Float, traits.Float, traits.Float, argstr="--trans %.3f %.3f %.3f", - desc="translation (in mm) to apply to reg matrix") + desc="translation (in mm) to apply to reg matrix", + ) override_reg_subj = traits.Bool( argstr="--srcsubject %s", requires=["subject_id"], - desc="override the subject in the reg file header") + desc="override the subject in the reg file header", + ) sampling_method = traits.Enum( "point", @@ -152,106 +183,116 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): argstr="%s", xor=["projection_stem"], requires=["sampling_range", "sampling_units"], - desc="how to sample -- at a point or at the max or average over a range" + desc="how to sample -- at a point or at the max or average over a range", ) sampling_range = traits.Either( traits.Float, - traits.Tuple(traits.Float, traits.Float, traits.Float), - desc="sampling range - a point or a tuple of (min, max, step)") + Tuple(traits.Float, traits.Float, traits.Float), + desc="sampling range - a point or a tuple of (min, max, step)", + ) sampling_units = traits.Enum( - "mm", "frac", desc="sampling range type -- either 'mm' or 'frac'") + "mm", "frac", desc="sampling range type -- either 'mm' or 'frac'" + ) projection_stem = traits.String( mandatory=True, xor=["sampling_method"], - desc="stem for precomputed linear estimates and volume fractions") + desc="stem for precomputed linear estimates and volume fractions", + ) smooth_vol = traits.Float( - argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)") + argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)" + ) smooth_surf = traits.Float( - argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)") + argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)" + ) interp_method = traits.Enum( - "nearest", - "trilinear", - argstr="--interp %s", - desc="interpolation method") + "nearest", "trilinear", argstr="--interp %s", desc="interpolation method" + ) cortex_mask = traits.Bool( argstr="--cortex", xor=["mask_label"], - desc="mask the target surface with hemi.cortex.label") + desc="mask the target surface with hemi.cortex.label", + ) mask_label = File( exists=True, argstr="--mask %s", xor=["cortex_mask"], - desc="label file to mask output with") + desc="label file to mask output with", + ) float2int_method = traits.Enum( "round", "tkregister", argstr="--float2int %s", - desc="method to convert reg matrix values (default is round)") + desc="method to convert reg matrix values (default is round)", + ) fix_tk_reg = traits.Bool( - argstr="--fixtkreg", desc="make reg matrix round-compatible") + argstr="--fixtkreg", desc="make reg matrix round-compatible" + ) subject_id = traits.String(desc="subject id") target_subject = traits.String( argstr="--trgsubject %s", - desc="sample to surface of different subject than source") + desc="sample to surface of different subject than source", + ) surf_reg = traits.Either( traits.Bool, traits.Str(), argstr="--surfreg %s", requires=["target_subject"], - desc="use surface registration to target subject") + desc="use surface registration to target subject", + ) ico_order = traits.Int( argstr="--icoorder %d", requires=["target_subject"], - desc="icosahedron order when target_subject is 'ico'") + desc="icosahedron order when target_subject is 'ico'", + ) reshape = traits.Bool( argstr="--reshape", xor=["no_reshape"], - desc="reshape surface vector to fit in non-mgh format") + desc="reshape surface vector to fit in non-mgh format", + ) no_reshape = traits.Bool( argstr="--noreshape", xor=["reshape"], - desc="do not reshape surface vector (default)") + desc="do not reshape surface vector (default)", + ) reshape_slices = traits.Int( - argstr="--rf %d", desc="number of 'slices' for reshaping") + argstr="--rf %d", desc="number of 'slices' for reshaping" + ) scale_input = traits.Float( - argstr="--scale %.3f", desc="multiple all intensities by scale factor") - frame = traits.Int( - argstr="--frame %d", desc="save only one frame (0-based)") + argstr="--scale %.3f", desc="multiple all intensities by scale factor" + ) + frame = traits.Int(argstr="--frame %d", desc="save only one frame (0-based)") - out_file = File( - argstr="--o %s", genfile=True, desc="surface file to write") + out_file = File(argstr="--o %s", genfile=True, desc="surface file to write") out_type = traits.Enum( - filetypes + implicit_filetypes, - argstr="--out_type %s", - desc="output file type") + filetypes + implicit_filetypes, argstr="--out_type %s", desc="output file type" + ) hits_file = traits.Either( traits.Bool, File(exists=True), argstr="--srchit %s", - desc="save image with number of hits at each voxel") - hits_type = traits.Enum( - filetypes, argstr="--srchit_type", desc="hits file type") + desc="save image with number of hits at each voxel", + ) + hits_type = traits.Enum(filetypes, argstr="--srchit_type", desc="hits file type") vox_file = traits.Either( traits.Bool, File, argstr="--nvox %s", - desc="text file with the number of voxels intersecting the surface") + desc="text file with the number of voxels intersecting the surface", + ) class SampleToSurfaceOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="surface file") - hits_file = File( - exists=True, desc="image with number of hits at each voxel") + hits_file = File(exists=True, desc="image with number of hits at each voxel") vox_file = File( - exists=True, - desc="text file with the number of voxels intersecting the surface") + exists=True, desc="text file with the number of voxels intersecting the surface" + ) class SampleToSurface(FSCommand): @@ -284,6 +325,7 @@ class SampleToSurface(FSCommand): >>> res = sampler.run() # doctest: +SKIP """ + _cmd = "mri_vol2surf" input_spec = SampleToSurfaceInputSpec output_spec = SampleToSurfaceOutputSpec @@ -299,7 +341,7 @@ def _format_arg(self, name, spec, value): else: range = "%.3f" % range method = dict(point="", max="-max", average="-avg")[value] - return "--proj%s%s %s" % (units, method, range) + return f"--proj{units}{method} {range}" if name == "reg_header": return spec.argstr % self.inputs.subject_id @@ -314,37 +356,44 @@ def _format_arg(self, name, spec, value): if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " - "{}".format(value, ext)) + "{}".format(value, ext) + ) else: - logger.warning('Creating %s file with extension %s: %s%s', - value, ext, base, ext) + logger.warning( + "Creating %s file with extension %s: %s%s", + value, + ext, + base, + ext, + ) if value in implicit_filetypes: return "" - if name == 'surf_reg': + if name == "surf_reg": if value is True: - return spec.argstr % 'sphere.reg' + return spec.argstr % "sphere.reg" - return super(SampleToSurface, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _get_outfilename(self, opt="out_file"): outfile = getattr(self.inputs, opt) if not isdefined(outfile) or isinstance(outfile, bool): if isdefined(self.inputs.out_type): if opt == "hits_file": - suffix = '_hits.' + filemap[self.inputs.out_type] + suffix = "_hits." + filemap[self.inputs.out_type] else: - suffix = '.' + filemap[self.inputs.out_type] + suffix = "." + filemap[self.inputs.out_type] elif opt == "hits_file": suffix = "_hits.mgz" else: - suffix = '.mgz' + suffix = ".mgz" outfile = fname_presuffix( self.inputs.source_file, newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix=suffix, - use_ext=False) + use_ext=False, + ) return outfile def _list_outputs(self): @@ -363,7 +412,8 @@ def _list_outputs(self): newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix="_vox.txt", - use_ext=False) + use_ext=False, + ) outputs["vox_file"] = voxfile return outputs @@ -374,59 +424,53 @@ def _gen_filename(self, name): class SurfaceSmoothInputSpec(FSTraitedSpec): - - in_file = File( - mandatory=True, argstr="--sval %s", desc="source surface file") + in_file = File(mandatory=True, argstr="--sval %s", desc="source surface file") subject_id = traits.String( - mandatory=True, argstr="--s %s", desc="subject id of surface file") + mandatory=True, argstr="--s %s", desc="subject id of surface file" + ) hemi = traits.Enum( - "lh", - "rh", - argstr="--hemi %s", - mandatory=True, - desc="hemisphere to operate on") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to operate on" + ) fwhm = traits.Float( argstr="--fwhm %.4f", xor=["smooth_iters"], - desc="effective FWHM of the smoothing process") + desc="effective FWHM of the smoothing process", + ) smooth_iters = traits.Int( - argstr="--smooth %d", - xor=["fwhm"], - desc="iterations of the smoothing process") + argstr="--smooth %d", xor=["fwhm"], desc="iterations of the smoothing process" + ) cortex = traits.Bool( True, argstr="--cortex", usedefault=True, - desc="only smooth within $hemi.cortex.label") + desc="only smooth within ``$hemi.cortex.label``", + ) reshape = traits.Bool( - argstr="--reshape", - desc="reshape surface vector to fit in non-mgh format") - out_file = File( - argstr="--tval %s", genfile=True, desc="surface file to write") + argstr="--reshape", desc="reshape surface vector to fit in non-mgh format" + ) + out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceSmoothOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="smoothed surface file") class SurfaceSmooth(FSCommand): """Smooth a surface image with mri_surf2surf. - The surface is smoothed by an interative process of averaging the + The surface is smoothed by an iterative process of averaging the value at each vertex with those of its adjacent neighbors. You may supply either the number of iterations to run or a desired effective FWHM of the smoothing process. If the latter, the underlying program will calculate the correct number of iterations internally. - .. seealso:: - - SmoothTessellation() Interface - For smoothing a tessellated surface (e.g. in gifti or .stl) + See Also + -------- + `nipype.interfaces.freesurfer.utils.SmoothTessellation`_ interface for + smoothing a tessellated surface (e.g. in gifti or .stl) Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> smoother = fs.SurfaceSmooth() >>> smoother.inputs.in_file = "lh.cope1.mgz" @@ -438,6 +482,7 @@ class SurfaceSmooth(FSCommand): >>> smoother.run() # doctest: +SKIP """ + _cmd = "mri_surf2surf" input_spec = SurfaceSmoothInputSpec output_spec = SurfaceSmoothOutputSpec @@ -452,7 +497,8 @@ def _list_outputs(self): else: kernel = self.inputs.smooth_iters outputs["out_file"] = fname_presuffix( - in_file, suffix="_smooth%d" % kernel, newpath=os.getcwd()) + in_file, suffix="_smooth%d" % kernel, newpath=os.getcwd() + ) return outputs def _gen_filename(self, name): @@ -466,28 +512,25 @@ class SurfaceTransformInputSpec(FSTraitedSpec): exists=True, mandatory=True, argstr="--sval %s", - xor=['source_annot_file'], - desc="surface file with source values") + xor=["source_annot_file"], + desc="surface file with source values", + ) source_annot_file = File( exists=True, mandatory=True, argstr="--sval-annot %s", - xor=['source_file'], - desc="surface annotation file") + xor=["source_file"], + desc="surface annotation file", + ) source_subject = traits.String( - mandatory=True, - argstr="--srcsubject %s", - desc="subject id for source surface") + mandatory=True, argstr="--srcsubject %s", desc="subject id for source surface" + ) hemi = traits.Enum( - "lh", - "rh", - argstr="--hemi %s", - mandatory=True, - desc="hemisphere to transform") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to transform" + ) target_subject = traits.String( - mandatory=True, - argstr="--trgsubject %s", - desc="subject id of target surface") + mandatory=True, argstr="--trgsubject %s", desc="subject id of target surface" + ) target_ico_order = traits.Enum( 1, 2, @@ -497,24 +540,24 @@ class SurfaceTransformInputSpec(FSTraitedSpec): 6, 7, argstr="--trgicoorder %d", - desc=("order of the icosahedron if " - "target_subject is 'ico'")) + desc=("order of the icosahedron if target_subject is 'ico'"), + ) source_type = traits.Enum( filetypes, - argstr='--sfmt %s', - requires=['source_file'], - desc="source file format") + argstr="--sfmt %s", + requires=["source_file"], + desc="source file format", + ) target_type = traits.Enum( - filetypes + implicit_filetypes, - argstr='--tfmt %s', - desc="output format") + filetypes + implicit_filetypes, argstr="--tfmt %s", desc="output format" + ) reshape = traits.Bool( - argstr="--reshape", - desc="reshape output surface to conform with Nifti") + argstr="--reshape", desc="reshape output surface to conform with Nifti" + ) reshape_factor = traits.Int( - argstr="--reshape-factor", desc="number of slices in reshaped image") - out_file = File( - argstr="--tval %s", genfile=True, desc="surface file to write") + argstr="--reshape-factor", desc="number of slices in reshaped image" + ) + out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceTransformOutputSpec(TraitedSpec): @@ -540,6 +583,7 @@ class SurfaceTransform(FSCommand): >>> sxfm.run() # doctest: +SKIP """ + _cmd = "mri_surf2surf" input_spec = SurfaceTransformInputSpec output_spec = SurfaceTransformOutputSpec @@ -547,18 +591,24 @@ class SurfaceTransform(FSCommand): def _format_arg(self, name, spec, value): if name == "target_type": if isdefined(self.inputs.out_file): - _, base, ext = split_filename(self._list_outputs()['out_file']) + _, base, ext = split_filename(self._list_outputs()["out_file"]) if ext != filemap[value]: if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " - "{}".format(value, ext)) + "{}".format(value, ext) + ) else: - logger.warning('Creating %s file with extension %s: %s%s', - value, ext, base, ext) + logger.warning( + "Creating %s file with extension %s: %s%s", + value, + ext, + base, + ext, + ) if value in implicit_filetypes: return "" - return super(SurfaceTransform, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -572,10 +622,24 @@ def _list_outputs(self): # Some recon-all files don't have a proper extension (e.g. "lh.thickness") # so we have to account for that here bad_extensions = [ - ".%s" % e for e in [ - "area", "mid", "pial", "avg_curv", "curv", "inflated", - "jacobian_white", "orig", "nofix", "smoothwm", "crv", - "sphere", "sulc", "thickness", "volume", "white" + ".%s" % e + for e in [ + "area", + "mid", + "pial", + "avg_curv", + "curv", + "inflated", + "jacobian_white", + "orig", + "nofix", + "smoothwm", + "crv", + "sphere", + "sulc", + "thickness", + "volume", + "white", ] ] use_ext = True @@ -588,9 +652,10 @@ def _list_outputs(self): use_ext = False outputs["out_file"] = fname_presuffix( source, - suffix=".%s%s" % (self.inputs.target_subject, ext), + suffix=f".{self.inputs.target_subject}{ext}", newpath=os.getcwd(), - use_ext=use_ext) + use_ext=use_ext, + ) else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -604,55 +669,59 @@ def _gen_filename(self, name): class Surface2VolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, - argstr='--surfval %s', + argstr="--surfval %s", copyfile=False, mandatory=True, - xor=['mkmask'], - desc='This is the source of the surface values') - hemi = traits.Str( - argstr='--hemi %s', mandatory=True, desc='hemisphere of data') + xor=["mkmask"], + desc="This is the source of the surface values", + ) + hemi = traits.Str(argstr="--hemi %s", mandatory=True, desc="hemisphere of data") transformed_file = File( name_template="%s_asVol.nii", - desc='Output volume', - argstr='--outvol %s', - name_source=['source_file'], - hash_files=False) + desc="Output volume", + argstr="--outvol %s", + name_source=["source_file"], + hash_files=False, + ) reg_file = File( exists=True, - argstr='--volreg %s', + argstr="--volreg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)', - xor=['subject_id']) + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + xor=["subject_id"], + ) template_file = File( - exists=True, argstr='--template %s', desc='Output template volume') + exists=True, argstr="--template %s", desc="Output template volume" + ) mkmask = traits.Bool( - desc='make a mask instead of loading surface values', - argstr='--mkmask', - xor=['source_file']) + desc="make a mask instead of loading surface values", + argstr="--mkmask", + xor=["source_file"], + ) vertexvol_file = File( name_template="%s_asVol_vertex.nii", - desc=('Path name of the vertex output volume, which ' - 'is the same as output volume except that the ' - 'value of each voxel is the vertex-id that is ' - 'mapped to that voxel.'), - argstr='--vtxvol %s', - name_source=['source_file'], - hash_files=False) - surf_name = traits.Str( - argstr='--surf %s', desc='surfname (default is white)') - projfrac = traits.Float(argstr='--projfrac %s', desc='thickness fraction') + desc=( + "Path name of the vertex output volume, which " + "is the same as output volume except that the " + "value of each voxel is the vertex-id that is " + "mapped to that voxel." + ), + argstr="--vtxvol %s", + name_source=["source_file"], + hash_files=False, + ) + surf_name = traits.Str(argstr="--surf %s", desc="surfname (default is white)") + projfrac = traits.Float(argstr="--projfrac %s", desc="thickness fraction") subjects_dir = traits.Str( - argstr='--sd %s', - desc=('freesurfer subjects directory defaults to ' - '$SUBJECTS_DIR')) - subject_id = traits.Str( - argstr='--identity %s', desc='subject id', xor=['reg_file']) + argstr="--sd %s", + desc=("freesurfer subjects directory defaults to $SUBJECTS_DIR"), + ) + subject_id = traits.Str(argstr="--identity %s", desc="subject id", xor=["reg_file"]) class Surface2VolTransformOutputSpec(TraitedSpec): - transformed_file = File( - exists=True, desc='Path to output file if used normally') - vertexvol_file = File(desc='vertex map volume path id. Optional') + transformed_file = File(exists=True, desc="Path to output file if used normally") + vertexvol_file = File(desc="vertex map volume path id. Optional") class Surface2VolTransform(FSCommand): @@ -674,60 +743,61 @@ class Surface2VolTransform(FSCommand): """ - _cmd = 'mri_surf2vol' + _cmd = "mri_surf2vol" input_spec = Surface2VolTransformInputSpec output_spec = Surface2VolTransformOutputSpec class ApplyMaskInputSpec(FSTraitedSpec): - in_file = File( exists=True, mandatory=True, position=-3, argstr="%s", - desc="input image (will be masked)") + desc="input image (will be masked)", + ) mask_file = File( exists=True, mandatory=True, position=-2, argstr="%s", - desc="image defining mask space") + desc="image defining mask space", + ) out_file = File( - name_source=['in_file'], - name_template='%s_masked', + name_source=["in_file"], + name_template="%s_masked", hash_files=True, keep_extension=True, position=-1, argstr="%s", - desc="final image to write") + desc="final image to write", + ) xfm_file = File( exists=True, argstr="-xform %s", - desc="LTA-format transformation matrix to align mask with input") + desc="LTA-format transformation matrix to align mask with input", + ) invert_xfm = traits.Bool(argstr="-invert", desc="invert transformation") xfm_source = File( - exists=True, - argstr="-lta_src %s", - desc="image defining transform source space") + exists=True, argstr="-lta_src %s", desc="image defining transform source space" + ) xfm_target = File( - exists=True, - argstr="-lta_dst %s", - desc="image defining transform target space") + exists=True, argstr="-lta_dst %s", desc="image defining transform target space" + ) use_abs = traits.Bool( - argstr="-abs", desc="take absolute value of mask before applying") - mask_thresh = traits.Float( - argstr="-T %.4f", desc="threshold mask before applying") + argstr="-abs", desc="take absolute value of mask before applying" + ) + mask_thresh = traits.Float(argstr="-T %.4f", desc="threshold mask before applying") keep_mask_deletion_edits = traits.Bool( argstr="-keep_mask_deletion_edits", - desc="transfer voxel-deletion edits (voxels=1) from mask to out vol") + desc="transfer voxel-deletion edits (voxels=1) from mask to out vol", + ) transfer = traits.Int( - argstr="-transfer %d", - desc="transfer only voxel value # from mask to out") + argstr="-transfer %d", desc="transfer only voxel value # from mask to out" + ) class ApplyMaskOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="masked image") @@ -739,128 +809,149 @@ class ApplyMask(FSCommand): space with an LTA matrix. """ + _cmd = "mri_mask" input_spec = ApplyMaskInputSpec output_spec = ApplyMaskOutputSpec class SurfaceSnapshotsInputSpec(FSTraitedSpec): - subject_id = traits.String( - position=1, argstr="%s", mandatory=True, desc="subject to visualize") + position=1, argstr="%s", mandatory=True, desc="subject to visualize" + ) hemi = traits.Enum( "lh", "rh", position=2, argstr="%s", mandatory=True, - desc="hemisphere to visualize") + desc="hemisphere to visualize", + ) surface = traits.String( - position=3, argstr="%s", mandatory=True, desc="surface to visualize") + position=3, argstr="%s", mandatory=True, desc="surface to visualize" + ) show_curv = traits.Bool( - argstr="-curv", desc="show curvature", xor=["show_gray_curv"]) + argstr="-curv", desc="show curvature", xor=["show_gray_curv"] + ) show_gray_curv = traits.Bool( - argstr="-gray", desc="show curvature in gray", xor=["show_curv"]) + argstr="-gray", desc="show curvature in gray", xor=["show_curv"] + ) overlay = File( exists=True, argstr="-overlay %s", desc="load an overlay volume/surface", - requires=["overlay_range"]) + requires=["overlay_range"], + ) reg_xors = ["overlay_reg", "identity_reg", "mni152_reg"] - overlay_reg = traits.File( + overlay_reg = File( exists=True, argstr="-overlay-reg %s", xor=reg_xors, - desc="registration matrix file to register overlay to surface") + desc="registration matrix file to register overlay to surface", + ) identity_reg = traits.Bool( argstr="-overlay-reg-identity", xor=reg_xors, - desc="use the identity matrix to register the overlay to the surface") + desc="use the identity matrix to register the overlay to the surface", + ) mni152_reg = traits.Bool( argstr="-mni152reg", xor=reg_xors, - desc="use to display a volume in MNI152 space on the average subject") + desc="use to display a volume in MNI152 space on the average subject", + ) overlay_range = traits.Either( traits.Float, - traits.Tuple(traits.Float, traits.Float), - traits.Tuple(traits.Float, traits.Float, traits.Float), + Tuple(traits.Float, traits.Float), + Tuple(traits.Float, traits.Float, traits.Float), desc="overlay range--either min, (min, max) or (min, mid, max)", - argstr="%s") + argstr="%s", + ) overlay_range_offset = traits.Float( argstr="-foffset %.3f", - desc="overlay range will be symettric around offset value") + desc="overlay range will be symmetric around offset value", + ) truncate_overlay = traits.Bool( - argstr="-truncphaseflag 1", desc="truncate the overlay display") + argstr="-truncphaseflag 1", desc="truncate the overlay display" + ) reverse_overlay = traits.Bool( - argstr="-revphaseflag 1", desc="reverse the overlay display") + argstr="-revphaseflag 1", desc="reverse the overlay display" + ) invert_overlay = traits.Bool( - argstr="-invphaseflag 1", desc="invert the overlay display") + argstr="-invphaseflag 1", desc="invert the overlay display" + ) demean_overlay = traits.Bool(argstr="-zm", desc="remove mean from overlay") annot_file = File( exists=True, argstr="-annotation %s", xor=["annot_name"], - desc="path to annotation file to display") + desc="path to annotation file to display", + ) annot_name = traits.String( argstr="-annotation %s", xor=["annot_file"], - desc= - "name of annotation to display (must be in $subject/label directory") + desc="name of annotation to display (must be in $subject/label directory", + ) label_file = File( exists=True, argstr="-label %s", xor=["label_name"], - desc="path to label file to display") + desc="path to label file to display", + ) label_name = traits.String( argstr="-label %s", xor=["label_file"], - desc="name of label to display (must be in $subject/label directory") + desc="name of label to display (must be in $subject/label directory", + ) - colortable = File( - exists=True, argstr="-colortable %s", desc="load colortable file") + colortable = File(exists=True, argstr="-colortable %s", desc="load colortable file") label_under = traits.Bool( - argstr="-labels-under", desc="draw label/annotation under overlay") + argstr="-labels-under", desc="draw label/annotation under overlay" + ) label_outline = traits.Bool( - argstr="-label-outline", desc="draw label/annotation as outline") + argstr="-label-outline", desc="draw label/annotation as outline" + ) patch_file = File(exists=True, argstr="-patch %s", desc="load a patch") orig_suffix = traits.String( - argstr="-orig %s", desc="set the orig surface suffix string") + argstr="-orig %s", desc="set the orig surface suffix string" + ) sphere_suffix = traits.String( - argstr="-sphere %s", desc="set the sphere.reg suffix string") + argstr="-sphere %s", desc="set the sphere.reg suffix string" + ) show_color_scale = traits.Bool( - argstr="-colscalebarflag 1", desc="display the color scale bar") + argstr="-colscalebarflag 1", desc="display the color scale bar" + ) show_color_text = traits.Bool( - argstr="-colscaletext 1", desc="display text in the color scale bar") + argstr="-colscaletext 1", desc="display text in the color scale bar" + ) six_images = traits.Bool(desc="also take anterior and posterior snapshots") - screenshot_stem = traits.String( - desc="stem to use for screenshot file names") + screenshot_stem = traits.String(desc="stem to use for screenshot file names") stem_template_args = traits.List( traits.String, requires=["screenshot_stem"], - desc= - "input names to use as arguments for a string-formated stem template") + desc="input names to use as arguments for a string-formated stem template", + ) tcl_script = File( exists=True, argstr="%s", genfile=True, - desc="override default screenshot script") + desc="override default screenshot script", + ) class SurfaceSnapshotsOutputSpec(TraitedSpec): - snapshots = OutputMultiPath( - File(exists=True), - desc="tiff images of the surface from different perspectives") + File(exists=True), desc="tiff images of the surface from different perspectives" + ) class SurfaceSnapshots(FSCommand): @@ -889,6 +980,7 @@ class SurfaceSnapshots(FSCommand): >>> res = shots.run() # doctest: +SKIP """ + _cmd = "tksurfer" input_spec = SurfaceSnapshotsInputSpec output_spec = SurfaceSnapshotsOutputSpec @@ -906,42 +998,46 @@ def _format_arg(self, name, spec, value): if len(value) == 2: return "-fminmax %.3f %.3f" % value else: - return "-fminmax %.3f %.3f -fmid %.3f" % (value[0], - value[2], - value[1]) + return "-fminmax {:.3f} {:.3f} -fmid {:.3f}".format( + value[0], + value[2], + value[1], + ) elif name == "annot_name" and isdefined(value): # Matching annot by name needs to strip the leading hemi and trailing # extension strings if value.endswith(".annot"): value = value[:-6] - if re.match("%s[\.\-_]" % self.inputs.hemi, value[:3]): + if re.match(r"%s[\.\-_]" % self.inputs.hemi, value[:3]): value = value[3:] return "-annotation %s" % value - return super(SurfaceSnapshots, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _run_interface(self, runtime): if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % (self.inputs.subject_id, self.inputs.hemi, - self.inputs.surface) + stem = "{}_{}_{}".format( + self.inputs.subject_id, + self.inputs.hemi, + self.inputs.surface, + ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args if isdefined(stem_args): - args = tuple([getattr(self.inputs, arg) for arg in stem_args]) + args = tuple(getattr(self.inputs, arg) for arg in stem_args) stem = stem % args # Check if the DISPLAY variable is set -- should avoid crashes (might not?) if "DISPLAY" not in os.environ: - raise RuntimeError( - "Graphics are not enabled -- cannot run tksurfer") + raise RuntimeError("Graphics are not enabled -- cannot run tksurfer") runtime.environ["_SNAPSHOT_STEM"] = stem self._write_tcl_script() - runtime = super(SurfaceSnapshots, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) # If a display window can't be opened, this will crash on # aggregate_outputs. Let's try to parse stderr and raise a # better exception here if that happened. errors = [ "surfer: failed, no suitable display found", - "Fatal Error in tksurfer.bin: could not open display" + "Fatal Error in tksurfer.bin: could not open display", ] for err in errors: if err in runtime.stderr: @@ -954,21 +1050,33 @@ def _run_interface(self, runtime): def _write_tcl_script(self): fid = open("snapshots.tcl", "w") script = [ - "save_tiff $env(_SNAPSHOT_STEM)-lat.tif", "make_lateral_view", - "rotate_brain_y 180", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-med.tif", "make_lateral_view", - "rotate_brain_x 90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-ven.tif", "make_lateral_view", - "rotate_brain_x -90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-dor.tif" + "save_tiff $env(_SNAPSHOT_STEM)-lat.tif", + "make_lateral_view", + "rotate_brain_y 180", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-med.tif", + "make_lateral_view", + "rotate_brain_x 90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-ven.tif", + "make_lateral_view", + "rotate_brain_x -90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-dor.tif", ] if isdefined(self.inputs.six_images) and self.inputs.six_images: - script.extend([ - "make_lateral_view", "rotate_brain_y 90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-pos.tif", "make_lateral_view", - "rotate_brain_y -90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-ant.tif" - ]) + script.extend( + [ + "make_lateral_view", + "rotate_brain_y 90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-pos.tif", + "make_lateral_view", + "rotate_brain_y -90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-ant.tif", + ] + ) script.append("exit") fid.write("\n".join(script)) @@ -977,13 +1085,16 @@ def _write_tcl_script(self): def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % (self.inputs.subject_id, self.inputs.hemi, - self.inputs.surface) + stem = "{}_{}_{}".format( + self.inputs.subject_id, + self.inputs.hemi, + self.inputs.surface, + ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args if isdefined(stem_args): - args = tuple([getattr(self.inputs, arg) for arg in stem_args]) + args = tuple(getattr(self.inputs, arg) for arg in stem_args) stem = stem % args snapshots = ["%s-lat.tif", "%s-med.tif", "%s-dor.tif", "%s-ven.tif"] if self.inputs.six_images: @@ -999,12 +1110,10 @@ def _gen_filename(self, name): class ImageInfoInputSpec(FSTraitedSpec): - in_file = File(exists=True, position=1, argstr="%s", desc="image to query") class ImageInfoOutputSpec(TraitedSpec): - info = traits.Any(desc="output of mri_info") out_file = File(exists=True, desc="text file with image information") data_type = traits.String(desc="image data type") @@ -1012,20 +1121,19 @@ class ImageInfoOutputSpec(TraitedSpec): TE = traits.String(desc="echo time (msec)") TR = traits.String(desc="repetition time(msec)") TI = traits.String(desc="inversion time (msec)") - dimensions = traits.Tuple(desc="image dimensions (voxels)") - vox_sizes = traits.Tuple(desc="voxel sizes (mm)") + dimensions = Tuple(desc="image dimensions (voxels)") + vox_sizes = Tuple(desc="voxel sizes (mm)") orientation = traits.String(desc="image orientation") ph_enc_dir = traits.String(desc="phase encode direction") class ImageInfo(FSCommand): - _cmd = "mri_info" input_spec = ImageInfoInputSpec output_spec = ImageInfoOutputSpec def info_regexp(self, info, field, delim="\n"): - m = re.search("%s\s*:\s+(.+?)%s" % (field, delim), info) + m = re.search(fr"{field}\s*:\s+(.+?){delim}", info) if m: return m.group(1) else: @@ -1048,14 +1156,14 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): vox = tuple(vox.split(", ")) outputs.vox_sizes = vox dim = self.info_regexp(info, "dimensions") - dim = tuple([int(d) for d in dim.split(" x ")]) + dim = tuple(int(d) for d in dim.split(" x ")) outputs.dimensions = dim outputs.orientation = self.info_regexp(info, "Orientation") outputs.ph_enc_dir = self.info_regexp(info, "PhEncDir") # File format and datatype are both keyed by "type" - ftype, dtype = re.findall("%s\s*:\s+(.+?)\n" % "type", info) + ftype, dtype = re.findall(r"%s\s*:\s+(.+?)\n" % "type", info) outputs.file_format = ftype outputs.data_type = dtype @@ -1066,73 +1174,77 @@ class MRIsConvertInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ + annot_file = File( - exists=True, - argstr="--annot %s", - desc="input is annotation or gifti label data") + exists=True, argstr="--annot %s", desc="input is annotation or gifti label data" + ) parcstats_file = File( exists=True, argstr="--parcstats %s", - desc="infile is name of text file containing label/val pairs") + desc="infile is name of text file containing label/val pairs", + ) label_file = File( exists=True, argstr="--label %s", - desc="infile is .label file, label is name of this label") + desc="infile is .label file, label is name of this label", + ) scalarcurv_file = File( exists=True, argstr="-c %s", - desc="input is scalar curv overlay file (must still specify surface)") + desc="input is scalar curv overlay file (must still specify surface)", + ) functional_file = File( exists=True, argstr="-f %s", - desc= - "input is functional time-series or other multi-frame data (must specify surface)" + desc="input is functional time-series or other multi-frame data (must specify surface)", ) labelstats_outfile = File( exists=False, argstr="--labelstats %s", - desc= - "outfile is name of gifti file to which label stats will be written") + desc="outfile is name of gifti file to which label stats will be written", + ) - patch = traits.Bool( - argstr="-p", desc="input is a patch, not a full surface") + patch = traits.Bool(argstr="-p", desc="input is a patch, not a full surface") rescale = traits.Bool( - argstr="-r", - desc="rescale vertex xyz so total area is same as group average") - normal = traits.Bool( - argstr="-n", desc="output is an ascii file where vertex data") - xyz_ascii = traits.Bool( - argstr="-a", desc="Print only surface xyz to ascii file") + argstr="-r", desc="rescale vertex xyz so total area is same as group average" + ) + normal = traits.Bool(argstr="-n", desc="output is an ascii file where vertex data") + xyz_ascii = traits.Bool(argstr="-a", desc="Print only surface xyz to ascii file") vertex = traits.Bool( - argstr="-v", desc="Writes out neighbors of a vertex in each row") + argstr="-v", desc="Writes out neighbors of a vertex in each row" + ) scale = traits.Float(argstr="-s %.3f", desc="scale vertex xyz by scale") dataarray_num = traits.Int( argstr="--da_num %d", - desc="if input is gifti, 'num' specifies which data array to use") + desc="if input is gifti, 'num' specifies which data array to use", + ) talairachxfm_subjid = traits.String( - argstr="-t %s", desc="apply talairach xfm of subject to vertex xyz") + argstr="-t %s", desc="apply talairach xfm of subject to vertex xyz" + ) origname = traits.String(argstr="-o %s", desc="read orig positions") in_file = File( exists=True, mandatory=True, position=-2, - argstr='%s', - desc='File to read/convert') + argstr="%s", + desc="File to read/convert", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - xor=['out_datatype'], + xor=["out_datatype"], mandatory=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) out_datatype = traits.Enum( "asc", @@ -1143,18 +1255,18 @@ class MRIsConvertInputSpec(FSTraitedSpec): "gii", "mgh", "mgz", - xor=['out_file'], + xor=["out_file"], mandatory=True, desc="These file formats are supported: ASCII: .asc" - "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz" + "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz", ) to_scanner = traits.Bool( argstr="--to-scanner", - desc="convert coordinates from native FS (tkr) coords to scanner coords" + desc="convert coordinates from native FS (tkr) coords to scanner coords", ) to_tkr = traits.Bool( argstr="--to-tkr", - desc="convert coordinates from scanner coords to native FS (tkr) coords" + desc="convert coordinates from scanner coords to native FS (tkr) coords", ) @@ -1162,7 +1274,8 @@ class MRIsConvertOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ - converted = File(exists=True, desc='converted output surface') + + converted = File(exists=True, desc="converted output surface") class MRIsConvert(FSCommand): @@ -1178,14 +1291,15 @@ class MRIsConvert(FSCommand): >>> mris.inputs.out_datatype = 'gii' >>> mris.run() # doctest: +SKIP """ - _cmd = 'mris_convert' + + _cmd = "mris_convert" input_spec = MRIsConvertInputSpec output_spec = MRIsConvertOutputSpec def _format_arg(self, name, spec, value): if name == "out_file" and not os.path.isabs(value): value = os.path.abspath(value) - return super(MRIsConvert, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1193,7 +1307,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return os.path.abspath(self._gen_outfilename()) else: return None @@ -1221,30 +1335,33 @@ class MRIsCombineInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ + in_files = traits.List( File(Exists=True), maxlen=2, minlen=2, mandatory=True, position=1, - argstr='--combinesurfs %s', - desc='Two surfaces to be combined.') + argstr="--combinesurfs %s", + desc="Two surfaces to be combined.", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, mandatory=True, - desc='Output filename. Combined surfaces from in_files.') + desc="Output filename. Combined surfaces from in_files.", + ) class MRIsCombineOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ + out_file = File( - exists=True, - desc='Output filename. Combined surfaces from ' - 'in_files.') + exists=True, desc="Output filename. Combined surfaces from in_files." + ) class MRIsCombine(FSSurfaceCommand): @@ -1272,7 +1389,8 @@ class MRIsCombine(FSSurfaceCommand): 'mris_convert --combinesurfs lh.pial rh.pial bh.pial' >>> mris.run() # doctest: +SKIP """ - _cmd = 'mris_convert' + + _cmd = "mris_convert" input_spec = MRIsCombineInputSpec output_spec = MRIsCombineOutputSpec @@ -1283,9 +1401,9 @@ def _list_outputs(self): # regardless of input file names, except when path info is # specified path, base = os.path.split(self.inputs.out_file) - if path == '' and base[:3] not in ('lh.', 'rh.'): - base = 'lh.' + base - outputs['out_file'] = os.path.abspath(os.path.join(path, base)) + if path == "" and base[:3] not in ("lh.", "rh."): + base = "lh." + base + outputs["out_file"] = os.path.abspath(os.path.join(path, base)) return outputs @@ -1308,33 +1426,35 @@ class MRITessellateInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-3, - argstr='%s', - desc='Input volume to tesselate voxels from.') + argstr="%s", + desc="Input volume to tessellate voxels from.", + ) label_value = traits.Int( position=-2, - argstr='%d', + argstr="%d", mandatory=True, - desc= - 'Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)' + desc='Label value which to tessellate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) tesselate_all_voxels = traits.Bool( - argstr='-a', - desc='Tessellate the surface of all voxels with different labels') + argstr="-a", desc="Tessellate the surface of all voxels with different labels" + ) use_real_RAS_coordinates = traits.Bool( - argstr='-n', - desc='Saves surface with real RAS coordinates where c_(r,a,s) != 0') + argstr="-n", desc="Saves surface with real RAS coordinates where c_(r,a,s) != 0" + ) class MRITessellateOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume """ - surface = File(exists=True, desc='binary surface of the tessellation ') + + surface = File(exists=True, desc="binary surface of the tessellation ") class MRITessellate(FSCommand): @@ -1351,17 +1471,18 @@ class MRITessellate(FSCommand): >>> tess.inputs.out_file = 'lh.hippocampus' >>> tess.run() # doctest: +SKIP """ - _cmd = 'mri_tessellate' + + _cmd = "mri_tessellate" input_spec = MRITessellateInputSpec output_spec = MRITessellateOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = os.path.abspath(self._gen_outfilename()) + outputs["surface"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1371,7 +1492,7 @@ def _gen_outfilename(self): return self.inputs.out_file else: _, name, ext = split_filename(self.inputs.in_file) - return name + ext + '_' + str(self.inputs.label_value) + return name + ext + "_" + str(self.inputs.label_value) class MRIPretessInputSpec(FSTraitedSpec): @@ -1379,65 +1500,69 @@ class MRIPretessInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-4, - argstr='%s', - desc=('filled volume, usually wm.mgz')) + argstr="%s", + desc=("filled volume, usually wm.mgz"), + ) label = traits.Either( - traits.Str('wm'), + traits.Str("wm"), traits.Int(1), - argstr='%s', - default='wm', + argstr="%s", + default="wm", mandatory=True, usedefault=True, position=-3, - desc=('label to be picked up, can be a Freesurfer\'s string like ' - '\'wm\' or a label value (e.g. 127 for rh or 255 for lh)')) + desc=( + "label to be picked up, can be a Freesurfer's string like " + "'wm' or a label value (e.g. 127 for rh or 255 for lh)" + ), + ) in_norm = File( exists=True, mandatory=True, position=-2, - argstr='%s', - desc=('the normalized, brain-extracted T1w image. Usually norm.mgz')) + argstr="%s", + desc=("the normalized, brain-extracted T1w image. Usually norm.mgz"), + ) out_file = File( position=-1, - argstr='%s', - name_source=['in_filled'], - name_template='%s_pretesswm', + argstr="%s", + name_source=["in_filled"], + name_template="%s_pretesswm", keep_extension=True, - desc='the output file after mri_pretess.') + desc="the output file after mri_pretess.", + ) nocorners = traits.Bool( False, - argstr='-nocorners', - desc=('do not remove corner configurations' - ' in addition to edge ones.')) - keep = traits.Bool(False, argstr='-keep', desc=('keep WM edits')) + argstr="-nocorners", + desc=("do not remove corner configurations in addition to edge ones."), + ) + keep = traits.Bool(False, argstr="-keep", desc=("keep WM edits")) test = traits.Bool( False, - argstr='-test', - desc= - ('adds a voxel that should be removed by ' - 'mri_pretess. The value of the voxel is set to that of an ON-edited WM, ' - 'so it should be kept with -keep. The output will NOT be saved.')) + argstr="-test", + desc=( + "adds a voxel that should be removed by " + "mri_pretess. The value of the voxel is set to that of an ON-edited WM, " + "so it should be kept with -keep. The output will NOT be saved." + ), + ) class MRIPretessOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file after mri_pretess') + out_file = File(exists=True, desc="output file after mri_pretess") class MRIPretess(FSCommand): """ Uses Freesurfer's mri_pretess to prepare volumes to be tessellated. - Description - ----------- - Changes white matter (WM) segmentation so that the neighbors of all voxels labeled as WM have a face in common - no edges or corners allowed. Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> pretess = fs.MRIPretess() >>> pretess.inputs.in_filled = 'wm.mgz' @@ -1448,7 +1573,8 @@ class MRIPretess(FSCommand): >>> pretess.run() # doctest: +SKIP """ - _cmd = 'mri_pretess' + + _cmd = "mri_pretess" input_spec = MRIPretessInputSpec output_spec = MRIPretessOutputSpec @@ -1462,35 +1588,36 @@ class MRIMarchingCubesInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=1, - argstr='%s', - desc='Input volume to tesselate voxels from.') + argstr="%s", + desc="Input volume to tessellate voxels from.", + ) label_value = traits.Int( position=2, - argstr='%d', + argstr="%d", mandatory=True, - desc= - 'Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)' + desc='Label value which to tessellate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) connectivity_value = traits.Int( 1, position=-1, - argstr='%d', + argstr="%d", usedefault=True, - desc= - 'Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)' + desc="Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)", ) out_file = File( - argstr='./%s', + argstr="./%s", position=-2, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) class MRIMarchingCubesOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume """ - surface = File(exists=True, desc='binary surface of the tessellation ') + + surface = File(exists=True, desc="binary surface of the tessellation ") class MRIMarchingCubes(FSCommand): @@ -1507,17 +1634,18 @@ class MRIMarchingCubes(FSCommand): >>> mc.inputs.out_file = 'lh.hippocampus' >>> mc.run() # doctest: +SKIP """ - _cmd = 'mri_mc' + + _cmd = "mri_mc" input_spec = MRIMarchingCubesInputSpec output_spec = MRIMarchingCubesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = self._gen_outfilename() + outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1527,92 +1655,100 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath( - name + ext + '_' + str(self.inputs.label_value)) + return os.path.abspath(name + ext + "_" + str(self.inputs.label_value)) class SmoothTessellationInputSpec(FSTraitedSpec): - """ - This program smooths the tessellation of a surface using 'mris_smooth' - """ - in_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, copyfile=True, - desc='Input volume to tesselate voxels from.') + desc="Input volume to tessellate voxels from.", + ) curvature_averaging_iterations = traits.Int( - argstr='-a %d', - desc='Number of curvature averaging iterations (default=10)') + argstr="-a %d", desc="Number of curvature averaging iterations (default=10)" + ) smoothing_iterations = traits.Int( - argstr='-n %d', desc='Number of smoothing iterations (default=10)') + argstr="-n %d", desc="Number of smoothing iterations (default=10)" + ) snapshot_writing_iterations = traits.Int( - argstr='-w %d', desc='Write snapshot every "n" iterations') + argstr="-w %d", desc="Write snapshot every *n* iterations" + ) use_gaussian_curvature_smoothing = traits.Bool( - argstr='-g', desc='Use Gaussian curvature smoothing') + argstr="-g", desc="Use Gaussian curvature smoothing" + ) gaussian_curvature_norm_steps = traits.Int( - argstr='%d ', desc='Use Gaussian curvature smoothing') + argstr="%d", desc="Use Gaussian curvature smoothing" + ) gaussian_curvature_smoothing_steps = traits.Int( - argstr='%d', desc='Use Gaussian curvature smoothing') + argstr=" %d", desc="Use Gaussian curvature smoothing" + ) disable_estimates = traits.Bool( - argstr='-nw', - desc='Disables the writing of curvature and area estimates') + argstr="-nw", desc="Disables the writing of curvature and area estimates" + ) normalize_area = traits.Bool( - argstr='-area', desc='Normalizes the area after smoothing') - use_momentum = traits.Bool(argstr='-m', desc='Uses momentum') + argstr="-area", desc="Normalizes the area after smoothing" + ) + use_momentum = traits.Bool(argstr="-m", desc="Uses momentum") out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) out_curvature_file = File( - argstr='-c %s', desc='Write curvature to ?h.curvname (default "curv")') + argstr="-c %s", desc='Write curvature to ``?h.curvname`` (default "curv")' + ) out_area_file = File( - argstr='-b %s', desc='Write area to ?h.areaname (default "area")') + argstr="-b %s", desc='Write area to ``?h.areaname`` (default "area")' + ) seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) class SmoothTessellationOutputSpec(TraitedSpec): """ This program smooths the tessellation of a surface using 'mris_smooth' """ - surface = File(exists=True, desc='Smoothed surface file ') + + surface = File(exists=True, desc="Smoothed surface file.") class SmoothTessellation(FSCommand): """ - This program smooths the tessellation of a surface using 'mris_smooth' + Smooth a tessellated surface. - .. seealso:: - - SurfaceSmooth() Interface - For smoothing a scalar field along a surface manifold + See Also + -------- + `nipype.interfaces.freesurfer.utils.SurfaceSmooth`_ interface for smoothing a scalar field + along a surface manifold Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> smooth = fs.SmoothTessellation() >>> smooth.inputs.in_file = 'lh.hippocampus.stl' >>> smooth.run() # doctest: +SKIP + """ - _cmd = 'mris_smooth' + + _cmd = "mris_smooth" input_spec = SmoothTessellationInputSpec output_spec = SmoothTessellationOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = self._gen_outfilename() + outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1622,13 +1758,13 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath(name + '_smoothed' + ext) + return os.path.abspath(name + "_smoothed" + ext) def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(SmoothTessellation, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if "failed" in runtime.stderr: self.raise_exception(runtime) return runtime @@ -1637,19 +1773,21 @@ def _run_interface(self, runtime): class MakeAverageSubjectInputSpec(FSTraitedSpec): subjects_ids = traits.List( traits.Str(), - argstr='--subjects %s', - desc='freesurfer subjects ids to average', + argstr="--subjects %s", + desc="freesurfer subjects ids to average", mandatory=True, - sep=' ') + sep=" ", + ) out_name = File( - 'average', - argstr='--out %s', - desc='name for the average subject', - usedefault=True) + "average", + argstr="--out %s", + desc="name for the average subject", + usedefault=True, + ) class MakeAverageSubjectOutputSpec(TraitedSpec): - average_subject_name = traits.Str(desc='Output registration file') + average_subject_name = traits.Str(desc="Output registration file") class MakeAverageSubject(FSCommand): @@ -1665,37 +1803,35 @@ class MakeAverageSubject(FSCommand): """ - _cmd = 'make_average_subject' + _cmd = "make_average_subject" input_spec = MakeAverageSubjectInputSpec output_spec = MakeAverageSubjectOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['average_subject_name'] = self.inputs.out_name + outputs["average_subject_name"] = self.inputs.out_name return outputs class ExtractMainComponentInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr='%s', - position=1, - desc='input surface file') + exists=True, mandatory=True, argstr="%s", position=1, desc="input surface file" + ) out_file = File( - name_template='%s.maincmp', - name_source='in_file', - argstr='%s', + name_template="%s.maincmp", + name_source="in_file", + argstr="%s", position=2, - desc='surface containing main component') + desc="surface containing main component", + ) class ExtractMainComponentOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='surface containing main component') + out_file = File(exists=True, desc="surface containing main component") class ExtractMainComponent(CommandLine): - """Extract the main component of a tesselated surface + """Extract the main component of a tessellated surface Examples -------- @@ -1707,80 +1843,87 @@ class ExtractMainComponent(CommandLine): """ - _cmd = 'mris_extract_main_component' + _cmd = "mris_extract_main_component" input_spec = ExtractMainComponentInputSpec output_spec = ExtractMainComponentOutputSpec class Tkregister2InputSpec(FSTraitedSpec): target_image = File( - exists=True, argstr="--targ %s", xor=['fstarg'], desc='target volume') + exists=True, argstr="--targ %s", xor=["fstarg"], desc="target volume" + ) fstarg = traits.Bool( False, - argstr='--fstarg', - xor=['target_image'], - desc='use subject\'s T1 as reference') + argstr="--fstarg", + xor=["target_image"], + desc="use subject's T1 as reference", + ) moving_image = File( - exists=True, mandatory=True, argstr="--mov %s", desc='moving volume') + exists=True, mandatory=True, argstr="--mov %s", desc="moving volume" + ) # Input registration file options fsl_in_matrix = File( - exists=True, - argstr="--fsl %s", - desc='fsl-style registration input matrix') + exists=True, argstr="--fsl %s", desc="fsl-style registration input matrix" + ) xfm = File( exists=True, - argstr='--xfm %s', - desc='use a matrix in MNI coordinates as initial registration') + argstr="--xfm %s", + desc="use a matrix in MNI coordinates as initial registration", + ) lta_in = File( exists=True, - argstr='--lta %s', - desc='use a matrix in MNI coordinates as initial registration') + argstr="--lta %s", + desc="use a matrix in MNI coordinates as initial registration", + ) invert_lta_in = traits.Bool( - requires=['lta_in'], desc='Invert input LTA before applying') + requires=["lta_in"], desc="Invert input LTA before applying" + ) # Output registration file options fsl_out = traits.Either( True, File, - argstr='--fslregout %s', - desc='compute an FSL-compatible resgitration matrix') + argstr="--fslregout %s", + desc="compute an FSL-compatible resgitration matrix", + ) lta_out = traits.Either( - True, - File, - argstr='--ltaout %s', - desc='output registration file (LTA format)') + True, File, argstr="--ltaout %s", desc="output registration file (LTA format)" + ) invert_lta_out = traits.Bool( - argstr='--ltaout-inv', - requires=['lta_in'], - desc='Invert input LTA before applying') + argstr="--ltaout-inv", + requires=["lta_in"], + desc="Invert input LTA before applying", + ) - subject_id = traits.String(argstr="--s %s", desc='freesurfer subject ID') + subject_id = traits.String(argstr="--s %s", desc="freesurfer subject ID") noedit = traits.Bool( - True, - argstr="--noedit", - usedefault=True, - desc='do not open edit window (exit)') + True, argstr="--noedit", usedefault=True, desc="do not open edit window (exit)" + ) reg_file = File( - 'register.dat', + "register.dat", usedefault=True, mandatory=True, - argstr='--reg %s', - desc='freesurfer-style registration file') + argstr="--reg %s", + desc="freesurfer-style registration file", + ) reg_header = traits.Bool( - False, argstr='--regheader', desc='compute regstration from headers') + False, argstr="--regheader", desc="compute registration from headers" + ) fstal = traits.Bool( False, - argstr='--fstal', - xor=['target_image', 'moving_image', 'reg_file'], - desc='set mov to be tal and reg to be tal xfm') + argstr="--fstal", + xor=["target_image", "moving_image", "reg_file"], + desc="set mov to be tal and reg to be tal xfm", + ) movscale = traits.Float( - argstr='--movscale %f', desc='adjust registration matrix to scale mov') + argstr="--movscale %f", desc="adjust registration matrix to scale mov" + ) class Tkregister2OutputSpec(TraitedSpec): - reg_file = File(exists=True, desc='freesurfer-style registration file') - fsl_file = File(desc='FSL-style registration file') - lta_file = File(desc='LTA-style registration file') + reg_file = File(exists=True, desc="freesurfer-style registration file") + fsl_file = File(desc="FSL-style registration file") + lta_file = File(desc="LTA-style registration file") class Tkregister2(FSCommand): @@ -1788,11 +1931,10 @@ class Tkregister2(FSCommand): Examples -------- - Get transform matrix between orig (*tkRAS*) and native (*scannerRAS*) coordinates in Freesurfer. Implements the first step of mapping surfaces to native space in `this guide - `_. + `__. >>> from nipype.interfaces.freesurfer import Tkregister2 >>> tk2 = Tkregister2(reg_file='T1_to_native.dat') @@ -1816,38 +1958,41 @@ class Tkregister2(FSCommand): 'tkregister2 --fsl flirt.mat --mov epi.nii --noedit --reg register.dat' >>> tk2.run() # doctest: +SKIP """ + _cmd = "tkregister2" input_spec = Tkregister2InputSpec output_spec = Tkregister2OutputSpec def _format_arg(self, name, spec, value): - if name == 'lta_in' and self.inputs.invert_lta_in: - spec = '--lta-inv %s' - if name in ('fsl_out', 'lta_out') and value is True: - value = self._list_outputs()[name] - return super(Tkregister2, self)._format_arg(name, spec, value) + if name == "lta_in" and self.inputs.invert_lta_in: + spec = "--lta-inv %s" + if name in ("fsl_out", "lta_out") and value is True: + value = self._list_outputs()[f'{name[:3]}_file'] + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() reg_file = os.path.abspath(self.inputs.reg_file) - outputs['reg_file'] = reg_file + outputs["reg_file"] = reg_file cwd = os.getcwd() fsl_out = self.inputs.fsl_out if isdefined(fsl_out): if fsl_out is True: - outputs['fsl_file'] = fname_presuffix( - reg_file, suffix='.mat', newpath=cwd, use_ext=False) + outputs["fsl_file"] = fname_presuffix( + reg_file, suffix=".mat", newpath=cwd, use_ext=False + ) else: - outputs['fsl_file'] = os.path.abspath(self.inputs.fsl_out) + outputs["fsl_file"] = os.path.abspath(self.inputs.fsl_out) lta_out = self.inputs.lta_out if isdefined(lta_out): if lta_out is True: - outputs['lta_file'] = fname_presuffix( - reg_file, suffix='.lta', newpath=cwd, use_ext=False) + outputs["lta_file"] = fname_presuffix( + reg_file, suffix=".lta", newpath=cwd, use_ext=False + ) else: - outputs['lta_file'] = os.path.abspath(self.inputs.lta_out) + outputs["lta_file"] = os.path.abspath(self.inputs.lta_out) return outputs def _gen_outfilename(self): @@ -1855,48 +2000,43 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath(name + '_smoothed' + ext) + return os.path.abspath(name + "_smoothed" + ext) class AddXFormToHeaderInputSpec(FSTraitedSpec): - # required in_file = File( - exists=True, - mandatory=True, - position=-2, - argstr="%s", - desc="input volume") - # transform file does NOT need to exist at the time if using copy_name - transform = File( - exists=False, - mandatory=True, - position=-3, - argstr="%s", - desc="xfm file") + exists=True, mandatory=True, position=-2, argstr="%s", desc="input volume" + ) + # transform file does NOT need to exist at the time if using copy_name + transform = File( + exists=False, mandatory=True, position=-3, argstr="%s", desc="xfm file" + ) out_file = File( - 'output.mgz', - position=-1, - argstr="%s", - usedefault=True, - desc="output volume") + "output.mgz", position=-1, argstr="%s", usedefault=True, desc="output volume" + ) # optional copy_name = traits.Bool( - argstr="-c", desc="do not try to load the xfmfile, just copy name") + argstr="-c", desc="do not try to load the xfmfile, just copy name" + ) verbose = traits.Bool(argstr="-v", desc="be verbose") class AddXFormToHeaderOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="output volume") class AddXFormToHeader(FSCommand): - """ Just adds specified xform to the volume header + """ + Just adds specified xform to the volume header. - (!) WARNING: transform input **MUST** be an absolute path to a DataSink'ed transform or - the output will reference a transform in the workflow cache directory! + .. danger :: + Input transform **MUST** be an absolute path to a DataSink'ed transform or + the output will reference a transform in the workflow cache directory! + + Examples + -------- >>> from nipype.interfaces.freesurfer import AddXFormToHeader >>> adder = AddXFormToHeader() >>> adder.inputs.in_file = 'norm.mgz' @@ -1907,24 +2047,24 @@ class AddXFormToHeader(FSCommand): >>> adder.inputs.copy_name = True >>> adder.cmdline 'mri_add_xform_to_header -c trans.mat norm.mgz output.mgz' - >>> adder.run() # doctest: +SKIP - References: + References ---------- [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_add_xform_to_header] """ + _cmd = "mri_add_xform_to_header" input_spec = AddXFormToHeaderInputSpec output_spec = AddXFormToHeaderOutputSpec def _format_arg(self, name, spec, value): - if name == 'transform': + if name == "transform": return value # os.path.abspath(value) # if name == 'copy_name' and value: # self.input_spec.transform - return super(AddXFormToHeader, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1934,30 +2074,32 @@ def _list_outputs(self): class CheckTalairachAlignmentInputSpec(FSTraitedSpec): in_file = File( - argstr='-xfm %s', - xor=['subject'], + argstr="-xfm %s", + xor=["subject"], exists=True, mandatory=True, position=-1, - desc="specify the talairach.xfm file to check") + desc="specify the talairach.xfm file to check", + ) subject = traits.String( - argstr='-subj %s', - xor=['in_file'], + argstr="-subj %s", + xor=["in_file"], mandatory=True, position=-1, - desc="specify subject's name") + desc="specify subject's name", + ) # optional threshold = traits.Float( default_value=0.010, usedefault=True, - argstr='-T %.3f', - desc="Talairach transforms for subjects with p-values <= T " + - "are considered as very unlikely default=0.010") + argstr="-T %.3f", + desc="Talairach transforms for subjects with p-values <= T " + "are considered as very unlikely default=0.010", + ) class CheckTalairachAlignmentOutputSpec(TraitedSpec): - out_file = traits.File( - exists=True, desc="The input file for CheckTalairachAlignment") + out_file = File(exists=True, desc="The input file for CheckTalairachAlignment") class CheckTalairachAlignment(FSCommand): @@ -1977,37 +2119,32 @@ class CheckTalairachAlignment(FSCommand): >>> checker.run() # doctest: +SKIP """ + _cmd = "talairach_afd" input_spec = CheckTalairachAlignmentInputSpec output_spec = CheckTalairachAlignmentOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self.inputs.in_file + outputs["out_file"] = self.inputs.in_file return outputs class TalairachAVIInputSpec(FSTraitedSpec): - in_file = File( - argstr='--i %s', exists=True, mandatory=True, desc="input volume") + in_file = File(argstr="--i %s", exists=True, mandatory=True, desc="input volume") out_file = File( - argstr='--xfm %s', - mandatory=True, - exists=False, - desc="output xfm file") + argstr="--xfm %s", mandatory=True, exists=False, desc="output xfm file" + ) # optional atlas = traits.String( - argstr='--atlas %s', - desc="alternate target atlas (in freesurfer/average dir)") + argstr="--atlas %s", desc="alternate target atlas (in freesurfer/average dir)" + ) class TalairachAVIOutputSpec(TraitedSpec): - out_file = traits.File( - exists=False, desc="The output transform for TalairachAVI") - out_log = traits.File( - exists=False, desc="The output log file for TalairachAVI") - out_txt = traits.File( - exists=False, desc="The output text file for TaliarachAVI") + out_file = File(exists=False, desc="The output transform for TalairachAVI") + out_log = File(exists=False, desc="The output log file for TalairachAVI") + out_txt = File(exists=False, desc="The output text file for TaliarachAVI") class TalairachAVI(FSCommand): @@ -2030,27 +2167,30 @@ class TalairachAVI(FSCommand): >>> example.run() # doctest: +SKIP """ + _cmd = "talairach_avi" input_spec = TalairachAVIInputSpec output_spec = TalairachAVIOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['out_log'] = os.path.abspath('talairach_avi.log') - outputs['out_txt'] = os.path.join( + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["out_log"] = os.path.abspath("talairach_avi.log") + outputs["out_txt"] = os.path.join( os.path.dirname(self.inputs.out_file), - 'talsrcimg_to_' + str(self.inputs.atlas) + 't4_vox2vox.txt') + "talsrcimg_to_" + str(self.inputs.atlas) + "t4_vox2vox.txt", + ) return outputs class TalairachQCInputSpec(FSTraitedSpec): log_file = File( - argstr='%s', + argstr="%s", mandatory=True, exists=True, position=0, - desc="The log file for TalairachQC") + desc="The log file for TalairachQC", + ) class TalairachQC(FSScriptCommand): @@ -2064,6 +2204,7 @@ class TalairachQC(FSScriptCommand): >>> qc.cmdline 'tal_QC_AZS dirs.txt' """ + _cmd = "tal_QC_AZS" input_spec = TalairachQCInputSpec output_spec = FSScriptOutputSpec @@ -2075,28 +2216,32 @@ class RemoveNeckInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-4, - desc="Input file for RemoveNeck") + desc="Input file for RemoveNeck", + ) out_file = File( argstr="%s", exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s_noneck", hash_files=False, keep_extension=True, position=-1, - desc="Output file for RemoveNeck") + desc="Output file for RemoveNeck", + ) transform = File( argstr="%s", exists=True, mandatory=True, position=-3, - desc="Input transform file for RemoveNeck") + desc="Input transform file for RemoveNeck", + ) template = File( argstr="%s", exists=True, mandatory=True, position=-2, - desc="Input template file for RemoveNeck") + desc="Input template file for RemoveNeck", + ) # optional radius = traits.Int(argstr="-radius %d", desc="Radius") @@ -2120,13 +2265,14 @@ class RemoveNeck(FSCommand): >>> remove_neck.cmdline 'mri_remove_neck norm.mgz trans.mat trans.mat norm_noneck.mgz' """ + _cmd = "mri_remove_neck" input_spec = RemoveNeckInputSpec output_spec = RemoveNeckOutputSpec def _gen_fname(self, name): - if name == 'out_file': - return os.path.abspath('nu_noneck.mgz') + if name == "out_file": + return os.path.abspath("nu_noneck.mgz") return None def _list_outputs(self): @@ -2141,22 +2287,24 @@ class MRIFillInputSpec(FSTraitedSpec): mandatory=True, exists=True, position=-2, - desc="Input white matter file") + desc="Input white matter file", + ) out_file = File( argstr="%s", mandatory=True, exists=False, position=-1, - desc="Output filled volume file name for MRIFill") + desc="Output filled volume file name for MRIFill", + ) # optional segmentation = File( argstr="-segmentation %s", exists=True, - desc="Input segmentation file for MRIFill") + desc="Input segmentation file for MRIFill", + ) transform = File( - argstr="-xform %s", - exists=True, - desc="Input transform file for MRIFill") + argstr="-xform %s", exists=True, desc="Input transform file for MRIFill" + ) log_file = File(argstr="-a %s", desc="Output log file for MRIFill") @@ -2168,7 +2316,7 @@ class MRIFillOutputSpec(TraitedSpec): class MRIFill(FSCommand): """ This program creates hemispheric cutting planes and fills white matter - with specific values for subsequent surface tesselation. + with specific values for subsequent surface tessellation. Examples ======== @@ -2199,23 +2347,23 @@ class MRIsInflateInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for MRIsInflate") + desc="Input file for MRIsInflate", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s.inflated", hash_files=False, keep_extension=True, - desc="Output file for MRIsInflate") + desc="Output file for MRIsInflate", + ) # optional - out_sulc = File( - exists=False, xor=['no_save_sulc'], desc="Output sulc file") + out_sulc = File(exists=False, xor=["no_save_sulc"], desc="Output sulc file") no_save_sulc = traits.Bool( - argstr='-no-save-sulc', - xor=['out_sulc'], - desc="Do not save sulc file as output") + argstr="-no-save-sulc", xor=["out_sulc"], desc="Do not save sulc file as output" + ) class MRIsInflateOutputSpec(TraitedSpec): @@ -2237,7 +2385,7 @@ class MRIsInflate(FSCommand): 'mris_inflate -no-save-sulc lh.pial lh.inflated' """ - _cmd = 'mris_inflate' + _cmd = "mris_inflate" input_spec = MRIsInflateInputSpec output_spec = MRIsInflateOutputSpec @@ -2257,27 +2405,30 @@ class SphereInputSpec(FSTraitedSpecOpenMP): copyfile=True, mandatory=True, exists=True, - desc="Input file for Sphere") + desc="Input file for Sphere", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s.sphere', - desc="Output file for Sphere") + name_template="%s.sphere", + desc="Output file for Sphere", + ) # optional seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) magic = traits.Bool( argstr="-q", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) in_smoothwm = File( exists=True, copyfile=True, - desc="Input surface required when -q flag is not selected") + desc="Input surface required when -q flag is not selected", + ) class SphereOutputSpec(TraitedSpec): @@ -2296,7 +2447,8 @@ class Sphere(FSCommandOpenMP): >>> sphere.cmdline 'mris_sphere lh.pial lh.sphere' """ - _cmd = 'mris_sphere' + + _cmd = "mris_sphere" input_spec = SphereInputSpec output_spec = SphereOutputSpec @@ -2308,47 +2460,45 @@ def _list_outputs(self): class FixTopologyInputSpec(FSTraitedSpec): in_orig = File( - exists=True, - mandatory=True, - desc="Undocumented input file .orig") + exists=True, mandatory=True, desc="Undocumented input file .orig" + ) in_inflated = File( exists=True, mandatory=True, - desc="Undocumented input file .inflated") - in_brain = File( - exists=True, mandatory=True, desc="Implicit input brain.mgz") + desc="Undocumented input file .inflated", + ) + in_brain = File(exists=True, mandatory=True, desc="Implicit input brain.mgz") in_wm = File(exists=True, mandatory=True, desc="Implicit input wm.mgz") hemisphere = traits.String( - position=-1, - argstr="%s", - mandatory=True, - desc="Hemisphere being processed") + position=-1, argstr="%s", mandatory=True, desc="Hemisphere being processed" + ) subject_id = traits.String( - 'subject_id', + "subject_id", position=-2, argstr="%s", mandatory=True, usedefault=True, - desc="Subject being processed") + desc="Subject being processed", + ) copy_inputs = traits.Bool( mandatory=True, - desc="If running as a node, set this to True " + - "otherwise, the topology fixing will be done " + "in place.") + desc="If running as a node, set this to True " + "otherwise, the topology fixing will be done in place.", + ) # optional seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) ga = traits.Bool( argstr="-ga", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) mgz = traits.Bool( argstr="-mgz", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) - sphere = traits.File(argstr="-sphere %s", desc="Sphere input file") + sphere = File(argstr="-sphere %s", desc="Sphere input file") class FixTopologyOutputSpec(TraitedSpec): @@ -2376,40 +2526,40 @@ class FixTopology(FSCommand): 'mris_fix_topology -ga -mgz -sphere qsphere.nofix 10335 lh' """ - _cmd = 'mris_fix_topology' + _cmd = "mris_fix_topology" input_spec = FixTopologyInputSpec output_spec = FixTopologyOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.sphere, folder='surf') + copy2subjdir(self, self.inputs.sphere, folder="surf") # the orig file is edited in place self.inputs.in_orig = copy2subjdir( self, self.inputs.in_orig, - folder='surf', - basename='{0}.orig'.format(hemi)) + folder="surf", + basename=f"{hemi}.orig", + ) copy2subjdir( self, self.inputs.in_inflated, - folder='surf', - basename='{0}.inflated'.format(hemi)) - copy2subjdir( - self, self.inputs.in_brain, folder='mri', basename='brain.mgz') - copy2subjdir( - self, self.inputs.in_wm, folder='mri', basename='wm.mgz') - return super(FixTopology, self).run(**inputs) + folder="surf", + basename=f"{hemi}.inflated", + ) + copy2subjdir(self, self.inputs.in_brain, folder="mri", basename="brain.mgz") + copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") + return super().run(**inputs) def _format_arg(self, name, spec, value): - if name == 'sphere': + if name == "sphere": # get the basename and take out the hemisphere - suffix = os.path.basename(value).split('.', 1)[1] + suffix = os.path.basename(value).split(".", 1)[1] return spec.argstr % suffix - return super(FixTopology, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2423,11 +2573,16 @@ class EulerNumberInputSpec(FSTraitedSpec): position=-1, mandatory=True, exists=True, - desc="Input file for EulerNumber") + desc="Input file for EulerNumber", + ) class EulerNumberOutputSpec(TraitedSpec): - out_file = File(exists=False, desc="Output file for EulerNumber") + euler = traits.Int( + desc="Euler number of cortical surface. A value of 2 signals a " + "topologically correct surface model with no holes" + ) + defects = traits.Int(desc="Number of defects") class EulerNumber(FSCommand): @@ -2442,13 +2597,27 @@ class EulerNumber(FSCommand): >>> ft.cmdline 'mris_euler_number lh.pial' """ - _cmd = 'mris_euler_number' + + _cmd = "mris_euler_number" input_spec = EulerNumberInputSpec output_spec = EulerNumberOutputSpec + def _run_interface(self, runtime): + runtime = super()._run_interface(runtime) + self._parse_output(runtime.stdout, runtime.stderr) + return runtime + + def _parse_output(self, stdout, stderr): + """Parse stdout / stderr and extract defects""" + m = re.search(r"(?<=total defect index = )\d+", stdout or stderr) + if m is None: + raise RuntimeError("Could not fetch defect index") + self._defects = int(m.group()) + def _list_outputs(self): outputs = self._outputs().get() - outputs["out_file"] = os.path.abspath(self.inputs.in_file) + outputs["defects"] = self._defects + outputs["euler"] = 2 - (2 * self._defects) return outputs @@ -2459,16 +2628,18 @@ class RemoveIntersectionInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for RemoveIntersection") + desc="Input file for RemoveIntersection", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", hash_files=False, keep_extension=True, - desc="Output file for RemoveIntersection") + desc="Output file for RemoveIntersection", + ) class RemoveIntersectionOutputSpec(TraitedSpec): @@ -2488,7 +2659,7 @@ class RemoveIntersection(FSCommand): 'mris_remove_intersection lh.pial lh.pial' """ - _cmd = 'mris_remove_intersection' + _cmd = "mris_remove_intersection" input_spec = RemoveIntersectionInputSpec output_spec = RemoveIntersectionOutputSpec @@ -2501,82 +2672,82 @@ def _list_outputs(self): class MakeSurfacesInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-1, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-2, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) # implicit in_orig = File( exists=True, mandatory=True, - argstr='-orig %s', - desc="Implicit input file .orig") - in_wm = File( - exists=True, mandatory=True, desc="Implicit input file wm.mgz") - in_filled = File( - exists=True, mandatory=True, desc="Implicit input file filled.mgz") + argstr="-orig %s", + desc="Implicit input file .orig", + ) + in_wm = File(exists=True, mandatory=True, desc="Implicit input file wm.mgz") + in_filled = File(exists=True, mandatory=True, desc="Implicit input file filled.mgz") # optional in_white = File(exists=True, desc="Implicit input that is sometimes used") in_label = File( exists=True, - xor=['noaparc'], - desc="Implicit input label/.aparc.annot") + xor=["noaparc"], + desc="Implicit input label/.aparc.annot", + ) orig_white = File( argstr="-orig_white %s", exists=True, - desc="Specify a white surface to start with") + desc="Specify a white surface to start with", + ) orig_pial = File( argstr="-orig_pial %s", exists=True, - requires=['in_label'], - desc="Specify a pial surface to start with") + requires=["in_label"], + desc="Specify a pial surface to start with", + ) fix_mtl = traits.Bool(argstr="-fix_mtl", desc="Undocumented flag") no_white = traits.Bool(argstr="-nowhite", desc="Undocumented flag") - white_only = traits.Bool(argstr="-whiteonly", desc="Undocumented flage") - in_aseg = File( - argstr="-aseg %s", exists=True, desc="Input segmentation file") + white_only = traits.Bool(argstr="-whiteonly", desc="Undocumented flag") + in_aseg = File(argstr="-aseg %s", exists=True, desc="Input segmentation file") in_T1 = File(argstr="-T1 %s", exists=True, desc="Input brain or T1 file") mgz = traits.Bool( argstr="-mgz", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) noaparc = traits.Bool( argstr="-noaparc", - xor=['in_label'], - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + xor=["in_label"], + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) maximum = traits.Float( - argstr="-max %.1f", - desc="No documentation (used for longitudinal processing)") + argstr="-max %.1f", desc="No documentation (used for longitudinal processing)" + ) longitudinal = traits.Bool( - argstr="-long", - desc="No documentation (used for longitudinal processing)") + argstr="-long", desc="No documentation (used for longitudinal processing)" + ) white = traits.String(argstr="-white %s", desc="White surface name") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) class MakeSurfacesOutputSpec(TraitedSpec): - out_white = File( - exists=False, desc="Output white matter hemisphere surface") + out_white = File(exists=False, desc="Output white matter hemisphere surface") out_curv = File(exists=False, desc="Output curv file for MakeSurfaces") out_area = File(exists=False, desc="Output area file for MakeSurfaces") out_cortex = File(exists=False, desc="Output cortex file for MakeSurfaces") out_pial = File(exists=False, desc="Output pial surface for MakeSurfaces") - out_thickness = File( - exists=False, desc="Output thickness file for MakeSurfaces") + out_thickness = File(exists=False, desc="Output thickness file for MakeSurfaces") class MakeSurfaces(FSCommand): @@ -2603,106 +2774,116 @@ class MakeSurfaces(FSCommand): 'mris_make_surfaces -T1 T1.mgz -orig pial -orig_pial pial 10335 lh' """ - _cmd = 'mris_make_surfaces' + _cmd = "mris_make_surfaces" input_spec = MakeSurfacesInputSpec output_spec = MakeSurfacesOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") copy2subjdir( - self, self.inputs.in_wm, folder='mri', basename='wm.mgz') + self, self.inputs.in_filled, folder="mri", basename="filled.mgz" + ) copy2subjdir( self, - self.inputs.in_filled, - folder='mri', - basename='filled.mgz') - copy2subjdir(self, self.inputs.in_white, 'surf', - '{0}.white'.format(self.inputs.hemisphere)) + self.inputs.in_white, + "surf", + f"{self.inputs.hemisphere}.white", + ) for originalfile in [self.inputs.in_aseg, self.inputs.in_T1]: - copy2subjdir(self, originalfile, folder='mri') + copy2subjdir(self, originalfile, folder="mri") for originalfile in [ - self.inputs.orig_white, self.inputs.orig_pial, - self.inputs.in_orig + self.inputs.orig_white, + self.inputs.orig_pial, + self.inputs.in_orig, ]: - copy2subjdir(self, originalfile, folder='surf') + copy2subjdir(self, originalfile, folder="surf") if isdefined(self.inputs.in_label): - copy2subjdir(self, self.inputs.in_label, 'label', - '{0}.aparc.annot'.format(self.inputs.hemisphere)) + copy2subjdir( + self, + self.inputs.in_label, + "label", + f"{self.inputs.hemisphere}.aparc.annot", + ) else: os.makedirs( - os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label')) - return super(MakeSurfaces, self).run(**inputs) + os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) + ) + return super().run(**inputs) def _format_arg(self, name, spec, value): - if name in ['in_T1', 'in_aseg']: + if name in ["in_T1", "in_aseg"]: # These inputs do not take full paths as inputs or even basenames basename = os.path.basename(value) - # whent the -mgz flag is specified, it assumes the mgz extension + # when the -mgz flag is specified, it assumes the mgz extension if self.inputs.mgz: prefix = os.path.splitext(basename)[0] else: prefix = basename - if prefix == 'aseg': + if prefix == "aseg": return # aseg is already the default return spec.argstr % prefix - elif name in ['orig_white', 'orig_pial']: + elif name in ["orig_white", "orig_pial"]: # these inputs do take full file paths or even basenames basename = os.path.basename(value) - suffix = basename.split('.')[1] + suffix = basename.split(".")[1] return spec.argstr % suffix - elif name == 'in_orig': - if value.endswith('lh.orig') or value.endswith('rh.orig'): - # {lh,rh}.orig inputs are not sepcified on command line + elif name == "in_orig": + if value.endswith(("lh.orig", "rh.orig")): + # {lh,rh}.orig inputs are not specified on command line return else: # if the input orig file is different than lh.orig or rh.orig # these inputs do take full file paths or even basenames basename = os.path.basename(value) - suffix = basename.split('.')[1] + suffix = basename.split(".")[1] return spec.argstr % suffix - return super(MakeSurfaces, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() # Outputs are saved in the surf directory - dest_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'surf') + dest_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "surf" + ) # labels are saved in the label directory - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not self.inputs.no_white: outputs["out_white"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.white') + dest_dir, str(self.inputs.hemisphere) + ".white" + ) # The curv and area files must have the hemisphere names as a prefix outputs["out_curv"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.curv') + dest_dir, str(self.inputs.hemisphere) + ".curv" + ) outputs["out_area"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.area') + dest_dir, str(self.inputs.hemisphere) + ".area" + ) # Something determines when a pial surface and thickness file is generated # but documentation doesn't say what. # The orig_pial input is just a guess - if isdefined(self.inputs.orig_pial) or self.inputs.white == 'NOWRITE': + if isdefined(self.inputs.orig_pial) or self.inputs.white == "NOWRITE": outputs["out_curv"] = outputs["out_curv"] + ".pial" outputs["out_area"] = outputs["out_area"] + ".pial" outputs["out_pial"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.pial') + dest_dir, str(self.inputs.hemisphere) + ".pial" + ) outputs["out_thickness"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.thickness') + dest_dir, str(self.inputs.hemisphere) + ".thickness" + ) else: # when a pial surface is generated, the cortex label file is not # generated outputs["out_cortex"] = os.path.join( - label_dir, - str(self.inputs.hemisphere) + '.cortex.label') + label_dir, str(self.inputs.hemisphere) + ".cortex.label" + ) return outputs @@ -2713,26 +2894,25 @@ class CurvatureInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for Curvature") + desc="Input file for Curvature", + ) # optional - threshold = traits.Float( - argstr="-thresh %.3f", desc="Undocumented input threshold") + threshold = traits.Float(argstr="-thresh %.3f", desc="Undocumented input threshold") n = traits.Bool(argstr="-n", desc="Undocumented boolean flag") averages = traits.Int( argstr="-a %d", - desc= - "Perform this number iterative averages of curvature measure before saving" + desc="Perform this number iterative averages of curvature measure before saving", ) save = traits.Bool( argstr="-w", - desc= - "Save curvature files (will only generate screen output without this option)" + desc="Save curvature files (will only generate screen output without this option)", ) - distances = traits.Tuple( + distances = Tuple( traits.Int, traits.Int, argstr="-distances %d %d", - desc="Undocumented input integer distances") + desc="Undocumented input integer distances", + ) copy_input = traits.Bool(desc="Copy input file to current directory") @@ -2757,16 +2937,16 @@ class Curvature(FSCommand): 'mris_curvature -w lh.pial' """ - _cmd = 'mris_curvature' + _cmd = "mris_curvature" input_spec = CurvatureInputSpec output_spec = CurvatureOutputSpec def _format_arg(self, name, spec, value): if self.inputs.copy_input: - if name == 'in_file': + if name == "in_file": basename = os.path.basename(value) return spec.argstr % basename - return super(Curvature, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2774,60 +2954,65 @@ def _list_outputs(self): in_file = os.path.basename(self.inputs.in_file) else: in_file = self.inputs.in_file - outputs["out_mean"] = os.path.abspath(in_file) + '.H' - outputs["out_gauss"] = os.path.abspath(in_file) + '.K' + outputs["out_mean"] = os.path.abspath(in_file) + ".H" + outputs["out_gauss"] = os.path.abspath(in_file) + ".K" return outputs class CurvatureStatsInputSpec(FSTraitedSpec): surface = File( - argstr="-F %s", - exists=True, - desc="Specify surface file for CurvatureStats") + argstr="-F %s", exists=True, desc="Specify surface file for CurvatureStats" + ) curvfile1 = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input file for CurvatureStats") + desc="Input file for CurvatureStats", + ) curvfile2 = File( argstr="%s", position=-1, mandatory=True, exists=True, - desc="Input file for CurvatureStats") + desc="Input file for CurvatureStats", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-3, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-4, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) out_file = File( argstr="-o %s", exists=False, - name_source=['hemisphere'], - name_template='%s.curv.stats', + name_source=["hemisphere"], + name_template="%s.curv.stats", hash_files=False, - desc="Output curvature stats file") + desc="Output curvature stats file", + ) # optional min_max = traits.Bool( - argstr="-m", - desc="Output min / max information for the processed curvature.") + argstr="-m", desc="Output min / max information for the processed curvature." + ) values = traits.Bool( - argstr="-G", desc="Triggers a series of derived curvature values") - write = traits.Bool( - argstr="--writeCurvatureFiles", desc="Write curvature files") + argstr="-G", desc="Triggers a series of derived curvature values" + ) + write = traits.Bool(argstr="--writeCurvatureFiles", desc="Write curvature files") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) class CurvatureStatsOutputSpec(TraitedSpec): @@ -2873,15 +3058,15 @@ class CurvatureStats(FSCommand): 'mris_curvature_stats -m -o lh.curv.stats -F pial -G --writeCurvatureFiles subject_id lh pial pial' """ - _cmd = 'mris_curvature_stats' + _cmd = "mris_curvature_stats" input_spec = CurvatureStatsInputSpec output_spec = CurvatureStatsOutputSpec def _format_arg(self, name, spec, value): - if name in ['surface', 'curvfile1', 'curvfile2']: - prefix = os.path.basename(value).split('.')[1] + if name in ["surface", "curvfile1", "curvfile2"]: + prefix = os.path.basename(value).split(".")[1] return spec.argstr % prefix - return super(CurvatureStats, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2891,43 +3076,37 @@ def _list_outputs(self): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.surface, 'surf') - copy2subjdir(self, self.inputs.curvfile1, 'surf') - copy2subjdir(self, self.inputs.curvfile2, 'surf') - return super(CurvatureStats, self).run(**inputs) + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.surface, "surf") + copy2subjdir(self, self.inputs.curvfile1, "surf") + copy2subjdir(self, self.inputs.curvfile2, "surf") + return super().run(**inputs) class JacobianInputSpec(FSTraitedSpec): # required in_origsurf = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Original surface") + argstr="%s", position=-3, mandatory=True, exists=True, desc="Original surface" + ) in_mappedsurf = File( - argstr="%s", - position=-2, - mandatory=True, - exists=True, - desc="Mapped surface") + argstr="%s", position=-2, mandatory=True, exists=True, desc="Mapped surface" + ) # optional out_file = File( argstr="%s", exists=False, position=-1, - name_source=['in_origsurf'], + name_source=["in_origsurf"], hash_files=False, - name_template='%s.jacobian', + name_template="%s.jacobian", keep_extension=False, - desc="Output Jacobian of the surface mapping") + desc="Output Jacobian of the surface mapping", + ) class JacobianOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output Jacobian of the surface mapping") + out_file = File(exists=False, desc="Output Jacobian of the surface mapping") class Jacobian(FSCommand): @@ -2944,49 +3123,45 @@ class Jacobian(FSCommand): 'mris_jacobian lh.pial lh.pial lh.jacobian' """ - _cmd = 'mris_jacobian' + _cmd = "mris_jacobian" input_spec = JacobianInputSpec output_spec = JacobianOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCalcInputSpec(FSTraitedSpec): # required in_file1 = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Input file 1") + argstr="%s", position=-3, mandatory=True, exists=True, desc="Input file 1" + ) action = traits.String( argstr="%s", position=-2, mandatory=True, - desc="Action to perform on input file(s)") + desc="Action to perform on input file(s)", + ) out_file = File( - argstr="-o %s", mandatory=True, desc="Output file after calculation") + argstr="-o %s", mandatory=True, desc="Output file after calculation" + ) # optional in_file2 = File( argstr="%s", exists=True, position=-1, - xor=['in_float', 'in_int'], - desc="Input file 2") + xor=["in_float", "in_int"], + desc="Input file 2", + ) in_float = traits.Float( - argstr="%f", - position=-1, - xor=['in_file2', 'in_int'], - desc="Input float") + argstr="%f", position=-1, xor=["in_file2", "in_int"], desc="Input float" + ) in_int = traits.Int( - argstr="%d", - position=-1, - xor=['in_file2', 'in_float'], - desc="Input integer") + argstr="%d", position=-1, xor=["in_file2", "in_float"], desc="Input integer" + ) class MRIsCalcOutputSpec(TraitedSpec): @@ -3018,70 +3193,73 @@ class MRIsCalc(FSCommand): 'mris_calc -o lh.area.mid lh.area add lh.area.pial' """ - _cmd = 'mris_calc' + _cmd = "mris_calc" input_spec = MRIsCalcInputSpec output_spec = MRIsCalcOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class VolumeMaskInputSpec(FSTraitedSpec): left_whitelabel = traits.Int( - argstr="--label_left_white %d", - mandatory=True, - desc="Left white matter label") + argstr="--label_left_white %d", mandatory=True, desc="Left white matter label" + ) left_ribbonlabel = traits.Int( argstr="--label_left_ribbon %d", mandatory=True, - desc="Left cortical ribbon label") + desc="Left cortical ribbon label", + ) right_whitelabel = traits.Int( - argstr="--label_right_white %d", - mandatory=True, - desc="Right white matter label") + argstr="--label_right_white %d", mandatory=True, desc="Right white matter label" + ) right_ribbonlabel = traits.Int( argstr="--label_right_ribbon %d", mandatory=True, - desc="Right cortical ribbon label") - lh_pial = File( - mandatory=True, exists=True, desc="Implicit input left pial surface") + desc="Right cortical ribbon label", + ) + lh_pial = File(mandatory=True, exists=True, desc="Implicit input left pial surface") rh_pial = File( - mandatory=True, exists=True, desc="Implicit input right pial surface") + mandatory=True, exists=True, desc="Implicit input right pial surface" + ) lh_white = File( - mandatory=True, - exists=True, - desc="Implicit input left white matter surface") + mandatory=True, exists=True, desc="Implicit input left white matter surface" + ) rh_white = File( - mandatory=True, - exists=True, - desc="Implicit input right white matter surface") + mandatory=True, exists=True, desc="Implicit input right white matter surface" + ) aseg = File( exists=True, - xor=['in_aseg'], - desc="Implicit aseg.mgz segmentation. " + - "Specify a different aseg by using the 'in_aseg' input.") + xor=["in_aseg"], + desc="Implicit aseg.mgz segmentation. " + "Specify a different aseg by using the 'in_aseg' input.", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-1, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) # optional in_aseg = File( argstr="--aseg_name %s", exists=True, - xor=['aseg'], - desc="Input aseg file for VolumeMask") + xor=["aseg"], + desc="Input aseg file for VolumeMask", + ) save_ribbon = traits.Bool( argstr="--save_ribbon", - desc="option to save just the ribbon for the " + - "hemispheres in the format ?h.ribbon.mgz") + desc="option to save just the ribbon for the " + "hemispheres in the format ?h.ribbon.mgz", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the implicit input files to the " + "node directory.") + desc="If running as a node, set this to True. " + "This will copy the implicit input files to the node directory." + ) class VolumeMaskOutputSpec(TraitedSpec): @@ -3118,142 +3296,152 @@ class VolumeMask(FSCommand): 'mris_volmask --label_left_ribbon 3 --label_left_white 2 --label_right_ribbon 42 --label_right_white 41 --save_ribbon 10335' """ - _cmd = 'mris_volmask' + _cmd = "mris_volmask" input_spec = VolumeMaskInputSpec output_spec = VolumeMaskOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.in_aseg, 'mri') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.mgz') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.in_aseg, "mri") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") - return super(VolumeMask, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): - if name == 'in_aseg': - return spec.argstr % os.path.basename(value).rstrip('.mgz') - return super(VolumeMask, self)._format_arg(name, spec, value) + if name == "in_aseg": + return spec.argstr % os.path.basename(value).rstrip(".mgz") + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'mri') - outputs["out_ribbon"] = os.path.join(out_dir, 'ribbon.mgz') + out_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id, "mri") + outputs["out_ribbon"] = os.path.join(out_dir, "ribbon.mgz") if self.inputs.save_ribbon: - outputs["rh_ribbon"] = os.path.join(out_dir, 'rh.ribbon.mgz') - outputs["lh_ribbon"] = os.path.join(out_dir, 'lh.ribbon.mgz') + outputs["rh_ribbon"] = os.path.join(out_dir, "rh.ribbon.mgz") + outputs["lh_ribbon"] = os.path.join(out_dir, "lh.ribbon.mgz") return outputs class ParcellationStatsInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-3, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-2, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) # implicit wm = File( - mandatory=True, - exists=True, - desc="Input file must be /mri/wm.mgz") + mandatory=True, exists=True, desc="Input file must be /mri/wm.mgz" + ) lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) transform = File( mandatory=True, exists=True, - desc="Input file must be /mri/transforms/talairach.xfm") + desc="Input file must be /mri/transforms/talairach.xfm", + ) thickness = File( mandatory=True, exists=True, - desc="Input file must be /surf/?h.thickness") + desc="Input file must be /surf/?h.thickness", + ) brainmask = File( mandatory=True, exists=True, - desc="Input file must be /mri/brainmask.mgz") + desc="Input file must be /mri/brainmask.mgz", + ) aseg = File( mandatory=True, exists=True, - desc="Input file must be /mri/aseg.presurf.mgz") + desc="Input file must be /mri/aseg.presurf.mgz", + ) ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/ribbon.mgz") - cortex_label = File( - exists=True, desc="implicit input file {hemi}.cortex.label") + desc="Input file must be /mri/ribbon.mgz", + ) + cortex_label = File(exists=True, desc="implicit input file {hemi}.cortex.label") # optional surface = traits.String( - position=-1, argstr="%s", desc="Input surface (e.g. 'white')") + position=-1, argstr="%s", desc="Input surface (e.g. 'white')" + ) mgz = traits.Bool(argstr="-mgz", desc="Look for mgz files") - in_cortex = traits.File( - argstr="-cortex %s", exists=True, desc="Input cortex label") - in_annotation = traits.File( + in_cortex = File(argstr="-cortex %s", exists=True, desc="Input cortex label") + in_annotation = File( argstr="-a %s", exists=True, - xor=['in_label'], - desc= - "compute properties for each label in the annotation file separately") - in_label = traits.File( + xor=["in_label"], + desc="compute properties for each label in the annotation file separately", + ) + in_label = File( argstr="-l %s", exists=True, - xor=['in_annotatoin', 'out_color'], - desc="limit calculations to specified label") + xor=["in_annotatoin", "out_color"], + desc="limit calculations to specified label", + ) tabular_output = traits.Bool(argstr="-b", desc="Tabular output") - out_table = traits.File( + out_table = File( argstr="-f %s", exists=False, genfile=True, - requires=['tabular_output'], - desc="Table output to tablefile") - out_color = traits.File( + requires=["tabular_output"], + desc="Table output to tablefile", + ) + out_color = File( argstr="-c %s", exists=False, genfile=True, - xor=['in_label'], - desc="Output annotation files's colortable to text file") + xor=["in_label"], + desc="Output annotation files's colortable to text file", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) th3 = traits.Bool( argstr="-th3", requires=["cortex_label"], - desc="turns on new vertex-wise volume calc for mris_anat_stats") + desc="turns on new vertex-wise volume calc for mris_anat_stats", + ) class ParcellationStatsOutputSpec(TraitedSpec): out_table = File(exists=False, desc="Table output to tablefile") out_color = File( - exists=False, desc="Output annotation files's colortable to text file") + exists=False, desc="Output annotation files's colortable to text file" + ) class ParcellationStats(FSCommand): @@ -3284,35 +3472,47 @@ class ParcellationStats(FSCommand): 'mris_anatomical_stats -c test.ctab -f lh.test.stats 10335 lh white' """ - _cmd = 'mris_anatomical_stats' + _cmd = "mris_anatomical_stats" input_spec = ParcellationStatsInputSpec output_spec = ParcellationStatsOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.wm, 'mri', 'wm.mgz') - copy2subjdir(self, self.inputs.transform, - os.path.join('mri', 'transforms'), 'talairach.xfm') - copy2subjdir(self, self.inputs.brainmask, 'mri', 'brainmask.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.presurf.mgz') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.thickness, 'surf', - '{0}.thickness'.format(self.inputs.hemisphere)) + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.wm, "mri", "wm.mgz") + copy2subjdir( + self, + self.inputs.transform, + os.path.join("mri", "transforms"), + "talairach.xfm", + ) + copy2subjdir(self, self.inputs.brainmask, "mri", "brainmask.mgz") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.presurf.mgz") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir( + self, + self.inputs.thickness, + "surf", + f"{self.inputs.hemisphere}.thickness", + ) if isdefined(self.inputs.cortex_label): - copy2subjdir(self, self.inputs.cortex_label, 'label', - '{0}.cortex.label'.format(self.inputs.hemisphere)) + copy2subjdir( + self, + self.inputs.cortex_label, + "label", + f"{self.inputs.hemisphere}.cortex.label", + ) createoutputdirs(self._list_outputs()) - return super(ParcellationStats, self).run(**inputs) + return super().run(**inputs) def _gen_filename(self, name): - if name in ['out_table', 'out_color']: + if name in ["out_table", "out_color"]: return self._list_outputs()[name] return None @@ -3322,103 +3522,109 @@ def _list_outputs(self): outputs["out_table"] = os.path.abspath(self.inputs.out_table) else: # subject stats directory - stats_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'stats') + stats_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "stats" + ) if isdefined(self.inputs.in_annotation): # if out_table is not defined just tag .stats on the end # instead of .annot - if self.inputs.surface == 'pial': - basename = os.path.basename( - self.inputs.in_annotation).replace( - '.annot', '.pial.stats') + if self.inputs.surface == "pial": + basename = os.path.basename(self.inputs.in_annotation).replace( + ".annot", ".pial.stats" + ) else: - basename = os.path.basename( - self.inputs.in_annotation).replace('.annot', '.stats') + basename = os.path.basename(self.inputs.in_annotation).replace( + ".annot", ".stats" + ) elif isdefined(self.inputs.in_label): # if out_table is not defined just tag .stats on the end # instead of .label - if self.inputs.surface == 'pial': + if self.inputs.surface == "pial": basename = os.path.basename(self.inputs.in_label).replace( - '.label', '.pial.stats') + ".label", ".pial.stats" + ) else: basename = os.path.basename(self.inputs.in_label).replace( - '.label', '.stats') + ".label", ".stats" + ) else: - basename = str(self.inputs.hemisphere) + '.aparc.annot.stats' + basename = str(self.inputs.hemisphere) + ".aparc.annot.stats" outputs["out_table"] = os.path.join(stats_dir, basename) if isdefined(self.inputs.out_color): outputs["out_color"] = os.path.abspath(self.inputs.out_color) else: # subject label directory - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + out_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if isdefined(self.inputs.in_annotation): # find the annotation name (if it exists) basename = os.path.basename(self.inputs.in_annotation) - for item in ['lh.', 'rh.', 'aparc.', 'annot']: - basename = basename.replace(item, '') + for item in ["lh.", "rh.", "aparc.", "annot"]: + basename = basename.replace(item, "") annot = basename # if the out_color table is not defined, one with the annotation # name will be created - if 'BA' in annot: - outputs["out_color"] = os.path.join( - out_dir, annot + 'ctab') + if "BA" in annot: + outputs["out_color"] = os.path.join(out_dir, annot + "ctab") else: outputs["out_color"] = os.path.join( - out_dir, 'aparc.annot.' + annot + 'ctab') + out_dir, "aparc.annot." + annot + "ctab" + ) else: - outputs["out_color"] = os.path.join(out_dir, - 'aparc.annot.ctab') + outputs["out_color"] = os.path.join(out_dir, "aparc.annot.ctab") return outputs class ContrastInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="--s %s", usedefault=True, mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", argstr="--%s-only", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) # implicit thickness = File( mandatory=True, exists=True, - desc="Input file must be /surf/?h.thickness") + desc="Input file must be /surf/?h.thickness", + ) white = File( mandatory=True, exists=True, - desc="Input file must be /surf/.white") - annotation = traits.File( + desc="Input file must be /surf/.white", + ) + annotation = File( mandatory=True, exists=True, - desc= - "Input annotation file must be /label/.aparc.annot" + desc="Input annotation file must be /label/.aparc.annot", ) - cortex = traits.File( + cortex = File( mandatory=True, exists=True, - desc= - "Input cortex label must be /label/.cortex.label" + desc="Input cortex label must be /label/.cortex.label", ) - orig = File( - exists=True, mandatory=True, desc="Implicit input file mri/orig.mgz") + orig = File(exists=True, mandatory=True, desc="Implicit input file mri/orig.mgz") rawavg = File( - exists=True, mandatory=True, desc="Implicit input file mri/rawavg.mgz") + exists=True, mandatory=True, desc="Implicit input file mri/rawavg.mgz" + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True. " + "This will copy the input files to the node directory." + ) class ContrastOutputSpec(TraitedSpec): - out_contrast = File( - exists=False, desc="Output contrast file from Contrast") + out_contrast = File(exists=False, desc="Output contrast file from Contrast") out_stats = File(exists=False, desc="Output stats file from Contrast") out_log = File(exists=True, desc="Output log from Contrast") @@ -3443,42 +3649,36 @@ class Contrast(FSCommand): 'pctsurfcon --lh-only --s 10335' """ - _cmd = 'pctsurfcon' + _cmd = "pctsurfcon" input_spec = ContrastInputSpec output_spec = ContrastOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.annotation, 'label', - '{0}.aparc.annot'.format(hemi)) - copy2subjdir(self, self.inputs.cortex, 'label', - '{0}.cortex.label'.format(hemi)) - copy2subjdir(self, self.inputs.white, 'surf', - '{0}.white'.format(hemi)) - copy2subjdir(self, self.inputs.thickness, 'surf', - '{0}.thickness'.format(hemi)) - copy2subjdir(self, self.inputs.orig, 'mri', 'orig.mgz') - copy2subjdir(self, self.inputs.rawavg, 'mri', 'rawavg.mgz') + copy2subjdir(self, self.inputs.annotation, "label", f"{hemi}.aparc.annot") + copy2subjdir(self, self.inputs.cortex, "label", f"{hemi}.cortex.label") + copy2subjdir(self, self.inputs.white, "surf", f"{hemi}.white") + copy2subjdir(self, self.inputs.thickness, "surf", f"{hemi}.thickness") + copy2subjdir(self, self.inputs.orig, "mri", "orig.mgz") + copy2subjdir(self, self.inputs.rawavg, "mri", "rawavg.mgz") # need to create output directories createoutputdirs(self._list_outputs()) - return super(Contrast, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self._outputs().get() - subject_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id) + subject_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id) outputs["out_contrast"] = os.path.join( - subject_dir, 'surf', - str(self.inputs.hemisphere) + '.w-g.pct.mgh') + subject_dir, "surf", str(self.inputs.hemisphere) + ".w-g.pct.mgh" + ) outputs["out_stats"] = os.path.join( - subject_dir, 'stats', - str(self.inputs.hemisphere) + '.w-g.pct.stats') - outputs["out_log"] = os.path.join(subject_dir, 'scripts', - 'pctsurfcon.log') + subject_dir, "stats", str(self.inputs.hemisphere) + ".w-g.pct.stats" + ) + outputs["out_log"] = os.path.join(subject_dir, "scripts", "pctsurfcon.log") return outputs @@ -3488,34 +3688,35 @@ class RelabelHypointensitiesInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Implicit input file must be lh.white") + desc="Implicit input file must be lh.white", + ) rh_white = File( mandatory=True, exists=True, copyfile=True, - desc="Implicit input file must be rh.white") + desc="Implicit input file must be rh.white", + ) aseg = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Input aseg file") - surf_directory = traits.Directory( - '.', + argstr="%s", position=-3, mandatory=True, exists=True, desc="Input aseg file" + ) + surf_directory = Directory( + ".", argstr="%s", position=-2, exists=True, usedefault=True, - desc="Directory containing lh.white and rh.white") + desc="Directory containing lh.white and rh.white", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['aseg'], - name_template='%s.hypos.mgz', + name_source=["aseg"], + name_template="%s.hypos.mgz", hash_files=False, keep_extension=False, - desc="Output aseg file") + desc="Output aseg file", + ) class RelabelHypointensitiesOutputSpec(TraitedSpec): @@ -3538,7 +3739,7 @@ class RelabelHypointensities(FSCommand): 'mri_relabel_hypointensities aseg.mgz . aseg.hypos.mgz' """ - _cmd = 'mri_relabel_hypointensities' + _cmd = "mri_relabel_hypointensities" input_spec = RelabelHypointensitiesInputSpec output_spec = RelabelHypointensitiesOutputSpec @@ -3551,76 +3752,84 @@ def _list_outputs(self): class Aparc2AsegInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="--s %s", usedefault=True, mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) out_file = File( argstr="--o %s", exists=False, mandatory=True, - desc="Full path of file to save the output segmentation in") + desc="Full path of file to save the output segmentation in", + ) # implicit lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) lh_ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/lh.ribbon.mgz") + desc="Input file must be /mri/lh.ribbon.mgz", + ) rh_ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/rh.ribbon.mgz") + desc="Input file must be /mri/rh.ribbon.mgz", + ) ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/ribbon.mgz") + desc="Input file must be /mri/ribbon.mgz", + ) lh_annotation = File( mandatory=True, exists=True, - desc="Input file must be /label/lh.aparc.annot") + desc="Input file must be /label/lh.aparc.annot", + ) rh_annotation = File( mandatory=True, exists=True, - desc="Input file must be /label/rh.aparc.annot") + desc="Input file must be /label/rh.aparc.annot", + ) # optional filled = File( - exists=True, - desc="Implicit input filled file. Only required with FS v5.3.") + exists=True, desc="Implicit input filled file. Only required with FS v5.3." + ) aseg = File(argstr="--aseg %s", exists=True, desc="Input aseg file") volmask = traits.Bool(argstr="--volmask", desc="Volume mask flag") ctxseg = File(argstr="--ctxseg %s", exists=True, desc="") label_wm = traits.Bool( argstr="--labelwm", - desc=""" - For each voxel labeled as white matter in the aseg, re-assign - its label to be that of the closest cortical point if its - distance is less than dmaxctx - """) - hypo_wm = traits.Bool( - argstr="--hypo-as-wm", desc="Label hypointensities as WM") + desc="""\ +For each voxel labeled as white matter in the aseg, re-assign +its label to be that of the closest cortical point if its +distance is less than dmaxctx.""", + ) + hypo_wm = traits.Bool(argstr="--hypo-as-wm", desc="Label hypointensities as WM") rip_unknown = traits.Bool( - argstr="--rip-unknown", - desc="Do not label WM based on 'unknown' corical label") + argstr="--rip-unknown", desc="Do not label WM based on 'unknown' corical label" + ) a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." + ) class Aparc2AsegOutputSpec(TraitedSpec): @@ -3635,17 +3844,17 @@ class Aparc2Aseg(FSCommand): labeled as cortex (3 and 42) and assign it the label of the closest cortical vertex. If the voxel is not in the ribbon (as defined by mri/ lh.ribbon and rh.ribbon), then the voxel is marked as unknown (0). - This can be turned off with --noribbon. The cortical parcellation is + This can be turned off with ``--noribbon``. The cortical parcellation is obtained from subject/label/hemi.aparc.annot which should be based on the curvature.buckner40.filled.desikan_killiany.gcs atlas. The aseg is obtained from subject/mri/aseg.mgz and should be based on the RB40_talairach_2005-07-20.gca atlas. If these atlases are used, then the segmentations can be viewed with tkmedit and the - FreeSurferColorLUT.txt color table found in $FREESURFER_HOME. These - are the default atlases used by recon-all. + FreeSurferColorLUT.txt color table found in ``$FREESURFER_HOME``. These + are the default atlases used by ``recon-all``. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Aparc2Aseg >>> aparc2aseg = Aparc2Aseg() >>> aparc2aseg.inputs.lh_white = 'lh.pial' @@ -3662,40 +3871,41 @@ class Aparc2Aseg(FSCommand): >>> aparc2aseg.inputs.rip_unknown = True >>> aparc2aseg.cmdline # doctest: +SKIP 'mri_aparc2aseg --labelwm --o aparc+aseg.mgz --rip-unknown --s subject_id' + """ - _cmd = 'mri_aparc2aseg' + _cmd = "mri_aparc2aseg" input_spec = Aparc2AsegInputSpec output_spec = Aparc2AsegOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.lh_ribbon, 'mri', 'lh.ribbon.mgz') - copy2subjdir(self, self.inputs.rh_ribbon, 'mri', 'rh.ribbon.mgz') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri') - copy2subjdir(self, self.inputs.filled, 'mri', 'filled.mgz') - copy2subjdir(self, self.inputs.lh_annotation, 'label') - copy2subjdir(self, self.inputs.rh_annotation, 'label') - - return super(Aparc2Aseg, self).run(**inputs) + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.lh_ribbon, "mri", "lh.ribbon.mgz") + copy2subjdir(self, self.inputs.rh_ribbon, "mri", "rh.ribbon.mgz") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir(self, self.inputs.aseg, "mri") + copy2subjdir(self, self.inputs.filled, "mri", "filled.mgz") + copy2subjdir(self, self.inputs.lh_annotation, "label") + copy2subjdir(self, self.inputs.rh_annotation, "label") + + return super().run(**inputs) def _format_arg(self, name, spec, value): - if name == 'aseg': + if name == "aseg": # aseg does not take a full filename - basename = os.path.basename(value).replace('.mgz', '') + basename = os.path.basename(value).replace(".mgz", "") return spec.argstr % basename - elif name == 'out_file': + elif name == "out_file": return spec.argstr % os.path.abspath(value) - return super(Aparc2Aseg, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -3706,10 +3916,8 @@ def _list_outputs(self): class Apas2AsegInputSpec(FSTraitedSpec): # required in_file = File( - argstr="--i %s", - mandatory=True, - exists=True, - desc="Input aparc+aseg.mgz") + argstr="--i %s", mandatory=True, exists=True, desc="Input aparc+aseg.mgz" + ) out_file = File(argstr="--o %s", mandatory=True, desc="Output aseg file") @@ -3725,16 +3933,17 @@ class Apas2Aseg(FSCommand): actual surface (this is not the case with aseg.mgz). Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Apas2Aseg >>> apas2aseg = Apas2Aseg() >>> apas2aseg.inputs.in_file = 'aseg.mgz' >>> apas2aseg.inputs.out_file = 'output.mgz' >>> apas2aseg.cmdline 'apas2aseg --i aseg.mgz --o output.mgz' + """ - _cmd = 'apas2aseg' + _cmd = "apas2aseg" input_spec = Apas2AsegInputSpec output_spec = Apas2AsegOutputSpec @@ -3750,67 +3959,81 @@ class MRIsExpandInputSpec(FSTraitedSpec): in_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-3, copyfile=False, - desc='Surface to expand') + desc="Surface to expand", + ) distance = traits.Float( mandatory=True, - argstr='%g', + argstr="%g", position=-2, - desc='Distance in mm or fraction of cortical thickness') + desc="Distance in mm or fraction of cortical thickness", + ) out_name = traits.Str( - 'expanded', - argstr='%s', + "expanded", + argstr="%s", position=-1, usedefault=True, - desc=('Output surface file\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + "Output surface file. " + "If no path, uses directory of ``in_file``. " + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' + ), + ) thickness = traits.Bool( - argstr='-thickness', - desc='Expand by fraction of cortical thickness, not mm') + argstr="-thickness", desc="Expand by fraction of cortical thickness, not mm" + ) thickness_name = traits.Str( argstr="-thickness_name %s", copyfile=False, - desc=('Name of thickness file (implicit: "thickness")\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + 'Name of thickness file (implicit: "thickness")\n' + "If no path, uses directory of ``in_file``\n" + 'If no path AND missing "lh." or "rh.", derive from `in_file`' + ), + ) pial = traits.Str( - argstr='-pial %s', + argstr="-pial %s", copyfile=False, - desc=('Name of pial file (implicit: "pial")\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + 'Name of pial file (implicit: "pial")\n' + "If no path, uses directory of ``in_file``\n" + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' + ), + ) sphere = traits.Str( - 'sphere', + "sphere", copyfile=False, usedefault=True, - desc='WARNING: Do not change this trait') - spring = traits.Float(argstr='-S %g', desc="Spring term (implicit: 0.05)") - dt = traits.Float(argstr='-T %g', desc='dt (implicit: 0.25)') + desc="WARNING: Do not change this trait", + ) + spring = traits.Float(argstr="-S %g", desc="Spring term (implicit: 0.05)") + dt = traits.Float(argstr="-T %g", desc="dt (implicit: 0.25)") write_iterations = traits.Int( - argstr='-W %d', desc='Write snapshots of expansion every N iterations') + argstr="-W %d", desc="Write snapshots of expansion every N iterations" + ) smooth_averages = traits.Int( - argstr='-A %d', - desc='Smooth surface with N iterations after expansion') + argstr="-A %d", desc="Smooth surface with N iterations after expansion" + ) nsurfaces = traits.Int( - argstr='-N %d', desc='Number of surfacces to write during expansion') + argstr="-N %d", desc="Number of surfacces to write during expansion" + ) # # Requires dev version - Re-add when min_ver/max_ver support this # # https://github.com/freesurfer/freesurfer/blob/9730cb9/mris_expand/mris_expand.c - # navgs = traits.Tuple( + # navgs = Tuple( # traits.Int, traits.Int, # argstr='-navgs %d %d', # desc=('Tuple of (n_averages, min_averages) parameters ' # '(implicit: (16, 0))')) - # target_intensity = traits.Tuple( - # traits.Float, traits.File(exists=True), + # target_intensity = Tuple( + # traits.Float, File(exists=True), # argstr='-intensity %g %s', # desc='Tuple of intensity and brain volume to crop to target intensity') class MRIsExpandOutputSpec(TraitedSpec): - out_file = File(desc='Output surface file') + out_file = File(desc="Output surface file") class MRIsExpand(FSSurfaceCommand): @@ -3829,14 +4052,16 @@ class MRIsExpand(FSSurfaceCommand): >>> mris_expand.cmdline 'mris_expand -thickness lh.white 0.5 graymid' """ - _cmd = 'mris_expand' + + _cmd = "mris_expand" input_spec = MRIsExpandInputSpec output_spec = MRIsExpandOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._associated_file(self.inputs.in_file, - self.inputs.out_name) + outputs["out_file"] = self._associated_file( + self.inputs.in_file, self.inputs.out_name + ) return outputs def normalize_filenames(self): @@ -3849,100 +4074,103 @@ def normalize_filenames(self): pial = self.inputs.pial if not isdefined(pial): - pial = 'pial' + pial = "pial" self.inputs.pial = self._associated_file(in_file, pial) if isdefined(self.inputs.thickness) and self.inputs.thickness: thickness_name = self.inputs.thickness_name if not isdefined(thickness_name): - thickness_name = 'thickness' - self.inputs.thickness_name = self._associated_file( - in_file, thickness_name) + thickness_name = "thickness" + self.inputs.thickness_name = self._associated_file(in_file, thickness_name) self.inputs.sphere = self._associated_file(in_file, self.inputs.sphere) class LTAConvertInputSpec(CommandLineInputSpec): # Inputs - _in_xor = ('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', 'in_itk') + _in_xor = ("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk") in_lta = traits.Either( File(exists=True), - 'identity.nofile', - argstr='--inlta %s', + "identity.nofile", + argstr="--inlta %s", mandatory=True, xor=_in_xor, - desc='input transform of LTA type') + desc="input transform of LTA type", + ) in_fsl = File( exists=True, - argstr='--infsl %s', + argstr="--infsl %s", mandatory=True, xor=_in_xor, - desc='input transform of FSL type') + desc="input transform of FSL type", + ) in_mni = File( exists=True, - argstr='--inmni %s', + argstr="--inmni %s", mandatory=True, xor=_in_xor, - desc='input transform of MNI/XFM type') + desc="input transform of MNI/XFM type", + ) in_reg = File( exists=True, - argstr='--inreg %s', + argstr="--inreg %s", mandatory=True, xor=_in_xor, - desc='input transform of TK REG type (deprecated format)') + desc="input transform of TK REG type (deprecated format)", + ) in_niftyreg = File( exists=True, - argstr='--inniftyreg %s', + argstr="--inniftyreg %s", mandatory=True, xor=_in_xor, - desc='input transform of Nifty Reg type (inverse RAS2RAS)') + desc="input transform of Nifty Reg type (inverse RAS2RAS)", + ) in_itk = File( exists=True, - argstr='--initk %s', + argstr="--initk %s", mandatory=True, xor=_in_xor, - desc='input transform of ITK type') + desc="input transform of ITK type", + ) # Outputs out_lta = traits.Either( traits.Bool, File, - argstr='--outlta %s', - desc='output linear transform (LTA Freesurfer format)') + argstr="--outlta %s", + desc="output linear transform (LTA Freesurfer format)", + ) out_fsl = traits.Either( - traits.Bool, - File, - argstr='--outfsl %s', - desc='output transform in FSL format') + traits.Bool, File, argstr="--outfsl %s", desc="output transform in FSL format" + ) out_mni = traits.Either( traits.Bool, File, - argstr='--outmni %s', - desc='output transform in MNI/XFM format') + argstr="--outmni %s", + desc="output transform in MNI/XFM format", + ) out_reg = traits.Either( traits.Bool, File, - argstr='--outreg %s', - desc='output transform in reg dat format') + argstr="--outreg %s", + desc="output transform in reg dat format", + ) out_itk = traits.Either( - traits.Bool, - File, - argstr='--outitk %s', - desc='output transform in ITK format') + traits.Bool, File, argstr="--outitk %s", desc="output transform in ITK format" + ) # Optional flags - invert = traits.Bool(argstr='--invert') - ltavox2vox = traits.Bool(argstr='--ltavox2vox', requires=['out_lta']) - source_file = File(exists=True, argstr='--src %s') - target_file = File(exists=True, argstr='--trg %s') - target_conform = traits.Bool(argstr='--trgconform') + invert = traits.Bool(argstr="--invert") + ltavox2vox = traits.Bool(argstr="--ltavox2vox", requires=["out_lta"]) + source_file = File(exists=True, argstr="--src %s") + target_file = File(exists=True, argstr="--trg %s") + target_conform = traits.Bool(argstr="--trgconform") class LTAConvertOutputSpec(TraitedSpec): - out_lta = File( - exists=True, desc='output linear transform (LTA Freesurfer format)') - out_fsl = File(exists=True, desc='output transform in FSL format') - out_mni = File(exists=True, desc='output transform in MNI/XFM format') - out_reg = File(exists=True, desc='output transform in reg dat format') - out_itk = File(exists=True, desc='output transform in ITK format') + out_lta = File(exists=True, desc="output linear transform (LTA Freesurfer format)") + out_fsl = File(exists=True, desc="output transform in FSL format") + out_mni = File(exists=True, desc="output transform in MNI/XFM format") + out_reg = File(exists=True, desc="output transform in reg dat format") + out_itk = File(exists=True, desc="output transform in ITK format") class LTAConvert(CommandLine): @@ -3953,20 +4181,25 @@ class LTAConvert(CommandLine): For complete details, see the `lta_convert documentation. `_ """ + input_spec = LTAConvertInputSpec output_spec = LTAConvertOutputSpec - _cmd = 'lta_convert' + _cmd = "lta_convert" def _format_arg(self, name, spec, value): - if name.startswith('out_') and value is True: + if name.startswith("out_") and value is True: value = self._list_outputs()[name] - return super(LTAConvert, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - for name, default in (('out_lta', 'out.lta'), ('out_fsl', 'out.mat'), - ('out_mni', 'out.xfm'), ('out_reg', 'out.dat'), - ('out_itk', 'out.txt')): + for name, default in ( + ("out_lta", "out.lta"), + ("out_fsl", "out.mat"), + ("out_mni", "out.xfm"), + ("out_reg", "out.dat"), + ("out_itk", "out.txt"), + ): attr = getattr(self.inputs, name) if attr: fname = default if attr is True else attr diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index e8f192f4f2..ffeb9b39fc 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -1,36 +1,129 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The fsl module provides classes for interfacing with the `FSL -`_ command line tools. +""" +FSL is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. -Top-level namespace for fsl. +The fsl module provides classes for interfacing with the `FSL +`_ command line tools. """ -from .base import (FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data) -from .preprocess import (FAST, FLIRT, ApplyXFM, BET, MCFLIRT, FNIRT, ApplyWarp, - SliceTimer, SUSAN, PRELUDE, FUGUE, FIRST) -from .model import (Level1Design, FEAT, FEATModel, FILMGLS, FEATRegister, - FLAMEO, ContrastMgr, MultipleRegressDesign, L2Model, SMM, - MELODIC, SmoothEstimate, Cluster, Randomise, GLM) +from .base import FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data +from .preprocess import ( + FAST, + FLIRT, + ApplyXFM, + BET, + MCFLIRT, + FNIRT, + ApplyWarp, + SliceTimer, + SUSAN, + PRELUDE, + FUGUE, + FIRST, +) +from .model import ( + Level1Design, + FEAT, + FEATModel, + FILMGLS, + FEATRegister, + FLAMEO, + ContrastMgr, + MultipleRegressDesign, + L2Model, + SMM, + MELODIC, + SmoothEstimate, + Cluster, + Randomise, + GLM, +) from .utils import ( - AvScale, Smooth, Slice, Merge, ExtractROI, Split, ImageMaths, ImageMeants, - ImageStats, FilterRegressor, Overlay, Slicer, PlotTimeSeries, - PlotMotionParams, ConvertXFM, SwapDimensions, PowerSpectrum, Reorient2Std, - Complex, InvWarp, WarpUtils, ConvertWarp, WarpPoints, WarpPointsToStd, - WarpPointsFromStd, RobustFOV, CopyGeom, MotionOutliers) + AvScale, + Smooth, + Slice, + Merge, + ExtractROI, + Split, + ImageMaths, + ImageMeants, + ImageStats, + FilterRegressor, + Overlay, + Slicer, + PlotTimeSeries, + PlotMotionParams, + ConvertXFM, + SwapDimensions, + PowerSpectrum, + Reorient2Std, + Complex, + InvWarp, + WarpUtils, + ConvertWarp, + WarpPoints, + WarpPointsToStd, + WarpPointsFromStd, + RobustFOV, + CopyGeom, + MotionOutliers, + Text2Vest, + Vest2Text, +) -from .epi import (PrepareFieldmap, TOPUP, ApplyTOPUP, Eddy, EPIDeWarp, SigLoss, - EddyCorrect, EpiReg) -from .dti import (BEDPOSTX, XFibres, DTIFit, ProbTrackX, ProbTrackX2, VecReg, - ProjThresh, FindTheBiggest, DistanceMap, TractSkeleton, - MakeDyadicVectors, BEDPOSTX5, XFibres5) -from .maths import (ChangeDataType, Threshold, MeanImage, ApplyMask, - IsotropicSmooth, TemporalFilter, DilateImage, ErodeImage, - SpatialFilter, UnaryMaths, BinaryMaths, MultiImageMaths, - MaxnImage, MinImage, MedianImage, PercentileImage, - AR1Image) +from .epi import ( + PrepareFieldmap, + TOPUP, + ApplyTOPUP, + Eddy, + EPIDeWarp, + SigLoss, + EddyCorrect, + EpiReg, + EddyQuad, +) +from .dti import ( + BEDPOSTX, + XFibres, + DTIFit, + ProbTrackX, + ProbTrackX2, + VecReg, + ProjThresh, + FindTheBiggest, + DistanceMap, + TractSkeleton, + MakeDyadicVectors, + BEDPOSTX5, + XFibres5, +) +from .maths import ( + ChangeDataType, + Threshold, + MeanImage, + ApplyMask, + IsotropicSmooth, + TemporalFilter, + DilateImage, + ErodeImage, + SpatialFilter, + UnaryMaths, + BinaryMaths, + MultiImageMaths, + MaxnImage, + MinImage, + MedianImage, + PercentileImage, + AR1Image, +) from .possum import B0Calc -from .fix import (AccuracyTester, Classifier, Cleaner, FeatureExtractor, - Training, TrainingSetCreator) +from .fix import ( + AccuracyTester, + Classifier, + Cleaner, + FeatureExtractor, + Training, + TrainingSetCreator, +) from .aroma import ICA_AROMA diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index a86763e3e5..b699cd47e2 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -1,14 +1,17 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This commandline module provides classes for interfacing with the -`ICA-AROMA.py`_ command line tool. +`ICA-AROMA.py `__ command line tool. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, File, - Directory, traits, isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + Directory, + traits, +) import os @@ -16,83 +19,93 @@ class ICA_AROMAInputSpec(CommandLineInputSpec): feat_dir = Directory( exists=True, mandatory=True, - argstr='-feat %s', - xor=['in_file', 'mat_file', 'fnirt_warp_file', 'motion_parameters'], - desc='If a feat directory exists and temporal filtering ' - 'has not been run yet, ICA_AROMA can use the files in ' - 'this directory.') + argstr="-feat %s", + xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], + desc="If a feat directory exists and temporal filtering " + "has not been run yet, ICA_AROMA can use the files in " + "this directory.", + ) in_file = File( exists=True, mandatory=True, - argstr='-i %s', - xor=['feat_dir'], - desc='volume to be denoised') + argstr="-i %s", + xor=["feat_dir"], + desc="volume to be denoised", + ) out_dir = Directory( - 'out', usedefault=True, mandatory=True, - argstr='-o %s', desc='output directory') + "out", usedefault=True, mandatory=True, argstr="-o %s", desc="output directory" + ) mask = File( - exists=True, - argstr='-m %s', - xor=['feat_dir'], - desc='path/name volume mask') + exists=True, argstr="-m %s", xor=["feat_dir"], desc="path/name volume mask" + ) dim = traits.Int( - argstr='-dim %d', - desc='Dimensionality reduction when running ' - 'MELODIC (defualt is automatic estimation)') + argstr="-dim %d", + desc="Dimensionality reduction when running " + "MELODIC (default is automatic estimation)", + ) TR = traits.Float( - argstr='-tr %.3f', - desc='TR in seconds. If this is not specified ' - 'the TR will be extracted from the ' - 'header of the fMRI nifti file.') + argstr="-tr %.3f", + desc="TR in seconds. If this is not specified " + "the TR will be extracted from the " + "header of the fMRI nifti file.", + ) melodic_dir = Directory( exists=True, - argstr='-meldir %s', - desc='path to MELODIC directory if MELODIC has already been run') + argstr="-meldir %s", + desc="path to MELODIC directory if MELODIC has already been run", + ) mat_file = File( exists=True, - argstr='-affmat %s', - xor=['feat_dir'], - desc='path/name of the mat-file describing the ' - 'affine registration (e.g. FSL FLIRT) of the ' - 'functional data to structural space (.mat file)') + argstr="-affmat %s", + xor=["feat_dir"], + desc="path/name of the mat-file describing the " + "affine registration (e.g. FSL FLIRT) of the " + "functional data to structural space (.mat file)", + ) fnirt_warp_file = File( exists=True, - argstr='-warp %s', - xor=['feat_dir'], - desc='File name of the warp-file describing ' - 'the non-linear registration (e.g. FSL FNIRT) ' - 'of the structural data to MNI152 space (.nii.gz)') + argstr="-warp %s", + xor=["feat_dir"], + desc="File name of the warp-file describing " + "the non-linear registration (e.g. FSL FNIRT) " + "of the structural data to MNI152 space (.nii.gz)", + ) motion_parameters = File( exists=True, mandatory=True, - argstr='-mc %s', - xor=['feat_dir'], - desc='motion parameters file') + argstr="-mc %s", + xor=["feat_dir"], + desc="motion parameters file", + ) denoise_type = traits.Enum( - 'nonaggr', - 'aggr', - 'both', - 'no', + "nonaggr", + "aggr", + "both", + "no", usedefault=True, mandatory=True, - argstr='-den %s', - desc='Type of denoising strategy:\n' - '-no: only classification, no denoising\n' - '-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n' - '-aggr: aggressive denoising, i.e. full component regression\n' - '-both: both aggressive and non-aggressive denoising (two outputs)') + argstr="-den %s", + desc="Type of denoising strategy:\n" + "-no: only classification, no denoising\n" + "-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n" + "-aggr: aggressive denoising, i.e. full component regression\n" + "-both: both aggressive and non-aggressive denoising (two outputs)", + ) class ICA_AROMAOutputSpec(TraitedSpec): aggr_denoised_file = File( - exists=True, desc='if generated: aggressively denoised volume') + exists=True, desc="if generated: aggressively denoised volume" + ) nonaggr_denoised_file = File( - exists=True, desc='if generated: non aggressively denoised volume') + exists=True, desc="if generated: non aggressively denoised volume" + ) out_dir = Directory( exists=True, - desc='directory contains (in addition to the denoised files): ' - 'melodic.ica + classified_motion_components + ' - 'classification_overview + feature_scores + melodic_ic_mni)') + desc="directory contains (in addition to the denoised files): " + "melodic.ica + classified_motion_components + " + "classification_overview + feature_scores + melodic_ic_mni)", + ) class ICA_AROMA(CommandLine): @@ -123,24 +136,27 @@ class ICA_AROMA(CommandLine): >>> AROMA_obj.cmdline # doctest: +ELLIPSIS 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' """ - _cmd = 'ICA_AROMA.py' + + _cmd = "ICA_AROMA.py" input_spec = ICA_AROMAInputSpec output_spec = ICA_AROMAOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'out_dir': + if name == "out_dir": return trait_spec.argstr % os.path.abspath(value) - return super(ICA_AROMA, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) - out_dir = outputs['out_dir'] + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) + out_dir = outputs["out_dir"] - if self.inputs.denoise_type in ('aggr', 'both'): - outputs['aggr_denoised_file'] = os.path.join( - out_dir, 'denoised_func_data_aggr.nii.gz') - if self.inputs.denoise_type in ('nonaggr', 'both'): - outputs['nonaggr_denoised_file'] = os.path.join( - out_dir, 'denoised_func_data_nonaggr.nii.gz') + if self.inputs.denoise_type in ("aggr", "both"): + outputs["aggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_aggr.nii.gz" + ) + if self.inputs.denoise_type in ("nonaggr", "both"): + outputs["nonaggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_nonaggr.nii.gz" + ) return outputs diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index d16560c824..61010ee9a0 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -25,19 +24,16 @@ See the docstrings of the individual classes for examples. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) from glob import glob import os from ... import logging from ...utils.filemanip import fname_presuffix -from ..base import (traits, isdefined, CommandLine, CommandLineInputSpec, - PackageInfo) +from ..base import traits, isdefined, CommandLine, CommandLineInputSpec, PackageInfo from ...external.due import BibTeX -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): @@ -54,18 +50,18 @@ class Info(PackageInfo): >>> Info.version() # doctest: +SKIP >>> Info.output_type() # doctest: +SKIP - """ ftypes = { - 'NIFTI': '.nii', - 'NIFTI_PAIR': '.img', - 'NIFTI_GZ': '.nii.gz', - 'NIFTI_PAIR_GZ': '.img.gz' + "NIFTI": ".nii", + "NIFTI_PAIR": ".img", + "NIFTI_GZ": ".nii.gz", + "NIFTI_PAIR_GZ": ".img.gz", + "GIFTI": ".func.gii", } - if os.getenv('FSLDIR'): - version_file = os.path.join(os.getenv('FSLDIR'), 'etc', 'fslversion') + if os.getenv("FSLDIR"): + version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") @staticmethod def parse_version(raw_info): @@ -77,8 +73,8 @@ def output_type_to_ext(cls, output_type): Parameters ---------- - output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} - String specifying the output type. + output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ', 'GIFTI'} + String specifying the output type. Note: limited GIFTI support. Returns ------- @@ -89,7 +85,7 @@ def output_type_to_ext(cls, output_type): try: return cls.ftypes[output_type] except KeyError: - msg = 'Invalid FSLOUTPUTTYPE: ', output_type + msg = "Invalid FSLOUTPUTTYPE: ", output_type raise KeyError(msg) @classmethod @@ -105,28 +101,30 @@ def output_type(cls): Represents the current environment setting of FSLOUTPUTTYPE """ try: - return os.environ['FSLOUTPUTTYPE'] + return os.environ["FSLOUTPUTTYPE"] except KeyError: - IFLOGGER.warning('FSLOUTPUTTYPE environment variable is not set. ' - 'Setting FSLOUTPUTTYPE=NIFTI') - return 'NIFTI' + IFLOGGER.warning( + "FSLOUTPUTTYPE environment variable is not set. " + "Setting FSLOUTPUTTYPE=NIFTI" + ) + return "NIFTI" @staticmethod def standard_image(img_name=None): - '''Grab an image from the standard location. + """Grab an image from the standard location. Returns a list of standard images if called without arguments. - Could be made more fancy to allow for more relocatability''' + Could be made more fancy to allow for more relocatability""" try: - fsldir = os.environ['FSLDIR'] + fsldir = os.environ["FSLDIR"] except KeyError: - raise Exception('FSL environment variables not set') - stdpath = os.path.join(fsldir, 'data', 'standard') + raise Exception("FSL environment variables not set") + stdpath = os.path.join(fsldir, "data", "standard") if img_name is None: return [ - filename.replace(stdpath + '/', '') - for filename in glob(os.path.join(stdpath, '*nii*')) + filename.replace(stdpath + "/", "") + for filename in glob(os.path.join(stdpath, "*nii*")) ] return os.path.join(stdpath, img_name) @@ -142,35 +140,36 @@ class FSLCommandInputSpec(CommandLineInputSpec): ------- fsl.ExtractRoi(tmin=42, tsize=1, output_type='NIFTI') """ - output_type = traits.Enum( - 'NIFTI', list(Info.ftypes.keys()), desc='FSL output type') + output_type = traits.Enum("NIFTI", list(Info.ftypes.keys()), desc="FSL output type") -class FSLCommand(CommandLine): - """Base support for FSL commands. - """ +class FSLCommand(CommandLine): + """Base support for FSL commands.""" input_spec = FSLCommandInputSpec _output_type = None - references_ = [{ - 'entry': - BibTeX('@article{JenkinsonBeckmannBehrensWoolrichSmith2012,' - 'author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, ' - 'M.W. Woolrich, and S.M. Smith},' - 'title={FSL},' - 'journal={NeuroImage},' - 'volume={62},' - 'pages={782-790},' - 'year={2012},' - '}'), - 'tags': ['implementation'], - }] + _references = [ + { + "entry": BibTeX( + "@article{JenkinsonBeckmannBehrensWoolrichSmith2012," + "author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, " + "M.W. Woolrich, and S.M. Smith}," + "title={FSL}," + "journal={NeuroImage}," + "volume={62}," + "pages={782-790}," + "year={2012}," + "}" + ), + "tags": ["implementation"], + } + ] def __init__(self, **inputs): - super(FSLCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._output_update, 'output_type') + super().__init__(**inputs) + self.inputs.on_trait_change(self._output_update, "output_type") if self._output_type is None: self._output_type = Info.output_type() @@ -182,7 +181,7 @@ def __init__(self, **inputs): def _output_update(self): self._output_type = self.inputs.output_type - self.inputs.environ.update({'FSLOUTPUTTYPE': self.inputs.output_type}) + self.inputs.environ.update({"FSLOUTPUTTYPE": self.inputs.output_type}) @classmethod def set_default_output_type(cls, output_type): @@ -197,23 +196,18 @@ def set_default_output_type(cls, output_type): if output_type in Info.ftypes: cls._output_type = output_type else: - raise AttributeError('Invalid FSL output_type: %s' % output_type) + raise AttributeError("Invalid FSL output_type: %s" % output_type) @property def version(self): return Info.version() - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None): + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. - If change_ext is True, it will use the extentions specified in - intputs.output_type. + If change_ext is True, it will use the extensions specified in + inputs.output_type. Parameters ---------- @@ -234,9 +228,9 @@ def _gen_fname(self, """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() @@ -244,13 +238,12 @@ def _gen_fname(self, ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = f"{suffix}{ext}" else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _overload_extension(self, value, name=None): @@ -270,13 +263,12 @@ def no_fsl(): used with skipif to skip tests that will fail if FSL is not installed""" - if Info.version() is None: - return True - else: - return False + return Info.version() is None def no_fsl_course_data(): """check if fsl_course data is present""" - return not ('FSL_COURSE_DATA' in os.environ and os.path.isdir( - os.path.abspath(os.environ['FSL_COURSE_DATA']))) + return not ( + "FSL_COURSE_DATA" in os.environ + and os.path.isdir(os.path.abspath(os.environ["FSL_COURSE_DATA"])) + ) diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index c842ff05cf..cd46067daa 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -1,96 +1,92 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import os import warnings from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, isdefined, File, Directory, InputMultiPath, - OutputMultiPath, traits) -from .base import (FSLCommand, FSLCommandInputSpec, Info) +from ..base import ( + TraitedSpec, + isdefined, + File, + Directory, + InputMultiPath, + OutputMultiPath, + traits, +) +from .base import FSLCommand, FSLCommandInputSpec, Info class DTIFitInputSpec(FSLCommandInputSpec): dwi = File( exists=True, - desc='diffusion weighted image data file', - argstr='-k %s', + desc="diffusion weighted image data file", + argstr="-k %s", position=0, - mandatory=True) + mandatory=True, + ) base_name = traits.Str( "dtifit_", - desc=('base_name that all output files ' - 'will start with'), - argstr='-o %s', + desc=("base_name that all output files will start with"), + argstr="-o %s", position=1, - usedefault=True) + usedefault=True, + ) mask = File( exists=True, - desc='bet binary mask file', - argstr='-m %s', + desc="bet binary mask file", + argstr="-m %s", position=2, - mandatory=True) + mandatory=True, + ) bvecs = File( - exists=True, - desc='b vectors file', - argstr='-r %s', - position=3, - mandatory=True) + exists=True, desc="b vectors file", argstr="-r %s", position=3, mandatory=True + ) bvals = File( - exists=True, - desc='b values file', - argstr='-b %s', - position=4, - mandatory=True) - min_z = traits.Int(argstr='-z %d', desc='min z') - max_z = traits.Int(argstr='-Z %d', desc='max z') - min_y = traits.Int(argstr='-y %d', desc='min y') - max_y = traits.Int(argstr='-Y %d', desc='max y') - min_x = traits.Int(argstr='-x %d', desc='min x') - max_x = traits.Int(argstr='-X %d', desc='max x') + exists=True, desc="b values file", argstr="-b %s", position=4, mandatory=True + ) + min_z = traits.Int(argstr="-z %d", desc="min z") + max_z = traits.Int(argstr="-Z %d", desc="max z") + min_y = traits.Int(argstr="-y %d", desc="min y") + max_y = traits.Int(argstr="-Y %d", desc="max y") + min_x = traits.Int(argstr="-x %d", desc="min x") + max_x = traits.Int(argstr="-X %d", desc="max x") save_tensor = traits.Bool( - desc='save the elements of the tensor', argstr='--save_tensor') - sse = traits.Bool(desc='output sum of squared errors', argstr='--sse') - cni = File( - exists=True, desc='input counfound regressors', argstr='--cni=%s') + desc="save the elements of the tensor", argstr="--save_tensor" + ) + sse = traits.Bool(desc="output sum of squared errors", argstr="--sse") + cni = File(exists=True, desc="input counfound regressors", argstr="--cni=%s") little_bit = traits.Bool( - desc='only process small area of brain', argstr='--littlebit') + desc="only process small area of brain", argstr="--littlebit" + ) gradnonlin = File( - exists=True, argstr='--gradnonlin=%s', desc='gradient non linearities') + exists=True, argstr="--gradnonlin=%s", desc="gradient non linearities" + ) class DTIFitOutputSpec(TraitedSpec): - V1 = File(exists=True, desc='path/name of file with the 1st eigenvector') - V2 = File(exists=True, desc='path/name of file with the 2nd eigenvector') - V3 = File(exists=True, desc='path/name of file with the 3rd eigenvector') - L1 = File(exists=True, desc='path/name of file with the 1st eigenvalue') - L2 = File(exists=True, desc='path/name of file with the 2nd eigenvalue') - L3 = File(exists=True, desc='path/name of file with the 3rd eigenvalue') - MD = File(exists=True, desc='path/name of file with the mean diffusivity') - FA = File( - exists=True, desc='path/name of file with the fractional anisotropy') - MO = File( - exists=True, desc='path/name of file with the mode of anisotropy') + V1 = File(exists=True, desc="path/name of file with the 1st eigenvector") + V2 = File(exists=True, desc="path/name of file with the 2nd eigenvector") + V3 = File(exists=True, desc="path/name of file with the 3rd eigenvector") + L1 = File(exists=True, desc="path/name of file with the 1st eigenvalue") + L2 = File(exists=True, desc="path/name of file with the 2nd eigenvalue") + L3 = File(exists=True, desc="path/name of file with the 3rd eigenvalue") + MD = File(exists=True, desc="path/name of file with the mean diffusivity") + FA = File(exists=True, desc="path/name of file with the fractional anisotropy") + MO = File(exists=True, desc="path/name of file with the mode of anisotropy") S0 = File( exists=True, - desc=('path/name of file with the raw T2 signal with no ' - 'diffusion weighting')) - tensor = File( - exists=True, desc='path/name of file with the 4D tensor volume') - sse = File( - exists=True, desc='path/name of file with the summed squared error') + desc=("path/name of file with the raw T2 signal with no diffusion weighting"), + ) + tensor = File(exists=True, desc="path/name of file with the 4D tensor volume") + sse = File(exists=True, desc="path/name of file with the summed squared error") class DTIFit(FSLCommand): - """ Use FSL dtifit command for fitting a diffusion tensor model at each + """Use FSL dtifit command for fitting a diffusion tensor model at each voxel Example @@ -108,15 +104,14 @@ class DTIFit(FSLCommand): """ - _cmd = 'dtifit' + _cmd = "dtifit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): - keys_to_ignore = {'outputtype', 'environ', 'args'} + keys_to_ignore = {"outputtype", "environ", "args"} # Optional output: Map output name to input flag - opt_output = {'tensor': self.inputs.save_tensor, - 'sse': self.inputs.sse} + opt_output = {"tensor": self.inputs.save_tensor, "sse": self.inputs.sse} # Ignore optional output, whose corresponding input-flag is not defined # or set to False for output, input_flag in opt_output.items(): @@ -127,161 +122,172 @@ def _list_outputs(self): outputs = self.output_spec().get() for k in set(outputs.keys()) - keys_to_ignore: - outputs[k] = self._gen_fname(self.inputs.base_name, suffix='_' + k) + outputs[k] = self._gen_fname(self.inputs.base_name, suffix="_" + k) return outputs class FSLXCommandInputSpec(FSLCommandInputSpec): dwi = File( exists=True, - argstr='--data=%s', + argstr="--data=%s", mandatory=True, - desc='diffusion weighted image data file') + desc="diffusion weighted image data file", + ) mask = File( exists=True, - argstr='--mask=%s', + argstr="--mask=%s", mandatory=True, - desc='brain binary mask file (i.e. from BET)') + desc="brain binary mask file (i.e. from BET)", + ) bvecs = File( - exists=True, - argstr='--bvecs=%s', - mandatory=True, - desc='b vectors file') - bvals = File( - exists=True, argstr='--bvals=%s', mandatory=True, desc='b values file') + exists=True, argstr="--bvecs=%s", mandatory=True, desc="b vectors file" + ) + bvals = File(exists=True, argstr="--bvals=%s", mandatory=True, desc="b values file") - logdir = Directory('.', argstr='--logdir=%s', usedefault=True) + logdir = Directory(".", argstr="--logdir=%s", usedefault=True) n_fibres = traits.Range( usedefault=True, low=1, value=2, - argstr='--nfibres=%d', - desc=('Maximum number of fibres to fit in each voxel'), - mandatory=True) + argstr="--nfibres=%d", + desc=("Maximum number of fibres to fit in each voxel"), + mandatory=True, + ) model = traits.Enum( 1, 2, 3, - argstr='--model=%d', - desc=('use monoexponential (1, default, required for ' - 'single-shell) or multiexponential (2, multi-' - 'shell) model')) - fudge = traits.Int(argstr='--fudge=%d', desc='ARD fudge factor') + argstr="--model=%d", + desc=( + "use monoexponential (1, default, required for " + "single-shell) or multiexponential (2, multi-" + "shell) model" + ), + ) + fudge = traits.Int(argstr="--fudge=%d", desc="ARD fudge factor") n_jumps = traits.Int( - 5000, usedefault=True, - argstr='--njumps=%d', desc='Num of jumps to be made by MCMC') + 5000, + usedefault=True, + argstr="--njumps=%d", + desc="Num of jumps to be made by MCMC", + ) burn_in = traits.Range( low=0, value=0, usedefault=True, - argstr='--burnin=%d', - desc=('Total num of jumps at start of MCMC to be ' - 'discarded')) + argstr="--burnin=%d", + desc=("Total num of jumps at start of MCMC to be discarded"), + ) burn_in_no_ard = traits.Range( low=0, value=0, usedefault=True, - argstr='--burnin_noard=%d', - desc=('num of burnin jumps before the ard is' - ' imposed')) + argstr="--burnin_noard=%d", + desc=("num of burnin jumps before the ard is imposed"), + ) sample_every = traits.Range( low=0, value=1, usedefault=True, - argstr='--sampleevery=%d', - desc='Num of jumps for each sample (MCMC)') + argstr="--sampleevery=%d", + desc="Num of jumps for each sample (MCMC)", + ) update_proposal_every = traits.Range( low=1, value=40, usedefault=True, - argstr='--updateproposalevery=%d', - desc=('Num of jumps for each update ' - 'to the proposal density std ' - '(MCMC)')) + argstr="--updateproposalevery=%d", + desc=("Num of jumps for each update to the proposal density std (MCMC)"), + ) seed = traits.Int( - argstr='--seed=%d', desc='seed for pseudo random number generator') + argstr="--seed=%d", desc="seed for pseudo random number generator" + ) - _xor_inputs1 = ('no_ard', 'all_ard') + _xor_inputs1 = ("no_ard", "all_ard") no_ard = traits.Bool( - argstr='--noard', xor=_xor_inputs1, desc='Turn ARD off on all fibres') + argstr="--noard", xor=_xor_inputs1, desc="Turn ARD off on all fibres" + ) all_ard = traits.Bool( - argstr='--allard', xor=_xor_inputs1, desc='Turn ARD on on all fibres') + argstr="--allard", xor=_xor_inputs1, desc="Turn ARD on on all fibres" + ) - _xor_inputs2 = ('no_spat', 'non_linear', 'cnlinear') + _xor_inputs2 = ("no_spat", "non_linear", "cnlinear") no_spat = traits.Bool( - argstr='--nospat', + argstr="--nospat", xor=_xor_inputs2, - desc='Initialise with tensor, not spatially') + desc="Initialise with tensor, not spatially", + ) non_linear = traits.Bool( - argstr='--nonlinear', - xor=_xor_inputs2, - desc='Initialise with nonlinear fitting') + argstr="--nonlinear", xor=_xor_inputs2, desc="Initialise with nonlinear fitting" + ) cnlinear = traits.Bool( - argstr='--cnonlinear', + argstr="--cnonlinear", xor=_xor_inputs2, - desc=('Initialise with constrained nonlinear ' - 'fitting')) - rician = traits.Bool(argstr='--rician', desc=('use Rician noise modeling')) + desc=("Initialise with constrained nonlinear fitting"), + ) + rician = traits.Bool(argstr="--rician", desc=("use Rician noise modeling")) - _xor_inputs3 = ['f0_noard', 'f0_ard'] + _xor_inputs3 = ["f0_noard", "f0_ard"] f0_noard = traits.Bool( - argstr='--f0', + argstr="--f0", xor=_xor_inputs3, - desc=('Noise floor model: add to the model an ' - 'unattenuated signal compartment f0')) + desc=( + "Noise floor model: add to the model an " + "unattenuated signal compartment f0" + ), + ) f0_ard = traits.Bool( - argstr='--f0 --ardf0', - xor=_xor_inputs3 + ['all_ard'], - desc=('Noise floor model: add to the model an ' - 'unattenuated signal compartment f0')) + argstr="--f0 --ardf0", + xor=_xor_inputs3 + ["all_ard"], + desc=( + "Noise floor model: add to the model an " + "unattenuated signal compartment f0" + ), + ) force_dir = traits.Bool( True, - argstr='--forcedir', + argstr="--forcedir", usedefault=True, - desc=('use the actual directory name given ' - '(do not add + to make a new directory)')) + desc=( + "use the actual directory name given " + "(do not add + to make a new directory)" + ), + ) class FSLXCommandOutputSpec(TraitedSpec): dyads = OutputMultiPath( - File(exists=True), - desc=('Mean of PDD distribution' - ' in vector form.')) + File(exists=True), desc=("Mean of PDD distribution in vector form.") + ) fsamples = OutputMultiPath( - File(exists=True), - desc=('Samples from the ' - 'distribution on f ' - 'anisotropy')) - mean_dsamples = File( - exists=True, desc='Mean of distribution on diffusivity d') + File(exists=True), desc=("Samples from the distribution on f anisotropy") + ) + mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( - File(exists=True), desc=('Mean of distribution on f ' - 'anisotropy')) + File(exists=True), desc=("Mean of distribution on f anisotropy") + ) mean_S0samples = File( - exists=True, - desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + exists=True, desc=("Mean of distribution on T2w baseline signal intensity S0") + ) mean_tausamples = File( exists=True, - desc=('Mean of distribution on ' - 'tau samples (only with rician ' - 'noise)')) - phsamples = OutputMultiPath( - File(exists=True), desc=('phi samples, per fiber')) - thsamples = OutputMultiPath( - File(exists=True), desc=('theta samples, per fiber')) + desc=("Mean of distribution on tau samples (only with rician noise)"), + ) + phsamples = OutputMultiPath(File(exists=True), desc=("phi samples, per fiber")) + thsamples = OutputMultiPath(File(exists=True), desc=("theta samples, per fiber")) class FSLXCommand(FSLCommand): """ Base support for ``xfibres`` and ``bedpostx`` """ + input_spec = FSLXCommandInputSpec output_spec = FSLXCommandOutputSpec def _run_interface(self, runtime): self._out_dir = os.getcwd() - runtime = super(FSLXCommand, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime @@ -293,123 +299,120 @@ def _list_outputs(self, out_dir=None): if isdefined(self.inputs.logdir): out_dir = os.path.abspath(self.inputs.logdir) else: - out_dir = os.path.abspath('logdir') + out_dir = os.path.abspath("logdir") - multi_out = [ - 'dyads', 'fsamples', 'mean_fsamples', 'phsamples', 'thsamples' - ] - single_out = ['mean_dsamples', 'mean_S0samples'] + multi_out = ["dyads", "fsamples", "mean_fsamples", "phsamples", "thsamples"] + single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=out_dir) if isdefined(self.inputs.rician) and self.inputs.rician: - outputs['mean_tausamples'] = self._gen_fname( - 'mean_tausamples', cwd=out_dir) + outputs["mean_tausamples"] = self._gen_fname("mean_tausamples", cwd=out_dir) for k in multi_out: outputs[k] = [] for i in range(1, n_fibres + 1): - outputs['fsamples'].append( - self._gen_fname('f%dsamples' % i, cwd=out_dir)) - outputs['mean_fsamples'].append( - self._gen_fname('mean_f%dsamples' % i, cwd=out_dir)) + outputs["fsamples"].append(self._gen_fname("f%dsamples" % i, cwd=out_dir)) + outputs["mean_fsamples"].append( + self._gen_fname("mean_f%dsamples" % i, cwd=out_dir) + ) for i in range(1, n_fibres + 1): - outputs['dyads'].append( - self._gen_fname('dyads%d' % i, cwd=out_dir)) - outputs['phsamples'].append( - self._gen_fname('ph%dsamples' % i, cwd=out_dir)) - outputs['thsamples'].append( - self._gen_fname('th%dsamples' % i, cwd=out_dir)) + outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=out_dir)) + outputs["phsamples"].append(self._gen_fname("ph%dsamples" % i, cwd=out_dir)) + outputs["thsamples"].append(self._gen_fname("th%dsamples" % i, cwd=out_dir)) return outputs class BEDPOSTX5InputSpec(FSLXCommandInputSpec): - dwi = File( - exists=True, desc='diffusion weighted image data file', mandatory=True) - mask = File(exists=True, desc='bet binary mask file', mandatory=True) - bvecs = File(exists=True, desc='b vectors file', mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) - logdir = Directory(argstr='--logdir=%s') + dwi = File(exists=True, desc="diffusion weighted image data file", mandatory=True) + mask = File(exists=True, desc="bet binary mask file", mandatory=True) + bvecs = File(exists=True, desc="b vectors file", mandatory=True) + bvals = File(exists=True, desc="b values file", mandatory=True) + logdir = Directory(argstr="--logdir=%s") n_fibres = traits.Range( usedefault=True, low=1, value=2, - argstr='-n %d', - desc=('Maximum number of fibres to fit in each voxel'), - mandatory=True) + argstr="-n %d", + desc=("Maximum number of fibres to fit in each voxel"), + mandatory=True, + ) model = traits.Enum( 1, 2, 3, - argstr='-model %d', - desc=('use monoexponential (1, default, required for ' - 'single-shell) or multiexponential (2, multi-' - 'shell) model')) - fudge = traits.Int(argstr='-w %d', desc='ARD fudge factor') + argstr="-model %d", + desc=( + "use monoexponential (1, default, required for " + "single-shell) or multiexponential (2, multi-" + "shell) model" + ), + ) + fudge = traits.Int(argstr="-w %d", desc="ARD fudge factor") n_jumps = traits.Int( - 5000, usedefault=True, - argstr='-j %d', desc='Num of jumps to be made by MCMC') + 5000, usedefault=True, argstr="-j %d", desc="Num of jumps to be made by MCMC" + ) burn_in = traits.Range( low=0, value=0, usedefault=True, - argstr='-b %d', - desc=('Total num of jumps at start of MCMC to be ' - 'discarded')) + argstr="-b %d", + desc=("Total num of jumps at start of MCMC to be discarded"), + ) sample_every = traits.Range( low=0, value=1, usedefault=True, - argstr='-s %d', - desc='Num of jumps for each sample (MCMC)') + argstr="-s %d", + desc="Num of jumps for each sample (MCMC)", + ) out_dir = Directory( - 'bedpostx', + "bedpostx", mandatory=True, - desc='output directory', + desc="output directory", usedefault=True, position=1, - argstr='%s') + argstr="%s", + ) gradnonlin = traits.Bool( - False, - argstr='-g', - desc=('consider gradient nonlinearities, ' - 'default off')) - grad_dev = File( - exists=True, desc='grad_dev file, if gradnonlin, -g is True') - use_gpu = traits.Bool(False, desc='Use the GPU version of bedpostx') + False, argstr="-g", desc=("consider gradient nonlinearities, default off") + ) + grad_dev = File(exists=True, desc="grad_dev file, if gradnonlin, -g is True") + use_gpu = traits.Bool(False, desc="Use the GPU version of bedpostx") class BEDPOSTX5OutputSpec(TraitedSpec): - mean_dsamples = File( - exists=True, desc='Mean of distribution on diffusivity d') + mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( - File(exists=True), desc=('Mean of distribution on f ' - 'anisotropy')) + File(exists=True), desc=("Mean of distribution on f anisotropy") + ) mean_S0samples = File( - exists=True, - desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + exists=True, desc=("Mean of distribution on T2w baseline signal intensity S0") + ) mean_phsamples = OutputMultiPath( - File(exists=True), desc='Mean of distribution on phi') + File(exists=True), desc="Mean of distribution on phi" + ) mean_thsamples = OutputMultiPath( - File(exists=True), desc='Mean of distribution on theta') + File(exists=True), desc="Mean of distribution on theta" + ) merged_thsamples = OutputMultiPath( - File(exists=True), desc=('Samples from the distribution ' - 'on theta')) + File(exists=True), desc=("Samples from the distribution on theta") + ) merged_phsamples = OutputMultiPath( - File(exists=True), desc=('Samples from the distribution ' - 'on phi')) + File(exists=True), desc=("Samples from the distribution on phi") + ) merged_fsamples = OutputMultiPath( File(exists=True), - desc=('Samples from the distribution on ' - 'anisotropic volume fraction')) + desc=("Samples from the distribution on anisotropic volume fraction"), + ) dyads = OutputMultiPath( - File(exists=True), desc='Mean of PDD distribution in vector form.') - dyads_dispersion = OutputMultiPath(File(exists=True), desc=('Dispersion')) + File(exists=True), desc="Mean of PDD distribution in vector form." + ) + dyads_dispersion = OutputMultiPath(File(exists=True), desc=("Dispersion")) class BEDPOSTX5(FSLXCommand): @@ -424,7 +427,7 @@ class BEDPOSTX5(FSLXCommand): .. note:: Consider using - :func:`nipype.workflows.fsl.dmri.create_bedpostx_pipeline` instead. + :func:`niflow.nipype1.workflows.fsl.dmri.create_bedpostx_pipeline` instead. Example @@ -439,42 +442,39 @@ class BEDPOSTX5(FSLXCommand): """ - _cmd = 'bedpostx' + _cmd = "bedpostx" _default_cmd = _cmd input_spec = BEDPOSTX5InputSpec output_spec = BEDPOSTX5OutputSpec _can_resume = True def __init__(self, **inputs): - super(BEDPOSTX5, self).__init__(**inputs) - self.inputs.on_trait_change(self._cuda_update, 'use_gpu') + super().__init__(**inputs) + self.inputs.on_trait_change(self._cuda_update, "use_gpu") def _cuda_update(self): if isdefined(self.inputs.use_gpu) and self.inputs.use_gpu: - self._cmd = 'bedpostx_gpu' + self._cmd = "bedpostx_gpu" else: self._cmd = self._default_cmd def _run_interface(self, runtime): - subjectdir = os.path.abspath(self.inputs.out_dir) if not os.path.exists(subjectdir): os.makedirs(subjectdir) _, _, ext = split_filename(self.inputs.mask) - copyfile(self.inputs.mask, - os.path.join(subjectdir, 'nodif_brain_mask' + ext)) + copyfile(self.inputs.mask, os.path.join(subjectdir, "nodif_brain_mask" + ext)) _, _, ext = split_filename(self.inputs.dwi) - copyfile(self.inputs.dwi, os.path.join(subjectdir, 'data' + ext)) - copyfile(self.inputs.bvals, os.path.join(subjectdir, 'bvals')) - copyfile(self.inputs.bvecs, os.path.join(subjectdir, 'bvecs')) + copyfile(self.inputs.dwi, os.path.join(subjectdir, "data" + ext)) + copyfile(self.inputs.bvals, os.path.join(subjectdir, "bvals")) + copyfile(self.inputs.bvecs, os.path.join(subjectdir, "bvecs")) if isdefined(self.inputs.grad_dev): _, _, ext = split_filename(self.inputs.grad_dev) - copyfile(self.inputs.grad_dev, - os.path.join(subjectdir, 'grad_dev' + ext)) + copyfile(self.inputs.grad_dev, os.path.join(subjectdir, "grad_dev" + ext)) - retval = super(BEDPOSTX5, self)._run_interface(runtime) + retval = super()._run_interface(runtime) - self._out_dir = subjectdir + '.bedpostX' + self._out_dir = subjectdir + ".bedpostX" return retval def _list_outputs(self): @@ -482,12 +482,17 @@ def _list_outputs(self): n_fibres = self.inputs.n_fibres multi_out = [ - 'merged_thsamples', 'merged_fsamples', 'merged_phsamples', - 'mean_phsamples', 'mean_thsamples', 'mean_fsamples', - 'dyads_dispersion', 'dyads' + "merged_thsamples", + "merged_fsamples", + "merged_phsamples", + "mean_phsamples", + "mean_thsamples", + "mean_fsamples", + "dyads_dispersion", + "dyads", ] - single_out = ['mean_dsamples', 'mean_S0samples'] + single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=self._out_dir) @@ -496,30 +501,37 @@ def _list_outputs(self): outputs[k] = [] for i in range(1, n_fibres + 1): - outputs['merged_thsamples'].append( - self._gen_fname('merged_th%dsamples' % i, cwd=self._out_dir)) - outputs['merged_fsamples'].append( - self._gen_fname('merged_f%dsamples' % i, cwd=self._out_dir)) - outputs['merged_phsamples'].append( - self._gen_fname('merged_ph%dsamples' % i, cwd=self._out_dir)) - outputs['mean_thsamples'].append( - self._gen_fname('mean_th%dsamples' % i, cwd=self._out_dir)) - outputs['mean_phsamples'].append( - self._gen_fname('mean_ph%dsamples' % i, cwd=self._out_dir)) - outputs['mean_fsamples'].append( - self._gen_fname('mean_f%dsamples' % i, cwd=self._out_dir)) - outputs['dyads'].append( - self._gen_fname('dyads%d' % i, cwd=self._out_dir)) - outputs['dyads_dispersion'].append( - self._gen_fname('dyads%d_dispersion' % i, cwd=self._out_dir)) + outputs["merged_thsamples"].append( + self._gen_fname("merged_th%dsamples" % i, cwd=self._out_dir) + ) + outputs["merged_fsamples"].append( + self._gen_fname("merged_f%dsamples" % i, cwd=self._out_dir) + ) + outputs["merged_phsamples"].append( + self._gen_fname("merged_ph%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_thsamples"].append( + self._gen_fname("mean_th%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_phsamples"].append( + self._gen_fname("mean_ph%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_fsamples"].append( + self._gen_fname("mean_f%dsamples" % i, cwd=self._out_dir) + ) + outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=self._out_dir)) + outputs["dyads_dispersion"].append( + self._gen_fname("dyads%d_dispersion" % i, cwd=self._out_dir) + ) return outputs class XFibres5InputSpec(FSLXCommandInputSpec): gradnonlin = File( exists=True, - argstr='--gradnonlin=%s', - desc='gradient file corresponding to slice') + argstr="--gradnonlin=%s", + desc="gradient file corresponding to slice", + ) class XFibres5(FSLXCommand): @@ -527,7 +539,8 @@ class XFibres5(FSLXCommand): Perform model parameters estimation for local (voxelwise) diffusion parameters """ - _cmd = 'xfibres' + + _cmd = "xfibres" input_spec = XFibres5InputSpec output_spec = FSLXCommandOutputSpec @@ -542,143 +555,176 @@ class ProbTrackXBaseInputSpec(FSLCommandInputSpec): fsamples = InputMultiPath(File(exists=True), mandatory=True) samples_base_name = traits.Str( "merged", - desc=('the rootname/base_name for samples ' - 'files'), - argstr='--samples=%s', - usedefault=True) + desc=("the rootname/base_name for samples files"), + argstr="--samples=%s", + usedefault=True, + ) mask = File( exists=True, - desc='bet binary mask file in diffusion space', - argstr='-m %s', - mandatory=True) + desc="bet binary mask file in diffusion space", + argstr="-m %s", + mandatory=True, + ) seed = traits.Either( File(exists=True), traits.List(File(exists=True)), traits.List(traits.List(traits.Int(), minlen=3, maxlen=3)), - desc=('seed volume(s), or voxel(s) or freesurfer ' - 'label file'), - argstr='--seed=%s', - mandatory=True) + desc=("seed volume(s), or voxel(s) or freesurfer label file"), + argstr="--seed=%s", + mandatory=True, + ) target_masks = InputMultiPath( File(exits=True), - desc=('list of target masks - required for ' - 'seeds_to_targets classification'), - argstr='--targetmasks=%s') + desc=("list of target masks - required for seeds_to_targets classification"), + argstr="--targetmasks=%s", + ) waypoints = File( exists=True, - desc=('waypoint mask or ascii list of waypoint masks - ' - 'only keep paths going through ALL the masks'), - argstr='--waypoints=%s') + desc=( + "waypoint mask or ascii list of waypoint masks - " + "only keep paths going through ALL the masks" + ), + argstr="--waypoints=%s", + ) network = traits.Bool( - desc=('activate network mode - only keep paths ' - 'going through at least one seed mask ' - '(required if multiple seed masks)'), - argstr='--network') + desc=( + "activate network mode - only keep paths " + "going through at least one seed mask " + "(required if multiple seed masks)" + ), + argstr="--network", + ) seed_ref = File( exists=True, - desc=('reference vol to define seed space in simple mode ' - '- diffusion space assumed if absent'), - argstr='--seedref=%s') + desc=( + "reference vol to define seed space in simple mode " + "- diffusion space assumed if absent" + ), + argstr="--seedref=%s", + ) out_dir = Directory( exists=True, - argstr='--dir=%s', - desc='directory to put the final volumes in', - genfile=True) + argstr="--dir=%s", + desc="directory to put the final volumes in", + genfile=True, + ) force_dir = traits.Bool( True, - desc=('use the actual directory name given - i.e. ' - 'do not add + to make a new directory'), - argstr='--forcedir', - usedefault=True) + desc=( + "use the actual directory name given - i.e. " + "do not add + to make a new directory" + ), + argstr="--forcedir", + usedefault=True, + ) opd = traits.Bool( - True, - desc='outputs path distributions', - argstr='--opd', - usedefault=True) + True, desc="outputs path distributions", argstr="--opd", usedefault=True + ) correct_path_distribution = traits.Bool( - desc=('correct path distribution ' - 'for the length of the ' - 'pathways'), - argstr='--pd') - os2t = traits.Bool(desc='Outputs seeds to targets', argstr='--os2t') + desc=("correct path distribution for the length of the pathways"), + argstr="--pd", + ) + os2t = traits.Bool(desc="Outputs seeds to targets", argstr="--os2t") # paths_file = File('nipype_fdtpaths', usedefault=True, argstr='--out=%s', # desc='produces an output file (default is fdt_paths)') avoid_mp = File( exists=True, - desc=('reject pathways passing through locations given by ' - 'this mask'), - argstr='--avoid=%s') + desc=("reject pathways passing through locations given by this mask"), + argstr="--avoid=%s", + ) stop_mask = File( exists=True, - argstr='--stop=%s', - desc='stop tracking at locations given by this mask file') + argstr="--stop=%s", + desc="stop tracking at locations given by this mask file", + ) xfm = File( exists=True, - argstr='--xfm=%s', - desc=('transformation matrix taking seed space to DTI space ' - '(either FLIRT matrix or FNIRT warp_field) - default is ' - 'identity')) + argstr="--xfm=%s", + desc=( + "transformation matrix taking seed space to DTI space " + "(either FLIRT matrix or FNIRT warp_field) - default is " + "identity" + ), + ) inv_xfm = File( - argstr='--invxfm=%s', - desc=('transformation matrix taking DTI space to seed ' - 'space (compulsory when using a warp_field for ' - 'seeds_to_dti)')) + argstr="--invxfm=%s", + desc=( + "transformation matrix taking DTI space to seed " + "space (compulsory when using a warp_field for " + "seeds_to_dti)" + ), + ) n_samples = traits.Int( 5000, - argstr='--nsamples=%d', - desc='number of samples - default=5000', - usedefault=True) + argstr="--nsamples=%d", + desc="number of samples - default=5000", + usedefault=True, + ) n_steps = traits.Int( - argstr='--nsteps=%d', desc='number of steps per sample - default=2000') + argstr="--nsteps=%d", desc="number of steps per sample - default=2000" + ) dist_thresh = traits.Float( - argstr='--distthresh=%.3f', - desc=('discards samples shorter than this ' - 'threshold (in mm - default=0)')) + argstr="--distthresh=%.3f", + desc=("discards samples shorter than this threshold (in mm - default=0)"), + ) c_thresh = traits.Float( - argstr='--cthr=%.3f', desc='curvature threshold - default=0.2') - sample_random_points = traits.Bool( - argstr='--sampvox', - desc=('sample random points within ' - 'seed voxels')) + argstr="--cthr=%.3f", desc="curvature threshold - default=0.2" + ) + sample_random_points = traits.Float( + argstr="--sampvox=%.3f", desc=("sample random points within seed voxels") + ) step_length = traits.Float( - argstr='--steplength=%.3f', desc='step_length in mm - default=0.5') + argstr="--steplength=%.3f", desc="step_length in mm - default=0.5" + ) loop_check = traits.Bool( - argstr='--loopcheck', - desc=('perform loop_checks on paths - slower, ' - 'but allows lower curvature threshold')) + argstr="--loopcheck", + desc=( + "perform loop_checks on paths - slower, " + "but allows lower curvature threshold" + ), + ) use_anisotropy = traits.Bool( - argstr='--usef', desc='use anisotropy to constrain tracking') + argstr="--usef", desc="use anisotropy to constrain tracking" + ) rand_fib = traits.Enum( 0, 1, 2, 3, - argstr='--randfib=%d', - desc=('options: 0 - default, 1 - to randomly ' - 'sample initial fibres (with f > fibthresh), ' - '2 - to sample in proportion fibres (with ' - 'f>fibthresh) to f, 3 - to sample ALL ' - 'populations at random (even if ' - 'f fibthresh), " + "2 - to sample in proportion fibres (with " + "f>fibthresh) to f, 3 - to sample ALL " + "populations at random (even if " + "f>> pbx2.cmdline 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' """ - _cmd = 'probtrackx2' + + _cmd = "probtrackx2" input_spec = ProbTrackX2InputSpec output_spec = ProbTrackX2OutputSpec def _list_outputs(self): - outputs = super(ProbTrackX2, self)._list_outputs() + outputs = super()._list_outputs() if not isdefined(self.inputs.out_dir): out_dir = os.getcwd() else: out_dir = self.inputs.out_dir - outputs['way_total'] = os.path.abspath( - os.path.join(out_dir, 'waytotal')) + outputs["way_total"] = os.path.abspath(os.path.join(out_dir, "waytotal")) if isdefined(self.inputs.omatrix1): - outputs['network_matrix'] = os.path.abspath( - os.path.join(out_dir, 'matrix_seeds_to_all_targets')) - outputs['matrix1_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix1.dot')) + outputs["network_matrix"] = os.path.abspath( + os.path.join(out_dir, "matrix_seeds_to_all_targets") + ) + outputs["matrix1_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix1.dot") + ) if isdefined(self.inputs.omatrix2): - outputs['lookup_tractspace'] = os.path.abspath( - os.path.join(out_dir, 'lookup_tractspace_fdt_matrix2.nii.gz')) - outputs['matrix2_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix2.dot')) + outputs["lookup_tractspace"] = os.path.abspath( + os.path.join(out_dir, "lookup_tractspace_fdt_matrix2.nii.gz") + ) + outputs["matrix2_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix2.dot") + ) if isdefined(self.inputs.omatrix3): - outputs['matrix3_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix3.dot')) + outputs["matrix3_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix3.dot") + ) return outputs class VecRegInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='-i %s', - desc='filename for input vector or tensor field', - mandatory=True) + argstr="-i %s", + desc="filename for input vector or tensor field", + mandatory=True, + ) out_file = File( - argstr='-o %s', - desc=('filename for output registered vector or tensor ' - 'field'), + argstr="-o %s", + desc=("filename for output registered vector or tensor field"), genfile=True, - hash_files=False) + hash_files=False, + ) ref_vol = File( exists=True, - argstr='-r %s', - desc='filename for reference (target) volume', - mandatory=True) + argstr="-r %s", + desc="filename for reference (target) volume", + mandatory=True, + ) affine_mat = File( - exists=True, - argstr='-t %s', - desc='filename for affine transformation matrix') + exists=True, argstr="-t %s", desc="filename for affine transformation matrix" + ) warp_field = File( exists=True, - argstr='-w %s', - desc=('filename for 4D warp field for nonlinear ' - 'registration')) + argstr="-w %s", + desc=("filename for 4D warp field for nonlinear registration"), + ) rotation_mat = File( exists=True, - argstr='--rotmat=%s', - desc=('filename for secondary affine matrix if set, ' - 'this will be used for the rotation of the ' - 'vector/tensor field')) + argstr="--rotmat=%s", + desc=( + "filename for secondary affine matrix if set, " + "this will be used for the rotation of the " + "vector/tensor field" + ), + ) rotation_warp = File( exists=True, - argstr='--rotwarp=%s', - desc=('filename for secondary warp field if set, ' - 'this will be used for the rotation of the ' - 'vector/tensor field')) + argstr="--rotwarp=%s", + desc=( + "filename for secondary warp field if set, " + "this will be used for the rotation of the " + "vector/tensor field" + ), + ) interpolation = traits.Enum( "nearestneighbour", "trilinear", "sinc", "spline", - argstr='--interp=%s', - desc=('interpolation method : ' - 'nearestneighbour, trilinear (default), ' - 'sinc or spline')) - mask = File(exists=True, argstr='-m %s', desc='brain mask in input space') + argstr="--interp=%s", + desc=( + "interpolation method : " + "nearestneighbour, trilinear (default), " + "sinc or spline" + ), + ) + mask = File(exists=True, argstr="-m %s", desc="brain mask in input space") ref_mask = File( exists=True, - argstr='--refmask=%s', - desc=('brain mask in output space (useful for speed up of ' - 'nonlinear reg)')) + argstr="--refmask=%s", + desc=("brain mask in output space (useful for speed up of nonlinear reg)"), + ) class VecRegOutputSpec(TraitedSpec): out_file = File( exists=True, - desc=('path/name of filename for the registered vector or ' - 'tensor field')) + desc=("path/name of filename for the registered vector or tensor field"), + ) class VecReg(FSLCommand): @@ -1085,7 +1185,7 @@ class VecReg(FSLCommand): """ - _cmd = 'vecreg' + _cmd = "vecreg" input_spec = VecRegInputSpec output_spec = VecRegOutputSpec @@ -1093,22 +1193,23 @@ def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): pth, base_name = os.path.split(self.inputs.in_file) self.inputs.out_file = self._gen_fname( - base_name, cwd=os.path.abspath(pth), suffix='_vreg') - return super(VecReg, self)._run_interface(runtime) + base_name, cwd=os.path.abspath(pth), suffix="_vreg" + ) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if (not isdefined(outputs['out_file']) - and isdefined(self.inputs.in_file)): + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): pth, base_name = os.path.split(self.inputs.in_file) - outputs['out_file'] = self._gen_fname( - base_name, cwd=os.path.abspath(pth), suffix='_vreg') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self._gen_fname( + base_name, cwd=os.path.abspath(pth), suffix="_vreg" + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] else: return None @@ -1117,23 +1218,26 @@ def _gen_filename(self, name): class ProjThreshInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), - argstr='%s', - desc='a list of input volumes', + argstr="%s", + desc="a list of input volumes", mandatory=True, - position=0) + position=0, + ) threshold = traits.Int( - argstr='%d', - desc=('threshold indicating minimum number of seed ' - 'voxels entering this mask region'), + argstr="%d", + desc=( + "threshold indicating minimum number of seed " + "voxels entering this mask region" + ), mandatory=True, - position=1) + position=1, + ) class ProjThreshOuputSpec(TraitedSpec): out_files = traits.List( - File(exists=True), - desc=('path/name of output volume after ' - 'thresholding')) + File(exists=True), desc=("path/name of output volume after thresholding") + ) class ProjThresh(FSLCommand): @@ -1152,44 +1256,46 @@ class ProjThresh(FSLCommand): """ - _cmd = 'proj_thresh' + _cmd = "proj_thresh" input_spec = ProjThreshInputSpec output_spec = ProjThreshOuputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for name in self.inputs.in_files: cwd, base_name = os.path.split(name) - outputs['out_files'].append( + outputs["out_files"].append( self._gen_fname( base_name, cwd=cwd, - suffix='_proj_seg_thr_{}'.format(self.inputs.threshold))) + suffix=f"_proj_seg_thr_{self.inputs.threshold}", + ) + ) return outputs class FindTheBiggestInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), - argstr='%s', - desc=('a list of input volumes or a ' - 'singleMatrixFile'), + argstr="%s", + desc=("a list of input volumes or a singleMatrixFile"), position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - argstr='%s', - desc='file with the resulting segmentation', + argstr="%s", + desc="file with the resulting segmentation", position=2, genfile=True, - hash_files=False) + hash_files=False, + ) class FindTheBiggestOutputSpec(TraitedSpec): out_file = File( - exists=True, - argstr='%s', - desc='output file indexed in order of input files') + exists=True, argstr="%s", desc="output file indexed in order of input files" + ) class FindTheBiggest(FSLCommand): @@ -1209,71 +1315,69 @@ class FindTheBiggest(FSLCommand): 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' """ - _cmd = 'find_the_biggest' + + _cmd = "find_the_biggest" input_spec = FindTheBiggestInputSpec output_spec = FindTheBiggestOutputSpec def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): - self.inputs.out_file = self._gen_fname( - 'biggestSegmentation', suffix='') - return super(FindTheBiggest, self)._run_interface(runtime) + self.inputs.out_file = self._gen_fname("biggestSegmentation", suffix="") + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - 'biggestSegmentation', suffix='') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname("biggestSegmentation", suffix="") + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] else: return None class TractSkeletonInputSpec(FSLCommandInputSpec): - in_file = File( exists=True, mandatory=True, argstr="-i %s", - desc="input image (typcially mean FA volume)") + desc="input image (typically mean FA volume)", + ) _proj_inputs = ["threshold", "distance_map", "data_file"] project_data = traits.Bool( argstr="-p %.3f %s %s %s %s", requires=_proj_inputs, - desc="project data onto skeleton") + desc="project data onto skeleton", + ) threshold = traits.Float(desc="skeleton threshold value") distance_map = File(exists=True, desc="distance map image") search_mask_file = File( exists=True, xor=["use_cingulum_mask"], - desc="mask in which to use alternate search rule") + desc="mask in which to use alternate search rule", + ) use_cingulum_mask = traits.Bool( True, usedefault=True, xor=["search_mask_file"], - desc=("perform alternate search using " - "built-in cingulum mask")) - data_file = File( - exists=True, desc="4D data to project onto skeleton (usually FA)") + desc=("perform alternate search using built-in cingulum mask"), + ) + data_file = File(exists=True, desc="4D data to project onto skeleton (usually FA)") alt_data_file = File( - exists=True, - argstr="-a %s", - desc="4D non-FA data to project onto skeleton") - alt_skeleton = File( - exists=True, argstr="-s %s", desc="alternate skeleton to use") + exists=True, argstr="-a %s", desc="4D non-FA data to project onto skeleton" + ) + alt_skeleton = File(exists=True, argstr="-s %s", desc="alternate skeleton to use") projected_data = File(desc="input data projected onto skeleton") skeleton_file = traits.Either( - traits.Bool, File, argstr="-o %s", desc="write out skeleton image") + traits.Bool, File, argstr="-o %s", desc="write out skeleton image" + ) class TractSkeletonOutputSpec(TraitedSpec): - projected_data = File(desc="input data projected onto skeleton") skeleton_file = File(desc="tract skeleton image") @@ -1290,7 +1394,7 @@ class TractSkeleton(FSLCommand): ``search_mask_file`` and ``use_cingulum_mask`` inputs are also used in data projection, but ``use_cingulum_mask`` is set to True by default. This mask controls where the projection algorithm searches within a circular space - around a tract, rather than in a single perpindicular direction. + around a tract, rather than in a single perpendicular direction. Example ------- @@ -1319,14 +1423,19 @@ def _format_arg(self, name, spec, value): proj_file = self._list_outputs()["projected_data"] else: proj_file = _si.projected_data - return spec.argstr % (_si.threshold, _si.distance_map, - mask_file, _si.data_file, proj_file) + return spec.argstr % ( + _si.threshold, + _si.distance_map, + mask_file, + _si.data_file, + proj_file, + ) elif name == "skeleton_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["skeleton_file"] else: return spec.argstr % value - return super(TractSkeleton, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1339,50 +1448,42 @@ def _list_outputs(self): if isdefined(_si.alt_data_file): stem = _si.alt_data_file outputs["projected_data"] = fname_presuffix( - stem, - suffix="_skeletonised", - newpath=os.getcwd(), - use_ext=True) + stem, suffix="_skeletonised", newpath=os.getcwd(), use_ext=True + ) if isdefined(_si.skeleton_file) and _si.skeleton_file: outputs["skeleton_file"] = _si.skeleton_file if isinstance(_si.skeleton_file, bool): outputs["skeleton_file"] = fname_presuffix( - _si.in_file, - suffix="_skeleton", - newpath=os.getcwd(), - use_ext=True) + _si.in_file, suffix="_skeleton", newpath=os.getcwd(), use_ext=True + ) return outputs class DistanceMapInputSpec(FSLCommandInputSpec): - in_file = File( exists=True, mandatory=True, argstr="--in=%s", - desc="image to calculate distance values for") + desc="image to calculate distance values for", + ) mask_file = File( - exists=True, - argstr="--mask=%s", - desc="binary mask to contrain calculations") + exists=True, argstr="--mask=%s", desc="binary mask to constrain calculations" + ) invert_input = traits.Bool(argstr="--invert", desc="invert input image") local_max_file = traits.Either( traits.Bool, File, argstr="--localmax=%s", desc="write an image of the local maxima", - hash_files=False) + hash_files=False, + ) distance_map = File( - genfile=True, - argstr="--out=%s", - desc="distance map to write", - hash_files=False) + genfile=True, argstr="--out=%s", desc="distance map to write", hash_files=False + ) class DistanceMapOutputSpec(TraitedSpec): - - distance_map = File( - exists=True, desc="value is distance to nearest nonzero voxels") + distance_map = File(exists=True, desc="value is distance to nearest nonzero voxels") local_max_file = File(desc="image of local maxima") @@ -1408,7 +1509,7 @@ def _format_arg(self, name, spec, value): if name == "local_max_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["local_max_file"] - return super(DistanceMap, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1416,21 +1517,16 @@ def _list_outputs(self): outputs["distance_map"] = _si.distance_map if not isdefined(_si.distance_map): outputs["distance_map"] = fname_presuffix( - _si.in_file, - suffix="_dstmap", - use_ext=True, - newpath=os.getcwd()) + _si.in_file, suffix="_dstmap", use_ext=True, newpath=os.getcwd() + ) outputs["distance_map"] = os.path.abspath(outputs["distance_map"]) if isdefined(_si.local_max_file): outputs["local_max_file"] = _si.local_max_file if isinstance(_si.local_max_file, bool): outputs["local_max_file"] = fname_presuffix( - _si.in_file, - suffix="_lclmax", - use_ext=True, - newpath=os.getcwd()) - outputs["local_max_file"] = os.path.abspath( - outputs["local_max_file"]) + _si.in_file, suffix="_lclmax", use_ext=True, newpath=os.getcwd() + ) + outputs["local_max_file"] = os.path.abspath(outputs["local_max_file"]) return outputs def _gen_filename(self, name): @@ -1443,13 +1539,15 @@ class MakeDyadicVectorsInputSpec(FSLCommandInputSpec): theta_vol = File(exists=True, mandatory=True, position=0, argstr="%s") phi_vol = File(exists=True, mandatory=True, position=1, argstr="%s") mask = File(exists=True, position=2, argstr="%s") - output = File( - "dyads", position=3, usedefault=True, argstr="%s", hash_files=False) + output = File("dyads", position=3, usedefault=True, argstr="%s", hash_files=False) perc = traits.Float( - desc=("the {perc}% angle of the output cone of " - "uncertainty (output will be in degrees)"), + desc=( + "the {perc}% angle of the output cone of " + "uncertainty (output will be in degrees)" + ), position=4, - argstr="%f") + argstr="%f", + ) class MakeDyadicVectorsOutputSpec(TraitedSpec): @@ -1469,6 +1567,7 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs["dyads"] = self._gen_fname(self.inputs.output) outputs["dispersion"] = self._gen_fname( - self.inputs.output, suffix="_dispersion") + self.inputs.output, suffix="_dispersion" + ) return outputs diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index a13da0e0dc..09daacb17f 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -1,68 +1,64 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 5.0.4. """ -from __future__ import print_function, division, unicode_literals, \ - absolute_import -from builtins import str - import os import numpy as np import nibabel as nb import warnings -from ...utils.filemanip import split_filename -from ...utils import NUMPY_MMAP +from ...utils.filemanip import split_filename, fname_presuffix -from ..base import (traits, TraitedSpec, InputMultiPath, File, isdefined) +from ..base import traits, TraitedSpec, InputMultiPath, File, isdefined from .base import FSLCommand, FSLCommandInputSpec, Info class PrepareFieldmapInputSpec(FSLCommandInputSpec): scanner = traits.String( - 'SIEMENS', - argstr='%s', - position=1, - desc='must be SIEMENS', - usedefault=True) + "SIEMENS", argstr="%s", position=1, desc="must be SIEMENS", usedefault=True + ) in_phase = File( exists=True, - argstr='%s', + argstr="%s", position=2, mandatory=True, - desc=('Phase difference map, in SIEMENS format range from ' - '0-4096 or 0-8192)')) + desc=("Phase difference map, in SIEMENS format range from 0-4096 or 0-8192)"), + ) in_magnitude = File( exists=True, - argstr='%s', + argstr="%s", position=3, mandatory=True, - desc='Magnitude difference map, brain extracted') + desc="Magnitude difference map, brain extracted", + ) delta_TE = traits.Float( 2.46, usedefault=True, mandatory=True, - argstr='%f', + argstr="%f", position=-2, - desc=('echo time difference of the ' - 'fieldmap sequence in ms. (usually 2.46ms in' - ' Siemens)')) + desc=( + "echo time difference of the " + "fieldmap sequence in ms. (usually 2.46ms in" + " Siemens)" + ), + ) nocheck = traits.Bool( False, position=-1, - argstr='--nocheck', + argstr="--nocheck", usedefault=True, - desc=('do not perform sanity checks for image ' - 'size/range/dimensions')) + desc=("do not perform sanity checks for image size/range/dimensions"), + ) out_fieldmap = File( - argstr='%s', position=4, desc='output name for prepared fieldmap') + argstr="%s", position=4, desc="output name for prepared fieldmap" + ) class PrepareFieldmapOutputSpec(TraitedSpec): - out_fieldmap = File(exists=True, desc='output name for prepared fieldmap') + out_fieldmap = File(exists=True, desc="output name for prepared fieldmap") class PrepareFieldmap(FSLCommand): @@ -89,7 +85,8 @@ class PrepareFieldmap(FSLCommand): """ - _cmd = 'fsl_prepare_fieldmap' + + _cmd = "fsl_prepare_fieldmap" input_spec = PrepareFieldmapInputSpec output_spec = PrepareFieldmapOutputSpec @@ -99,24 +96,25 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.out_fieldmap): self.inputs.out_fieldmap = self._gen_fname( - self.inputs.in_phase, suffix='_fslprepared') + self.inputs.in_phase, suffix="_fslprepared" + ) if not isdefined(self.inputs.nocheck) or not self.inputs.nocheck: - skip += ['nocheck'] + skip += ["nocheck"] - return super(PrepareFieldmap, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_fieldmap'] = self.inputs.out_fieldmap + outputs["out_fieldmap"] = self.inputs.out_fieldmap return outputs def _run_interface(self, runtime): - runtime = super(PrepareFieldmap, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.returncode == 0: out_file = self.inputs.out_fieldmap - im = nb.load(out_file, mmap=NUMPY_MMAP) + im = nb.load(out_file) dumb_img = nb.Nifti1Image(np.zeros(im.shape), im.affine, im.header) out_nii = nb.funcs.concat_images((im, dumb_img)) nb.save(out_nii, out_file) @@ -128,171 +126,190 @@ class TOPUPInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - desc='name of 4D file with images', - argstr='--imain=%s') + desc="name of 4D file with images", + argstr="--imain=%s", + ) encoding_file = File( exists=True, mandatory=True, - xor=['encoding_direction'], - desc='name of text file with PE directions/times', - argstr='--datain=%s') + xor=["encoding_direction"], + desc="name of text file with PE directions/times", + argstr="--datain=%s", + ) encoding_direction = traits.List( - traits.Enum('y', 'x', 'z', 'x-', 'y-', 'z-'), + traits.Enum("y", "x", "z", "x-", "y-", "z-"), mandatory=True, - xor=['encoding_file'], - requires=['readout_times'], - argstr='--datain=%s', - desc=('encoding direction for automatic ' - 'generation of encoding_file')) + xor=["encoding_file"], + requires=["readout_times"], + argstr="--datain=%s", + desc=("encoding direction for automatic generation of encoding_file"), + ) readout_times = InputMultiPath( traits.Float, - requires=['encoding_direction'], - xor=['encoding_file'], + requires=["encoding_direction"], + xor=["encoding_file"], mandatory=True, - desc=('readout times (dwell times by # ' - 'phase-encode steps minus 1)')) + desc=("readout times (dwell times by # phase-encode steps minus 1)"), + ) out_base = File( - desc=('base-name of output files (spline ' - 'coefficients (Hz) and movement parameters)'), - name_source=['in_file'], - name_template='%s_base', - argstr='--out=%s', - hash_files=False) + desc=( + "base-name of output files (spline " + "coefficients (Hz) and movement parameters)" + ), + name_source=["in_file"], + name_template="%s_base", + argstr="--out=%s", + hash_files=False, + ) out_field = File( - argstr='--fout=%s', + argstr="--fout=%s", hash_files=False, - name_source=['in_file'], - name_template='%s_field', - desc='name of image file with field (Hz)') + name_source=["in_file"], + name_template="%s_field", + desc="name of image file with field (Hz)", + ) out_warp_prefix = traits.Str( "warpfield", - argstr='--dfout=%s', + argstr="--dfout=%s", hash_files=False, - desc='prefix for the warpfield images (in mm)', - usedefault=True) + desc="prefix for the warpfield images (in mm)", + usedefault=True, + ) out_mat_prefix = traits.Str( "xfm", - argstr='--rbmout=%s', + argstr="--rbmout=%s", hash_files=False, - desc='prefix for the realignment matrices', - usedefault=True) + desc="prefix for the realignment matrices", + usedefault=True, + ) out_jac_prefix = traits.Str( "jac", - argstr='--jacout=%s', + argstr="--jacout=%s", hash_files=False, - desc='prefix for the warpfield images', - usedefault=True) + desc="prefix for the warpfield images", + usedefault=True, + ) out_corrected = File( - argstr='--iout=%s', + argstr="--iout=%s", hash_files=False, - name_source=['in_file'], - name_template='%s_corrected', - desc='name of 4D image file with unwarped images') + name_source=["in_file"], + name_template="%s_corrected", + desc="name of 4D image file with unwarped images", + ) out_logfile = File( - argstr='--logout=%s', - desc='name of log-file', - name_source=['in_file'], - name_template='%s_topup.log', + argstr="--logout=%s", + desc="name of log-file", + name_source=["in_file"], + name_template="%s_topup.log", keep_extension=True, - hash_files=False) + hash_files=False, + ) # TODO: the following traits admit values separated by commas, one value # per registration level inside topup. warp_res = traits.Float( - argstr='--warpres=%f', - desc=('(approximate) resolution (in mm) of warp ' - 'basis for the different sub-sampling levels')) - subsamp = traits.Int(argstr='--subsamp=%d', - desc='sub-sampling scheme') + argstr="--warpres=%f", + desc=( + "(approximate) resolution (in mm) of warp " + "basis for the different sub-sampling levels" + ), + ) + subsamp = traits.Int(argstr="--subsamp=%d", desc="sub-sampling scheme") fwhm = traits.Float( - argstr='--fwhm=%f', - desc='FWHM (in mm) of gaussian smoothing kernel') + argstr="--fwhm=%f", desc="FWHM (in mm) of gaussian smoothing kernel" + ) config = traits.String( - 'b02b0.cnf', - argstr='--config=%s', + "b02b0.cnf", + argstr="--config=%s", usedefault=True, - desc=('Name of config file specifying command line ' - 'arguments')) - max_iter = traits.Int( - argstr='--miter=%d', - desc='max # of non-linear iterations') + desc=("Name of config file specifying command line arguments"), + ) + max_iter = traits.Int(argstr="--miter=%d", desc="max # of non-linear iterations") reg_lambda = traits.Float( - argstr='--lambda=%0.f', - desc=('Weight of regularisation, default ' - 'depending on --ssqlambda and --regmod switches.')) + argstr="--lambda=%0.f", + desc=( + "Weight of regularisation, default " + "depending on --ssqlambda and --regmod switches." + ), + ) ssqlambda = traits.Enum( 1, 0, - argstr='--ssqlambda=%d', - desc=('Weight lambda by the current value of the ' - 'ssd. If used (=1), the effective weight of ' - 'regularisation term becomes higher for the ' - 'initial iterations, therefore initial steps' - ' are a little smoother than they would ' - 'without weighting. This reduces the ' - 'risk of finding a local minimum.')) + argstr="--ssqlambda=%d", + desc=( + "Weight lambda by the current value of the " + "ssd. If used (=1), the effective weight of " + "regularisation term becomes higher for the " + "initial iterations, therefore initial steps" + " are a little smoother than they would " + "without weighting. This reduces the " + "risk of finding a local minimum." + ), + ) regmod = traits.Enum( - 'bending_energy', - 'membrane_energy', - argstr='--regmod=%s', - desc=('Regularisation term implementation. Defaults ' - 'to bending_energy. Note that the two functions' - ' have vastly different scales. The membrane ' - 'energy is based on the first derivatives and ' - 'the bending energy on the second derivatives. ' - 'The second derivatives will typically be much ' - 'smaller than the first derivatives, so input ' - 'lambda will have to be larger for ' - 'bending_energy to yield approximately the same' - ' level of regularisation.')) - estmov = traits.Enum( - 1, 0, argstr='--estmov=%d', desc='estimate movements if set') + "bending_energy", + "membrane_energy", + argstr="--regmod=%s", + desc=( + "Regularisation term implementation. Defaults " + "to bending_energy. Note that the two functions" + " have vastly different scales. The membrane " + "energy is based on the first derivatives and " + "the bending energy on the second derivatives. " + "The second derivatives will typically be much " + "smaller than the first derivatives, so input " + "lambda will have to be larger for " + "bending_energy to yield approximately the same" + " level of regularisation." + ), + ) + estmov = traits.Enum(1, 0, argstr="--estmov=%d", desc="estimate movements if set") minmet = traits.Enum( 0, 1, - argstr='--minmet=%d', - desc=('Minimisation method 0=Levenberg-Marquardt, ' - '1=Scaled Conjugate Gradient')) + argstr="--minmet=%d", + desc=("Minimisation method 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient"), + ) splineorder = traits.Int( - argstr='--splineorder=%d', - desc=('order of spline, 2->Qadratic spline, ' - '3->Cubic spline')) + argstr="--splineorder=%d", + desc=("order of spline, 2->Qadratic spline, 3->Cubic spline"), + ) numprec = traits.Enum( - 'double', - 'float', - argstr='--numprec=%s', - desc=('Precision for representing Hessian, double ' - 'or float.')) + "double", + "float", + argstr="--numprec=%s", + desc=("Precision for representing Hessian, double or float."), + ) interp = traits.Enum( - 'spline', - 'linear', - argstr='--interp=%s', - desc='Image interpolation model, linear or spline.') + "spline", + "linear", + argstr="--interp=%s", + desc="Image interpolation model, linear or spline.", + ) scale = traits.Enum( 0, 1, - argstr='--scale=%d', - desc=('If set (=1), the images are individually scaled' - ' to a common mean')) + argstr="--scale=%d", + desc=("If set (=1), the images are individually scaled to a common mean"), + ) regrid = traits.Enum( 1, 0, - argstr='--regrid=%d', - desc=('If set (=1), the calculations are done in a ' - 'different grid')) + argstr="--regrid=%d", + desc=("If set (=1), the calculations are done in a different grid"), + ) class TOPUPOutputSpec(TraitedSpec): - out_fieldcoef = File( - exists=True, desc='file containing the field coefficients') - out_movpar = File(exists=True, desc='movpar.txt output file') - out_enc_file = File(desc='encoding directions file output for applytopup') - out_field = File(desc='name of image file with field (Hz)') - out_warps = traits.List(File(exists=True), desc='warpfield images') - out_jacs = traits.List(File(exists=True), desc='Jacobian images') - out_mats = traits.List(File(exists=True), desc='realignment matrices') - out_corrected = File(desc='name of 4D image file with unwarped images') - out_logfile = File(desc='name of log-file') + out_fieldcoef = File(exists=True, desc="file containing the field coefficients") + out_movpar = File(exists=True, desc="movpar.txt output file") + out_enc_file = File(desc="encoding directions file output for applytopup") + out_field = File(desc="name of image file with field (Hz)") + out_warps = traits.List(File(exists=True), desc="warpfield images") + out_jacs = traits.List(File(exists=True), desc="Jacobian images") + out_mats = traits.List(File(exists=True), desc="realignment matrices") + out_corrected = File(desc="name of 4D image file with unwarped images") + out_logfile = File(desc="name of log-file") class TOPUP(FSLCommand): @@ -321,150 +338,157 @@ class TOPUP(FSLCommand): >>> res = topup.run() # doctest: +SKIP """ - _cmd = 'topup' + + _cmd = "topup" input_spec = TOPUPInputSpec output_spec = TOPUPOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'encoding_direction': + if name == "encoding_direction": return trait_spec.argstr % self._generate_encfile() - if name == 'out_base': + if name == "out_base": path, name, ext = split_filename(value) - if path != '': + if path != "": if not os.path.exists(path): - raise ValueError('out_base path must exist if provided') - return super(TOPUP, self)._format_arg(name, trait_spec, value) + raise ValueError("out_base path must exist if provided") + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): - outputs = super(TOPUP, self)._list_outputs() - del outputs['out_base'] + outputs = super()._list_outputs() + del outputs["out_base"] base_path = None if isdefined(self.inputs.out_base): base_path, base, _ = split_filename(self.inputs.out_base) - if base_path == '': + if base_path == "": base_path = None else: - base = split_filename(self.inputs.in_file)[1] + '_base' - outputs['out_fieldcoef'] = self._gen_fname( - base, suffix='_fieldcoef', cwd=base_path) - outputs['out_movpar'] = self._gen_fname( - base, suffix='_movpar', ext='.txt', cwd=base_path) + base = split_filename(self.inputs.in_file)[1] + "_base" + outputs["out_fieldcoef"] = self._gen_fname( + base, suffix="_fieldcoef", cwd=base_path + ) + outputs["out_movpar"] = self._gen_fname( + base, suffix="_movpar", ext=".txt", cwd=base_path + ) n_vols = nb.load(self.inputs.in_file).shape[-1] ext = Info.output_type_to_ext(self.inputs.output_type) - fmt = os.path.abspath('{prefix}_{i:02d}{ext}').format - outputs['out_warps'] = [ + fmt = os.path.abspath("{prefix}_{i:02d}{ext}").format + outputs["out_warps"] = [ fmt(prefix=self.inputs.out_warp_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] - outputs['out_jacs'] = [ + outputs["out_jacs"] = [ fmt(prefix=self.inputs.out_jac_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] - outputs['out_mats'] = [ + outputs["out_mats"] = [ fmt(prefix=self.inputs.out_mat_prefix, i=i, ext=".mat") for i in range(1, n_vols + 1) ] if isdefined(self.inputs.encoding_direction): - outputs['out_enc_file'] = self._get_encfilename() + outputs["out_enc_file"] = self._get_encfilename() return outputs def _get_encfilename(self): out_file = os.path.join( - os.getcwd(), - ('%s_encfile.txt' % split_filename(self.inputs.in_file)[1])) + os.getcwd(), ("%s_encfile.txt" % split_filename(self.inputs.in_file)[1]) + ) return out_file def _generate_encfile(self): - """Generate a topup compatible encoding file based on given directions - """ + """Generate a topup compatible encoding file based on given directions""" out_file = self._get_encfilename() durations = self.inputs.readout_times if len(self.inputs.encoding_direction) != len(durations): if len(self.inputs.readout_times) != 1: - raise ValueError(('Readout time must be a float or match the' - 'length of encoding directions')) + raise ValueError( + "Readout time must be a float or match the" + "length of encoding directions" + ) durations = durations * len(self.inputs.encoding_direction) lines = [] for idx, encdir in enumerate(self.inputs.encoding_direction): direction = 1.0 - if encdir.endswith('-'): + if encdir.endswith("-"): direction = -1.0 line = [ - float(val[0] == encdir[0]) * direction - for val in ['x', 'y', 'z'] + float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt=b'%d %d %d %.8f') + np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): - if name == 'out_base': + if name == "out_base": return value - return super(TOPUP, self)._overload_extension(value, name) + return super()._overload_extension(value, name) class ApplyTOPUPInputSpec(FSLCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='name of file with images', - argstr='--imain=%s', - sep=',') + desc="name of file with images", + argstr="--imain=%s", + sep=",", + ) encoding_file = File( exists=True, mandatory=True, - desc='name of text file with PE directions/times', - argstr='--datain=%s') + desc="name of text file with PE directions/times", + argstr="--datain=%s", + ) in_index = traits.List( traits.Int, - argstr='--inindex=%s', - sep=',', - desc='comma separated list of indices corresponding to --datain') + argstr="--inindex=%s", + sep=",", + desc="comma separated list of indices corresponding to --datain", + ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", copyfile=False, - requires=['in_topup_movpar'], - desc=('topup file containing the field ' - 'coefficients')) + requires=["in_topup_movpar"], + desc=("topup file containing the field coefficients"), + ) in_topup_movpar = File( exists=True, - requires=['in_topup_fieldcoef'], + requires=["in_topup_fieldcoef"], copyfile=False, - desc='topup movpar.txt file') + desc="topup movpar.txt file", + ) out_corrected = File( - desc='output (warped) image', - name_source=['in_files'], - name_template='%s_corrected', - argstr='--out=%s') + desc="output (warped) image", + name_source=["in_files"], + name_template="%s_corrected", + argstr="--out=%s", + ) method = traits.Enum( - 'jac', - 'lsr', - argstr='--method=%s', - desc=('use jacobian modulation (jac) or least-squares' - ' resampling (lsr)')) + "jac", + "lsr", + argstr="--method=%s", + desc=("use jacobian modulation (jac) or least-squares resampling (lsr)"), + ) interp = traits.Enum( - 'trilinear', - 'spline', - argstr='--interp=%s', - desc='interpolation method') + "trilinear", "spline", argstr="--interp=%s", desc="interpolation method" + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='-d=%s', - desc='force output data type') + "char", + "short", + "int", + "float", + "double", + argstr="-d=%s", + desc="force output data type", + ) class ApplyTOPUPOutputSpec(TraitedSpec): out_corrected = File( - exists=True, desc=('name of 4D image file with ' - 'unwarped images')) + exists=True, desc=("name of 4D image file with unwarped images") + ) class ApplyTOPUP(FSLCommand): @@ -493,7 +517,8 @@ class ApplyTOPUP(FSLCommand): >>> res = applytopup.run() # doctest: +SKIP """ - _cmd = 'applytopup' + + _cmd = "applytopup" input_spec = ApplyTOPUPInputSpec output_spec = ApplyTOPUPOutputSpec @@ -504,209 +529,374 @@ def _parse_inputs(self, skip=None): # If not defined, assume index are the first N entries in the # parameters file, for N input images. if not isdefined(self.inputs.in_index): - self.inputs.in_index = list( - range(1, - len(self.inputs.in_files) + 1)) + self.inputs.in_index = list(range(1, len(self.inputs.in_files) + 1)) - return super(ApplyTOPUP, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _format_arg(self, name, spec, value): - if name == 'in_topup_fieldcoef': - return spec.argstr % value.split('_fieldcoef')[0] - return super(ApplyTOPUP, self)._format_arg(name, spec, value) + if name == "in_topup_fieldcoef": + return spec.argstr % value.split("_fieldcoef")[0] + return super()._format_arg(name, spec, value) class EddyInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='--imain=%s', - desc=('File containing all the images to estimate ' - 'distortions for')) + argstr="--imain=%s", + desc="File containing all the images to estimate distortions for", + ) in_mask = File( - exists=True, - mandatory=True, - argstr='--mask=%s', - desc='Mask to indicate brain') + exists=True, mandatory=True, argstr="--mask=%s", desc="Mask to indicate brain" + ) in_index = File( exists=True, mandatory=True, - argstr='--index=%s', - desc=('File containing indices for all volumes in --imain ' - 'into --acqp and --topup')) + argstr="--index=%s", + desc="File containing indices for all volumes in --imain " + "into --acqp and --topup", + ) in_acqp = File( exists=True, mandatory=True, - argstr='--acqp=%s', - desc='File containing acquisition parameters') + argstr="--acqp=%s", + desc="File containing acquisition parameters", + ) in_bvec = File( exists=True, mandatory=True, - argstr='--bvecs=%s', - desc=('File containing the b-vectors for all volumes in ' - '--imain')) + argstr="--bvecs=%s", + desc="File containing the b-vectors for all volumes in --imain", + ) in_bval = File( exists=True, mandatory=True, - argstr='--bvals=%s', - desc=('File containing the b-values for all volumes in ' - '--imain')) + argstr="--bvals=%s", + desc="File containing the b-values for all volumes in --imain", + ) out_base = traits.Str( - 'eddy_corrected', - argstr='--out=%s', + default_value="eddy_corrected", usedefault=True, - desc=('basename for output (warped) image')) + argstr="--out=%s", + desc="Basename for output image", + ) session = File( exists=True, - argstr='--session=%s', - desc=('File containing session indices for all volumes in ' - '--imain')) + argstr="--session=%s", + desc="File containing session indices for all volumes in --imain", + ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", - requires=['in_topup_movpar'], - desc=('topup file containing the field ' - 'coefficients')) + requires=["in_topup_movpar"], + desc="Topup results file containing the field coefficients", + ) in_topup_movpar = File( exists=True, - requires=['in_topup_fieldcoef'], - desc='topup movpar.txt file') + requires=["in_topup_fieldcoef"], + desc="Topup results file containing the movement parameters (movpar.txt)", + ) + field = File( + exists=True, argstr="--field=%s", desc="Non-topup derived fieldmap scaled in Hz" + ) + field_mat = File( + exists=True, + argstr="--field_mat=%s", + desc="Matrix specifying the relative positions of the fieldmap, " + "--field, and the first volume of the input file, --imain", + ) flm = traits.Enum( - 'linear', - 'quadratic', - 'cubic', - argstr='--flm=%s', - desc='First level EC model') - + "quadratic", + "linear", + "cubic", + usedefault=True, + argstr="--flm=%s", + desc="First level EC model", + ) slm = traits.Enum( - 'none', - 'linear', - 'quadratic', - argstr='--slm=%s', - desc='Second level EC model') - + "none", + "linear", + "quadratic", + usedefault=True, + argstr="--slm=%s", + desc="Second level EC model", + ) fep = traits.Bool( - False, argstr='--fep', desc='Fill empty planes in x- or y-directions') - + False, argstr="--fep", desc="Fill empty planes in x- or y-directions" + ) + initrand = traits.Bool( + False, + argstr="--initrand", + desc="Resets rand for when selecting voxels", + min_ver="5.0.10", + ) interp = traits.Enum( - 'spline', - 'trilinear', - argstr='--interp=%s', - desc='Interpolation model for estimation step') - + "spline", + "trilinear", + usedefault=True, + argstr="--interp=%s", + desc="Interpolation model for estimation step", + ) nvoxhp = traits.Int( - 1000, usedefault=True, - argstr='--nvoxhp=%s', - desc=('# of voxels used to estimate the ' - 'hyperparameters')) - + default_value=1000, + usedefault=True, + argstr="--nvoxhp=%s", + desc="# of voxels used to estimate the hyperparameters", + ) fudge_factor = traits.Float( - 10.0, usedefault=True, - argstr='--ff=%s', - desc=('Fudge factor for hyperparameter ' - 'error variance')) - + default_value=10.0, + usedefault=True, + argstr="--ff=%s", + desc="Fudge factor for hyperparameter error variance", + ) dont_sep_offs_move = traits.Bool( False, - argstr='--dont_sep_offs_move', - desc=('Do NOT attempt to separate ' - 'field offset from subject ' - 'movement')) - + argstr="--dont_sep_offs_move", + desc="Do NOT attempt to separate field offset from subject movement", + ) dont_peas = traits.Bool( False, - argstr='--dont_peas', - desc="Do NOT perform a post-eddy alignment of " - "shells") - + argstr="--dont_peas", + desc="Do NOT perform a post-eddy alignment of shells", + ) fwhm = traits.Float( - desc=('FWHM for conditioning filter when estimating ' - 'the parameters'), - argstr='--fwhm=%s') - - niter = traits.Int(5, usedefault=True, - argstr='--niter=%s', desc='Number of iterations') - + desc="FWHM for conditioning filter when estimating the parameters", + argstr="--fwhm=%s", + ) + niter = traits.Int( + 5, usedefault=True, argstr="--niter=%s", desc="Number of iterations" + ) method = traits.Enum( - 'jac', - 'lsr', - argstr='--resamp=%s', - desc=('Final resampling method (jacobian/least ' - 'squares)')) + "jac", + "lsr", + usedefault=True, + argstr="--resamp=%s", + desc="Final resampling method (jacobian/least squares)", + ) + repol = traits.Bool( - False, argstr='--repol', desc='Detect and replace outlier slices') - num_threads = traits.Int( + False, argstr="--repol", desc="Detect and replace outlier slices" + ) + outlier_nstd = traits.Int( + argstr="--ol_nstd", + desc="Number of std off to qualify as outlier", + requires=["repol"], + min_ver="5.0.10", + ) + outlier_nvox = traits.Int( + argstr="--ol_nvox", + desc="Min # of voxels in a slice for inclusion in outlier detection", + requires=["repol"], + min_ver="5.0.10", + ) + outlier_type = traits.Enum( + "sw", + "gw", + "both", + argstr="--ol_type", + desc="Type of outliers, slicewise (sw), groupwise (gw) or both (both)", + requires=["repol"], + min_ver="5.0.10", + ) + outlier_pos = traits.Bool( + False, + argstr="--ol_pos", + desc="Consider both positive and negative outliers if set", + requires=["repol"], + min_ver="5.0.10", + ) + outlier_sqr = traits.Bool( + False, + argstr="--ol_sqr", + desc="Consider outliers among sums-of-squared differences if set", + requires=["repol"], + min_ver="5.0.10", + ) + multiband_factor = traits.Int( + argstr="--mb=%s", desc="Multi-band factor", min_ver="5.0.10" + ) + multiband_offset = traits.Enum( + 0, 1, - usedefault=True, - nohash=True, - desc="Number of openmp threads to use") + -1, + argstr="--mb_offs=%d", + desc="Multi-band offset (-1 if bottom slice removed, 1 if top slice removed", + requires=["multiband_factor"], + min_ver="5.0.10", + ) + + mporder = traits.Int( + argstr="--mporder=%s", + desc="Order of slice-to-vol movement model", + requires=["use_cuda"], + min_ver="5.0.11", + ) + slice2vol_niter = traits.Int( + argstr="--s2v_niter=%d", + desc="Number of iterations for slice-to-vol", + requires=["mporder"], + min_ver="5.0.11", + ) + slice2vol_lambda = traits.Int( + argstr="--s2v_lambda=%d", + desc="Regularisation weight for slice-to-vol movement (reasonable range 1-10)", + requires=["mporder"], + min_ver="5.0.11", + ) + slice2vol_interp = traits.Enum( + "trilinear", + "spline", + argstr="--s2v_interp=%s", + desc="Slice-to-vol interpolation model for estimation step", + requires=["mporder"], + min_ver="5.0.11", + ) + slice_order = traits.File( + exists=True, + argstr="--slspec=%s", + desc="Name of text file completely specifying slice/group acquisition", + requires=["mporder"], + xor=["json"], + min_ver="5.0.11", + ) + json = traits.File( + exists=True, + argstr="--json=%s", + desc="Name of .json text file with information about slice timing", + requires=["mporder"], + xor=["slice_order"], + min_ver="6.0.1", + ) + + estimate_move_by_susceptibility = traits.Bool( + False, + argstr="--estimate_move_by_susceptibility", + desc="Estimate how susceptibility field changes with subject movement", + min_ver="6.0.1", + ) + mbs_niter = traits.Int( + argstr="--mbs_niter=%s", + desc="Number of iterations for MBS estimation", + requires=["estimate_move_by_susceptibility"], + min_ver="6.0.1", + ) + mbs_lambda = traits.Int( + argstr="--mbs_lambda=%s", + desc="Weighting of regularisation for MBS estimation", + requires=["estimate_move_by_susceptibility"], + min_ver="6.0.1", + ) + mbs_ksp = traits.Int( + argstr="--mbs_ksp=%smm", + desc="Knot-spacing for MBS field estimation", + requires=["estimate_move_by_susceptibility"], + min_ver="6.0.1", + ) + + num_threads = traits.Int( + 1, usedefault=True, nohash=True, desc="Number of openmp threads to use" + ) is_shelled = traits.Bool( False, - argstr='--data_is_shelled', - desc="Override internal check to ensure that " - "date are acquired on a set of b-value " - "shells") - field = traits.Str( - argstr='--field=%s', - desc="NonTOPUP fieldmap scaled in Hz - filename has " - "to be provided without an extension. TOPUP is " - "strongly recommended") - field_mat = File( - exists=True, - argstr='--field_mat=%s', - desc="Matrix that specifies the relative locations of " - "the field specified by --field and first volume " - "in file --imain") + argstr="--data_is_shelled", + desc="Override internal check to ensure that date are acquired " + "on a set of b-value shells", + ) + use_cuda = traits.Bool(False, desc="Run eddy using cuda gpu") cnr_maps = traits.Bool( - False, desc='Output CNR-Maps', argstr='--cnr_maps', min_ver='5.0.10') + False, desc="Output CNR-Maps", argstr="--cnr_maps", min_ver="5.0.10" + ) residuals = traits.Bool( - False, desc='Output Residuals', argstr='--residuals', min_ver='5.0.10') + False, desc="Output Residuals", argstr="--residuals", min_ver="5.0.10" + ) class EddyOutputSpec(TraitedSpec): out_corrected = File( - exists=True, desc='4D image file containing all the corrected volumes') + exists=True, desc="4D image file containing all the corrected volumes" + ) out_parameter = File( exists=True, - desc=('text file with parameters definining the field and' - 'movement for each scan')) + desc="Text file with parameters defining the field and movement for each scan", + ) out_rotated_bvecs = File( - exists=True, desc='File containing rotated b-values for all volumes') + exists=True, desc="File containing rotated b-values for all volumes" + ) out_movement_rms = File( - exists=True, desc='Summary of the "total movement" in each volume') + exists=True, desc="Summary of the 'total movement' in each volume" + ) out_restricted_movement_rms = File( exists=True, - desc=('Summary of the "total movement" in each volume ' - 'disregarding translation in the PE direction')) + desc="Summary of the 'total movement' in each volume " + "disregarding translation in the PE direction", + ) out_shell_alignment_parameters = File( exists=True, - desc=('File containing rigid body movement parameters ' - 'between the different shells as estimated by a ' - 'post-hoc mutual information based registration')) + desc="Text file containing rigid body movement parameters " + "between the different shells as estimated by a " + "post-hoc mutual information based registration", + ) + out_shell_pe_translation_parameters = File( + exists=True, + desc="Text file containing translation along the PE-direction " + "between the different shells as estimated by a " + "post-hoc mutual information based registration", + ) + out_outlier_map = File( + exists=True, + desc="Matrix where rows represent volumes and columns represent " + 'slices. "0" indicates that scan-slice is not an outlier ' + 'and "1" indicates that it is', + ) + out_outlier_n_stdev_map = File( + exists=True, + desc="Matrix where rows represent volumes and columns represent " + "slices. Values indicate number of standard deviations off the " + "mean difference between observation and prediction is", + ) + out_outlier_n_sqr_stdev_map = File( + exists=True, + desc="Matrix where rows represent volumes and columns represent " + "slices. Values indicate number of standard deivations off the " + "square root of the mean squared difference between observation " + "and prediction is", + ) out_outlier_report = File( exists=True, - desc=('Text-file with a plain language report on what ' - 'outlier slices eddy has found')) - out_cnr_maps = File( - exists=True, desc='path/name of file with the cnr_maps') - out_residuals = File( - exists=True, desc='path/name of file with the residuals') + desc="Text file with a plain language report on what " + "outlier slices eddy has found", + ) + out_outlier_free = File( + exists=True, + desc="4D image file not corrected for susceptibility or eddy-" + "current distortions or subject movement but with outlier " + "slices replaced", + ) + out_movement_over_time = File( + exists=True, + desc="Text file containing translations (mm) and rotations " + "(radians) for each excitation", + ) + out_cnr_maps = File(exists=True, desc="path/name of file with the cnr_maps") + out_residuals = File(exists=True, desc="path/name of file with the residuals") class Eddy(FSLCommand): """ Interface for FSL eddy, a tool for estimating and correcting eddy currents induced distortions. `User guide - `_ and + `__ and `more info regarding acqp file - `_. + `_. Examples -------- >>> from nipype.interfaces.fsl import Eddy + + Running eddy on a CPU using OpenMP: >>> eddy = Eddy() >>> eddy.inputs.in_file = 'epi.nii' >>> eddy.inputs.in_mask = 'epi_mask.nii' @@ -714,128 +904,191 @@ class Eddy(FSLCommand): >>> eddy.inputs.in_acqp = 'epi_acqp.txt' >>> eddy.inputs.in_bvec = 'bvecs.scheme' >>> eddy.inputs.in_bval = 'bvals.scheme' + >>> eddy.cmdline # doctest: +ELLIPSIS + 'eddy_openmp --flm=quadratic --ff=10.0 \ +--acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ +--imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ +--interp=spline --resamp=jac --niter=5 --nvoxhp=1000 \ +--out=.../eddy_corrected --slm=none' + + Running eddy on an Nvidia GPU using cuda: >>> eddy.inputs.use_cuda = True >>> eddy.cmdline # doctest: +ELLIPSIS - 'eddy_cuda --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme \ ---bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt \ ---mask=epi_mask.nii --niter=5 --nvoxhp=1000 --out=.../eddy_corrected' - >>> eddy.inputs.use_cuda = False - >>> eddy.cmdline # doctest: +ELLIPSIS - 'eddy_openmp --ff=10.0 --acqp=epi_acqp.txt --bvals=bvals.scheme \ ---bvecs=bvecs.scheme --imain=epi.nii --index=epi_index.txt \ ---mask=epi_mask.nii --niter=5 --nvoxhp=1000 --out=.../eddy_corrected' - >>> res = eddy.run() # doctest: +SKIP + 'eddy_cuda --flm=quadratic --ff=10.0 \ +--acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ +--imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ +--interp=spline --resamp=jac --niter=5 --nvoxhp=1000 \ +--out=.../eddy_corrected --slm=none' + + Running eddy with slice-to-volume motion correction: + >>> eddy.inputs.mporder = 6 + >>> eddy.inputs.slice2vol_niter = 5 + >>> eddy.inputs.slice2vol_lambda = 1 + >>> eddy.inputs.slice2vol_interp = 'trilinear' + >>> eddy.inputs.slice_order = 'epi_slspec.txt' + >>> eddy.cmdline # doctest: +ELLIPSIS + 'eddy_cuda --flm=quadratic --ff=10.0 \ +--acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ +--imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ +--interp=spline --resamp=jac --mporder=6 --niter=5 --nvoxhp=1000 \ +--out=.../eddy_corrected --s2v_interp=trilinear --s2v_lambda=1 \ +--s2v_niter=5 --slspec=epi_slspec.txt --slm=none' + >>> res = eddy.run() # doctest: +SKIP """ - _cmd = 'eddy_openmp' + + _cmd = "eddy_openmp" input_spec = EddyInputSpec output_spec = EddyOutputSpec _num_threads = 1 def __init__(self, **inputs): - super(Eddy, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + super().__init__(**inputs) + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads else: self._num_threads_update() - self.inputs.on_trait_change(self._use_cuda, 'use_cuda') + self.inputs.on_trait_change(self._use_cuda, "use_cuda") if isdefined(self.inputs.use_cuda): self._use_cuda() def _num_threads_update(self): self._num_threads = self.inputs.num_threads if not isdefined(self.inputs.num_threads): - if 'OMP_NUM_THREADS' in self.inputs.environ: - del self.inputs.environ['OMP_NUM_THREADS'] + if "OMP_NUM_THREADS" in self.inputs.environ: + del self.inputs.environ["OMP_NUM_THREADS"] else: - self.inputs.environ['OMP_NUM_THREADS'] = str( - self.inputs.num_threads) + self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.num_threads) def _use_cuda(self): - self._cmd = 'eddy_cuda' if self.inputs.use_cuda else 'eddy_openmp' + self._cmd = "eddy_cuda" if self.inputs.use_cuda else "eddy_openmp" def _run_interface(self, runtime): # If 'eddy_openmp' is missing, use 'eddy' - FSLDIR = os.getenv('FSLDIR', '') + FSLDIR = os.getenv("FSLDIR", "") cmd = self._cmd - if all((FSLDIR != '', cmd == 'eddy_openmp', - not os.path.exists(os.path.join(FSLDIR, 'bin', cmd)))): - self._cmd = 'eddy' - runtime = super(Eddy, self)._run_interface(runtime) + if all( + ( + FSLDIR != "", + cmd == "eddy_openmp", + not os.path.exists(os.path.join(FSLDIR, "bin", cmd)), + ) + ): + self._cmd = "eddy" + runtime = super()._run_interface(runtime) # Restore command to avoid side-effects self._cmd = cmd return runtime def _format_arg(self, name, spec, value): - if name == 'in_topup_fieldcoef': - return spec.argstr % value.split('_fieldcoef')[0] - if name == 'out_base': + if name == "in_topup_fieldcoef": + return spec.argstr % value.split("_fieldcoef")[0] + if name == "field": + return spec.argstr % fname_presuffix(value, use_ext=False) + if name == "out_base": return spec.argstr % os.path.abspath(value) - return super(Eddy, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_corrected'] = os.path.abspath( - '%s.nii.gz' % self.inputs.out_base) - outputs['out_parameter'] = os.path.abspath( - '%s.eddy_parameters' % self.inputs.out_base) + outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % self.inputs.out_base) + outputs["out_parameter"] = os.path.abspath( + "%s.eddy_parameters" % self.inputs.out_base + ) # File generation might depend on the version of EDDY out_rotated_bvecs = os.path.abspath( - '%s.eddy_rotated_bvecs' % self.inputs.out_base) + "%s.eddy_rotated_bvecs" % self.inputs.out_base + ) out_movement_rms = os.path.abspath( - '%s.eddy_movement_rms' % self.inputs.out_base) + "%s.eddy_movement_rms" % self.inputs.out_base + ) out_restricted_movement_rms = os.path.abspath( - '%s.eddy_restricted_movement_rms' % self.inputs.out_base) + "%s.eddy_restricted_movement_rms" % self.inputs.out_base + ) out_shell_alignment_parameters = os.path.abspath( - '%s.eddy_post_eddy_shell_alignment_parameters' % - self.inputs.out_base) + "%s.eddy_post_eddy_shell_alignment_parameters" % self.inputs.out_base + ) + out_shell_pe_translation_parameters = os.path.abspath( + "%s.eddy_post_eddy_shell_PE_translation_parameters" % self.inputs.out_base + ) + out_outlier_map = os.path.abspath("%s.eddy_outlier_map" % self.inputs.out_base) + out_outlier_n_stdev_map = os.path.abspath( + "%s.eddy_outlier_n_stdev_map" % self.inputs.out_base + ) + out_outlier_n_sqr_stdev_map = os.path.abspath( + "%s.eddy_outlier_n_sqr_stdev_map" % self.inputs.out_base + ) out_outlier_report = os.path.abspath( - '%s.eddy_outlier_report' % self.inputs.out_base) + "%s.eddy_outlier_report" % self.inputs.out_base + ) + if isdefined(self.inputs.repol) and self.inputs.repol: + out_outlier_free = os.path.abspath( + "%s.eddy_outlier_free_data" % self.inputs.out_base + ) + if os.path.exists(out_outlier_free): + outputs["out_outlier_free"] = out_outlier_free + if isdefined(self.inputs.mporder) and self.inputs.mporder > 0: + out_movement_over_time = os.path.abspath( + "%s.eddy_movement_over_time" % self.inputs.out_base + ) + if os.path.exists(out_movement_over_time): + outputs["out_movement_over_time"] = out_movement_over_time if isdefined(self.inputs.cnr_maps) and self.inputs.cnr_maps: out_cnr_maps = os.path.abspath( - '%s.eddy_cnr_maps.nii.gz' % self.inputs.out_base) + "%s.eddy_cnr_maps.nii.gz" % self.inputs.out_base + ) if os.path.exists(out_cnr_maps): - outputs['out_cnr_maps'] = out_cnr_maps + outputs["out_cnr_maps"] = out_cnr_maps if isdefined(self.inputs.residuals) and self.inputs.residuals: out_residuals = os.path.abspath( - '%s.eddy_residuals.nii.gz' % self.inputs.out_base) + "%s.eddy_residuals.nii.gz" % self.inputs.out_base + ) if os.path.exists(out_residuals): - outputs['out_residuals'] = out_residuals + outputs["out_residuals"] = out_residuals if os.path.exists(out_rotated_bvecs): - outputs['out_rotated_bvecs'] = out_rotated_bvecs + outputs["out_rotated_bvecs"] = out_rotated_bvecs if os.path.exists(out_movement_rms): - outputs['out_movement_rms'] = out_movement_rms + outputs["out_movement_rms"] = out_movement_rms if os.path.exists(out_restricted_movement_rms): - outputs['out_restricted_movement_rms'] = \ - out_restricted_movement_rms + outputs["out_restricted_movement_rms"] = out_restricted_movement_rms if os.path.exists(out_shell_alignment_parameters): - outputs['out_shell_alignment_parameters'] = \ - out_shell_alignment_parameters + outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters + if os.path.exists(out_shell_pe_translation_parameters): + outputs["out_shell_pe_translation_parameters"] = ( + out_shell_pe_translation_parameters + ) + if os.path.exists(out_outlier_map): + outputs["out_outlier_map"] = out_outlier_map + if os.path.exists(out_outlier_n_stdev_map): + outputs["out_outlier_n_stdev_map"] = out_outlier_n_stdev_map + if os.path.exists(out_outlier_n_sqr_stdev_map): + outputs["out_outlier_n_sqr_stdev_map"] = out_outlier_n_sqr_stdev_map if os.path.exists(out_outlier_report): - outputs['out_outlier_report'] = out_outlier_report + outputs["out_outlier_report"] = out_outlier_report return outputs class SigLossInputSpec(FSLCommandInputSpec): - in_file = File( - mandatory=True, exists=True, argstr='-i %s', desc='b0 fieldmap file') + in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( - argstr='-s %s', desc='output signal loss estimate file', genfile=True) + argstr="-s %s", desc="output signal loss estimate file", genfile=True + ) - mask_file = File(exists=True, argstr='-m %s', desc='brain mask file') - echo_time = traits.Float(argstr='--te=%f', desc='echo time in seconds') + mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") + echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( - 'x', 'y', 'z', argstr='-d %s', desc='slicing direction') + "x", "y", "z", argstr="-d %s", desc="slicing direction" + ) class SigLossOuputSpec(TraitedSpec): - out_file = File(exists=True, desc='signal loss estimate file') + out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): @@ -856,118 +1109,118 @@ class SigLoss(FSLCommand): """ + input_spec = SigLossInputSpec output_spec = SigLossOuputSpec - _cmd = 'sigloss' + _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if ((not isdefined(outputs['out_file'])) - and (isdefined(self.inputs.in_file))): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_sigloss') + outputs["out_file"] = self.inputs.out_file + if (not isdefined(outputs["out_file"])) and (isdefined(self.inputs.in_file)): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_sigloss" + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class EpiRegInputSpec(FSLCommandInputSpec): epi = File( - exists=True, - argstr='--epi=%s', - mandatory=True, - position=-4, - desc='EPI image') + exists=True, argstr="--epi=%s", mandatory=True, position=-4, desc="EPI image" + ) t1_head = File( exists=True, - argstr='--t1=%s', + argstr="--t1=%s", mandatory=True, position=-3, - desc='wholehead T1 image') + desc="wholehead T1 image", + ) t1_brain = File( exists=True, - argstr='--t1brain=%s', + argstr="--t1brain=%s", mandatory=True, position=-2, - desc='brain extracted T1 image') + desc="brain extracted T1 image", + ) out_base = traits.String( "epi2struct", - desc='output base name', - argstr='--out=%s', + desc="output base name", + argstr="--out=%s", position=-1, - usedefault=True) - fmap = File( - exists=True, argstr='--fmap=%s', desc='fieldmap image (in rad/s)') + usedefault=True, + ) + fmap = File(exists=True, argstr="--fmap=%s", desc="fieldmap image (in rad/s)") fmapmag = File( - exists=True, - argstr='--fmapmag=%s', - desc='fieldmap magnitude image - wholehead') + exists=True, argstr="--fmapmag=%s", desc="fieldmap magnitude image - wholehead" + ) fmapmagbrain = File( exists=True, - argstr='--fmapmagbrain=%s', - desc='fieldmap magnitude image - brain extracted') + argstr="--fmapmagbrain=%s", + desc="fieldmap magnitude image - brain extracted", + ) wmseg = File( exists=True, - argstr='--wmseg=%s', - desc='white matter segmentation of T1 image, has to be named \ - like the t1brain and end on _wmseg') + argstr="--wmseg=%s", + desc="white matter segmentation of T1 image, has to be named \ + like the t1brain and end on _wmseg", + ) echospacing = traits.Float( - argstr='--echospacing=%f', - desc='Effective EPI echo spacing \ - (sometimes called dwell time) - in seconds') + argstr="--echospacing=%f", + desc="Effective EPI echo spacing \ + (sometimes called dwell time) - in seconds", + ) pedir = traits.Enum( - 'x', - 'y', - 'z', - '-x', - '-y', - '-z', - argstr='--pedir=%s', - desc='phase encoding direction, dir = x/y/z/-x/-y/-z') + "x", + "y", + "z", + "-x", + "-y", + "-z", + argstr="--pedir=%s", + desc="phase encoding direction, dir = x/y/z/-x/-y/-z", + ) weight_image = File( - exists=True, - argstr='--weight=%s', - desc='weighting image (in T1 space)') + exists=True, argstr="--weight=%s", desc="weighting image (in T1 space)" + ) no_fmapreg = traits.Bool( False, - argstr='--nofmapreg', - desc='do not perform registration of fmap to T1 \ - (use if fmap already registered)') + argstr="--nofmapreg", + desc="do not perform registration of fmap to T1 \ + (use if fmap already registered)", + ) no_clean = traits.Bool( True, - argstr='--noclean', + argstr="--noclean", usedefault=True, - desc='do not clean up intermediate files') + desc="do not clean up intermediate files", + ) class EpiRegOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='unwarped and coregistered epi input') - out_1vol = File( - exists=True, desc='unwarped and coregistered single volume') - fmap2str_mat = File( - exists=True, desc='rigid fieldmap-to-structural transform') - fmap2epi_mat = File(exists=True, desc='rigid fieldmap-to-epi transform') - fmap_epi = File(exists=True, desc='fieldmap in epi space') - fmap_str = File(exists=True, desc='fieldmap in structural space') - fmapmag_str = File( - exists=True, desc='fieldmap magnitude image in structural space') - epi2str_inv = File(exists=True, desc='rigid structural-to-epi transform') - epi2str_mat = File(exists=True, desc='rigid epi-to-structural transform') - shiftmap = File(exists=True, desc='shiftmap in epi space') + out_file = File(exists=True, desc="unwarped and coregistered epi input") + out_1vol = File(exists=True, desc="unwarped and coregistered single volume") + fmap2str_mat = File(exists=True, desc="rigid fieldmap-to-structural transform") + fmap2epi_mat = File(exists=True, desc="rigid fieldmap-to-epi transform") + fmap_epi = File(exists=True, desc="fieldmap in epi space") + fmap_str = File(exists=True, desc="fieldmap in structural space") + fmapmag_str = File(exists=True, desc="fieldmap magnitude image in structural space") + epi2str_inv = File(exists=True, desc="rigid structural-to-epi transform") + epi2str_mat = File(exists=True, desc="rigid epi-to-structural transform") + shiftmap = File(exists=True, desc="shiftmap in epi space") fullwarp = File( exists=True, - desc='warpfield to unwarp epi and transform into \ - structural space') - wmseg = File( - exists=True, desc='white matter segmentation used in flirt bbr') - seg = File( - exists=True, desc='white matter, gray matter, csf segmentation') - wmedge = File(exists=True, desc='white matter edges for visualization') + desc="warpfield to unwarp epi and transform into \ + structural space", + ) + wmseg = File(exists=True, desc="white matter segmentation used in flirt bbr") + seg = File(exists=True, desc="white matter, gray matter, csf segmentation") + wmedge = File(exists=True, desc="white matter edges for visualization") class EpiReg(FSLCommand): @@ -997,45 +1250,59 @@ class EpiReg(FSLCommand): >>> epireg.run() # doctest: +SKIP """ - _cmd = 'epi_reg' + + _cmd = "epi_reg" input_spec = EpiRegInputSpec output_spec = EpiRegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.out_base + '.nii.gz') - if (not (isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg) - and isdefined(self.inputs.fmap)): - outputs['out_1vol'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_1vol.nii.gz') - outputs['fmap2str_mat'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmap2str.mat') - outputs['fmap2epi_mat'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2epi.mat') - outputs['fmap_epi'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2epi.nii.gz') - outputs['fmap_str'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2str.nii.gz') - outputs['fmapmag_str'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmap2str.nii.gz') - outputs['shiftmap'] = os.path.join( - os.getcwd(), - self.inputs.out_base + '_fieldmaprads2epi_shift.nii.gz') - outputs['fullwarp'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_warp.nii.gz') - outputs['epi2str_inv'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_inv.mat') - - outputs['epi2str_mat'] = os.path.join(os.getcwd(), - self.inputs.out_base + '.mat') - outputs['wmedge'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_wmedge.nii.gz') - outputs['wmseg'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_wmseg.nii.gz') - outputs['seg'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_seg.nii.gz') - + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.out_base + ".nii.gz" + ) + if not ( + isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg + ) and isdefined(self.inputs.fmap): + outputs["out_1vol"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_1vol.nii.gz" + ) + outputs["fmap2str_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmap2str.mat" + ) + outputs["fmap2epi_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.mat" + ) + outputs["fmap_epi"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.nii.gz" + ) + outputs["fmap_str"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2str.nii.gz" + ) + outputs["fmapmag_str"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmap2str.nii.gz" + ) + outputs["shiftmap"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi_shift.nii.gz" + ) + outputs["fullwarp"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_warp.nii.gz" + ) + outputs["epi2str_inv"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_inv.mat" + ) + if not isdefined(self.inputs.wmseg): + outputs["wmedge"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_wmedge.nii.gz" + ) + outputs["wmseg"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_wmseg.nii.gz" + ) + outputs["seg"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_seg.nii.gz" + ) + outputs["epi2str_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + ".mat" + ) return outputs @@ -1047,44 +1314,49 @@ def _list_outputs(self): class EPIDeWarpInputSpec(FSLCommandInputSpec): mag_file = File( exists=True, - desc='Magnitude file', - argstr='--mag %s', + desc="Magnitude file", + argstr="--mag %s", position=0, - mandatory=True) + mandatory=True, + ) dph_file = File( exists=True, - desc='Phase file assumed to be scaled from 0 to 4095', - argstr='--dph %s', - mandatory=True) + desc="Phase file assumed to be scaled from 0 to 4095", + argstr="--dph %s", + mandatory=True, + ) exf_file = File( - exists=True, - desc='example func volume (or use epi)', - argstr='--exf %s') - epi_file = File( - exists=True, desc='EPI volume to unwarp', argstr='--epi %s') + exists=True, desc="example func volume (or use epi)", argstr="--exf %s" + ) + epi_file = File(exists=True, desc="EPI volume to unwarp", argstr="--epi %s") tediff = traits.Float( 2.46, usedefault=True, - desc='difference in B0 field map TEs', - argstr='--tediff %s') + desc="difference in B0 field map TEs", + argstr="--tediff %s", + ) esp = traits.Float( - 0.58, desc='EPI echo spacing', argstr='--esp %s', usedefault=True) + 0.58, desc="EPI echo spacing", argstr="--esp %s", usedefault=True + ) sigma = traits.Int( 2, usedefault=True, - argstr='--sigma %s', + argstr="--sigma %s", desc="2D spatial gaussing smoothing \ - stdev (default = 2mm)") - vsm = traits.String( - genfile=True, desc='voxel shift map', argstr='--vsm %s') + stdev (default = 2mm)", + ) + vsm = traits.String(genfile=True, desc="voxel shift map", argstr="--vsm %s") exfdw = traits.String( - desc='dewarped example func volume', genfile=True, argstr='--exfdw %s') + desc="dewarped example func volume", genfile=True, argstr="--exfdw %s" + ) epidw = traits.String( - desc='dewarped epi volume', genfile=False, argstr='--epidw %s') - tmpdir = traits.String(genfile=True, desc='tmpdir', argstr='--tmpdir %s') + desc="dewarped epi volume", genfile=False, argstr="--epidw %s" + ) + tmpdir = traits.String(genfile=True, desc="tmpdir", argstr="--tmpdir %s") nocleanup = traits.Bool( - True, usedefault=True, desc='no cleanup', argstr='--nocleanup') - cleanup = traits.Bool(desc='cleanup', argstr='--cleanup') + True, usedefault=True, desc="no cleanup", argstr="--nocleanup" + ) + cleanup = traits.Bool(desc="cleanup", argstr="--cleanup") class EPIDeWarpOutputSpec(TraitedSpec): @@ -1100,7 +1372,7 @@ class EPIDeWarp(FSLCommand): `_. .. warning:: deprecated in FSL, please use - :func:`nipype.workflows.dmri.preprocess.epi.sdc_fmb` instead. + :func:`niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb` instead. Examples -------- @@ -1119,87 +1391,94 @@ class EPIDeWarp(FSLCommand): """ - _cmd = 'epidewarp.fsl' + + _cmd = "epidewarp.fsl" input_spec = EPIDeWarpInputSpec output_spec = EPIDeWarpOutputSpec def __init__(self, **inputs): - warnings.warn(("Deprecated: Please use " - "nipype.workflows.dmri.preprocess.epi.sdc_fmb instead"), - DeprecationWarning) - return super(EPIDeWarp, self).__init__(**inputs) + warnings.warn( + ( + "Deprecated: Please use " + "niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb instead" + ), + DeprecationWarning, + ) + return super().__init__(**inputs) def _run_interface(self, runtime): - runtime = super(EPIDeWarp, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _gen_filename(self, name): - if name == 'exfdw': + if name == "exfdw": if isdefined(self.inputs.exf_file): return self._gen_fname(self.inputs.exf_file, suffix="_exfdw") else: return self._gen_fname("exfdw") - if name == 'epidw': + if name == "epidw": if isdefined(self.inputs.epi_file): return self._gen_fname(self.inputs.epi_file, suffix="_epidw") - if name == 'vsm': - return self._gen_fname('vsm') - if name == 'tmpdir': - return os.path.join(os.getcwd(), 'temp') + if name == "vsm": + return self._gen_fname("vsm") + if name == "tmpdir": + return os.path.join(os.getcwd(), "temp") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.exfdw): - outputs['exfdw'] = self._gen_filename('exfdw') + outputs["exfdw"] = self._gen_filename("exfdw") else: - outputs['exfdw'] = self.inputs.exfdw + outputs["exfdw"] = self.inputs.exfdw if isdefined(self.inputs.epi_file): if isdefined(self.inputs.epidw): - outputs['unwarped_file'] = self.inputs.epidw + outputs["unwarped_file"] = self.inputs.epidw else: - outputs['unwarped_file'] = self._gen_filename('epidw') + outputs["unwarped_file"] = self._gen_filename("epidw") if not isdefined(self.inputs.vsm): - outputs['vsm_file'] = self._gen_filename('vsm') + outputs["vsm_file"] = self._gen_filename("vsm") else: - outputs['vsm_file'] = self._gen_fname(self.inputs.vsm) + outputs["vsm_file"] = self._gen_fname(self.inputs.vsm) if not isdefined(self.inputs.tmpdir): - outputs['exf_mask'] = self._gen_fname( - cwd=self._gen_filename('tmpdir'), basename='maskexf') + outputs["exf_mask"] = self._gen_fname( + cwd=self._gen_filename("tmpdir"), basename="maskexf" + ) else: - outputs['exf_mask'] = self._gen_fname( - cwd=self.inputs.tmpdir, basename='maskexf') + outputs["exf_mask"] = self._gen_fname( + cwd=self.inputs.tmpdir, basename="maskexf" + ) return outputs class EddyCorrectInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='4D input file', - argstr='%s', - position=0, - mandatory=True) + exists=True, desc="4D input file", argstr="%s", position=0, mandatory=True + ) out_file = File( - desc='4D output file', - argstr='%s', + desc="4D output file", + argstr="%s", position=1, - name_source=['in_file'], - name_template='%s_edc', - output_name='eddy_corrected') + name_source=["in_file"], + name_template="%s_edc", + output_name="eddy_corrected", + ) ref_num = traits.Int( 0, - argstr='%d', + argstr="%d", position=2, - desc='reference number', + desc="reference number", mandatory=True, - usedefault=True) + usedefault=True, + ) class EddyCorrectOutputSpec(TraitedSpec): eddy_corrected = File( - exists=True, desc='path/name of 4D eddy corrected output file') + exists=True, desc="path/name of 4D eddy corrected output file" + ) class EddyCorrect(FSLCommand): @@ -1218,17 +1497,207 @@ class EddyCorrect(FSLCommand): 'eddy_correct diffusion.nii diffusion_edc.nii 0' """ - _cmd = 'eddy_correct' + + _cmd = "eddy_correct" input_spec = EddyCorrectInputSpec output_spec = EddyCorrectOutputSpec def __init__(self, **inputs): - warnings.warn(("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy " - "instead"), DeprecationWarning) - return super(EddyCorrect, self).__init__(**inputs) + warnings.warn( + ("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy instead"), + DeprecationWarning, + ) + return super().__init__(**inputs) def _run_interface(self, runtime): - runtime = super(EddyCorrect, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime + + +class EddyQuadInputSpec(FSLCommandInputSpec): + base_name = traits.Str( + "eddy_corrected", + usedefault=True, + argstr="%s", + desc=( + "Basename (including path) for EDDY output files, i.e., " + "corrected images and QC files" + ), + position=0, + ) + idx_file = File( + exists=True, + mandatory=True, + argstr="--eddyIdx %s", + desc=("File containing indices for all volumes into acquisition parameters"), + ) + param_file = File( + exists=True, + mandatory=True, + argstr="--eddyParams %s", + desc="File containing acquisition parameters", + ) + mask_file = File( + exists=True, mandatory=True, argstr="--mask %s", desc="Binary mask file" + ) + bval_file = File( + exists=True, mandatory=True, argstr="--bvals %s", desc="b-values file" + ) + bvec_file = File( + exists=True, + argstr="--bvecs %s", + desc=( + "b-vectors file - only used when .eddy_residuals " + "file is present" + ), + ) + output_dir = traits.Str( + name_template="%s.qc", + name_source=["base_name"], + argstr="--output-dir %s", + desc="Output directory - default = '.qc'", + ) + field = File(exists=True, argstr="--field %s", desc="TOPUP estimated field (in Hz)") + slice_spec = File( + exists=True, + argstr="--slspec %s", + desc="Text file specifying slice/group acquisition", + ) + verbose = traits.Bool(argstr="--verbose", desc="Display debug messages") + + +class EddyQuadOutputSpec(TraitedSpec): + qc_json = File( + exists=True, + desc=("Single subject database containing quality metrics and data info."), + ) + qc_pdf = File(exists=True, desc="Single subject QC report.") + avg_b_png = traits.List( + File(exists=True), + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each averaged b-shell volume." + ), + ) + avg_b0_pe_png = traits.List( + File(exists=True), + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each averaged pe-direction b0 volume. Generated when using " + "the -f option." + ), + ) + cnr_png = traits.List( + File(exists=True), + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each b-shell CNR volume. Generated when CNR maps are " + "available." + ), + ) + vdm_png = File( + exists=True, + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "the voxel displacement map. Generated when using the -f " + "option." + ), + ) + residuals = File( + exists=True, + desc=( + "Text file containing the volume-wise mask-averaged squared " + "residuals. Generated when residual maps are available." + ), + ) + clean_volumes = File( + exists=True, + desc=( + "Text file containing a list of clean volumes, based on " + "the eddy squared residuals. To generate a version of the " + "pre-processed dataset without outlier volumes, use: " + "`fslselectvols -i -o " + "eddy_corrected_data_clean --vols=vols_no_outliers.txt`" + ), + ) + + +class EddyQuad(FSLCommand): + """ + Interface for FSL eddy_quad, a tool for generating single subject reports + and storing the quality assessment indices for each subject. + `User guide `__ + + Examples + -------- + + >>> from nipype.interfaces.fsl import EddyQuad + >>> quad = EddyQuad() + >>> quad.inputs.base_name = 'eddy_corrected' + >>> quad.inputs.idx_file = 'epi_index.txt' + >>> quad.inputs.param_file = 'epi_acqp.txt' + >>> quad.inputs.mask_file = 'epi_mask.nii' + >>> quad.inputs.bval_file = 'bvals.scheme' + >>> quad.inputs.bvec_file = 'bvecs.scheme' + >>> quad.inputs.output_dir = 'eddy_corrected.qc' + >>> quad.inputs.field = 'fieldmap_phase_fslprepared.nii' + >>> quad.inputs.verbose = True + >>> quad.cmdline + 'eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme \ +--field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt \ +--mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt \ +--verbose' + >>> res = quad.run() # doctest: +SKIP + + """ + + _cmd = "eddy_quad" + input_spec = EddyQuadInputSpec + output_spec = EddyQuadOutputSpec + + def _list_outputs(self): + from glob import glob + + outputs = self.output_spec().get() + + # If the output directory isn't defined, the interface seems to use + # the default but not set its value in `self.inputs.output_dir` + if not isdefined(self.inputs.output_dir): + out_dir = os.path.abspath(os.path.basename(self.inputs.base_name) + ".qc") + else: + out_dir = os.path.abspath(self.inputs.output_dir) + + outputs["qc_json"] = os.path.join(out_dir, "qc.json") + outputs["qc_pdf"] = os.path.join(out_dir, "qc.pdf") + + # Grab all b* files here. This will also grab the b0_pe* files + # as well, but only if the field input was provided. So we'll remove + # them later in the next conditional. + outputs["avg_b_png"] = sorted(glob(os.path.join(out_dir, "avg_b*.png"))) + + if isdefined(self.inputs.field): + outputs["avg_b0_pe_png"] = sorted( + glob(os.path.join(out_dir, "avg_b0_pe*.png")) + ) + + # The previous glob for `avg_b_png` also grabbed the + # `avg_b0_pe_png` files so we have to remove them + # from `avg_b_png`. + for fname in outputs["avg_b0_pe_png"]: + outputs["avg_b_png"].remove(fname) + + outputs["vdm_png"] = os.path.join(out_dir, "vdm.png") + + outputs["cnr_png"] = sorted(glob(os.path.join(out_dir, "cnr*.png"))) + + residuals = os.path.join(out_dir, "eddy_msr.txt") + if os.path.isfile(residuals): + outputs["residuals"] = residuals + + clean_volumes = os.path.join(out_dir, "vols_no_outliers.txt") + if os.path.isfile(clean_volumes): + outputs["clean_volumes"] = clean_volumes + + return outputs diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py index ebe986eb79..2799c53104 100644 --- a/nipype/interfaces/fsl/fix.py +++ b/nipype/interfaces/fsl/fix.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fix module provides classes for interfacing with the `FSL FIX @@ -54,12 +53,20 @@ outgraph = fix_pipeline.run() """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, - InputMultiPath, OutputMultiPath, BaseInterface, - BaseInterfaceInputSpec, traits, Directory, File, isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + InputMultiPath, + OutputMultiPath, + BaseInterface, + BaseInterfaceInputSpec, + traits, + Directory, + File, + isdefined, +) import os @@ -67,53 +74,58 @@ class TrainingSetCreatorInputSpec(BaseInterfaceInputSpec): mel_icas_in = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', - position=-1) + desc="Melodic output directories", + argstr="%s", + position=-1, + ) class TrainingSetCreatorOutputSpec(TraitedSpec): mel_icas_out = OutputMultiPath( Directory(exists=True), copyfile=False, - desc='Hand labels for noise vs signal', - argstr='%s', - position=-1) + desc="Hand labels for noise vs signal", + argstr="%s", + position=-1, + ) class TrainingSetCreator(BaseInterface): - '''Goes through set of provided melodic output directories, to find all + """Goes through set of provided melodic output directories, to find all the ones that have a hand_labels_noise.txt file in them. This is outsourced as a separate class, so that the pipeline is - rerun everytime a handlabeled file has been changed, or a new one + rerun every time a handlabeled file has been changed, or a new one created. - ''' + """ + input_spec = TrainingSetCreatorInputSpec output_spec = TrainingSetCreatorOutputSpec _always_run = True def _run_interface(self, runtime): - mel_icas = [] - for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): - mel_icas.append(item) - + mel_icas = [ + item + for item in self.inputs.mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] if len(mel_icas) == 0: raise Exception( - '%s did not find any hand_labels_noise.txt files in the following directories: %s' - % (self.__class__.__name__, mel_icas)) + "%s did not find any hand_labels_noise.txt files in the following directories: %s" + % (self.__class__.__name__, mel_icas) + ) return runtime def _list_outputs(self): - mel_icas = [] - for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): - mel_icas.append(item) + mel_icas = [ + item + for item in self.inputs.mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] outputs = self._outputs().get() - outputs['mel_icas_out'] = mel_icas + outputs["mel_icas_out"] = mel_icas return outputs @@ -121,31 +133,34 @@ class FeatureExtractorInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=-1) + desc="Melodic output directory or directories", + argstr="%s", + position=-1, + ) class FeatureExtractorOutputSpec(TraitedSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=-1) + desc="Melodic output directory or directories", + argstr="%s", + position=-1, + ) class FeatureExtractor(CommandLine): - ''' + """ Extract features (for later training and/or classifying) - ''' + """ + input_spec = FeatureExtractorInputSpec output_spec = FeatureExtractorOutputSpec - cmd = 'fix -f' + cmd = "fix -f" def _list_outputs(self): outputs = self.output_spec().get() - outputs['mel_ica'] = self.inputs.mel_ica + outputs["mel_ica"] = self.inputs.mel_ica return outputs @@ -153,42 +168,43 @@ class TrainingInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', - position=-1) + desc="Melodic output directories", + argstr="%s", + position=-1, + ) trained_wts_filestem = traits.Str( - desc= - 'trained-weights filestem, used for trained_wts_file and output directories', - argstr='%s', - position=1) + desc="trained-weights filestem, used for trained_wts_file and output directories", + argstr="%s", + position=1, + ) loo = traits.Bool( - argstr='-l', - desc='full leave-one-out test with classifier training', - position=2) + argstr="-l", desc="full leave-one-out test with classifier training", position=2 + ) class TrainingOutputSpec(TraitedSpec): - trained_wts_file = File(exists=True, desc='Trained-weights file') + trained_wts_file = File(exists=True, desc="Trained-weights file") class Training(CommandLine): - ''' + """ Train the classifier based on your own FEAT/MELODIC output directory. - ''' + """ + input_spec = TrainingInputSpec output_spec = TrainingOutputSpec - cmd = 'fix -t' + cmd = "fix -t" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.trained_wts_filestem): - outputs['trained_wts_file'] = os.path.abspath( - self.inputs.trained_wts_filestem + '.RData') + outputs["trained_wts_file"] = os.path.abspath( + self.inputs.trained_wts_filestem + ".RData" + ) else: - outputs['trained_wts_file'] = os.path.abspath( - 'trained_wts_file.RData') + outputs["trained_wts_file"] = os.path.abspath("trained_wts_file.RData") return outputs @@ -196,47 +212,50 @@ class AccuracyTesterInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', + desc="Melodic output directories", + argstr="%s", position=3, - mandatory=True) + mandatory=True, + ) trained_wts_file = File( - desc='trained-weights file', argstr='%s', position=1, mandatory=True) + desc="trained-weights file", argstr="%s", position=1, mandatory=True + ) output_directory = Directory( - desc= - 'Path to folder in which to store the results of the accuracy test.', - argstr='%s', + desc="Path to folder in which to store the results of the accuracy test.", + argstr="%s", position=2, - mandatory=True) + mandatory=True, + ) class AccuracyTesterOutputSpec(TraitedSpec): output_directory = Directory( - desc= - 'Path to folder in which to store the results of the accuracy test.', - argstr='%s', - position=1) + desc="Path to folder in which to store the results of the accuracy test.", + argstr="%s", + position=1, + ) class AccuracyTester(CommandLine): - ''' + """ Test the accuracy of an existing training dataset on a set of hand-labelled subjects. Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg. - ''' + """ + input_spec = AccuracyTesterInputSpec output_spec = AccuracyTesterOutputSpec - cmd = 'fix -C' + cmd = "fix -C" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.output_directory): - outputs['output_directory'] = Directory( - exists=False, value=self.inputs.output_directory) + outputs["output_directory"] = Directory( + exists=False, value=self.inputs.output_directory + ) else: - outputs['output_directory'] = Directory( - exists=False, value='accuracy_test') + outputs["output_directory"] = Directory(exists=False, value="accuracy_test") return outputs @@ -244,58 +263,57 @@ class ClassifierInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=1) + desc="Melodic output directory or directories", + argstr="%s", + position=1, + ) trained_wts_file = File( exists=True, - desc='trained-weights file', - argstr='%s', + desc="trained-weights file", + argstr="%s", position=2, mandatory=True, - copyfile=False) + copyfile=False, + ) thresh = traits.Int( - argstr='%d', - desc='Threshold for cleanup.', - position=-1, - mandatory=True) + argstr="%d", desc="Threshold for cleanup.", position=-1, mandatory=True + ) artifacts_list_file = File( - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class ClassifierOutputSpec(TraitedSpec): artifacts_list_file = File( - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class Classifier(CommandLine): - ''' + """ Classify ICA components using a specific training dataset ( is in the range 0-100, typically 5-20). - ''' + """ + input_spec = ClassifierInputSpec output_spec = ClassifierOutputSpec - cmd = 'fix -c' + cmd = "fix -c" def _gen_artifacts_list_file(self, mel_ica, thresh): - _, trained_wts_file = os.path.split(self.inputs.trained_wts_file) - trained_wts_filestem = trained_wts_file.split('.')[0] - filestem = 'fix4melview_' + trained_wts_filestem + '_thr' + trained_wts_filestem = trained_wts_file.split(".")[0] + filestem = "fix4melview_" + trained_wts_filestem + "_thr" - fname = os.path.join(mel_ica, filestem + str(thresh) + '.txt') + fname = os.path.join(mel_ica, filestem + str(thresh) + ".txt") return fname def _list_outputs(self): outputs = self.output_spec().get() - outputs['artifacts_list_file'] = self._gen_artifacts_list_file( - self.inputs.mel_ica, self.inputs.thresh) + outputs["artifacts_list_file"] = self._gen_artifacts_list_file( + self.inputs.mel_ica, self.inputs.thresh + ) return outputs @@ -303,68 +321,73 @@ def _list_outputs(self): class CleanerInputSpec(CommandLineInputSpec): artifacts_list_file = File( exists=True, - argstr='%s', + argstr="%s", position=1, mandatory=True, - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually", ) cleanup_motion = traits.Bool( - argstr='-m', - desc= - 'cleanup motion confounds, looks for design.fsf for highpass filter cut-off', - position=2) + argstr="-m", + desc="cleanup motion confounds, looks for design.fsf for highpass filter cut-off", + position=2, + ) highpass = traits.Float( 100, - argstr='-m -h %f', + argstr="-m -h %f", usedefault=True, - desc='cleanup motion confounds', - position=2) + desc="cleanup motion confounds", + position=2, + ) aggressive = traits.Bool( - argstr='-A', - desc= - 'Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.', - position=3) + argstr="-A", + desc="Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.", + position=3, + ) - confound_file = traits.File( - argstr='-x %s', desc='Include additional confound file.', position=4) + confound_file = File( + argstr="-x %s", desc="Include additional confound file.", position=4 + ) - confound_file_1 = traits.File( - argstr='-x %s', desc='Include additional confound file.', position=5) + confound_file_1 = File( + argstr="-x %s", desc="Include additional confound file.", position=5 + ) - confound_file_2 = traits.File( - argstr='-x %s', desc='Include additional confound file.', position=6) + confound_file_2 = File( + argstr="-x %s", desc="Include additional confound file.", position=6 + ) class CleanerOutputSpec(TraitedSpec): - cleaned_functional_file = File(exists=True, desc='Cleaned session data') + cleaned_functional_file = File(exists=True, desc="Cleaned session data") class Cleaner(CommandLine): - ''' + """ Extract features (for later training and/or classifying) - ''' + """ + input_spec = CleanerInputSpec output_spec = CleanerOutputSpec - cmd = 'fix -a' + cmd = "fix -a" def _get_cleaned_functional_filename(self, artifacts_list_filename): - ''' extract the proper filename from the first line of the artifacts file ''' - artifacts_list_file = open(artifacts_list_filename, 'r') - functional_filename, extension = artifacts_list_file.readline().split( - '.') + """extract the proper filename from the first line of the artifacts file""" + artifacts_list_file = open(artifacts_list_filename) + functional_filename, extension = artifacts_list_file.readline().split(".") artifacts_list_file_path, artifacts_list_filename = os.path.split( - artifacts_list_filename) + artifacts_list_filename + ) - return (os.path.join(artifacts_list_file_path, - functional_filename + '_clean.nii.gz')) + return os.path.join( + artifacts_list_file_path, functional_filename + "_clean.nii.gz" + ) def _list_outputs(self): outputs = self.output_spec().get() - outputs[ - 'cleaned_functional_file'] = self._get_cleaned_functional_filename( - self.inputs.artifacts_list_file) + outputs["cleaned_functional_file"] = self._get_cleaned_functional_filename( + self.inputs.artifacts_list_file + ) return outputs diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index 3862cea8c7..7640cf930b 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -1,60 +1,47 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The maths module provides higher-level interfaces to some of the operations that can be performed with the fslmaths command-line program. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import numpy as np -from ..base import (TraitedSpec, File, traits, InputMultiPath, isdefined) +from ..base import TraitedSpec, File, traits, InputMultiPath, isdefined from .base import FSLCommand, FSLCommandInputSpec class MathsInput(FSLCommandInputSpec): - in_file = File( - position=2, - argstr="%s", - exists=True, - mandatory=True, - desc="image to operate on") + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) out_file = File( - genfile=True, - position=-2, - argstr="%s", - desc="image to write", - hash_files=False) + genfile=True, position=-2, argstr="%s", desc="image to write", hash_files=False + ) _dtypes = ["float", "char", "int", "short", "double", "input"] internal_datatype = traits.Enum( *_dtypes, position=1, argstr="-dt %s", - desc=("datatype to use for calculations " - "(default is float)")) + desc=("datatype to use for calculations (default is float)") + ) output_datatype = traits.Enum( *_dtypes, position=-1, argstr="-odt %s", - desc=("datatype to use for output (default " - "uses input type)")) + desc=("datatype to use for output (default uses input type)") + ) nan2zeros = traits.Bool( - position=3, - argstr='-nan', - desc='change NaNs to zeros before doing anything') + position=3, argstr="-nan", desc="change NaNs to zeros before doing anything" + ) class MathsOutput(TraitedSpec): - - out_file = File(exists=True, desc="image written after calculations") + out_file = File(desc="image written after calculations") class MathsCommand(FSLCommand): - _cmd = "fslmaths" input_spec = MathsInput output_spec = MathsOutput @@ -65,7 +52,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( - self.inputs.in_file, suffix=self._suffix) + self.inputs.in_file, suffix=self._suffix + ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -76,44 +64,41 @@ def _gen_filename(self, name): class ChangeDataTypeInput(MathsInput): - _dtypes = ["float", "char", "int", "short", "double", "input"] output_datatype = traits.Enum( - *_dtypes, - position=-1, - argstr="-odt %s", - mandatory=True, - desc="output data type") + *_dtypes, position=-1, argstr="-odt %s", mandatory=True, desc="output data type" + ) class ChangeDataType(MathsCommand): - """Use fslmaths to change the datatype of an image. + """Use fslmaths to change the datatype of an image.""" - """ input_spec = ChangeDataTypeInput _suffix = "_chdt" class ThresholdInputSpec(MathsInput): - thresh = traits.Float( - mandatory=True, position=4, argstr="%s", desc="threshold value") + mandatory=True, position=4, argstr="%s", desc="threshold value" + ) direction = traits.Enum( "below", "above", usedefault=True, - desc="zero-out either below or above thresh value") + desc="zero-out either below or above thresh value", + ) use_robust_range = traits.Bool( - desc="interpret thresh as percentage (0-100) of robust range") + desc="interpret thresh as percentage (0-100) of robust range" + ) use_nonzero_voxels = traits.Bool( desc="use nonzero voxels to calculate robust range", - requires=["use_robust_range"]) + requires=["use_robust_range"], + ) class Threshold(MathsCommand): - """Use fslmaths to apply a threshold to an image in a variety of ways. + """Use fslmaths to apply a threshold to an image in a variety of ways.""" - """ input_spec = ThresholdInputSpec _suffix = "_thresh" @@ -125,18 +110,16 @@ def _format_arg(self, name, spec, value): arg += "u" arg += "thr" if isdefined(_si.use_robust_range) and _si.use_robust_range: - if (isdefined(_si.use_nonzero_voxels) - and _si.use_nonzero_voxels): + if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels: arg += "P" else: arg += "p" arg += " %.10f" % value return arg - return super(Threshold, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class StdImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -145,19 +128,20 @@ class StdImageInput(MathsInput): usedefault=True, argstr="-%sstd", position=4, - desc="dimension to standard deviate across") + desc="dimension to standard deviate across", + ) class StdImage(MathsCommand): """Use fslmaths to generate a standard deviation in an image across a given dimension. """ + input_spec = StdImageInput _suffix = "_std" class MeanImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -166,19 +150,18 @@ class MeanImageInput(MathsInput): usedefault=True, argstr="-%smean", position=4, - desc="dimension to mean across") + desc="dimension to mean across", + ) class MeanImage(MathsCommand): - """Use fslmaths to generate a mean image across a given dimension. + """Use fslmaths to generate a mean image across a given dimension.""" - """ input_spec = MeanImageInput _suffix = "_mean" class MaxImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -187,7 +170,8 @@ class MaxImageInput(MathsInput): usedefault=True, argstr="-%smax", position=4, - desc="dimension to max across") + desc="dimension to max across", + ) class MaxImage(MathsCommand): @@ -203,12 +187,12 @@ class MaxImage(MathsCommand): 'fslmaths functional.nii -Tmax functional_max.nii' """ + input_spec = MaxImageInput _suffix = "_max" class PercentileImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -217,14 +201,15 @@ class PercentileImageInput(MathsInput): usedefault=True, argstr="-%sperc", position=4, - desc="dimension to percentile across") + desc="dimension to percentile across", + ) perc = traits.Range( low=0, high=100, argstr="%f", position=5, - desc=("nth percentile (0-100) of FULL RANGE " - "across dimension")) + desc=("nth percentile (0-100) of FULL RANGE across dimension"), + ) class PercentileImage(MathsCommand): @@ -241,12 +226,12 @@ class PercentileImage(MathsCommand): 'fslmaths functional.nii -Tperc 90 functional_perc.nii' """ + input_spec = PercentileImageInput _suffix = "_perc" class MaxnImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -255,7 +240,8 @@ class MaxnImageInput(MathsInput): usedefault=True, argstr="-%smaxn", position=4, - desc="dimension to index max across") + desc="dimension to index max across", + ) class MaxnImage(MathsCommand): @@ -263,12 +249,12 @@ class MaxnImage(MathsCommand): a given dimension. """ + input_spec = MaxnImageInput _suffix = "_maxn" class MinImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -277,19 +263,18 @@ class MinImageInput(MathsInput): usedefault=True, argstr="-%smin", position=4, - desc="dimension to min across") + desc="dimension to min across", + ) class MinImage(MathsCommand): - """Use fslmaths to generate a minimum image across a given dimension. + """Use fslmaths to generate a minimum image across a given dimension.""" - """ input_spec = MinImageInput _suffix = "_min" class MedianImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -298,19 +283,18 @@ class MedianImageInput(MathsInput): usedefault=True, argstr="-%smedian", position=4, - desc="dimension to median across") + desc="dimension to median across", + ) class MedianImage(MathsCommand): - """Use fslmaths to generate a median image across a given dimension. + """Use fslmaths to generate a median image across a given dimension.""" - """ input_spec = MedianImageInput _suffix = "_median" class AR1ImageInput(MathsInput): - dimension = traits.Enum( "T", "X", @@ -319,8 +303,8 @@ class AR1ImageInput(MathsInput): usedefault=True, argstr="-%sar1", position=4, - desc=("dimension to find AR(1) coefficient" - "across")) + desc=("dimension to find AR(1) coefficient across"), + ) class AR1Image(MathsCommand): @@ -328,30 +312,31 @@ class AR1Image(MathsCommand): given dimension. (Should use -odt float and probably demean first) """ + input_spec = AR1ImageInput _suffix = "_ar1" class IsotropicSmoothInput(MathsInput): - fwhm = traits.Float( mandatory=True, xor=["sigma"], position=4, argstr="-s %.5f", - desc="fwhm of smoothing kernel [mm]") + desc="fwhm of smoothing kernel [mm]", + ) sigma = traits.Float( mandatory=True, xor=["fwhm"], position=4, argstr="-s %.5f", - desc="sigma of smoothing kernel [mm]") + desc="sigma of smoothing kernel [mm]", + ) class IsotropicSmooth(MathsCommand): - """Use fslmaths to spatially smooth an image with a gaussian kernel. + """Use fslmaths to spatially smooth an image with a gaussian kernel.""" - """ input_spec = IsotropicSmoothInput _suffix = "_smooth" @@ -359,29 +344,27 @@ def _format_arg(self, name, spec, value): if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) return spec.argstr % sigma - return super(IsotropicSmooth, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ApplyMaskInput(MathsInput): - mask_file = File( exists=True, mandatory=True, argstr="-mas %s", position=4, - desc="binary image defining mask space") + desc="binary image defining mask space", + ) class ApplyMask(MathsCommand): - """Use fslmaths to apply a binary mask to another image. + """Use fslmaths to apply a binary mask to another image.""" - """ input_spec = ApplyMaskInput _suffix = "_masked" class KernelInput(MathsInput): - kernel_shape = traits.Enum( "3D", "2D", @@ -392,23 +375,24 @@ class KernelInput(MathsInput): "file", argstr="-kernel %s", position=4, - desc="kernel shape to use") + desc="kernel shape to use", + ) kernel_size = traits.Float( argstr="%.4f", position=5, xor=["kernel_file"], - desc=("kernel size - voxels for box/boxv, mm " - "for sphere, mm sigma for gauss")) + desc=("kernel size - voxels for box/boxv, mm for sphere, mm sigma for gauss"), + ) kernel_file = File( exists=True, argstr="%s", position=5, xor=["kernel_size"], - desc="use external file for kernel") + desc="use external file for kernel", + ) class DilateInput(KernelInput): - operation = traits.Enum( "mean", "modal", @@ -416,37 +400,35 @@ class DilateInput(KernelInput): argstr="-dil%s", position=6, mandatory=True, - desc="filtering operation to perfoem in dilation") + desc="filtering operation to perform in dilation", + ) class DilateImage(MathsCommand): - """Use fslmaths to perform a spatial dilation of an image. + """Use fslmaths to perform a spatial dilation of an image.""" - """ input_spec = DilateInput _suffix = "_dil" def _format_arg(self, name, spec, value): if name == "operation": return spec.argstr % dict(mean="M", modal="D", max="F")[value] - return super(DilateImage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ErodeInput(KernelInput): - minimum_filter = traits.Bool( argstr="%s", position=6, usedefault=True, default_value=False, - desc=("if true, minimum filter rather than " - "erosion by zeroing-out")) + desc=("if true, minimum filter rather than erosion by zeroing-out"), + ) class ErodeImage(MathsCommand): - """Use fslmaths to perform a spatial erosion of an image. + """Use fslmaths to perform a spatial erosion of an image.""" - """ input_spec = ErodeInput _suffix = "_ero" @@ -455,11 +437,10 @@ def _format_arg(self, name, spec, value): if value: return "-eroF" return "-ero" - return super(ErodeImage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class SpatialFilterInput(KernelInput): - operation = traits.Enum( "mean", "median", @@ -467,19 +448,18 @@ class SpatialFilterInput(KernelInput): argstr="-f%s", position=6, mandatory=True, - desc="operation to filter with") + desc="operation to filter with", + ) class SpatialFilter(MathsCommand): - """Use fslmaths to spatially filter an image. + """Use fslmaths to spatially filter an image.""" - """ input_spec = SpatialFilterInput _suffix = "_filt" class UnaryMathsInput(MathsInput): - operation = traits.Enum( "exp", "log", @@ -507,22 +487,21 @@ class UnaryMathsInput(MathsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform") + desc="operation to perform", + ) class UnaryMaths(MathsCommand): - """Use fslmaths to perorm a variety of mathematical operations on an image. + """Use fslmaths to perorm a variety of mathematical operations on an image.""" - """ input_spec = UnaryMathsInput def _list_outputs(self): self._suffix = "_" + self.inputs.operation - return super(UnaryMaths, self)._list_outputs() + return super()._list_outputs() class BinaryMathsInput(MathsInput): - operation = traits.Enum( "add", "sub", @@ -534,20 +513,23 @@ class BinaryMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform") + desc="operation to perform", + ) operand_file = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value"], - desc="second image to perform operation with") + desc="second image to perform operation with", + ) operand_value = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], - desc="value to perform operation with") + desc="value to perform operation with", + ) class BinaryMaths(MathsCommand): @@ -555,22 +537,22 @@ class BinaryMaths(MathsCommand): a numeric value. """ + input_spec = BinaryMathsInput class MultiImageMathsInput(MathsInput): - op_string = traits.String( position=4, argstr="%s", mandatory=True, - desc=("python formatted string of operations " - "to perform")) + desc=("python formatted string of operations to perform"), + ) operand_files = InputMultiPath( File(exists=True), mandatory=True, - desc=("list of file names to plug into op " - "string")) + desc=("list of file names to plug into op string"), + ) class MultiImageMaths(MathsCommand): @@ -588,28 +570,30 @@ class MultiImageMaths(MathsCommand): 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' """ + input_spec = MultiImageMathsInput def _format_arg(self, name, spec, value): if name == "op_string": return value % tuple(self.inputs.operand_files) - return super(MultiImageMaths, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class TemporalFilterInput(MathsInput): - lowpass_sigma = traits.Float( -1, argstr="%.6f", position=5, usedefault=True, - desc="lowpass filter sigma (in volumes)") + desc="lowpass filter sigma (in volumes)", + ) highpass_sigma = traits.Float( -1, argstr="-bptf %.6f", position=4, usedefault=True, - desc="highpass filter sigma (in volumes)") + desc="highpass filter sigma (in volumes)", + ) class TemporalFilter(MathsCommand): @@ -617,5 +601,6 @@ class TemporalFilter(MathsCommand): timeseries. """ + input_spec = TemporalFilterInput _suffix = "_filt" diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 113f785120..2a148025f5 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -1,96 +1,127 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import os from glob import glob from shutil import rmtree from string import Template import numpy as np +from looseversion import LooseVersion from nibabel import load -from ... import LooseVersion from ...utils.filemanip import simplify_list, ensure_list from ...utils.misc import human_order_sorted from ...external.due import BibTeX -from ..base import (File, traits, isdefined, TraitedSpec, BaseInterface, - Directory, InputMultiPath, OutputMultiPath, - BaseInterfaceInputSpec) +from ..base import ( + File, + traits, + Tuple, + isdefined, + TraitedSpec, + BaseInterface, + Directory, + InputMultiPath, + OutputMultiPath, + BaseInterfaceInputSpec, +) from .base import FSLCommand, FSLCommandInputSpec, Info class Level1DesignInputSpec(BaseInterfaceInputSpec): interscan_interval = traits.Float( - mandatory=True, desc='Interscan interval (in secs)') + mandatory=True, desc="Interscan interval (in secs)" + ) session_info = traits.Any( mandatory=True, - desc=('Session specific information generated ' - 'by ``modelgen.SpecifyModel``')) + desc=("Session specific information generated by ``modelgen.SpecifyModel``"), + ) bases = traits.Either( traits.Dict( - traits.Enum('dgamma'), - traits.Dict(traits.Enum('derivs'), traits.Bool)), + traits.Enum("dgamma"), traits.Dict(traits.Enum("derivs"), traits.Bool) + ), traits.Dict( - traits.Enum('gamma'), - traits.Dict(traits.Enum('derivs', 'gammasigma', 'gammadelay'))), + traits.Enum("gamma"), + traits.Dict(traits.Enum("derivs", "gammasigma", "gammadelay")), + ), traits.Dict( - traits.Enum('custom'), - traits.Dict(traits.Enum('bfcustompath'), traits.Str)), - traits.Dict(traits.Enum('none'), traits.Dict()), - traits.Dict(traits.Enum('none'), traits.Enum(None)), + traits.Enum("custom"), traits.Dict(traits.Enum("bfcustompath"), traits.Str) + ), + traits.Dict(traits.Enum("none"), traits.Dict()), + traits.Dict(traits.Enum("none"), traits.Enum(None)), mandatory=True, - desc=("name of basis function and options e.g., " - "{'dgamma': {'derivs': True}}"), + desc=("name of basis function and options e.g., {'dgamma': {'derivs': True}}"), ) orthogonalization = traits.Dict( traits.Int, traits.Dict(traits.Int, traits.Either(traits.Bool, traits.Int)), - desc=("which regressors to make orthogonal e.g., " - "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " - "regressor in a 2-regressor model orthogonal to the first."), - usedefault=True) + desc=( + "which regressors to make orthogonal e.g., " + "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " + "regressor in a 2-regressor model orthogonal to the first." + ), + usedefault=True, + ) model_serial_correlations = traits.Bool( desc="Option to model serial correlations using an \ autoregressive estimator (order 1). Setting this option is only \ useful in the context of the fsf file. If you set this to False, you need to \ repeat this option for FILMGLS by setting autocorr_noestimate to True", - mandatory=True) + mandatory=True, + ) contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list], [session list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ -T-contrasts.") +T-contrasts.", + ) class Level1DesignOutputSpec(TraitedSpec): - fsf_files = OutputMultiPath( - File(exists=True), desc='FSL feat specification files') + fsf_files = OutputMultiPath(File(exists=True), desc="FSL feat specification files") ev_files = OutputMultiPath( - traits.List(File(exists=True)), desc='condition information files') + traits.List(File(exists=True)), desc="condition information files" + ) class Level1Design(BaseInterface): @@ -111,34 +142,41 @@ class Level1Design(BaseInterface): output_spec = Level1DesignOutputSpec def _create_ev_file(self, evfname, evinfo): - f = open(evfname, 'wt') - for i in evinfo: - if len(i) == 3: - f.write('%f %f %f\n' % (i[0], i[1], i[2])) - else: - f.write('%f\n' % i[0]) - f.close() - - def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, - orthogonalization, contrasts, do_tempfilter, - basis_key): + with open(evfname, "w") as f: + for i in evinfo: + if len(i) == 3: + f.write(f"{i[0]:f} {i[1]:f} {i[2]:f}\n") + else: + f.write("%f\n" % i[0]) + + def _create_ev_files( + self, + cwd, + runinfo, + runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + basis_key, + ): """Creates EV files from condition and regressor information. - Parameters: - ----------- - - runinfo : dict - Generated by `SpecifyModel` and contains information - about events and other regressors. - runidx : int - Index to run number - ev_parameters : dict - A dictionary containing the model parameters for the - given design type. - orthogonalization : dict - A dictionary of dictionaries specifying orthogonal EVs. - contrasts : list of lists - Information on contrasts to be evaluated + Parameters: + ----------- + + runinfo : dict + Generated by `SpecifyModel` and contains information + about events and other regressors. + runidx : int + Index to run number + ev_parameters : dict + A dictionary containing the model parameters for the + given design type. + orthogonalization : dict + A dictionary of dictionaries specifying orthogonal EVs. + contrasts : list of lists + Information on contrasts to be evaluated """ conds = {} evname = [] @@ -146,99 +184,98 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, basis_key = "hrf" elif basis_key == "gamma": try: - _ = ev_parameters['gammasigma'] + _ = ev_parameters["gammasigma"] except KeyError: - ev_parameters['gammasigma'] = 3 + ev_parameters["gammasigma"] = 3 try: - _ = ev_parameters['gammadelay'] + _ = ev_parameters["gammadelay"] except KeyError: - ev_parameters['gammadelay'] = 6 - ev_template = load_template('feat_ev_' + basis_key + '.tcl') - ev_none = load_template('feat_ev_none.tcl') - ev_ortho = load_template('feat_ev_ortho.tcl') - ev_txt = '' + ev_parameters["gammadelay"] = 6 + ev_template = load_template("feat_ev_" + basis_key + ".tcl") + ev_none = load_template("feat_ev_none.tcl") + ev_ortho = load_template("feat_ev_ortho.tcl") + ev_txt = "" # generate sections for conditions and other nuisance # regressors num_evs = [0, 0] - for field in ['cond', 'regress']: + for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): - name = cond['name'] + name = cond["name"] evname.append(name) - evfname = os.path.join(cwd, 'ev_%s_%d_%d.txt' % (name, runidx, - len(evname))) + evfname = os.path.join( + cwd, "ev_%s_%d_%d.txt" % (name, runidx, len(evname)) + ) evinfo = [] num_evs[0] += 1 num_evs[1] += 1 - if field == 'cond': - for j, onset in enumerate(cond['onset']): + if field == "cond": + for j, onset in enumerate(cond["onset"]): try: - amplitudes = cond['amplitudes'] + amplitudes = cond["amplitudes"] if len(amplitudes) > 1: amp = amplitudes[j] else: amp = amplitudes[0] except KeyError: amp = 1 - if len(cond['duration']) > 1: - evinfo.insert(j, [onset, cond['duration'][j], amp]) + if len(cond["duration"]) > 1: + evinfo.insert(j, [onset, cond["duration"][j], amp]) else: - evinfo.insert(j, [onset, cond['duration'][0], amp]) - ev_parameters['cond_file'] = evfname - ev_parameters['ev_num'] = num_evs[0] - ev_parameters['ev_name'] = name - ev_parameters['tempfilt_yn'] = do_tempfilter - if 'basisorth' not in ev_parameters: - ev_parameters['basisorth'] = 1 - if 'basisfnum' not in ev_parameters: - ev_parameters['basisfnum'] = 1 + evinfo.insert(j, [onset, cond["duration"][0], amp]) + ev_parameters["cond_file"] = evfname + ev_parameters["ev_num"] = num_evs[0] + ev_parameters["ev_name"] = name + ev_parameters["tempfilt_yn"] = do_tempfilter + if "basisorth" not in ev_parameters: + ev_parameters["basisorth"] = 1 + if "basisfnum" not in ev_parameters: + ev_parameters["basisfnum"] = 1 try: - ev_parameters['fsldir'] = os.environ['FSLDIR'] + ev_parameters["fsldir"] = os.environ["FSLDIR"] except KeyError: - if basis_key == 'flobs': - raise Exception( - 'FSL environment variables not set') + if basis_key == "flobs": + raise Exception("FSL environment variables not set") else: - ev_parameters['fsldir'] = '/usr/share/fsl' - ev_parameters['temporalderiv'] = int( - bool(ev_parameters.get('derivs', False))) - if ev_parameters['temporalderiv']: - evname.append(name + 'TD') + ev_parameters["fsldir"] = "/usr/share/fsl" + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") num_evs[1] += 1 ev_txt += ev_template.substitute(ev_parameters) - elif field == 'regress': - evinfo = [[j] for j in cond['val']] + elif field == "regress": + evinfo = [[j] for j in cond["val"]] ev_txt += ev_none.substitute( ev_num=num_evs[0], ev_name=name, tempfilt_yn=do_tempfilter, - cond_file=evfname) + cond_file=evfname, + ) ev_txt += "\n" conds[name] = evfname self._create_ev_file(evfname, evinfo) # add ev orthogonalization for i in range(1, num_evs[0] + 1): initial = ev_ortho.substitute(c0=i, c1=0, orthogonal=1) - for j in range(0, num_evs[0] + 1): + for j in range(num_evs[0] + 1): try: orthogonal = int(orthogonalization[i][j]) except (KeyError, TypeError, ValueError, IndexError): orthogonal = 0 if orthogonal == 1 and initial not in ev_txt: ev_txt += initial + "\n" - ev_txt += ev_ortho.substitute(c0=i, c1=j, - orthogonal=orthogonal) + ev_txt += ev_ortho.substitute(c0=i, c1=j, orthogonal=orthogonal) ev_txt += "\n" # add contrast info to fsf file if isdefined(contrasts): - contrast_header = load_template('feat_contrast_header.tcl') - contrast_prolog = load_template('feat_contrast_prolog.tcl') - contrast_element = load_template('feat_contrast_element.tcl') - contrast_ftest_element = load_template( - 'feat_contrast_ftest_element.tcl') - contrastmask_header = load_template('feat_contrastmask_header.tcl') - contrastmask_footer = load_template('feat_contrastmask_footer.tcl') - contrastmask_element = load_template( - 'feat_contrastmask_element.tcl') + contrast_header = load_template("feat_contrast_header.tcl") + contrast_prolog = load_template("feat_contrast_prolog.tcl") + contrast_element = load_template("feat_contrast_element.tcl") + contrast_ftest_element = load_template("feat_contrast_ftest_element.tcl") + contrastmask_header = load_template("feat_contrastmask_header.tcl") + contrastmask_footer = load_template("feat_contrastmask_footer.tcl") + contrastmask_element = load_template("feat_contrastmask_element.tcl") # add t/f contrast info ev_txt += contrast_header.substitute() con_names = [] @@ -248,7 +285,7 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, ftest_idx = [] ttest_idx = [] for j, con in enumerate(contrasts): - if con[1] == 'F': + if con[1] == "F": ftest_idx.append(j) for c in con[2]: if c[0] not in list(con_map.keys()): @@ -257,16 +294,17 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, else: ttest_idx.append(j) - for ctype in ['real', 'orig']: + for ctype in ["real", "orig"]: for j, con in enumerate(contrasts): - if con[1] == 'F': + if con[1] == "F": continue tidx = ttest_idx.index(j) + 1 ev_txt += contrast_prolog.substitute( - cnum=tidx, ctype=ctype, cname=con[0]) + cnum=tidx, ctype=ctype, cname=con[0] + ) count = 0 for c in range(1, len(evname) + 1): - if evname[c - 1].endswith('TD') and ctype == 'orig': + if evname[c - 1].endswith("TD") and ctype == "orig": continue count = count + 1 if evname[c - 1] in con[2]: @@ -274,19 +312,20 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, else: val = 0.0 ev_txt += contrast_element.substitute( - cnum=tidx, element=count, ctype=ctype, val=val) + cnum=tidx, element=count, ctype=ctype, val=val + ) ev_txt += "\n" for fconidx in ftest_idx: fval = 0 - if (con[0] in con_map.keys() - and fconidx in con_map[con[0]]): + if con[0] in con_map and fconidx in con_map[con[0]]: fval = 1 ev_txt += contrast_ftest_element.substitute( cnum=ftest_idx.index(fconidx) + 1, element=tidx, ctype=ctype, - val=fval) + val=fval, + ) ev_txt += "\n" # add contrast mask info @@ -294,8 +333,7 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, for j, _ in enumerate(contrasts): for k, _ in enumerate(contrasts): if j != k: - ev_txt += contrastmask_element.substitute( - c1=j + 1, c2=k + 1) + ev_txt += contrastmask_element.substitute(c1=j + 1, c2=k + 1) ev_txt += contrastmask_footer.substitute() return num_evs, ev_txt @@ -305,17 +343,16 @@ def _format_session_info(self, session_info): return session_info def _get_func_files(self, session_info): - """Returns functional files in the order of runs - """ + """Returns functional files in the order of runs""" func_files = [] for i, info in enumerate(session_info): - func_files.insert(i, info['scans']) + func_files.insert(i, info["scans"]) return func_files def _run_interface(self, runtime): cwd = os.getcwd() - fsf_header = load_template('feat_header_l1.tcl') - fsf_postscript = load_template('feat_nongui.tcl') + fsf_header = load_template("feat_header_l1.tcl") + fsf_postscript = load_template("feat_nongui.tcl") prewhiten = 0 if isdefined(self.inputs.model_serial_correlations): @@ -328,18 +365,25 @@ def _run_interface(self, runtime): n_fcon = 0 if isdefined(self.inputs.contrasts): for i, c in enumerate(self.inputs.contrasts): - if c[1] == 'T': + if c[1] == "T": n_tcon += 1 - elif c[1] == 'F': + elif c[1] == "F": n_fcon += 1 for i, info in enumerate(session_info): do_tempfilter = 1 - if info['hpf'] == np.inf: + if info["hpf"] == np.inf: do_tempfilter = 0 num_evs, cond_txt = self._create_ev_files( - cwd, info, i, ev_parameters, self.inputs.orthogonalization, - self.inputs.contrasts, do_tempfilter, basis_key) + cwd, + info, + i, + ev_parameters, + self.inputs.orthogonalization, + self.inputs.contrasts, + do_tempfilter, + basis_key, + ) nim = load(func_files[i]) (_, _, _, timepoints) = nim.shape fsf_txt = fsf_header.substitute( @@ -351,44 +395,45 @@ def _run_interface(self, runtime): num_evs_real=num_evs[1], num_tcon=n_tcon, num_fcon=n_fcon, - high_pass_filter_cutoff=info['hpf'], + high_pass_filter_cutoff=info["hpf"], temphp_yn=do_tempfilter, - func_file=func_files[i]) + func_file=func_files[i], + ) fsf_txt += cond_txt fsf_txt += fsf_postscript.substitute(overwrite=1) - f = open(os.path.join(cwd, 'run%d.fsf' % i), 'w') - f.write(fsf_txt) - f.close() + with open(os.path.join(cwd, "run%d.fsf" % i), "w") as f: + f.write(fsf_txt) return runtime def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() - outputs['fsf_files'] = [] - outputs['ev_files'] = [] + outputs["fsf_files"] = [] + outputs["ev_files"] = [] basis_key = list(self.inputs.bases.keys())[0] ev_parameters = dict(self.inputs.bases[basis_key]) for runno, runinfo in enumerate( - self._format_session_info(self.inputs.session_info)): - outputs['fsf_files'].append(os.path.join(cwd, 'run%d.fsf' % runno)) - outputs['ev_files'].insert(runno, []) + self._format_session_info(self.inputs.session_info) + ): + outputs["fsf_files"].append(os.path.join(cwd, "run%d.fsf" % runno)) + outputs["ev_files"].insert(runno, []) evname = [] - for field in ['cond', 'regress']: + for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): - name = cond['name'] + name = cond["name"] evname.append(name) - evfname = os.path.join(cwd, - 'ev_%s_%d_%d.txt' % (name, runno, - len(evname))) - if field == 'cond': - ev_parameters['temporalderiv'] = int( - bool(ev_parameters.get('derivs', False))) - if ev_parameters['temporalderiv']: - evname.append(name + 'TD') - outputs['ev_files'][runno].append( - os.path.join(cwd, evfname)) + evfname = os.path.join( + cwd, "ev_%s_%d_%d.txt" % (name, runno, len(evname)) + ) + if field == "cond": + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") + outputs["ev_files"][runno].append(os.path.join(cwd, evfname)) return outputs @@ -398,7 +443,8 @@ class FEATInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=0, - desc="File specifying the feat design spec file") + desc="File specifying the feat design spec file", + ) class FEATOutputSpec(TraitedSpec): @@ -406,37 +452,34 @@ class FEATOutputSpec(TraitedSpec): class FEAT(FSLCommand): - """Uses FSL feat to calculate first level stats - """ - _cmd = 'feat' + """Uses FSL feat to calculate first level stats""" + + _cmd = "feat" input_spec = FEATInputSpec output_spec = FEATOutputSpec def _list_outputs(self): outputs = self._outputs().get() is_ica = False - outputs['feat_dir'] = None - with open(self.inputs.fsf_file, 'rt') as fp: + outputs["feat_dir"] = None + with open(self.inputs.fsf_file) as fp: text = fp.read() if "set fmri(inmelodic) 1" in text: is_ica = True - for line in text.split('\n'): + for line in text.split("\n"): if line.find("set fmri(outputdir)") > -1: try: outputdir_spec = line.split('"')[-2] if os.path.exists(outputdir_spec): - outputs['feat_dir'] = outputdir_spec + outputs["feat_dir"] = outputdir_spec except: pass - if not outputs['feat_dir']: + if not outputs["feat_dir"]: if is_ica: - outputs['feat_dir'] = glob(os.path.join(os.getcwd(), - '*ica'))[0] + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*ica"))[0] else: - outputs['feat_dir'] = glob(os.path.join(os.getcwd(), - '*feat'))[0] - print('Outputs from FEATmodel:', outputs) + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*feat"))[0] return outputs @@ -447,136 +490,138 @@ class FEATModelInputSpec(FSLCommandInputSpec): argstr="%s", position=0, desc="File specifying the feat design spec file", - copyfile=False) + copyfile=False, + ) ev_files = traits.List( File(exists=True), mandatory=True, argstr="%s", desc="Event spec files generated by level1design", position=1, - copyfile=False) + copyfile=False, + ) class FEATModelOutpuSpec(TraitedSpec): - design_file = File( - exists=True, desc='Mat file containing ascii matrix for design') - design_image = File( - exists=True, desc='Graphical representation of design matrix') - design_cov = File( - exists=True, desc='Graphical representation of design covariance') - con_file = File( - exists=True, desc='Contrast file containing contrast vectors') - fcon_file = File(desc='Contrast file containing contrast vectors') + design_file = File(exists=True, desc="Mat file containing ascii matrix for design") + design_image = File(exists=True, desc="Graphical representation of design matrix") + design_cov = File(exists=True, desc="Graphical representation of design covariance") + con_file = File(exists=True, desc="Contrast file containing contrast vectors") + fcon_file = File(desc="Contrast file containing contrast vectors") class FEATModel(FSLCommand): - """Uses FSL feat_model to generate design.mat files - """ - _cmd = 'feat_model' + """Uses FSL feat_model to generate design.mat files""" + + _cmd = "feat_model" input_spec = FEATModelInputSpec output_spec = FEATModelOutpuSpec def _format_arg(self, name, trait_spec, value): - if name == 'fsf_file': - return super(FEATModel, - self)._format_arg(name, trait_spec, - self._get_design_root(value)) - elif name == 'ev_files': - return '' + if name == "fsf_file": + return super()._format_arg(name, trait_spec, self._get_design_root(value)) + elif name == "ev_files": + return "" else: - return super(FEATModel, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) - return fname.split('.')[0] + return fname.split(".")[0] def _list_outputs(self): # TODO: figure out file names and get rid off the globs outputs = self._outputs().get() root = self._get_design_root(simplify_list(self.inputs.fsf_file)) - design_file = glob(os.path.join(os.getcwd(), '%s*.mat' % root)) - assert len(design_file) == 1, 'No mat file generated by FEAT Model' - outputs['design_file'] = design_file[0] - design_image = glob(os.path.join(os.getcwd(), '%s.png' % root)) - assert len( - design_image) == 1, 'No design image generated by FEAT Model' - outputs['design_image'] = design_image[0] - design_cov = glob(os.path.join(os.getcwd(), '%s_cov.png' % root)) - assert len( - design_cov) == 1, 'No covariance image generated by FEAT Model' - outputs['design_cov'] = design_cov[0] - con_file = glob(os.path.join(os.getcwd(), '%s*.con' % root)) - assert len(con_file) == 1, 'No con file generated by FEAT Model' - outputs['con_file'] = con_file[0] - fcon_file = glob(os.path.join(os.getcwd(), '%s*.fts' % root)) + design_file = glob(os.path.join(os.getcwd(), "%s*.mat" % root)) + assert len(design_file) == 1, "No mat file generated by FEAT Model" + outputs["design_file"] = design_file[0] + design_image = glob(os.path.join(os.getcwd(), "%s.png" % root)) + assert len(design_image) == 1, "No design image generated by FEAT Model" + outputs["design_image"] = design_image[0] + design_cov = glob(os.path.join(os.getcwd(), "%s_cov.png" % root)) + assert len(design_cov) == 1, "No covariance image generated by FEAT Model" + outputs["design_cov"] = design_cov[0] + con_file = glob(os.path.join(os.getcwd(), "%s*.con" % root)) + assert len(con_file) == 1, "No con file generated by FEAT Model" + outputs["con_file"] = con_file[0] + fcon_file = glob(os.path.join(os.getcwd(), "%s*.fts" % root)) if fcon_file: - assert len(fcon_file) == 1, 'No fts file generated by FEAT Model' - outputs['fcon_file'] = fcon_file[0] + assert len(fcon_file) == 1, "No fts file generated by FEAT Model" + outputs["fcon_file"] = fcon_file[0] return outputs class FILMGLSInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - position=-3, - argstr='%s', - desc='input data file') - design_file = File( - exists=True, position=-2, argstr='%s', desc='design matrix file') + exists=True, mandatory=True, position=-3, argstr="%s", desc="input data file" + ) + design_file = File(exists=True, position=-2, argstr="%s", desc="design matrix file") threshold = traits.Range( - value=1000., + value=1000.0, low=0.0, - argstr='%f', + argstr="%f", position=-1, usedefault=True, - desc='threshold') - smooth_autocorr = traits.Bool( - argstr='-sa', desc='Smooth auto corr estimates') - mask_size = traits.Int(argstr='-ms %d', desc="susan mask size") + desc="threshold", + ) + smooth_autocorr = traits.Bool(argstr="-sa", desc="Smooth auto corr estimates") + mask_size = traits.Int(argstr="-ms %d", desc="susan mask size") brightness_threshold = traits.Range( low=0, - argstr='-epith %d', - desc=('susan brightness threshold, ' - 'otherwise it is estimated')) - full_data = traits.Bool(argstr='-v', desc='output full data') + argstr="-epith %d", + desc=("susan brightness threshold, otherwise it is estimated"), + ) + full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( - argstr='-ac', + argstr="-ac", xor=_estimate_xor, - desc=('perform autocorrelation ' - 'estimatation only')) + desc=("perform autocorrelation estimatation only"), + ) fit_armodel = traits.Bool( - argstr='-ar', + argstr="-ar", xor=_estimate_xor, - desc=('fits autoregressive model - default is ' - 'to use tukey with M=sqrt(numvols)')) + desc=( + "fits autoregressive model - default is " + "to use tukey with M=sqrt(numvols)" + ), + ) tukey_window = traits.Int( - argstr='-tukey %d', + argstr="-tukey %d", xor=_estimate_xor, - desc='tukey window size to estimate autocorr') + desc="tukey window size to estimate autocorr", + ) multitaper_product = traits.Int( - argstr='-mt %d', + argstr="-mt %d", xor=_estimate_xor, - desc=('multitapering with slepian tapers ' - 'and num is the time-bandwidth ' - 'product')) - use_pava = traits.Bool( - argstr='-pava', desc='estimates autocorr using PAVA') + desc=( + "multitapering with slepian tapers " + "and num is the time-bandwidth " + "product" + ), + ) + use_pava = traits.Bool(argstr="-pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( - argstr='-noest', xor=_estimate_xor, desc='do not estimate autocorrs') + argstr="-noest", xor=_estimate_xor, desc="do not estimate autocorrs" + ) output_pwdata = traits.Bool( - argstr='-output_pwdata', - desc=('output prewhitened data and average ' - 'design matrix')) + argstr="-output_pwdata", + desc=("output prewhitened data and average design matrix"), + ) results_dir = Directory( - 'results', - argstr='-rn %s', + "results", + argstr="-rn %s", usedefault=True, - desc='directory to store results in') + desc="directory to store results in", + ) class FILMGLSInputSpec505(FSLCommandInputSpec): @@ -584,142 +629,154 @@ class FILMGLSInputSpec505(FSLCommandInputSpec): exists=True, mandatory=True, position=-3, - argstr='--in=%s', - desc='input data file') + argstr="--in=%s", + desc="input data file", + ) design_file = File( - exists=True, position=-2, argstr='--pd=%s', desc='design matrix file') + exists=True, position=-2, argstr="--pd=%s", desc="design matrix file" + ) threshold = traits.Range( - value=1000., + value=1000.0, low=0.0, - argstr='--thr=%f', + argstr="--thr=%f", position=-1, usedefault=True, - desc='threshold') - smooth_autocorr = traits.Bool( - argstr='--sa', desc='Smooth auto corr estimates') - mask_size = traits.Int(argstr='--ms=%d', desc="susan mask size") + desc="threshold", + ) + smooth_autocorr = traits.Bool(argstr="--sa", desc="Smooth auto corr estimates") + mask_size = traits.Int(argstr="--ms=%d", desc="susan mask size") brightness_threshold = traits.Range( low=0, - argstr='--epith=%d', - desc=('susan brightness threshold, ' - 'otherwise it is estimated')) - full_data = traits.Bool(argstr='-v', desc='output full data') + argstr="--epith=%d", + desc=("susan brightness threshold, otherwise it is estimated"), + ) + full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( - argstr='--ac', + argstr="--ac", xor=_estimate_xor, - desc=('perform autocorrelation ' - 'estimation only')) + desc=("perform autocorrelation estimation only"), + ) fit_armodel = traits.Bool( - argstr='--ar', + argstr="--ar", xor=_estimate_xor, - desc=('fits autoregressive model - default is ' - 'to use tukey with M=sqrt(numvols)')) + desc=( + "fits autoregressive model - default is " + "to use tukey with M=sqrt(numvols)" + ), + ) tukey_window = traits.Int( - argstr='--tukey=%d', + argstr="--tukey=%d", xor=_estimate_xor, - desc='tukey window size to estimate autocorr') + desc="tukey window size to estimate autocorr", + ) multitaper_product = traits.Int( - argstr='--mt=%d', + argstr="--mt=%d", xor=_estimate_xor, - desc=('multitapering with slepian tapers ' - 'and num is the time-bandwidth ' - 'product')) - use_pava = traits.Bool( - argstr='--pava', desc='estimates autocorr using PAVA') + desc=( + "multitapering with slepian tapers " + "and num is the time-bandwidth " + "product" + ), + ) + use_pava = traits.Bool(argstr="--pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( - argstr='--noest', xor=_estimate_xor, desc='do not estimate autocorrs') + argstr="--noest", xor=_estimate_xor, desc="do not estimate autocorrs" + ) output_pwdata = traits.Bool( - argstr='--outputPWdata', - desc=('output prewhitened data and average ' - 'design matrix')) + argstr="--outputPWdata", + desc=("output prewhitened data and average design matrix"), + ) results_dir = Directory( - 'results', - argstr='--rn=%s', + "results", + argstr="--rn=%s", usedefault=True, - desc='directory to store results in') + desc="directory to store results in", + ) class FILMGLSInputSpec507(FILMGLSInputSpec505): threshold = traits.Float( - default_value=-1000., - argstr='--thr=%f', + default_value=-1000.0, + argstr="--thr=%f", position=-1, usedefault=True, - desc='threshold') + desc="threshold", + ) tcon_file = File( - exists=True, - argstr='--con=%s', - desc='contrast file containing T-contrasts') + exists=True, argstr="--con=%s", desc="contrast file containing T-contrasts" + ) fcon_file = File( - exists=True, - argstr='--fcon=%s', - desc='contrast file containing F-contrasts') + exists=True, argstr="--fcon=%s", desc="contrast file containing F-contrasts" + ) mode = traits.Enum( - 'volumetric', - 'surface', - argstr="--mode=%s", - desc="Type of analysis to be done") + "volumetric", "surface", argstr="--mode=%s", desc="Type of analysis to be done" + ) surface = File( exists=True, argstr="--in2=%s", - desc=("input surface for autocorr smoothing in " - "surface-based analyses")) + desc=("input surface for autocorr smoothing in surface-based analyses"), + ) class FILMGLSOutputSpec(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each column of the design matrix"), + ) residual4d = File( exists=True, - desc=('Model fit residual mean-squared error for each ' - 'time point')) - dof_file = File(exists=True, desc='degrees of freedom') + desc=("Model fit residual mean-squared error for each time point"), + ) + dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( - exists=True, desc='summary of residuals, See Woolrich, et. al., 2001') + exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" + ) results_dir = Directory( - exists=True, desc='directory storing model estimation output') + exists=True, desc="directory storing model estimation output" + ) corrections = File( - exists=True, - desc=('statistical corrections used within FILM ' - 'modeling')) - thresholdac = File(exists=True, desc='The FILM autocorrelation parameters') - logfile = File(exists=True, desc='FILM run logfile') + exists=True, desc=("statistical corrections used within FILM modeling") + ) + thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") + logfile = File(exists=True, desc="FILM run logfile") class FILMGLSOutputSpec507(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each column of the design matrix"), + ) residual4d = File( exists=True, - desc=('Model fit residual mean-squared error for each ' - 'time point')) - dof_file = File(exists=True, desc='degrees of freedom') + desc=("Model fit residual mean-squared error for each time point"), + ) + dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( - exists=True, desc='summary of residuals, See Woolrich, et. al., 2001') + exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" + ) results_dir = Directory( - exists=True, desc='directory storing model estimation output') - thresholdac = File(exists=True, desc='The FILM autocorrelation parameters') - logfile = File(exists=True, desc='FILM run logfile') + exists=True, desc="directory storing model estimation output" + ) + thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") + logfile = File(exists=True, desc="FILM run logfile") copes = OutputMultiPath( - File(exists=True), desc='Contrast estimates for each contrast') + File(exists=True), desc="Contrast estimates for each contrast" + ) varcopes = OutputMultiPath( - File(exists=True), desc='Variance estimates for each contrast') - zstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each contrast') - tstats = OutputMultiPath( - File(exists=True), desc='t-stat file for each contrast') - fstats = OutputMultiPath( - File(exists=True), desc='f-stat file for each contrast') - zfstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each F contrast') + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") class FILMGLS(FSLCommand): @@ -752,73 +809,73 @@ class FILMGLS(FSLCommand): """ - _cmd = 'film_gls' + _cmd = "film_gls" input_spec = FILMGLSInputSpec output_spec = FILMGLSOutputSpec - if Info.version() and LooseVersion(Info.version()) > LooseVersion('5.0.6'): + + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): input_spec = FILMGLSInputSpec507 output_spec = FILMGLSOutputSpec507 - elif (Info.version() - and LooseVersion(Info.version()) > LooseVersion('5.0.4')): + elif Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.4"): input_spec = FILMGLSInputSpec505 + def __init__(self, **inputs): + super(FILMGLS, self).__init__(**inputs) + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): + if 'output_type' not in inputs: + if isdefined(self.inputs.mode) and self.inputs.mode == 'surface': + self.inputs.output_type = 'GIFTI' + def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): - fp = open(self.inputs.design_file, 'rt') - for line in fp.readlines(): - if line.startswith('/NumWaves'): - numpes = int(line.split()[-1]) - files = [] - for i in range(numpes): - files.append( - self._gen_fname('pe%d.nii' % (i + 1), cwd=cwd)) - break - fp.close() + with open(self.inputs.design_file) as fp: + for line in fp: + if line.startswith("/NumWaves"): + numpes = int(line.split()[-1]) + files = [ + self._gen_fname(f"pe{i + 1}.nii", cwd=cwd) + for i in range(numpes) + ] + break return files def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, 'rt') - for line in fp.readlines(): - if line.startswith('/NumContrasts'): - numtcons = int(line.split()[-1]) - break - fp.close() + with open(self.inputs.tcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numtcons = int(line.split()[-1]) + break if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, 'rt') - for line in fp.readlines(): - if line.startswith('/NumContrasts'): - numfcons = int(line.split()[-1]) - break - fp.close() + with open(self.inputs.fcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numfcons = int(line.split()[-1]) + break return numtcons, numfcons def _list_outputs(self): outputs = self._outputs().get() cwd = os.getcwd() results_dir = os.path.join(cwd, self.inputs.results_dir) - outputs['results_dir'] = results_dir + outputs["results_dir"] = results_dir pe_files = self._get_pe_files(results_dir) if pe_files: - outputs['param_estimates'] = pe_files - outputs['residual4d'] = self._gen_fname('res4d.nii', cwd=results_dir) - outputs['dof_file'] = os.path.join(results_dir, 'dof') - outputs['sigmasquareds'] = self._gen_fname( - 'sigmasquareds.nii', cwd=results_dir) - outputs['thresholdac'] = self._gen_fname( - 'threshac1.nii', cwd=results_dir) - if (Info.version() - and LooseVersion(Info.version()) < LooseVersion('5.0.7')): - outputs['corrections'] = self._gen_fname( - 'corrections.nii', cwd=results_dir) - outputs['logfile'] = self._gen_fname( - 'logfile', change_ext=False, cwd=results_dir) - - if (Info.version() - and LooseVersion(Info.version()) > LooseVersion('5.0.6')): + outputs["param_estimates"] = pe_files + outputs["residual4d"] = self._gen_fname("res4d.nii", cwd=results_dir) + outputs["dof_file"] = os.path.join(results_dir, "dof") + outputs["sigmasquareds"] = self._gen_fname("sigmasquareds.nii", cwd=results_dir) + outputs["thresholdac"] = self._gen_fname("threshac1.nii", cwd=results_dir) + if Info.version() and LooseVersion(Info.version()) < LooseVersion("5.0.7"): + outputs["corrections"] = self._gen_fname("corrections.nii", cwd=results_dir) + outputs["logfile"] = self._gen_fname( + "logfile", change_ext=False, cwd=results_dir + ) + + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): pth = results_dir numtcons, numfcons = self._get_numcons() base_contrast = 1 @@ -828,46 +885,47 @@ def _list_outputs(self): tstats = [] for i in range(numtcons): copes.append( - self._gen_fname( - 'cope%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth) + ) varcopes.append( - self._gen_fname( - 'varcope%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) + ) zstats.append( - self._gen_fname( - 'zstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth) + ) tstats.append( - self._gen_fname( - 'tstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth) + ) if copes: - outputs['copes'] = copes - outputs['varcopes'] = varcopes - outputs['zstats'] = zstats - outputs['tstats'] = tstats + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats fstats = [] zfstats = [] for i in range(numfcons): fstats.append( - self._gen_fname( - 'fstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth) + ) zfstats.append( - self._gen_fname( - 'zfstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) + ) if fstats: - outputs['fstats'] = fstats - outputs['zfstats'] = zfstats + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats return outputs class FEATRegisterInputSpec(BaseInterfaceInputSpec): feat_dirs = InputMultiPath( - Directory(exists=True), desc="Lower level feat dirs", mandatory=True) + Directory(exists=True), desc="Lower level feat dirs", mandatory=True + ) reg_image = File( exists=True, desc="image to register to (will be treated as standard)", - mandatory=True) - reg_dof = traits.Int( - 12, desc="registration degrees of freedom", usedefault=True) + mandatory=True, + ) + reg_dof = traits.Int(12, desc="registration degrees of freedom", usedefault=True) class FEATRegisterOutputSpec(TraitedSpec): @@ -875,138 +933,143 @@ class FEATRegisterOutputSpec(TraitedSpec): class FEATRegister(BaseInterface): - """Register feat directories to a specific standard - """ + """Register feat directories to a specific standard""" + input_spec = FEATRegisterInputSpec output_spec = FEATRegisterOutputSpec def _run_interface(self, runtime): - fsf_header = load_template('featreg_header.tcl') - fsf_footer = load_template('feat_nongui.tcl') - fsf_dirs = load_template('feat_fe_featdirs.tcl') + fsf_header = load_template("featreg_header.tcl") + fsf_footer = load_template("feat_nongui.tcl") + fsf_dirs = load_template("feat_fe_featdirs.tcl") num_runs = len(self.inputs.feat_dirs) fsf_txt = fsf_header.substitute( num_runs=num_runs, regimage=self.inputs.reg_image, - regdof=self.inputs.reg_dof) + regdof=self.inputs.reg_dof, + ) for i, rundir in enumerate(ensure_list(self.inputs.feat_dirs)): - fsf_txt += fsf_dirs.substitute( - runno=i + 1, rundir=os.path.abspath(rundir)) + fsf_txt += fsf_dirs.substitute(runno=i + 1, rundir=os.path.abspath(rundir)) fsf_txt += fsf_footer.substitute() - f = open(os.path.join(os.getcwd(), 'register.fsf'), 'wt') - f.write(fsf_txt) - f.close() + with open(os.path.join(os.getcwd(), "register.fsf"), "w") as f: + f.write(fsf_txt) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['fsf_file'] = os.path.abspath( - os.path.join(os.getcwd(), 'register.fsf')) + outputs["fsf_file"] = os.path.abspath(os.path.join(os.getcwd(), "register.fsf")) return outputs class FLAMEOInputSpec(FSLCommandInputSpec): cope_file = File( exists=True, - argstr='--copefile=%s', + argstr="--copefile=%s", mandatory=True, - desc='cope regressor data file') + desc="cope regressor data file", + ) var_cope_file = File( - exists=True, - argstr='--varcopefile=%s', - desc='varcope weightings data file') + exists=True, argstr="--varcopefile=%s", desc="varcope weightings data file" + ) dof_var_cope_file = File( - exists=True, - argstr='--dofvarcopefile=%s', - desc='dof data file for varcope data') + exists=True, argstr="--dofvarcopefile=%s", desc="dof data file for varcope data" + ) mask_file = File( - exists=True, argstr='--maskfile=%s', mandatory=True, desc='mask file') + exists=True, argstr="--maskfile=%s", mandatory=True, desc="mask file" + ) design_file = File( - exists=True, - argstr='--designfile=%s', - mandatory=True, - desc='design matrix file') + exists=True, argstr="--designfile=%s", mandatory=True, desc="design matrix file" + ) t_con_file = File( exists=True, - argstr='--tcontrastsfile=%s', + argstr="--tcontrastsfile=%s", mandatory=True, - desc='ascii matrix specifying t-contrasts') + desc="ascii matrix specifying t-contrasts", + ) f_con_file = File( exists=True, - argstr='--fcontrastsfile=%s', - desc='ascii matrix specifying f-contrasts') + argstr="--fcontrastsfile=%s", + desc="ascii matrix specifying f-contrasts", + ) cov_split_file = File( exists=True, - argstr='--covsplitfile=%s', + argstr="--covsplitfile=%s", mandatory=True, - desc='ascii matrix specifying the groups the covariance is split into') + desc="ascii matrix specifying the groups the covariance is split into", + ) run_mode = traits.Enum( - 'fe', - 'ols', - 'flame1', - 'flame12', - argstr='--runmode=%s', + "fe", + "ols", + "flame1", + "flame12", + argstr="--runmode=%s", mandatory=True, - desc='inference to perform') - n_jumps = traits.Int( - argstr='--njumps=%d', desc='number of jumps made by mcmc') + desc="inference to perform", + ) + n_jumps = traits.Int(argstr="--njumps=%d", desc="number of jumps made by mcmc") burnin = traits.Int( - argstr='--burnin=%d', - desc=('number of jumps at start of mcmc to be ' - 'discarded')) + argstr="--burnin=%d", + desc=("number of jumps at start of mcmc to be discarded"), + ) sample_every = traits.Int( - argstr='--sampleevery=%d', desc='number of jumps for each sample') - fix_mean = traits.Bool(argstr='--fixmean', desc='fix mean for tfit') + argstr="--sampleevery=%d", desc="number of jumps for each sample" + ) + fix_mean = traits.Bool(argstr="--fixmean", desc="fix mean for tfit") infer_outliers = traits.Bool( - argstr='--inferoutliers', desc='infer outliers - not for fe') - no_pe_outputs = traits.Bool( - argstr='--nopeoutput', desc='do not output pe files') + argstr="--inferoutliers", desc="infer outliers - not for fe" + ) + no_pe_outputs = traits.Bool(argstr="--nopeoutput", desc="do not output pe files") sigma_dofs = traits.Int( - argstr='--sigma_dofs=%d', - desc=('sigma (in mm) to use for Gaussian ' - 'smoothing the DOFs in FLAME 2. Default is ' - '1mm, -1 indicates no smoothing')) + argstr="--sigma_dofs=%d", + desc=( + "sigma (in mm) to use for Gaussian " + "smoothing the DOFs in FLAME 2. Default is " + "1mm, -1 indicates no smoothing" + ), + ) outlier_iter = traits.Int( - argstr='--ioni=%d', - desc=('Number of max iterations to use when ' - 'inferring outliers. Default is 12.')) - log_dir = Directory("stats", argstr='--ld=%s', usedefault=True) # ohinds + argstr="--ioni=%d", + desc=( + "Number of max iterations to use when inferring outliers. Default is 12." + ), + ) + log_dir = Directory("stats", argstr="--ld=%s", usedefault=True) # ohinds # no support for ven, vef class FLAMEOOutputSpec(TraitedSpec): pes = OutputMultiPath( File(exists=True), - desc=("Parameter estimates for each column of the " - "design matrix for each voxel")) + desc=( + "Parameter estimates for each column of the design matrix for each voxel" + ), + ) res4d = OutputMultiPath( File(exists=True), - desc=("Model fit residual mean-squared error for " - "each time point")) + desc=("Model fit residual mean-squared error for each time point"), + ) copes = OutputMultiPath( - File(exists=True), desc="Contrast estimates for each contrast") + File(exists=True), desc="Contrast estimates for each contrast" + ) var_copes = OutputMultiPath( - File(exists=True), desc="Variance estimates for each contrast") - zstats = OutputMultiPath( - File(exists=True), desc="z-stat file for each contrast") - tstats = OutputMultiPath( - File(exists=True), desc="t-stat file for each contrast") - zfstats = OutputMultiPath( - File(exists=True), desc="z stat file for each f contrast") - fstats = OutputMultiPath( - File(exists=True), desc="f-stat file for each contrast") + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z stat file for each f contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") mrefvars = OutputMultiPath( - File(exists=True), - desc=("mean random effect variances for each " - "contrast")) + File(exists=True), desc=("mean random effect variances for each contrast") + ) tdof = OutputMultiPath( - File(exists=True), desc="temporal dof file for each contrast") - weights = OutputMultiPath( - File(exists=True), desc="weights file for each contrast") + File(exists=True), desc="temporal dof file for each contrast" + ) + weights = OutputMultiPath(File(exists=True), desc="weights file for each contrast") stats_dir = Directory( - File(exists=True), desc="directory storing model estimation output") + File(exists=True), desc="directory storing model estimation output" + ) class FLAMEO(FSLCommand): @@ -1031,36 +1094,39 @@ class FLAMEO(FSLCommand): """ - _cmd = 'flameo' + _cmd = "flameo" input_spec = FLAMEOInputSpec output_spec = FLAMEOOutputSpec - references_ = [{ - 'entry': - BibTeX( - '@article{BeckmannJenkinsonSmith2003,' - 'author={C.F. Beckmann, M. Jenkinson, and S.M. Smith},' - 'title={General multilevel linear modeling for group analysis in FMRI.},' - 'journal={NeuroImage},' - 'volume={20},' - 'pages={1052-1063},' - 'year={2003},' - '}'), - 'tags': ['method'], - }, { - 'entry': - BibTeX( - '@article{WoolrichBehrensBeckmannJenkinsonSmith2004,' - 'author={M.W. Woolrich, T.E. Behrens, ' - 'C.F. Beckmann, M. Jenkinson, and S.M. Smith},' - 'title={Multilevel linear modelling for FMRI group analysis using Bayesian inference.},' - 'journal={NeuroImage},' - 'volume={21},' - 'pages={1732-1747},' - 'year={2004},' - '}'), - 'tags': ['method'], - }] + _references = [ + { + "entry": BibTeX( + "@article{BeckmannJenkinsonSmith2003," + "author={C.F. Beckmann, M. Jenkinson, and S.M. Smith}," + "title={General multilevel linear modeling for group analysis in FMRI.}," + "journal={NeuroImage}," + "volume={20}," + "pages={1052-1063}," + "year={2003}," + "}" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + "@article{WoolrichBehrensBeckmannJenkinsonSmith2004," + "author={M.W. Woolrich, T.E. Behrens, " + "C.F. Beckmann, M. Jenkinson, and S.M. Smith}," + "title={Multilevel linear modelling for FMRI group analysis using Bayesian inference.}," + "journal={NeuroImage}," + "volume={21}," + "pages={1732-1747}," + "year={2004}," + "}" + ), + "tags": ["method"], + }, + ] # ohinds: 2010-04-06 def _run_interface(self, runtime): @@ -1069,7 +1135,7 @@ def _run_interface(self, runtime): if os.access(os.path.join(cwd, log_dir), os.F_OK): rmtree(os.path.join(cwd, log_dir)) - return super(FLAMEO, self)._run_interface(runtime) + return super()._run_interface(runtime) # ohinds: 2010-04-06 # made these compatible with flameo @@ -1077,61 +1143,54 @@ def _list_outputs(self): outputs = self._outputs().get() pth = os.path.join(os.getcwd(), self.inputs.log_dir) - pes = human_order_sorted(glob(os.path.join(pth, 'pe[0-9]*.*'))) - assert len(pes) >= 1, 'No pe volumes generated by FSL Estimate' - outputs['pes'] = pes + pes = human_order_sorted(glob(os.path.join(pth, "pe[0-9]*.*"))) + assert len(pes) >= 1, "No pe volumes generated by FSL Estimate" + outputs["pes"] = pes - res4d = human_order_sorted(glob(os.path.join(pth, 'res4d.*'))) - assert len(res4d) == 1, 'No residual volume generated by FSL Estimate' - outputs['res4d'] = res4d[0] + res4d = human_order_sorted(glob(os.path.join(pth, "res4d.*"))) + assert len(res4d) == 1, "No residual volume generated by FSL Estimate" + outputs["res4d"] = res4d[0] - copes = human_order_sorted(glob(os.path.join(pth, 'cope[0-9]*.*'))) - assert len(copes) >= 1, 'No cope volumes generated by FSL CEstimate' - outputs['copes'] = copes + copes = human_order_sorted(glob(os.path.join(pth, "cope[0-9]*.*"))) + assert len(copes) >= 1, "No cope volumes generated by FSL CEstimate" + outputs["copes"] = copes - var_copes = human_order_sorted( - glob(os.path.join(pth, 'varcope[0-9]*.*'))) - assert len( - var_copes) >= 1, 'No varcope volumes generated by FSL CEstimate' - outputs['var_copes'] = var_copes + var_copes = human_order_sorted(glob(os.path.join(pth, "varcope[0-9]*.*"))) + assert len(var_copes) >= 1, "No varcope volumes generated by FSL CEstimate" + outputs["var_copes"] = var_copes - zstats = human_order_sorted(glob(os.path.join(pth, 'zstat[0-9]*.*'))) - assert len(zstats) >= 1, 'No zstat volumes generated by FSL CEstimate' - outputs['zstats'] = zstats + zstats = human_order_sorted(glob(os.path.join(pth, "zstat[0-9]*.*"))) + assert len(zstats) >= 1, "No zstat volumes generated by FSL CEstimate" + outputs["zstats"] = zstats if isdefined(self.inputs.f_con_file): - zfstats = human_order_sorted( - glob(os.path.join(pth, 'zfstat[0-9]*.*'))) - assert len( - zfstats) >= 1, 'No zfstat volumes generated by FSL CEstimate' - outputs['zfstats'] = zfstats - - fstats = human_order_sorted( - glob(os.path.join(pth, 'fstat[0-9]*.*'))) - assert len( - fstats) >= 1, 'No fstat volumes generated by FSL CEstimate' - outputs['fstats'] = fstats - - tstats = human_order_sorted(glob(os.path.join(pth, 'tstat[0-9]*.*'))) - assert len(tstats) >= 1, 'No tstat volumes generated by FSL CEstimate' - outputs['tstats'] = tstats + zfstats = human_order_sorted(glob(os.path.join(pth, "zfstat[0-9]*.*"))) + assert len(zfstats) >= 1, "No zfstat volumes generated by FSL CEstimate" + outputs["zfstats"] = zfstats + + fstats = human_order_sorted(glob(os.path.join(pth, "fstat[0-9]*.*"))) + assert len(fstats) >= 1, "No fstat volumes generated by FSL CEstimate" + outputs["fstats"] = fstats + + tstats = human_order_sorted(glob(os.path.join(pth, "tstat[0-9]*.*"))) + assert len(tstats) >= 1, "No tstat volumes generated by FSL CEstimate" + outputs["tstats"] = tstats mrefs = human_order_sorted( - glob(os.path.join(pth, 'mean_random_effects_var[0-9]*.*'))) - assert len( - mrefs) >= 1, 'No mean random effects volumes generated by FLAMEO' - outputs['mrefvars'] = mrefs + glob(os.path.join(pth, "mean_random_effects_var[0-9]*.*")) + ) + assert len(mrefs) >= 1, "No mean random effects volumes generated by FLAMEO" + outputs["mrefvars"] = mrefs - tdof = human_order_sorted(glob(os.path.join(pth, 'tdof_t[0-9]*.*'))) - assert len(tdof) >= 1, 'No T dof volumes generated by FLAMEO' - outputs['tdof'] = tdof + tdof = human_order_sorted(glob(os.path.join(pth, "tdof_t[0-9]*.*"))) + assert len(tdof) >= 1, "No T dof volumes generated by FLAMEO" + outputs["tdof"] = tdof - weights = human_order_sorted( - glob(os.path.join(pth, 'weights[0-9]*.*'))) - assert len(weights) >= 1, 'No weight volumes generated by FLAMEO' - outputs['weights'] = weights + weights = human_order_sorted(glob(os.path.join(pth, "weights[0-9]*.*"))) + assert len(weights) >= 1, "No weight volumes generated by FLAMEO" + outputs["weights"] = weights - outputs['stats_dir'] = pth + outputs["stats_dir"] = pth return outputs @@ -1140,66 +1199,66 @@ class ContrastMgrInputSpec(FSLCommandInputSpec): tcon_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-1, - desc='contrast file containing T-contrasts') + desc="contrast file containing T-contrasts", + ) fcon_file = File( - exists=True, - argstr='-f %s', - desc='contrast file containing F-contrasts') + exists=True, argstr="-f %s", desc="contrast file containing F-contrasts" + ) param_estimates = InputMultiPath( File(exists=True), - argstr='', + argstr="", copyfile=False, mandatory=True, - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each column of the design matrix"), + ) corrections = File( exists=True, copyfile=False, mandatory=True, - desc='statistical corrections used within FILM modelling') + desc="statistical corrections used within FILM modelling", + ) dof_file = File( exists=True, - argstr='', + argstr="", copyfile=False, mandatory=True, - desc='degrees of freedom') + desc="degrees of freedom", + ) sigmasquareds = File( exists=True, - argstr='', + argstr="", position=-2, copyfile=False, mandatory=True, - desc=('summary of residuals, See Woolrich, et. al., ' - '2001')) + desc=("summary of residuals, See Woolrich, et. al., 2001"), + ) contrast_num = traits.Range( - low=1, - argstr='-cope', - desc=('contrast number to start labeling ' - 'copes from')) + low=1, argstr="-cope", desc=("contrast number to start labeling copes from") + ) suffix = traits.Str( - argstr='-suffix %s', - desc=('suffix to put on the end of the cope filename ' - 'before the contrast number, default is ' - 'nothing')) + argstr="-suffix %s", + desc=( + "suffix to put on the end of the cope filename " + "before the contrast number, default is " + "nothing" + ), + ) class ContrastMgrOutputSpec(TraitedSpec): copes = OutputMultiPath( - File(exists=True), desc='Contrast estimates for each contrast') + File(exists=True), desc="Contrast estimates for each contrast" + ) varcopes = OutputMultiPath( - File(exists=True), desc='Variance estimates for each contrast') - zstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each contrast') - tstats = OutputMultiPath( - File(exists=True), desc='t-stat file for each contrast') - fstats = OutputMultiPath( - File(exists=True), desc='f-stat file for each contrast') - zfstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each F contrast') - neffs = OutputMultiPath( - File(exists=True), desc='neff file ?? for each contrast') + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") + neffs = OutputMultiPath(File(exists=True), desc="neff file ?? for each contrast") class ContrastMgr(FSLCommand): @@ -1209,10 +1268,10 @@ class ContrastMgr(FSLCommand): same location. This has deprecated for FSL versions 5.0.7+ as the necessary corrections file is no longer generated by FILMGLS. """ - if Info.version() and LooseVersion( - Info.version()) >= LooseVersion("5.0.7"): + + if Info.version() and LooseVersion(Info.version()) >= LooseVersion("5.0.7"): DeprecationWarning("ContrastMgr is deprecated in FSL 5.0.7+") - _cmd = 'contrast_mgr' + _cmd = "contrast_mgr" input_spec = ContrastMgrInputSpec output_spec = ContrastMgrOutputSpec @@ -1220,42 +1279,39 @@ def _run_interface(self, runtime): # The returncode is meaningless in ContrastMgr. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(ContrastMgr, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): - if name in ['param_estimates', 'corrections', 'dof_file']: - return '' - elif name in ['sigmasquareds']: + if name in ["param_estimates", "corrections", "dof_file"]: + return "" + elif name in ["sigmasquareds"]: path, _ = os.path.split(value) return path else: - return super(ContrastMgr, self)._format_arg( - name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) - return fname.split('.')[0] + return fname.split(".")[0] def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, 'rt') - for line in fp.readlines(): - if line.startswith('/NumContrasts'): - numtcons = int(line.split()[-1]) - break - fp.close() + with open(self.inputs.tcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numtcons = int(line.split()[-1]) + break if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, 'rt') - for line in fp.readlines(): - if line.startswith('/NumContrasts'): - numfcons = int(line.split()[-1]) - break - fp.close() + with open(self.inputs.fcon_file) as fp: + for line in fp: + if line.startswith("/NumContrasts"): + numfcons = int(line.split()[-1]) + break return numtcons, numfcons def _list_outputs(self): @@ -1271,45 +1327,42 @@ def _list_outputs(self): tstats = [] neffs = [] for i in range(numtcons): - copes.append( - self._gen_fname('cope%d.nii' % (base_contrast + i), cwd=pth)) + copes.append(self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth)) varcopes.append( - self._gen_fname( - 'varcope%d.nii' % (base_contrast + i), cwd=pth)) - zstats.append( - self._gen_fname('zstat%d.nii' % (base_contrast + i), cwd=pth)) - tstats.append( - self._gen_fname('tstat%d.nii' % (base_contrast + i), cwd=pth)) - neffs.append( - self._gen_fname('neff%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) + ) + zstats.append(self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth)) + tstats.append(self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth)) + neffs.append(self._gen_fname("neff%d.nii" % (base_contrast + i), cwd=pth)) if copes: - outputs['copes'] = copes - outputs['varcopes'] = varcopes - outputs['zstats'] = zstats - outputs['tstats'] = tstats - outputs['neffs'] = neffs + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats + outputs["neffs"] = neffs fstats = [] zfstats = [] for i in range(numfcons): - fstats.append( - self._gen_fname('fstat%d.nii' % (base_contrast + i), cwd=pth)) + fstats.append(self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth)) zfstats.append( - self._gen_fname('zfstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) + ) if fstats: - outputs['fstats'] = fstats - outputs['zfstats'] = zfstats + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats return outputs class L2ModelInputSpec(BaseInterfaceInputSpec): num_copes = traits.Range( - low=1, mandatory=True, desc='number of copes to be combined') + low=1, mandatory=True, desc="number of copes to be combined" + ) class L2ModelOutputSpec(TraitedSpec): - design_mat = File(exists=True, desc='design matrix file') - design_con = File(exists=True, desc='design contrast file') - design_grp = File(exists=True, desc='design group file') + design_mat = File(exists=True, desc="design matrix file") + design_con = File(exists=True, desc="design contrast file") + design_grp = File(exists=True, desc="design group file") class L2Model(BaseInterface): @@ -1329,88 +1382,100 @@ class L2Model(BaseInterface): def _run_interface(self, runtime): cwd = os.getcwd() mat_txt = [ - '/NumWaves 1', '/NumPoints {:d}'.format(self.inputs.num_copes), - '/PPheights 1', '', '/Matrix' + "/NumWaves 1", + f"/NumPoints {self.inputs.num_copes:d}", + "/PPheights 1", + "", + "/Matrix", ] for i in range(self.inputs.num_copes): - mat_txt += ['1'] - mat_txt = '\n'.join(mat_txt) + mat_txt += ["1"] + mat_txt = "\n".join(mat_txt) con_txt = [ - '/ContrastName1 group mean', - '/NumWaves 1', - '/NumContrasts 1', - '/PPheights 1', - '/RequiredEffect 100', # XX where does this + "/ContrastName1 group mean", + "/NumWaves 1", + "/NumContrasts 1", + "/PPheights 1", + "/RequiredEffect 100", # XX where does this # number come from - '', - '/Matrix', - '1' + "", + "/Matrix", + "1", ] - con_txt = '\n'.join(con_txt) + con_txt = "\n".join(con_txt) grp_txt = [ - '/NumWaves 1', '/NumPoints {:d}'.format(self.inputs.num_copes), - '', '/Matrix' + "/NumWaves 1", + f"/NumPoints {self.inputs.num_copes:d}", + "", + "/Matrix", ] for i in range(self.inputs.num_copes): - grp_txt += ['1'] - grp_txt = '\n'.join(grp_txt) + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) - txt = { - 'design.mat': mat_txt, - 'design.con': con_txt, - 'design.grp': grp_txt - } + txt = {"design.mat": mat_txt, "design.con": con_txt, "design.grp": grp_txt} # write design files - for i, name in enumerate(['design.mat', 'design.con', 'design.grp']): - f = open(os.path.join(cwd, name), 'wt') - f.write(txt[name]) - f.close() + for i, name in enumerate(["design.mat", "design.con", "design.grp"]): + with open(os.path.join(cwd, name), "w") as f: + f.write(txt[name]) return runtime def _list_outputs(self): outputs = self._outputs().get() for field in list(outputs.keys()): - outputs[field] = os.path.join(os.getcwd(), field.replace('_', '.')) + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs class MultipleRegressDesignInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), ))), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ) + ), + ), + ), mandatory=True, desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ -T-contrasts without any weight list.") +T-contrasts without any weight list.", + ) regressors = traits.Dict( traits.Str, traits.List(traits.Float), mandatory=True, - desc=('dictionary containing named lists of ' - 'regressors')) + desc=("dictionary containing named lists of regressors"), + ) groups = traits.List( - traits.Int, - desc=('list of group identifiers (defaults to single ' - 'group)')) + traits.Int, desc=("list of group identifiers (defaults to single group)") + ) class MultipleRegressDesignOutputSpec(TraitedSpec): - design_mat = File(exists=True, desc='design matrix file') - design_con = File(exists=True, desc='design t-contrast file') - design_fts = File(exists=True, desc='design f-contrast file') - design_grp = File(exists=True, desc='design group file') + design_mat = File(exists=True, desc="design matrix file") + design_con = File(exists=True, desc="design t-contrast file") + design_fts = File(exists=True, desc="design f-contrast file") + design_grp = File(exists=True, desc="design group file") class MultipleRegressDesign(BaseInterface): @@ -1442,13 +1507,10 @@ def _run_interface(self, runtime): regs = sorted(self.inputs.regressors.keys()) nwaves = len(regs) npoints = len(self.inputs.regressors[regs[0]]) - ntcons = sum([1 for con in self.inputs.contrasts if con[1] == 'T']) - nfcons = sum([1 for con in self.inputs.contrasts if con[1] == 'F']) + ntcons = sum(1 for con in self.inputs.contrasts if con[1] == "T") + nfcons = sum(1 for con in self.inputs.contrasts if con[1] == "F") # write mat file - mat_txt = [ - '/NumWaves %d' % nwaves, - '/NumPoints %d' % npoints - ] + mat_txt = ["/NumWaves %d" % nwaves, "/NumPoints %d" % npoints] ppheights = [] for reg in regs: maxreg = np.max(self.inputs.regressors[reg]) @@ -1457,89 +1519,86 @@ def _run_interface(self, runtime): regheight = max([abs(minreg), abs(maxreg)]) else: regheight = abs(maxreg - minreg) - ppheights.append('%e' % regheight) - mat_txt += ['/PPheights ' + ' '.join(ppheights)] - mat_txt += ['', '/Matrix'] + ppheights.append("%e" % regheight) + mat_txt += ["/PPheights " + " ".join(ppheights)] + mat_txt += ["", "/Matrix"] for cidx in range(npoints): - mat_txt.append(' '.join( - ['%e' % self.inputs.regressors[key][cidx] for key in regs])) - mat_txt = '\n'.join(mat_txt) + '\n' + mat_txt.append( + " ".join(["%e" % self.inputs.regressors[key][cidx] for key in regs]) + ) + mat_txt = "\n".join(mat_txt) + "\n" # write t-con file con_txt = [] counter = 0 tconmap = {} for conidx, con in enumerate(self.inputs.contrasts): - if con[1] == 'T': + if con[1] == "T": tconmap[conidx] = counter counter += 1 - con_txt += ['/ContrastName%d %s' % (counter, con[0])] + con_txt += ["/ContrastName%d %s" % (counter, con[0])] con_txt += [ - '/NumWaves %d' % nwaves, - '/NumContrasts %d' % ntcons, - '/PPheights %s' % ' '.join( - ['%e' % 1 for i in range(counter)]), - '/RequiredEffect %s' % ' '.join( - ['%.3f' % 100 for i in range(counter)]), '', '/Matrix' + "/NumWaves %d" % nwaves, + "/NumContrasts %d" % ntcons, + "/PPheights %s" % " ".join(["%e" % 1 for i in range(counter)]), + "/RequiredEffect %s" % " ".join(["%.3f" % 100 for i in range(counter)]), + "", + "/Matrix", ] for idx in sorted(tconmap.keys()): convals = np.zeros((nwaves, 1)) for regidx, reg in enumerate(self.inputs.contrasts[idx][2]): - convals[regs.index(reg)] = self.inputs.contrasts[idx][3][ - regidx] - con_txt.append(' '.join(['%e' % val for val in convals])) - con_txt = '\n'.join(con_txt) + '\n' + convals[regs.index(reg)] = self.inputs.contrasts[idx][3][regidx] + con_txt.append(" ".join(["%e" % val for val in convals])) + con_txt = "\n".join(con_txt) + "\n" # write f-con file - fcon_txt = '' + fcon_txt = "" if nfcons: fcon_txt = [ - '/NumWaves %d' % ntcons, - '/NumContrasts %d' % nfcons, '', '/Matrix' + "/NumWaves %d" % ntcons, + "/NumContrasts %d" % nfcons, + "", + "/Matrix", ] for conidx, con in enumerate(self.inputs.contrasts): - if con[1] == 'F': + if con[1] == "F": convals = np.zeros((ntcons, 1)) for tcon in con[2]: convals[tconmap[self.inputs.contrasts.index(tcon)]] = 1 - fcon_txt.append(' '.join(['%d' % val for val in convals])) - fcon_txt = '\n'.join(fcon_txt) - fcon_txt += '\n' + fcon_txt.append(" ".join(["%d" % val for val in convals])) + fcon_txt = "\n".join(fcon_txt) + "\n" # write group file - grp_txt = [ - '/NumWaves 1', - '/NumPoints %d' % npoints, '', '/Matrix' - ] + grp_txt = ["/NumWaves 1", "/NumPoints %d" % npoints, "", "/Matrix"] for i in range(npoints): if isdefined(self.inputs.groups): - grp_txt += ['%d' % self.inputs.groups[i]] + grp_txt += ["%d" % self.inputs.groups[i]] else: - grp_txt += ['1'] - grp_txt = '\n'.join(grp_txt) + '\n' + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) + "\n" txt = { - 'design.mat': mat_txt, - 'design.con': con_txt, - 'design.fts': fcon_txt, - 'design.grp': grp_txt + "design.mat": mat_txt, + "design.con": con_txt, + "design.fts": fcon_txt, + "design.grp": grp_txt, } # write design files for key, val in list(txt.items()): - if ('fts' in key) and (nfcons == 0): + if ("fts" in key) and (nfcons == 0): continue - filename = key.replace('_', '.') - f = open(os.path.join(cwd, filename), 'wt') - f.write(val) - f.close() + filename = key.replace("_", ".") + with open(os.path.join(cwd, filename), "w") as f: + f.write(val) return runtime def _list_outputs(self): outputs = self._outputs().get() - nfcons = sum([1 for con in self.inputs.contrasts if con[1] == 'F']) + nfcons = sum(1 for con in self.inputs.contrasts if con[1] == "F") for field in list(outputs.keys()): - if ('fts' in field) and (nfcons == 0): + if ("fts" in field) and (nfcons == 0): continue - outputs[field] = os.path.join(os.getcwd(), field.replace('_', '.')) + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs @@ -1550,18 +1609,19 @@ class SMMInputSpec(FSLCommandInputSpec): argstr='--sdf="%s"', mandatory=True, desc="statistics spatial map", - copyfile=False) + copyfile=False, + ) mask = File( exists=True, position=1, argstr='--mask="%s"', mandatory=True, desc="mask file", - copyfile=False) + copyfile=False, + ) no_deactivation_class = traits.Bool( - position=2, - argstr="--zfstatmode", - desc="enforces no deactivation class") + position=2, argstr="--zfstatmode", desc="enforces no deactivation class" + ) class SMMOutputSpec(TraitedSpec): @@ -1571,27 +1631,29 @@ class SMMOutputSpec(TraitedSpec): class SMM(FSLCommand): - ''' + """ Spatial Mixture Modelling. For more detail on the spatial mixture modelling see Mixture Models with Adaptive Spatial Regularisation for Segmentation with an Application to FMRI Data; Woolrich, M., Behrens, T., Beckmann, C., and Smith, S.; IEEE Trans. Medical Imaging, 24(1):1-11, 2005. - ''' - _cmd = 'mm --ld=logdir' + """ + + _cmd = "mm --ld=logdir" input_spec = SMMInputSpec output_spec = SMMOutputSpec def _list_outputs(self): outputs = self._outputs().get() # TODO get the true logdir from the stdout - outputs['null_p_map'] = self._gen_fname( - basename="w1_mean", cwd="logdir") - outputs['activation_p_map'] = self._gen_fname( - basename="w2_mean", cwd="logdir") - if (not isdefined(self.inputs.no_deactivation_class) - or not self.inputs.no_deactivation_class): - outputs['deactivation_p_map'] = self._gen_fname( - basename="w3_mean", cwd="logdir") + outputs["null_p_map"] = self._gen_fname(basename="w1_mean", cwd="logdir") + outputs["activation_p_map"] = self._gen_fname(basename="w2_mean", cwd="logdir") + if ( + not isdefined(self.inputs.no_deactivation_class) + or not self.inputs.no_deactivation_class + ): + outputs["deactivation_p_map"] = self._gen_fname( + basename="w3_mean", cwd="logdir" + ) return outputs @@ -1602,128 +1664,144 @@ class MELODICInputSpec(FSLCommandInputSpec): mandatory=True, position=0, desc="input file names (either single file name or a list)", - sep=",") - out_dir = Directory( - argstr="-o %s", desc="output directory name", genfile=True) - mask = File( - exists=True, argstr="-m %s", desc="file name of mask for thresholding") + sep=",", + ) + out_dir = Directory(argstr="-o %s", desc="output directory name", genfile=True) + mask = File(exists=True, argstr="-m %s", desc="file name of mask for thresholding") no_mask = traits.Bool(argstr="--nomask", desc="switch off masking") - update_mask = traits.Bool( - argstr="--update_mask", desc="switch off mask updating") + update_mask = traits.Bool(argstr="--update_mask", desc="switch off mask updating") no_bet = traits.Bool(argstr="--nobet", desc="switch off BET") bg_threshold = traits.Float( argstr="--bgthreshold=%f", - desc=("brain/non-brain threshold used to mask non-brain voxels, as a " - "percentage (only if --nobet selected)")) + desc=( + "brain/non-brain threshold used to mask non-brain voxels, as a " + "percentage (only if --nobet selected)" + ), + ) dim = traits.Int( argstr="-d %d", - desc=("dimensionality reduction into #num dimensions (default: " - "automatic estimation)")) + desc=( + "dimensionality reduction into #num dimensions (default: " + "automatic estimation)" + ), + ) dim_est = traits.Str( argstr="--dimest=%s", - desc=("use specific dim. estimation technique: lap, " - "bic, mdl, aic, mean (default: lap)")) - sep_whiten = traits.Bool( - argstr="--sep_whiten", desc="switch on separate whitening") + desc=( + "use specific dim. estimation technique: lap, " + "bic, mdl, aic, mean (default: lap)" + ), + ) + sep_whiten = traits.Bool(argstr="--sep_whiten", desc="switch on separate whitening") sep_vn = traits.Bool( - argstr="--sep_vn", desc="switch off joined variance normalization") + argstr="--sep_vn", desc="switch off joined variance normalization" + ) migp = traits.Bool(argstr="--migp", desc="switch on MIGP data reduction") - migpN = traits.Int( - argstr="--migpN %d", desc="number of internal Eigenmaps") + migpN = traits.Int(argstr="--migpN %d", desc="number of internal Eigenmaps") migp_shuffle = traits.Bool( - argstr="--migp_shuffle", - desc="randomise MIGP file order (default: TRUE)") + argstr="--migp_shuffle", desc="randomise MIGP file order (default: TRUE)" + ) migp_factor = traits.Int( argstr="--migp_factor %d", - desc= - "Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)" + desc="Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)", ) num_ICs = traits.Int( - argstr="-n %d", - desc="number of IC's to extract (for deflation approach)") + argstr="-n %d", desc="number of IC's to extract (for deflation approach)" + ) approach = traits.Str( argstr="-a %s", desc="approach for decomposition, 2D: defl, symm (default), 3D: tica " - "(default), concat") + "(default), concat", + ) non_linearity = traits.Str( - argstr="--nl=%s", desc="nonlinearity: gauss, tanh, pow3, pow4") - var_norm = traits.Bool( - argstr="--vn", desc="switch off variance normalization") + argstr="--nl=%s", desc="nonlinearity: gauss, tanh, pow3, pow4" + ) + var_norm = traits.Bool(argstr="--vn", desc="switch off variance normalization") pbsc = traits.Bool( - argstr="--pbsc", - desc="switch off conversion to percent BOLD signal change") + argstr="--pbsc", desc="switch off conversion to percent BOLD signal change" + ) cov_weight = traits.Float( argstr="--covarweight=%f", - desc=("voxel-wise weights for the covariance matrix (e.g. " - "segmentation information)")) + desc=( + "voxel-wise weights for the covariance matrix (e.g. " + "segmentation information)" + ), + ) epsilon = traits.Float(argstr="--eps=%f", desc="minimum error change") epsilonS = traits.Float( - argstr="--epsS=%f", - desc="minimum error change for rank-1 approximation in TICA") + argstr="--epsS=%f", desc="minimum error change for rank-1 approximation in TICA" + ) maxit = traits.Int( - argstr="--maxit=%d", - desc="maximum number of iterations before restart") + argstr="--maxit=%d", desc="maximum number of iterations before restart" + ) max_restart = traits.Int( - argstr="--maxrestart=%d", desc="maximum number of restarts") + argstr="--maxrestart=%d", desc="maximum number of restarts" + ) mm_thresh = traits.Float( - argstr="--mmthresh=%f", - desc="threshold for Mixture Model based inference") + argstr="--mmthresh=%f", desc="threshold for Mixture Model based inference" + ) no_mm = traits.Bool( - argstr="--no_mm", desc="switch off mixture modelling on IC maps") + argstr="--no_mm", desc="switch off mixture modelling on IC maps" + ) ICs = File( exists=True, argstr="--ICs=%s", - desc="filename of the IC components file for mixture modelling") + desc="filename of the IC components file for mixture modelling", + ) mix = File( exists=True, argstr="--mix=%s", - desc="mixing matrix for mixture modelling / filtering") + desc="mixing matrix for mixture modelling / filtering", + ) smode = File( exists=True, argstr="--smode=%s", - desc="matrix of session modes for report generation") + desc="matrix of session modes for report generation", + ) rem_cmp = traits.List( - traits.Int, argstr="-f %d", desc="component numbers to remove") + traits.Int, argstr="-f %d", desc="component numbers to remove" + ) report = traits.Bool(argstr="--report", desc="generate Melodic web report") bg_image = File( exists=True, argstr="--bgimage=%s", - desc="specify background image for report (default: mean image)") + desc="specify background image for report (default: mean image)", + ) tr_sec = traits.Float(argstr="--tr=%f", desc="TR in seconds") log_power = traits.Bool( - argstr="--logPower", - desc="calculate log of power for frequency spectrum") + argstr="--logPower", desc="calculate log of power for frequency spectrum" + ) t_des = File( - exists=True, - argstr="--Tdes=%s", - desc="design matrix across time-domain") + exists=True, argstr="--Tdes=%s", desc="design matrix across time-domain" + ) t_con = File( - exists=True, - argstr="--Tcon=%s", - desc="t-contrast matrix across time-domain") + exists=True, argstr="--Tcon=%s", desc="t-contrast matrix across time-domain" + ) s_des = File( - exists=True, - argstr="--Sdes=%s", - desc="design matrix across subject-domain") + exists=True, argstr="--Sdes=%s", desc="design matrix across subject-domain" + ) s_con = File( - exists=True, - argstr="--Scon=%s", - desc="t-contrast matrix across subject-domain") + exists=True, argstr="--Scon=%s", desc="t-contrast matrix across subject-domain" + ) out_all = traits.Bool(argstr="--Oall", desc="output everything") out_unmix = traits.Bool(argstr="--Ounmix", desc="output unmixing matrix") out_stats = traits.Bool( - argstr="--Ostats", desc="output thresholded maps and probability maps") + argstr="--Ostats", desc="output thresholded maps and probability maps" + ) out_pca = traits.Bool(argstr="--Opca", desc="output PCA results") out_white = traits.Bool( - argstr="--Owhite", desc="output whitening/dewhitening matrices") + argstr="--Owhite", desc="output whitening/dewhitening matrices" + ) out_orig = traits.Bool(argstr="--Oorig", desc="output the original ICs") out_mean = traits.Bool(argstr="--Omean", desc="output mean volume") report_maps = traits.Str( argstr="--report_maps=%s", - desc="control string for spatial map images (see slicer)") + desc="control string for spatial map images (see slicer)", + ) remove_deriv = traits.Bool( argstr="--remove_deriv", - desc="removes every second entry in paradigm file (EV derivatives)") + desc="removes every second entry in paradigm file (EV derivatives)", + ) class MELODICOutputSpec(TraitedSpec): @@ -1757,18 +1835,19 @@ class MELODIC(FSLCommand): """ + input_spec = MELODICInputSpec output_spec = MELODICOutputSpec - _cmd = 'melodic' + _cmd = "melodic" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: - outputs['out_dir'] = self._gen_filename("out_dir") + outputs["out_dir"] = self._gen_filename("out_dir") if isdefined(self.inputs.report) and self.inputs.report: - outputs['report_dir'] = os.path.join(outputs['out_dir'], "report") + outputs["report_dir"] = os.path.join(outputs["out_dir"], "report") return outputs def _gen_filename(self, name): @@ -1778,32 +1857,32 @@ def _gen_filename(self, name): class SmoothEstimateInputSpec(FSLCommandInputSpec): dof = traits.Int( - argstr='--dof=%d', + argstr="--dof=%d", mandatory=True, - xor=['zstat_file'], - desc='number of degrees of freedom') + xor=["zstat_file"], + desc="number of degrees of freedom", + ) mask_file = File( - argstr='--mask=%s', - exists=True, - mandatory=True, - desc='brain mask volume') + argstr="--mask=%s", exists=True, mandatory=True, desc="brain mask volume" + ) residual_fit_file = File( - argstr='--res=%s', - exists=True, - requires=['dof'], - desc='residual-fit image file') + argstr="--res=%s", exists=True, requires=["dof"], desc="residual-fit image file" + ) zstat_file = File( - argstr='--zstat=%s', exists=True, xor=['dof'], desc='zstat image file') + argstr="--zstat=%s", exists=True, xor=["dof"], desc="zstat image file" + ) class SmoothEstimateOutputSpec(TraitedSpec): - dlh = traits.Float(desc='smoothness estimate sqrt(det(Lambda))') - volume = traits.Int(desc='number of voxels in mask') - resels = traits.Float(desc='number of resels') + dlh = traits.Float(desc="smoothness estimate sqrt(det(Lambda))") + volume = traits.Int(desc="number of voxels in mask") + resels = traits.Float( + desc="volume of resel, in voxels, defined as FWHM_x * FWHM_y * FWHM_z" + ) class SmoothEstimate(FSLCommand): - """ Estimates the smoothness of an image + """Estimates the smoothness of an image Examples -------- @@ -1818,11 +1897,11 @@ class SmoothEstimate(FSLCommand): input_spec = SmoothEstimateInputSpec output_spec = SmoothEstimateOutputSpec - _cmd = 'smoothest' + _cmd = "smoothest" def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - stdout = runtime.stdout.split('\n') + stdout = runtime.stdout.split("\n") outputs.dlh = float(stdout[0].split()[1]) outputs.volume = int(stdout[1].split()[1]) outputs.resels = float(stdout[2].split()[1]) @@ -1830,125 +1909,134 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class ClusterInputSpec(FSLCommandInputSpec): - in_file = File( - argstr='--in=%s', mandatory=True, exists=True, desc='input volume') + in_file = File(argstr="--in=%s", mandatory=True, exists=True, desc="input volume") threshold = traits.Float( - argstr='--thresh=%.10f', - mandatory=True, - desc='threshold for input volume') + argstr="--thresh=%.10f", mandatory=True, desc="threshold for input volume" + ) out_index_file = traits.Either( traits.Bool, File, - argstr='--oindex=%s', - desc='output of cluster index (in size order)', - hash_files=False) + argstr="--oindex=%s", + desc="output of cluster index (in size order)", + hash_files=False, + ) out_threshold_file = traits.Either( traits.Bool, File, - argstr='--othresh=%s', - desc='thresholded image', - hash_files=False) + argstr="--othresh=%s", + desc="thresholded image", + hash_files=False, + ) out_localmax_txt_file = traits.Either( traits.Bool, File, - argstr='--olmax=%s', - desc='local maxima text file', - hash_files=False) + argstr="--olmax=%s", + desc="local maxima text file", + hash_files=False, + ) out_localmax_vol_file = traits.Either( traits.Bool, File, - argstr='--olmaxim=%s', - desc='output of local maxima volume', - hash_files=False) + argstr="--olmaxim=%s", + desc="output of local maxima volume", + hash_files=False, + ) out_size_file = traits.Either( traits.Bool, File, - argstr='--osize=%s', - desc='filename for output of size image', - hash_files=False) + argstr="--osize=%s", + desc="filename for output of size image", + hash_files=False, + ) out_max_file = traits.Either( traits.Bool, File, - argstr='--omax=%s', - desc='filename for output of max image', - hash_files=False) + argstr="--omax=%s", + desc="filename for output of max image", + hash_files=False, + ) out_mean_file = traits.Either( traits.Bool, File, - argstr='--omean=%s', - desc='filename for output of mean image', - hash_files=False) + argstr="--omean=%s", + desc="filename for output of mean image", + hash_files=False, + ) out_pval_file = traits.Either( traits.Bool, File, - argstr='--opvals=%s', - desc='filename for image output of log pvals', - hash_files=False) + argstr="--opvals=%s", + desc="filename for image output of log pvals", + hash_files=False, + ) pthreshold = traits.Float( - argstr='--pthresh=%.10f', - requires=['dlh', 'volume'], - desc='p-threshold for clusters') + argstr="--pthresh=%.10f", + requires=["dlh", "volume"], + desc="p-threshold for clusters", + ) peak_distance = traits.Float( - argstr='--peakdist=%.10f', - desc='minimum distance between local maxima/minima, in mm (default 0)') - cope_file = traits.File(argstr='--cope=%s', desc='cope volume') - volume = traits.Int( - argstr='--volume=%d', desc='number of voxels in the mask') + argstr="--peakdist=%.10f", + desc="minimum distance between local maxima/minima, in mm (default 0)", + ) + cope_file = File(argstr="--cope=%s", desc="cope volume") + volume = traits.Int(argstr="--volume=%d", desc="number of voxels in the mask") dlh = traits.Float( - argstr='--dlh=%.10f', desc='smoothness estimate = sqrt(det(Lambda))') + argstr="--dlh=%.10f", desc="smoothness estimate = sqrt(det(Lambda))" + ) fractional = traits.Bool( False, usedefault=True, - argstr='--fractional', - desc='interprets the threshold as a fraction of the robust range') + argstr="--fractional", + desc="interprets the threshold as a fraction of the robust range", + ) connectivity = traits.Int( - argstr='--connectivity=%d', - desc='the connectivity of voxels (default 26)') + argstr="--connectivity=%d", desc="the connectivity of voxels (default 26)" + ) use_mm = traits.Bool( - False, - usedefault=True, - argstr='--mm', - desc='use mm, not voxel, coordinates') + False, usedefault=True, argstr="--mm", desc="use mm, not voxel, coordinates" + ) find_min = traits.Bool( - False, - usedefault=True, - argstr='--min', - desc='find minima instead of maxima') + False, usedefault=True, argstr="--min", desc="find minima instead of maxima" + ) no_table = traits.Bool( False, usedefault=True, - argstr='--no_table', - desc='suppresses printing of the table info') + argstr="--no_table", + desc="suppresses printing of the table info", + ) minclustersize = traits.Bool( False, usedefault=True, - argstr='--minclustersize', - desc='prints out minimum significant cluster size') + argstr="--minclustersize", + desc="prints out minimum significant cluster size", + ) xfm_file = File( - argstr='--xfm=%s', - desc=('filename for Linear: input->standard-space ' - 'transform. Non-linear: input->highres transform')) + argstr="--xfm=%s", + desc=( + "filename for Linear: input->standard-space " + "transform. Non-linear: input->highres transform" + ), + ) std_space_file = File( - argstr='--stdvol=%s', desc='filename for standard-space volume') - num_maxima = traits.Int( - argstr='--num=%d', desc='no of local maxima to report') - warpfield_file = File( - argstr='--warpvol=%s', desc='file contining warpfield') + argstr="--stdvol=%s", desc="filename for standard-space volume" + ) + num_maxima = traits.Int(argstr="--num=%d", desc="no of local maxima to report") + warpfield_file = File(argstr="--warpvol=%s", desc="file containing warpfield") class ClusterOutputSpec(TraitedSpec): - index_file = File(desc='output of cluster index (in size order)') - threshold_file = File(desc='thresholded image') - localmax_txt_file = File(desc='local maxima text file') - localmax_vol_file = File(desc='output of local maxima volume') - size_file = File(desc='filename for output of size image') - max_file = File(desc='filename for output of max image') - mean_file = File(desc='filename for output of mean image') - pval_file = File(desc='filename for image output of log pvals') + index_file = File(desc="output of cluster index (in size order)") + threshold_file = File(desc="thresholded image") + localmax_txt_file = File(desc="local maxima text file") + localmax_vol_file = File(desc="output of local maxima volume") + size_file = File(desc="filename for output of size image") + max_file = File(desc="filename for output of max image") + mean_file = File(desc="filename for output of mean image") + pval_file = File(desc="filename for image output of log pvals") class Cluster(FSLCommand): - """ Uses FSL cluster to perform clustering on statistical output + """Uses FSL cluster to perform clustering on statistical output Examples -------- @@ -1962,19 +2050,20 @@ class Cluster(FSLCommand): 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' """ + input_spec = ClusterInputSpec output_spec = ClusterOutputSpec - _cmd = 'cluster' + _cmd = "cluster" filemap = { - 'out_index_file': 'index', - 'out_threshold_file': 'threshold', - 'out_localmax_txt_file': 'localmax.txt', - 'out_localmax_vol_file': 'localmax', - 'out_size_file': 'size', - 'out_max_file': 'max', - 'out_mean_file': 'mean', - 'out_pval_file': 'pval' + "out_index_file": "index", + "out_threshold_file": "threshold", + "out_localmax_txt_file": "localmax.txt", + "out_localmax_vol_file": "localmax", + "out_size_file": "size", + "out_max_file": "max", + "out_mean_file": "mean", + "out_pval_file": "pval", } def _list_outputs(self): @@ -1986,12 +2075,13 @@ def _list_outputs(self): if isinstance(inval, bool): if inval: change_ext = True - if suffix.endswith('.txt'): + if suffix.endswith(".txt"): change_ext = False outputs[outkey] = self._gen_fname( self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + suffix="_" + suffix, + change_ext=change_ext, + ) else: outputs[outkey] = os.path.abspath(inval) return outputs @@ -2003,7 +2093,7 @@ def _format_arg(self, name, spec, value): else: fname = value return spec.argstr % fname - return super(Cluster, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class DualRegressionInputSpec(FSLCommandInputSpec): @@ -2021,44 +2111,48 @@ class DualRegressionInputSpec(FSLCommandInputSpec): mandatory=True, position=1, desc="4D image containing spatial IC maps (melodic_IC) from the " - "whole-group ICA analysis") + "whole-group ICA analysis", + ) des_norm = traits.Bool( True, argstr="%i", position=2, usedefault=True, desc="Whether to variance-normalise the timecourses used as the " - "stage-2 regressors; True is default and recommended") + "stage-2 regressors; True is default and recommended", + ) one_sample_group_mean = traits.Bool( argstr="-1", position=3, - desc="perform 1-sample group-mean test instead of generic " - "permutation test") + desc="perform 1-sample group-mean test instead of generic permutation test", + ) design_file = File( exists=True, argstr="%s", position=3, - desc="Design matrix for final cross-subject modelling with " - "randomise") + desc="Design matrix for final cross-subject modelling with randomise", + ) con_file = File( exists=True, argstr="%s", position=4, - desc="Design contrasts for final cross-subject modelling with " - "randomise") + desc="Design contrasts for final cross-subject modelling with randomise", + ) n_perm = traits.Int( argstr="%i", mandatory=True, position=5, desc="Number of permutations for randomise; set to 1 for just raw " - "tstat output, set to 0 to not run randomise at all.") + "tstat output, set to 0 to not run randomise at all.", + ) out_dir = Directory( "output", argstr="%s", usedefault=True, position=6, desc="This directory will be created to hold all output and logfiles", - genfile=True) + genfile=True, + ) class DualRegressionOutputSpec(TraitedSpec): @@ -2083,16 +2177,17 @@ class DualRegression(FSLCommand): >>> dual_regression.run() # doctest: +SKIP """ + input_spec = DualRegressionInputSpec output_spec = DualRegressionOutputSpec - _cmd = 'dual_regression' + _cmd = "dual_regression" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: - outputs['out_dir'] = self._gen_filename("out_dir") + outputs["out_dir"] = self._gen_filename("out_dir") return outputs def _gen_filename(self, name): @@ -2102,92 +2197,101 @@ def _gen_filename(self, name): class RandomiseInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='4D input file', - argstr='-i %s', - position=0, - mandatory=True) + exists=True, desc="4D input file", argstr="-i %s", position=0, mandatory=True + ) base_name = traits.Str( - 'randomise', - desc='the rootname that all generated files will have', + "randomise", + desc="the rootname that all generated files will have", argstr='-o "%s"', position=1, - usedefault=True) + usedefault=True, + ) design_mat = File( - exists=True, desc='design matrix file', argstr='-d %s', position=2) - tcon = File( - exists=True, desc='t contrasts file', argstr='-t %s', position=3) - fcon = File(exists=True, desc='f contrasts file', argstr='-f %s') - mask = File(exists=True, desc='mask image', argstr='-m %s') + exists=True, desc="design matrix file", argstr="-d %s", position=2 + ) + tcon = File(exists=True, desc="t contrasts file", argstr="-t %s", position=3) + fcon = File(exists=True, desc="f contrasts file", argstr="-f %s") + mask = File(exists=True, desc="mask image", argstr="-m %s") x_block_labels = File( - exists=True, desc='exchangeability block labels file', argstr='-e %s') + exists=True, desc="exchangeability block labels file", argstr="-e %s" + ) demean = traits.Bool( - desc='demean data temporally before model fitting', argstr='-D') + desc="demean data temporally before model fitting", argstr="-D" + ) one_sample_group_mean = traits.Bool( - desc=('perform 1-sample group-mean test instead of generic ' - 'permutation test'), - argstr='-1') + desc=("perform 1-sample group-mean test instead of generic permutation test"), + argstr="-1", + ) show_total_perms = traits.Bool( - desc=('print out how many unique permutations would be generated ' - 'and exit'), - argstr='-q') + desc=("print out how many unique permutations would be generated and exit"), + argstr="-q", + ) show_info_parallel_mode = traits.Bool( - desc='print out information required for parallel mode and exit', - argstr='-Q') + desc="print out information required for parallel mode and exit", argstr="-Q" + ) vox_p_values = traits.Bool( - desc='output voxelwise (corrected and uncorrected) p-value images', - argstr='-x') - tfce = traits.Bool( - desc='carry out Threshold-Free Cluster Enhancement', argstr='-T') + desc="output voxelwise (corrected and uncorrected) p-value images", argstr="-x" + ) + tfce = traits.Bool(desc="carry out Threshold-Free Cluster Enhancement", argstr="-T") tfce2D = traits.Bool( - desc=('carry out Threshold-Free Cluster Enhancement with 2D ' - 'optimisation'), - argstr='--T2') - f_only = traits.Bool(desc='calculate f-statistics only', argstr='--f_only') + desc=("carry out Threshold-Free Cluster Enhancement with 2D optimisation"), + argstr="--T2", + ) + f_only = traits.Bool(desc="calculate f-statistics only", argstr="--fonly") raw_stats_imgs = traits.Bool( - desc='output raw ( unpermuted ) statistic images', argstr='-R') + desc="output raw ( unpermuted ) statistic images", argstr="-R" + ) p_vec_n_dist_files = traits.Bool( - desc='output permutation vector and null distribution text files', - argstr='-P') + desc="output permutation vector and null distribution text files", argstr="-P" + ) num_perm = traits.Int( - argstr='-n %d', - desc='number of permutations (default 5000, set to 0 for exhaustive)') + argstr="-n %d", + desc="number of permutations (default 5000, set to 0 for exhaustive)", + ) seed = traits.Int( - argstr='--seed=%d', - desc='specific integer seed for random number generator') + argstr="--seed=%d", desc="specific integer seed for random number generator" + ) var_smooth = traits.Int( - argstr='-v %d', desc='use variance smoothing (std is in mm)') + argstr="-v %d", desc="use variance smoothing (std is in mm)" + ) c_thresh = traits.Float( - argstr='-c %.1f', desc='carry out cluster-based thresholding') + argstr="-c %.1f", desc="carry out cluster-based thresholding" + ) cm_thresh = traits.Float( - argstr='-C %.1f', desc='carry out cluster-mass-based thresholding') - f_c_thresh = traits.Float( - argstr='-F %.2f', desc='carry out f cluster thresholding') + argstr="-C %.1f", desc="carry out cluster-mass-based thresholding" + ) + f_c_thresh = traits.Float(argstr="-F %.2f", desc="carry out f cluster thresholding") f_cm_thresh = traits.Float( - argstr='-S %.2f', desc='carry out f cluster-mass thresholding') + argstr="-S %.2f", desc="carry out f cluster-mass thresholding" + ) tfce_H = traits.Float( - argstr='--tfce_H=%.2f', desc='TFCE height parameter (default=2)') + argstr="--tfce_H=%.2f", desc="TFCE height parameter (default=2)" + ) tfce_E = traits.Float( - argstr='--tfce_E=%.2f', desc='TFCE extent parameter (default=0.5)') + argstr="--tfce_E=%.2f", desc="TFCE extent parameter (default=0.5)" + ) tfce_C = traits.Float( - argstr='--tfce_C=%.2f', desc='TFCE connectivity (6 or 26; default=6)') + argstr="--tfce_C=%.2f", desc="TFCE connectivity (6 or 26; default=6)" + ) class RandomiseOutputSpec(TraitedSpec): - tstat_files = traits.List( - File(exists=True), desc='t contrast raw statistic') - fstat_files = traits.List( - File(exists=True), desc='f contrast raw statistic') + tstat_files = traits.List(File(exists=True), desc="t contrast raw statistic") + fstat_files = traits.List(File(exists=True), desc="f contrast raw statistic") t_p_files = traits.List( - File(exists=True), desc='f contrast uncorrected p values files') + File(exists=True), desc="f contrast uncorrected p values files" + ) f_p_files = traits.List( - File(exists=True), desc='f contrast uncorrected p values files') + File(exists=True), desc="f contrast uncorrected p values files" + ) t_corrected_p_files = traits.List( File(exists=True), - desc='t contrast FWE (Family-wise error) corrected p values files') + desc="t contrast FWE (Family-wise error) corrected p values files", + ) f_corrected_p_files = traits.List( File(exists=True), - desc='f contrast FWE (Family-wise error) corrected p values files') + desc="f contrast FWE (Family-wise error) corrected p values files", + ) class Randomise(FSLCommand): @@ -2204,153 +2308,174 @@ class Randomise(FSLCommand): """ - _cmd = 'randomise' + _cmd = "randomise" input_spec = RandomiseInputSpec output_spec = RandomiseOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tstat_files'] = glob( - self._gen_fname('%s_tstat*.nii' % self.inputs.base_name)) - outputs['fstat_files'] = glob( - self._gen_fname('%s_fstat*.nii' % self.inputs.base_name)) + outputs["tstat_files"] = glob( + self._gen_fname("%s_tstat*.nii" % self.inputs.base_name) + ) + outputs["fstat_files"] = glob( + self._gen_fname("%s_fstat*.nii" % self.inputs.base_name) + ) prefix = False if self.inputs.tfce or self.inputs.tfce2D: - prefix = 'tfce' + prefix = "tfce" elif self.inputs.vox_p_values: - prefix = 'vox' + prefix = "vox" elif self.inputs.c_thresh or self.inputs.f_c_thresh: - prefix = 'clustere' + prefix = "clustere" elif self.inputs.cm_thresh or self.inputs.f_cm_thresh: - prefix = 'clusterm' + prefix = "clusterm" if prefix: - outputs['t_p_files'] = glob( - self._gen_fname('%s_%s_p_tstat*' % (self.inputs.base_name, - prefix))) - outputs['t_corrected_p_files'] = glob( - self._gen_fname('%s_%s_corrp_tstat*.nii' % - (self.inputs.base_name, prefix))) - - outputs['f_p_files'] = glob( - self._gen_fname('%s_%s_p_fstat*.nii' % (self.inputs.base_name, - prefix))) - outputs['f_corrected_p_files'] = glob( - self._gen_fname('%s_%s_corrp_fstat*.nii' % - (self.inputs.base_name, prefix))) + outputs["t_p_files"] = glob( + self._gen_fname(f"{self.inputs.base_name}_{prefix}_p_tstat*") + ) + outputs["t_corrected_p_files"] = glob( + self._gen_fname(f"{self.inputs.base_name}_{prefix}_corrp_tstat*.nii") + ) + + outputs["f_p_files"] = glob( + self._gen_fname(f"{self.inputs.base_name}_{prefix}_p_fstat*.nii") + ) + outputs["f_corrected_p_files"] = glob( + self._gen_fname(f"{self.inputs.base_name}_{prefix}_corrp_fstat*.nii") + ) return outputs class GLMInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='input file name (text matrix or 3D/4D image file)') + desc="input file name (text matrix or 3D/4D image file)", + ) out_file = File( name_template="%s_glm", - argstr='-o %s', + argstr="-o %s", position=3, - desc=('filename for GLM parameter estimates' + ' (GLM betas)'), + desc=("filename for GLM parameter estimates" + " (GLM betas)"), name_source="in_file", - keep_extension=True) + keep_extension=True, + ) design = File( exists=True, - argstr='-d %s', + argstr="-d %s", mandatory=True, position=2, - desc=('file name of the GLM design matrix (text time' + - ' courses for temporal regression or an image' + - ' file for spatial regression)')) + desc=( + "file name of the GLM design matrix (text time" + " courses for temporal regression or an image" + " file for spatial regression)" + ), + ) contrasts = File( - exists=True, argstr='-c %s', desc=('matrix of t-statics contrasts')) + exists=True, argstr="-c %s", desc=("matrix of t-statics contrasts") + ) mask = File( - exists=True, - argstr='-m %s', - desc=('mask image file name if input is image')) - dof = traits.Int( - argstr='--dof=%d', desc=('set degrees of freedom' + ' explicitly')) + exists=True, argstr="-m %s", desc=("mask image file name if input is image") + ) + dof = traits.Int(argstr="--dof=%d", desc=("set degrees of freedom" + " explicitly")) des_norm = traits.Bool( - argstr='--des_norm', - desc=('switch on normalization of the design' + - ' matrix columns to unit std deviation')) + argstr="--des_norm", + desc=( + "switch on normalization of the design" + " matrix columns to unit std deviation" + ), + ) dat_norm = traits.Bool( - argstr='--dat_norm', - desc=('switch on normalization of the data time series to unit std ' - 'deviation')) + argstr="--dat_norm", + desc=("switch on normalization of the data time series to unit std deviation"), + ) var_norm = traits.Bool( - argstr='--vn', desc=('perform MELODIC variance-normalisation on data')) + argstr="--vn", desc=("perform MELODIC variance-normalisation on data") + ) demean = traits.Bool( - argstr='--demean', desc=('switch on demeaining of design and data')) + argstr="--demean", desc=("switch on demeaining of design and data") + ) out_cope = File( - argstr='--out_cope=%s', - desc='output file name for COPE (either as txt or image') + argstr="--out_cope=%s", desc="output file name for COPE (either as txt or image" + ) out_z_name = File( - argstr='--out_z=%s', - desc='output file name for Z-stats (either as txt or image') + argstr="--out_z=%s", desc="output file name for Z-stats (either as txt or image" + ) out_t_name = File( - argstr='--out_t=%s', - desc='output file name for t-stats (either as txt or image') + argstr="--out_t=%s", desc="output file name for t-stats (either as txt or image" + ) out_p_name = File( - argstr='--out_p=%s', - desc=('output file name for p-values of Z-stats (either as text file ' - 'or image)')) + argstr="--out_p=%s", + desc=( + "output file name for p-values of Z-stats (either as text file or image)" + ), + ) out_f_name = File( - argstr='--out_f=%s', - desc='output file name for F-value of full model fit') + argstr="--out_f=%s", desc="output file name for F-value of full model fit" + ) out_pf_name = File( - argstr='--out_pf=%s', - desc='output file name for p-value for full model fit') - out_res_name = File( - argstr='--out_res=%s', desc='output file name for residuals') + argstr="--out_pf=%s", desc="output file name for p-value for full model fit" + ) + out_res_name = File(argstr="--out_res=%s", desc="output file name for residuals") out_varcb_name = File( - argstr='--out_varcb=%s', desc='output file name for variance of COPEs') + argstr="--out_varcb=%s", desc="output file name for variance of COPEs" + ) out_sigsq_name = File( - argstr='--out_sigsq=%s', - desc=('output file name for residual noise variance sigma-square')) + argstr="--out_sigsq=%s", + desc=("output file name for residual noise variance sigma-square"), + ) out_data_name = File( - argstr='--out_data=%s', desc='output file name for pre-processed data') + argstr="--out_data=%s", desc="output file name for pre-processed data" + ) out_vnscales_name = File( - argstr='--out_vnscales=%s', - desc=('output file name for scaling factors for variance ' - 'normalisation')) + argstr="--out_vnscales=%s", + desc=("output file name for scaling factors for variance normalisation"), + ) class GLMOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc=('file name of GLM parameters (if generated)')) + out_file = File(exists=True, desc=("file name of GLM parameters (if generated)")) out_cope = OutputMultiPath( File(exists=True), - desc=('output file name for COPEs (either as text file or image)')) + desc=("output file name for COPEs (either as text file or image)"), + ) out_z = OutputMultiPath( File(exists=True), - desc=('output file name for COPEs (either as text file or image)')) + desc=("output file name for COPEs (either as text file or image)"), + ) out_t = OutputMultiPath( File(exists=True), - desc=('output file name for t-stats (either as text file or image)')) + desc=("output file name for t-stats (either as text file or image)"), + ) out_p = OutputMultiPath( File(exists=True), - desc=('output file name for p-values of Z-stats (either as text file ' - 'or image)')) + desc=( + "output file name for p-values of Z-stats (either as text file or image)" + ), + ) out_f = OutputMultiPath( - File(exists=True), - desc=('output file name for F-value of full model fit')) + File(exists=True), desc=("output file name for F-value of full model fit") + ) out_pf = OutputMultiPath( - File(exists=True), - desc=('output file name for p-value for full model fit')) - out_res = OutputMultiPath( - File(exists=True), desc='output file name for residuals') + File(exists=True), desc=("output file name for p-value for full model fit") + ) + out_res = OutputMultiPath(File(exists=True), desc="output file name for residuals") out_varcb = OutputMultiPath( - File(exists=True), desc='output file name for variance of COPEs') + File(exists=True), desc="output file name for variance of COPEs" + ) out_sigsq = OutputMultiPath( File(exists=True), - desc=('output file name for residual noise variance sigma-square')) + desc=("output file name for residual noise variance sigma-square"), + ) out_data = OutputMultiPath( - File(exists=True), desc='output file for preprocessed data') + File(exists=True), desc="output file for preprocessed data" + ) out_vnscales = OutputMultiPath( File(exists=True), - desc=('output file name for scaling factors for variance ' - 'normalisation')) + desc=("output file name for scaling factors for variance normalisation"), + ) class GLM(FSLCommand): @@ -2365,46 +2490,46 @@ class GLM(FSLCommand): 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' """ - _cmd = 'fsl_glm' + + _cmd = "fsl_glm" input_spec = GLMInputSpec output_spec = GLMOutputSpec def _list_outputs(self): - outputs = super(GLM, self)._list_outputs() + outputs = super()._list_outputs() if isdefined(self.inputs.out_cope): - outputs['out_cope'] = os.path.abspath(self.inputs.out_cope) + outputs["out_cope"] = os.path.abspath(self.inputs.out_cope) if isdefined(self.inputs.out_z_name): - outputs['out_z'] = os.path.abspath(self.inputs.out_z_name) + outputs["out_z"] = os.path.abspath(self.inputs.out_z_name) if isdefined(self.inputs.out_t_name): - outputs['out_t'] = os.path.abspath(self.inputs.out_t_name) + outputs["out_t"] = os.path.abspath(self.inputs.out_t_name) if isdefined(self.inputs.out_p_name): - outputs['out_p'] = os.path.abspath(self.inputs.out_p_name) + outputs["out_p"] = os.path.abspath(self.inputs.out_p_name) if isdefined(self.inputs.out_f_name): - outputs['out_f'] = os.path.abspath(self.inputs.out_f_name) + outputs["out_f"] = os.path.abspath(self.inputs.out_f_name) if isdefined(self.inputs.out_pf_name): - outputs['out_pf'] = os.path.abspath(self.inputs.out_pf_name) + outputs["out_pf"] = os.path.abspath(self.inputs.out_pf_name) if isdefined(self.inputs.out_res_name): - outputs['out_res'] = os.path.abspath(self.inputs.out_res_name) + outputs["out_res"] = os.path.abspath(self.inputs.out_res_name) if isdefined(self.inputs.out_varcb_name): - outputs['out_varcb'] = os.path.abspath(self.inputs.out_varcb_name) + outputs["out_varcb"] = os.path.abspath(self.inputs.out_varcb_name) if isdefined(self.inputs.out_sigsq_name): - outputs['out_sigsq'] = os.path.abspath(self.inputs.out_sigsq_name) + outputs["out_sigsq"] = os.path.abspath(self.inputs.out_sigsq_name) if isdefined(self.inputs.out_data_name): - outputs['out_data'] = os.path.abspath(self.inputs.out_data_name) + outputs["out_data"] = os.path.abspath(self.inputs.out_data_name) if isdefined(self.inputs.out_vnscales_name): - outputs['out_vnscales'] = os.path.abspath( - self.inputs.out_vnscales_name) + outputs["out_vnscales"] = os.path.abspath(self.inputs.out_vnscales_name) return outputs @@ -2423,9 +2548,10 @@ def load_template(name): """ from pkg_resources import resource_filename as pkgrf - full_fname = pkgrf('nipype', - os.path.join('interfaces', 'fsl', 'model_templates', - name)) + + full_fname = pkgrf( + "nipype", os.path.join("interfaces", "fsl", "model_templates", name) + ) with open(full_fname) as template_file: template = Template(template_file.read()) diff --git a/nipype/interfaces/fsl/possum.py b/nipype/interfaces/fsl/possum.py index 50b88db185..2b09764003 100644 --- a/nipype/interfaces/fsl/possum.py +++ b/nipype/interfaces/fsl/possum.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -11,83 +10,107 @@ """ from .base import FSLCommand, FSLCommandInputSpec -from ..base import TraitedSpec, File, traits +from ..base import TraitedSpec, File, traits, Tuple class B0CalcInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='-i %s', + argstr="-i %s", position=0, - desc='filename of input image (usually a tissue/air segmentation)') + desc="filename of input image (usually a tissue/air segmentation)", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", position=1, - name_source=['in_file'], - name_template='%s_b0field', - output_name='out_file', - desc='filename of B0 output volume') + name_source=["in_file"], + name_template="%s_b0field", + output_name="out_file", + desc="filename of B0 output volume", + ) x_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gx=%0.4f', - desc='Value for zeroth-order x-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gx=%0.4f", + desc="Value for zeroth-order x-gradient field (per mm)", + ) y_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gy=%0.4f', - desc='Value for zeroth-order y-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gy=%0.4f", + desc="Value for zeroth-order y-gradient field (per mm)", + ) z_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gz=%0.4f', - desc='Value for zeroth-order z-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gz=%0.4f", + desc="Value for zeroth-order z-gradient field (per mm)", + ) x_b0 = traits.Float( - 0.0, usedefault=True, - argstr='--b0x=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (x-component), in Tesla') + 0.0, + usedefault=True, + argstr="--b0x=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (x-component), in Tesla", + ) y_b0 = traits.Float( - 0.0, usedefault=True, - argstr='--b0y=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (y-component), in Tesla') + 0.0, + usedefault=True, + argstr="--b0y=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (y-component), in Tesla", + ) z_b0 = traits.Float( - 1.0, usedefault=True, - argstr='--b0=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (z-component), in Tesla') - - xyz_b0 = traits.Tuple( + 1.0, + usedefault=True, + argstr="--b0=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (z-component), in Tesla", + ) + + xyz_b0 = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--b0x=%0.2f --b0y=%0.2f --b0=%0.2f', - xor=['x_b0', 'y_b0', 'z_b0'], - desc='Zeroth-order B0 field in Tesla') + argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", + xor=["x_b0", "y_b0", "z_b0"], + desc="Zeroth-order B0 field in Tesla", + ) delta = traits.Float( - -9.45e-6, usedefault=True, - argstr='-d %e', desc='Delta value (chi_tissue - chi_air)') + -9.45e-6, + usedefault=True, + argstr="-d %e", + desc="Delta value (chi_tissue - chi_air)", + ) chi_air = traits.Float( - 4.0e-7, usedefault=True, - argstr='--chi0=%e', desc='susceptibility of air') + 4.0e-7, usedefault=True, argstr="--chi0=%e", desc="susceptibility of air" + ) compute_xyz = traits.Bool( - False, usedefault=True, - argstr='--xyz', - desc='calculate and save all 3 field components (i.e. x,y,z)') + False, + usedefault=True, + argstr="--xyz", + desc="calculate and save all 3 field components (i.e. x,y,z)", + ) extendboundary = traits.Float( - 1.0, usedefault=True, - argstr='--extendboundary=%0.2f', - desc='Relative proportion to extend voxels at boundary') + 1.0, + usedefault=True, + argstr="--extendboundary=%0.2f", + desc="Relative proportion to extend voxels at boundary", + ) directconv = traits.Bool( - False, usedefault=True, - argstr='--directconv', - desc='use direct (image space) convolution, not FFT') + False, + usedefault=True, + argstr="--directconv", + desc="use direct (image space) convolution, not FFT", + ) class B0CalcOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='filename of B0 output volume') + out_file = File(exists=True, desc="filename of B0 output volume") class B0Calc(FSLCommand): @@ -112,6 +135,6 @@ class B0Calc(FSLCommand): """ - _cmd = 'b0calc' + _cmd = "b0calc" input_spec = B0CalcInputSpec output_spec = B0CalcOutputSpec diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index da06a5c574..e4abd5ce16 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -1,24 +1,28 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import os import os.path as op from warnings import warn import numpy as np from nibabel import load +from looseversion import LooseVersion from ...utils.filemanip import split_filename -from ..base import (TraitedSpec, File, InputMultiPath, OutputMultiPath, - Undefined, traits, isdefined) +from ..base import ( + TraitedSpec, + File, + InputMultiPath, + OutputMultiPath, + Undefined, + traits, + Tuple, + isdefined, +) from .base import FSLCommand, FSLCommandInputSpec, Info @@ -27,72 +31,91 @@ class BETInputSpec(FSLCommandInputSpec): # will put something on the end in_file = File( exists=True, - desc='input file to skull strip', - argstr='%s', + desc="input file to skull strip", + argstr="%s", position=0, - mandatory=True) + mandatory=True, + copyfile=False, + ) out_file = File( - desc='name of output skull stripped image', - argstr='%s', + desc="name of output skull stripped image", + argstr="%s", position=1, genfile=True, - hash_files=False) - outline = traits.Bool(desc='create surface outline image', argstr='-o') - mask = traits.Bool(desc='create binary mask image', argstr='-m') - skull = traits.Bool(desc='create skull image', argstr='-s') - no_output = traits.Bool( - argstr='-n', desc="Don't generate segmented output") - frac = traits.Float( - desc='fractional intensity threshold', argstr='-f %.2f') + hash_files=False, + ) + outline = traits.Bool(desc="create surface outline image", argstr="-o") + mask = traits.Bool(desc="create binary mask image", argstr="-m") + skull = traits.Bool(desc="create skull image", argstr="-s") + no_output = traits.Bool(argstr="-n", desc="Don't generate segmented output") + frac = traits.Float(desc="fractional intensity threshold", argstr="-f %.2f") vertical_gradient = traits.Float( - argstr='-g %.2f', - desc='vertical gradient in fractional intensity threshold (-1, 1)') - radius = traits.Int(argstr='-r %d', units='mm', desc="head radius") + argstr="-g %.2f", + desc="vertical gradient in fractional intensity threshold (-1, 1)", + ) + radius = traits.Int(argstr="-r %d", units="mm", desc="head radius") center = traits.List( traits.Int, - desc='center of gravity in voxels', - argstr='-c %s', + desc="center of gravity in voxels", + argstr="-c %s", minlen=0, maxlen=3, - units='voxels') + units="voxels", + ) threshold = traits.Bool( - argstr='-t', - desc="apply thresholding to segmented brain image and mask") - mesh = traits.Bool(argstr='-e', desc="generate a vtk mesh brain surface") + argstr="-t", desc="apply thresholding to segmented brain image and mask" + ) + mesh = traits.Bool(argstr="-e", desc="generate a vtk mesh brain surface") # the remaining 'options' are more like modes (mutually exclusive) that # FSL actually implements in a shell script wrapper around the bet binary. # for some combinations of them in specific order a call would not fail, # but in general using more than one of the following is clearly not # supported - _xor_inputs = ('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided') + _xor_inputs = ( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ) robust = traits.Bool( - desc='robust brain centre estimation (iterates BET several times)', - argstr='-R', - xor=_xor_inputs) + desc="robust brain centre estimation (iterates BET several times)", + argstr="-R", + xor=_xor_inputs, + ) padding = traits.Bool( - desc=('improve BET if FOV is very small in Z (by temporarily padding ' - 'end slices)'), - argstr='-Z', - xor=_xor_inputs) + desc=( + "improve BET if FOV is very small in Z (by temporarily padding " + "end slices)" + ), + argstr="-Z", + xor=_xor_inputs, + ) remove_eyes = traits.Bool( - desc='eye & optic nerve cleanup (can be useful in SIENA)', - argstr='-S', - xor=_xor_inputs) + desc="eye & optic nerve cleanup (can be useful in SIENA)", + argstr="-S", + xor=_xor_inputs, + ) surfaces = traits.Bool( - desc=('run bet2 and then betsurf to get additional skull and scalp ' - 'surfaces (includes registrations)'), - argstr='-A', - xor=_xor_inputs) + desc=( + "run bet2 and then betsurf to get additional skull and scalp " + "surfaces (includes registrations)" + ), + argstr="-A", + xor=_xor_inputs, + ) t2_guided = File( - desc='as with creating surfaces, when also feeding in ' - 'non-brain-extracted T2 (includes registrations)', - argstr='-A2 %s', - xor=_xor_inputs) - functional = traits.Bool( - argstr='-F', xor=_xor_inputs, desc="apply to 4D fMRI data") + desc="as with creating surfaces, when also feeding in " + "non-brain-extracted T2 (includes registrations)", + argstr="-A2 %s", + xor=_xor_inputs, + ) + functional = traits.Bool(argstr="-F", xor=_xor_inputs, desc="apply to 4D fMRI data") reduce_bias = traits.Bool( - argstr='-B', xor=_xor_inputs, desc="bias field and neck cleanup") + argstr="-B", xor=_xor_inputs, desc="bias field and neck cleanup" + ) class BETOutputSpec(TraitedSpec): @@ -101,15 +124,13 @@ class BETOutputSpec(TraitedSpec): outline_file = File(desc="path/name of outline file (if generated)") meshfile = File(desc="path/name of vtk mesh file (if generated)") inskull_mask_file = File(desc="path/name of inskull mask (if generated)") - inskull_mesh_file = File( - desc="path/name of inskull mesh outline (if generated)") + inskull_mesh_file = File(desc="path/name of inskull mesh outline (if generated)") outskull_mask_file = File(desc="path/name of outskull mask (if generated)") - outskull_mesh_file = File( - desc="path/name of outskull mesh outline (if generated)") + outskull_mesh_file = File(desc="path/name of outskull mesh outline (if generated)") outskin_mask_file = File(desc="path/name of outskin mask (if generated)") - outskin_mesh_file = File( - desc="path/name of outskin mesh outline (if generated)") + outskin_mesh_file = File(desc="path/name of outskin mesh outline (if generated)") skull_mask_file = File(desc="path/name of skull mask (if generated)") + skull_file = File(desc="path/name of skull file (if generated)") class BET(FSLCommand): @@ -131,7 +152,7 @@ class BET(FSLCommand): """ - _cmd = 'bet' + _cmd = "bet" input_spec = BETInputSpec output_spec = BETOutputSpec @@ -139,176 +160,211 @@ def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(BET, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime + def _format_arg(self, name, spec, value): + formatted = super()._format_arg(name, spec, value) + if name == "in_file": + # Convert to relative path to prevent BET failure + # with long paths. + return op.relpath(formatted, start=os.getcwd()) + return formatted + def _gen_outfilename(self): out_file = self.inputs.out_file + # Generate default output filename if non specified. if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_brain') - return os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, suffix="_brain") + # Convert to relative path to prevent BET failure + # with long paths. + return op.relpath(out_file, start=os.getcwd()) + return out_file def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self._gen_outfilename() - if ((isdefined(self.inputs.mesh) and self.inputs.mesh) - or (isdefined(self.inputs.surfaces) and self.inputs.surfaces)): - outputs['meshfile'] = self._gen_fname( - outputs['out_file'], suffix='_mesh.vtk', change_ext=False) - if (isdefined(self.inputs.mask) and self.inputs.mask) or \ - (isdefined(self.inputs.reduce_bias) and - self.inputs.reduce_bias): - outputs['mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_mask') + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) + + basename = os.path.basename(outputs["out_file"]) + cwd = os.path.dirname(outputs["out_file"]) + kwargs = {"basename": basename, "cwd": cwd} + + if (isdefined(self.inputs.mesh) and self.inputs.mesh) or ( + isdefined(self.inputs.surfaces) and self.inputs.surfaces + ): + outputs["meshfile"] = self._gen_fname( + suffix="_mesh.vtk", change_ext=False, **kwargs + ) + if (isdefined(self.inputs.mask) and self.inputs.mask) or ( + isdefined(self.inputs.reduce_bias) and self.inputs.reduce_bias + ): + outputs["mask_file"] = self._gen_fname(suffix="_mask", **kwargs) if isdefined(self.inputs.outline) and self.inputs.outline: - outputs['outline_file'] = self._gen_fname( - outputs['out_file'], suffix='_overlay') + outputs["outline_file"] = self._gen_fname(suffix="_overlay", **kwargs) if isdefined(self.inputs.surfaces) and self.inputs.surfaces: - outputs['inskull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_inskull_mask') - outputs['inskull_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_inskull_mesh') - outputs['outskull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskull_mask') - outputs['outskull_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskull_mesh') - outputs['outskin_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskin_mask') - outputs['outskin_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskin_mesh') - outputs['skull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_skull_mask') + outputs["inskull_mask_file"] = self._gen_fname( + suffix="_inskull_mask", **kwargs + ) + outputs["inskull_mesh_file"] = self._gen_fname( + suffix="_inskull_mesh", **kwargs + ) + outputs["outskull_mask_file"] = self._gen_fname( + suffix="_outskull_mask", **kwargs + ) + outputs["outskull_mesh_file"] = self._gen_fname( + suffix="_outskull_mesh", **kwargs + ) + outputs["outskin_mask_file"] = self._gen_fname( + suffix="_outskin_mask", **kwargs + ) + outputs["outskin_mesh_file"] = self._gen_fname( + suffix="_outskin_mesh", **kwargs + ) + outputs["skull_mask_file"] = self._gen_fname(suffix="_skull_mask", **kwargs) + if isdefined(self.inputs.skull) and self.inputs.skull: + outputs["skull_file"] = self._gen_fname(suffix="_skull", **kwargs) if isdefined(self.inputs.no_output) and self.inputs.no_output: - outputs['out_file'] = Undefined + outputs["out_file"] = Undefined return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None class FASTInputSpec(FSLCommandInputSpec): - """ Defines inputs (trait classes) for FAST """ + """Defines inputs (trait classes) for FAST""" + in_files = InputMultiPath( File(exists=True), copyfile=False, - desc='image, or multi-channel set of images, ' - 'to be segmented', - argstr='%s', + desc="image, or multi-channel set of images, to be segmented", + argstr="%s", position=-1, - mandatory=True) - out_basename = File(desc='base name of output files', argstr='-o %s') + mandatory=True, + ) + out_basename = File(desc="base name of output files", argstr="-o %s") # ^^ uses in_file name as basename if none given number_classes = traits.Range( - low=1, high=10, argstr='-n %d', desc='number of tissue-type classes') - output_biasfield = traits.Bool( - desc='output estimated bias field', argstr='-b') + low=1, high=10, argstr="-n %d", desc="number of tissue-type classes" + ) + output_biasfield = traits.Bool(desc="output estimated bias field", argstr="-b") output_biascorrected = traits.Bool( - desc='output restored image (bias-corrected image)', argstr='-B') + desc="output restored image (bias-corrected image)", argstr="-B" + ) img_type = traits.Enum( (1, 2, 3), - desc='int specifying type of image: (1 = T1, 2 = T2, 3 = PD)', - argstr='-t %d') + desc="int specifying type of image: (1 = T1, 2 = T2, 3 = PD)", + argstr="-t %d", + ) bias_iters = traits.Range( low=1, high=10, - argstr='-I %d', - desc='number of main-loop iterations during ' - 'bias-field removal') + argstr="-I %d", + desc="number of main-loop iterations during bias-field removal", + ) bias_lowpass = traits.Range( low=4, high=40, - desc='bias field smoothing extent (FWHM) ' - 'in mm', - argstr='-l %d', - units='mm') + desc="bias field smoothing extent (FWHM) in mm", + argstr="-l %d", + units="mm", + ) init_seg_smooth = traits.Range( low=0.0001, high=0.1, - desc='initial segmentation spatial ' - 'smoothness (during bias field ' - 'estimation)', - argstr='-f %.3f') + desc="initial segmentation spatial " + "smoothness (during bias field " + "estimation)", + argstr="-f %.3f", + ) segments = traits.Bool( - desc='outputs a separate binary image for each ' - 'tissue type', - argstr='-g') + desc="outputs a separate binary image for each tissue type", argstr="-g" + ) init_transform = File( exists=True, - desc=' initialise' - ' using priors', - argstr='-a %s') + desc=" initialise using priors", + argstr="-a %s", + ) other_priors = InputMultiPath( File(exist=True), - desc='alternative prior images', - argstr='-A %s', + desc="alternative prior images", + argstr="-A %s", minlen=3, - maxlen=3) + maxlen=3, + ) no_pve = traits.Bool( - desc='turn off PVE (partial volume estimation)', argstr='--nopve') - no_bias = traits.Bool(desc='do not remove bias field', argstr='-N') - use_priors = traits.Bool(desc='use priors throughout', argstr='-P') + desc="turn off PVE (partial volume estimation)", argstr="--nopve" + ) + no_bias = traits.Bool(desc="do not remove bias field", argstr="-N") + use_priors = traits.Bool(desc="use priors throughout", argstr="-P") # ^^ Must also set -a!, mutually inclusive?? No, conditional mandatory... need to figure out how to handle with traits. segment_iters = traits.Range( low=1, high=50, - desc='number of segmentation-initialisation' - ' iterations', - argstr='-W %d') + desc="number of segmentation-initialisation iterations", + argstr="-W %d", + ) mixel_smooth = traits.Range( - low=0.0, - high=1.0, - desc='spatial smoothness for mixeltype', - argstr='-R %.2f') + low=0.0, high=1.0, desc="spatial smoothness for mixeltype", argstr="-R %.2f" + ) iters_afterbias = traits.Range( low=1, high=20, - desc='number of main-loop iterations ' - 'after bias-field removal', - argstr='-O %d') + desc="number of main-loop iterations after bias-field removal", + argstr="-O %d", + ) hyper = traits.Range( - low=0.0, - high=1.0, - desc='segmentation spatial smoothness', - argstr='-H %.2f') - verbose = traits.Bool(desc='switch on diagnostic messages', argstr='-v') + low=0.0, high=1.0, desc="segmentation spatial smoothness", argstr="-H %.2f" + ) + verbose = traits.Bool(desc="switch on diagnostic messages", argstr="-v") manual_seg = File( - exists=True, desc='Filename containing intensities', argstr='-s %s') + exists=True, desc="Filename containing intensities", argstr="-s %s" + ) probability_maps = traits.Bool( - desc='outputs individual probability maps', argstr='-p') + desc="outputs individual probability maps", argstr="-p" + ) class FASTOutputSpec(TraitedSpec): """Specify possible outputs from FAST""" + tissue_class_map = File( exists=True, - desc='path/name of binary segmented volume file' - ' one val for each class _seg') + desc="path/name of binary segmented volume file" + " one val for each class _seg", + ) tissue_class_files = OutputMultiPath( File( desc=( - 'path/name of binary segmented volumes one file for each class ' - '_seg_x'))) + "path/name of binary segmented volumes one file for each class " + "_seg_x" + ) + ) + ) restored_image = OutputMultiPath( File( desc=( - 'restored images (one for each input image) named according to ' - 'the input images _restore'))) + "restored images (one for each input image) named according to " + "the input images _restore" + ) + ) + ) mixeltype = File(desc="path/name of mixeltype volume file _mixeltype") - partial_volume_map = File(desc='path/name of partial volume file _pveseg') + partial_volume_map = File(desc="path/name of partial volume file _pveseg") partial_volume_files = OutputMultiPath( - File( - desc='path/name of partial volumes files one for each class, _pve_x' - )) + File(desc="path/name of partial volumes files one for each class, _pve_x") + ) - bias_field = OutputMultiPath(File(desc='Estimated bias field _bias')) + bias_field = OutputMultiPath(File(desc="Estimated bias field _bias")) probability_maps = OutputMultiPath( - File(desc='filenames, one for each class, for each input, prob_x')) + File(desc="filenames, one for each class, for each input, prob_x") + ) class FAST(FSLCommand): @@ -320,22 +376,23 @@ class FAST(FSLCommand): Examples -------- >>> from nipype.interfaces import fsl - >>> fastr = fsl.FAST() - >>> fastr.inputs.in_files = 'structural.nii' - >>> fastr.inputs.out_basename = 'fast_' - >>> fastr.cmdline + >>> fast = fsl.FAST() + >>> fast.inputs.in_files = 'structural.nii' + >>> fast.inputs.out_basename = 'fast_' + >>> fast.cmdline 'fast -o fast_ -S 1 structural.nii' - >>> out = fastr.run() # doctest: +SKIP + >>> out = fast.run() # doctest: +SKIP """ - _cmd = 'fast' + + _cmd = "fast" input_spec = FASTInputSpec output_spec = FASTOutputSpec def _format_arg(self, name, spec, value): # first do what should be done in general - formatted = super(FAST, self)._format_arg(name, spec, value) - if name == 'in_files': + formatted = super()._format_arg(name, spec, value) + if name == "in_files": # FAST needs the -S parameter value to correspond to the number # of input images, otherwise it will ignore all but the first formatted = "-S %d %s" % (len(value), formatted) @@ -351,287 +408,306 @@ def _list_outputs(self): # input filename _gen_fname_opts = {} if isdefined(self.inputs.out_basename): - _gen_fname_opts['basename'] = self.inputs.out_basename - _gen_fname_opts['cwd'] = os.getcwd() + _gen_fname_opts["basename"] = self.inputs.out_basename + _gen_fname_opts["cwd"] = os.getcwd() else: - _gen_fname_opts['basename'] = self.inputs.in_files[-1] - _gen_fname_opts['cwd'], _, _ = split_filename( - _gen_fname_opts['basename']) + _gen_fname_opts["basename"] = self.inputs.in_files[-1] + _gen_fname_opts["cwd"], _, _ = split_filename(_gen_fname_opts["basename"]) - outputs['tissue_class_map'] = self._gen_fname( - suffix='_seg', **_gen_fname_opts) + outputs["tissue_class_map"] = self._gen_fname(suffix="_seg", **_gen_fname_opts) if self.inputs.segments: - outputs['tissue_class_files'] = [] + outputs["tissue_class_files"] = [] for i in range(nclasses): - outputs['tissue_class_files'].append( - self._gen_fname(suffix='_seg_%d' % i, **_gen_fname_opts)) + outputs["tissue_class_files"].append( + self._gen_fname(suffix="_seg_%d" % i, **_gen_fname_opts) + ) if isdefined(self.inputs.output_biascorrected): - outputs['restored_image'] = [] + outputs["restored_image"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one corrected image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based - outputs['restored_image'].append( + outputs["restored_image"].append( self._gen_fname( - suffix='_restore_%d' % (val + 1), - **_gen_fname_opts)) + suffix="_restore_%d" % (val + 1), **_gen_fname_opts + ) + ) else: # single image segmentation has unnumbered output image - outputs['restored_image'].append( - self._gen_fname(suffix='_restore', **_gen_fname_opts)) + outputs["restored_image"].append( + self._gen_fname(suffix="_restore", **_gen_fname_opts) + ) - outputs['mixeltype'] = self._gen_fname( - suffix='_mixeltype', **_gen_fname_opts) + outputs["mixeltype"] = self._gen_fname(suffix="_mixeltype", **_gen_fname_opts) if not self.inputs.no_pve: - outputs['partial_volume_map'] = self._gen_fname( - suffix='_pveseg', **_gen_fname_opts) - outputs['partial_volume_files'] = [] + outputs["partial_volume_map"] = self._gen_fname( + suffix="_pveseg", **_gen_fname_opts + ) + outputs["partial_volume_files"] = [] for i in range(nclasses): - outputs['partial_volume_files'].append( - self._gen_fname(suffix='_pve_%d' % i, **_gen_fname_opts)) + outputs["partial_volume_files"].append( + self._gen_fname(suffix="_pve_%d" % i, **_gen_fname_opts) + ) if self.inputs.output_biasfield: - outputs['bias_field'] = [] + outputs["bias_field"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one bias field image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based - outputs['bias_field'].append( + outputs["bias_field"].append( self._gen_fname( - suffix='_bias_%d' % (val + 1), **_gen_fname_opts)) + suffix="_bias_%d" % (val + 1), **_gen_fname_opts + ) + ) else: # single image segmentation has unnumbered output image - outputs['bias_field'].append( - self._gen_fname(suffix='_bias', **_gen_fname_opts)) + outputs["bias_field"].append( + self._gen_fname(suffix="_bias", **_gen_fname_opts) + ) if self.inputs.probability_maps: - outputs['probability_maps'] = [] + outputs["probability_maps"] = [] for i in range(nclasses): - outputs['probability_maps'].append( - self._gen_fname(suffix='_prob_%d' % i, **_gen_fname_opts)) + outputs["probability_maps"].append( + self._gen_fname(suffix="_prob_%d" % i, **_gen_fname_opts) + ) return outputs class FLIRTInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=0, - desc='input file') + exists=True, argstr="-in %s", mandatory=True, position=0, desc="input file" + ) reference = File( - exists=True, - argstr='-ref %s', - mandatory=True, - position=1, - desc='reference file') + exists=True, argstr="-ref %s", mandatory=True, position=1, desc="reference file" + ) out_file = File( - argstr='-out %s', - desc='registered output file', - name_source=['in_file'], - name_template='%s_flirt', + argstr="-out %s", + desc="registered output file", + name_source=["in_file"], + name_template="%s_flirt", position=2, - hash_files=False) + hash_files=False, + ) out_matrix_file = File( - argstr='-omat %s', - name_source=['in_file'], + argstr="-omat %s", + name_source=["in_file"], keep_extension=True, - name_template='%s_flirt.mat', - desc='output affine matrix in 4x4 asciii format', + name_template="%s_flirt.mat", + desc="output affine matrix in 4x4 asciii format", position=3, - hash_files=False) + hash_files=False, + ) out_log = File( - name_source=['in_file'], + name_source=["in_file"], keep_extension=True, - requires=['save_log'], - name_template='%s_flirt.log', - desc='output log') - in_matrix_file = File(argstr='-init %s', desc='input 4x4 affine matrix') + requires=["save_log"], + name_template="%s_flirt.log", + desc="output log", + ) + in_matrix_file = File(argstr="-init %s", desc="input 4x4 affine matrix") apply_xfm = traits.Bool( - argstr='-applyxfm', + argstr="-applyxfm", desc=( - 'apply transformation supplied by in_matrix_file or uses_qform to' - ' use the affine matrix stored in the reference header')) + "apply transformation supplied by in_matrix_file or uses_qform to" + " use the affine matrix stored in the reference header" + ), + ) apply_isoxfm = traits.Float( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], - desc='as applyxfm but forces isotropic resampling') + argstr="-applyisoxfm %f", + xor=["apply_xfm"], + desc="as applyxfm but forces isotropic resampling", + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='-datatype %s', - desc='force output data type') + "char", + "short", + "int", + "float", + "double", + argstr="-datatype %s", + desc="force output data type", + ) cost = traits.Enum( - 'mutualinfo', - 'corratio', - 'normcorr', - 'normmi', - 'leastsq', - 'labeldiff', - 'bbr', - argstr='-cost %s', - desc='cost function') + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr", + argstr="-cost %s", + desc="cost function", + ) # XXX What is the difference between 'cost' and 'searchcost'? Are # these both necessary or do they map to the same variable. cost_func = traits.Enum( - 'mutualinfo', - 'corratio', - 'normcorr', - 'normmi', - 'leastsq', - 'labeldiff', - 'bbr', - argstr='-searchcost %s', - desc='cost function') + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr", + argstr="-searchcost %s", + desc="cost function", + ) uses_qform = traits.Bool( - argstr='-usesqform', desc='initialize using sform or qform') - display_init = traits.Bool( - argstr='-displayinit', desc='display initial matrix') + argstr="-usesqform", desc="initialize using sform or qform" + ) + display_init = traits.Bool(argstr="-displayinit", desc="display initial matrix") angle_rep = traits.Enum( - 'quaternion', - 'euler', - argstr='-anglerep %s', - desc='representation of rotation angles') + "quaternion", + "euler", + argstr="-anglerep %s", + desc="representation of rotation angles", + ) interp = traits.Enum( - 'trilinear', - 'nearestneighbour', - 'sinc', - 'spline', - argstr='-interp %s', - desc='final interpolation method used in reslicing') + "trilinear", + "nearestneighbour", + "sinc", + "spline", + argstr="-interp %s", + desc="final interpolation method used in reslicing", + ) sinc_width = traits.Int( - argstr='-sincwidth %d', units='voxels', desc='full-width in voxels') + argstr="-sincwidth %d", units="voxels", desc="full-width in voxels" + ) sinc_window = traits.Enum( - 'rectangular', - 'hanning', - 'blackman', - argstr='-sincwindow %s', - desc='sinc window') # XXX better doc - bins = traits.Int(argstr='-bins %d', desc='number of histogram bins') - dof = traits.Int( - argstr='-dof %d', desc='number of transform degrees of freedom') - no_resample = traits.Bool( - argstr='-noresample', desc='do not change input sampling') + "rectangular", + "hanning", + "blackman", + argstr="-sincwindow %s", + desc="sinc window", + ) # XXX better doc + bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") + dof = traits.Int(argstr="-dof %d", desc="number of transform degrees of freedom") + no_resample = traits.Bool(argstr="-noresample", desc="do not change input sampling") force_scaling = traits.Bool( - argstr='-forcescaling', desc='force rescaling even for low-res images') + argstr="-forcescaling", desc="force rescaling even for low-res images" + ) min_sampling = traits.Float( - argstr='-minsampling %f', - units='mm', - desc='set minimum voxel dimension for sampling') + argstr="-minsampling %f", + units="mm", + desc="set minimum voxel dimension for sampling", + ) padding_size = traits.Int( - argstr='-paddingsize %d', - units='voxels', - desc='for applyxfm: interpolates outside image ' - 'by size') + argstr="-paddingsize %d", + units="voxels", + desc="for applyxfm: interpolates outside image by size", + ) searchr_x = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchrx %s', - desc='search angles along x-axis, in degrees') + units="degrees", + argstr="-searchrx %s", + desc="search angles along x-axis, in degrees", + ) searchr_y = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchry %s', - desc='search angles along y-axis, in degrees') + units="degrees", + argstr="-searchry %s", + desc="search angles along y-axis, in degrees", + ) searchr_z = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchrz %s', - desc='search angles along z-axis, in degrees') + units="degrees", + argstr="-searchrz %s", + desc="search angles along z-axis, in degrees", + ) no_search = traits.Bool( - argstr='-nosearch', desc='set all angular searches to ranges 0 to 0') + argstr="-nosearch", desc="set all angular searches to ranges 0 to 0" + ) coarse_search = traits.Int( - argstr='-coarsesearch %d', - units='degrees', - desc='coarse search delta angle') + argstr="-coarsesearch %d", units="degrees", desc="coarse search delta angle" + ) fine_search = traits.Int( - argstr='-finesearch %d', - units='degrees', - desc='fine search delta angle') + argstr="-finesearch %d", units="degrees", desc="fine search delta angle" + ) schedule = File( - exists=True, argstr='-schedule %s', desc='replaces default schedule') + exists=True, argstr="-schedule %s", desc="replaces default schedule" + ) ref_weight = File( - exists=True, - argstr='-refweight %s', - desc='File for reference weighting volume') + exists=True, argstr="-refweight %s", desc="File for reference weighting volume" + ) in_weight = File( - exists=True, - argstr='-inweight %s', - desc='File for input weighting volume') - no_clamp = traits.Bool( - argstr='-noclamp', desc='do not use intensity clamping') + exists=True, argstr="-inweight %s", desc="File for input weighting volume" + ) + no_clamp = traits.Bool(argstr="-noclamp", desc="do not use intensity clamping") no_resample_blur = traits.Bool( - argstr='-noresampblur', desc='do not use blurring on downsampling') - rigid2D = traits.Bool( - argstr='-2D', desc='use 2D rigid body mode - ignores dof') - save_log = traits.Bool(desc='save to log file') - verbose = traits.Int(argstr='-verbose %d', desc='verbose mode, 0 is least') + argstr="-noresampblur", desc="do not use blurring on downsampling" + ) + rigid2D = traits.Bool(argstr="-2D", desc="use 2D rigid body mode - ignores dof") + save_log = traits.Bool(desc="save to log file") + verbose = traits.Int(argstr="-verbose %d", desc="verbose mode, 0 is least") bgvalue = traits.Float( 0, - argstr='-setbackground %f', - desc=('use specified background value for points ' - 'outside FOV')) + argstr="-setbackground %f", + desc=("use specified background value for points outside FOV"), + ) # BBR options wm_seg = File( - argstr='-wmseg %s', - min_ver='5.0.0', - desc='white matter segmentation volume needed by BBR cost function') + argstr="-wmseg %s", + min_ver="5.0.0", + desc="white matter segmentation volume needed by BBR cost function", + ) wmcoords = File( - argstr='-wmcoords %s', - min_ver='5.0.0', - desc='white matter boundary coordinates for BBR cost function') + argstr="-wmcoords %s", + min_ver="5.0.0", + desc="white matter boundary coordinates for BBR cost function", + ) wmnorms = File( - argstr='-wmnorms %s', - min_ver='5.0.0', - desc='white matter boundary normals for BBR cost function') + argstr="-wmnorms %s", + min_ver="5.0.0", + desc="white matter boundary normals for BBR cost function", + ) fieldmap = File( - argstr='-fieldmap %s', - min_ver='5.0.0', - desc=('fieldmap image in rads/s - must be already registered to the ' - 'reference image')) + argstr="-fieldmap %s", + min_ver="5.0.0", + desc=( + "fieldmap image in rads/s - must be already registered to the " + "reference image" + ), + ) fieldmapmask = File( - argstr='-fieldmapmask %s', - min_ver='5.0.0', - desc='mask for fieldmap image') + argstr="-fieldmapmask %s", min_ver="5.0.0", desc="mask for fieldmap image" + ) pedir = traits.Int( - argstr='-pedir %d', - min_ver='5.0.0', - desc='phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z') + argstr="-pedir %d", + min_ver="5.0.0", + desc="phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z", + ) echospacing = traits.Float( - argstr='-echospacing %f', - min_ver='5.0.0', - desc='value of EPI echo spacing - units of seconds') + argstr="-echospacing %f", + min_ver="5.0.0", + desc="value of EPI echo spacing - units of seconds", + ) bbrtype = traits.Enum( - 'signed', - 'global_abs', - 'local_abs', - argstr='-bbrtype %s', - min_ver='5.0.0', - desc=('type of bbr cost function: signed [default], global_abs, ' - 'local_abs')) + "signed", + "global_abs", + "local_abs", + argstr="-bbrtype %s", + min_ver="5.0.0", + desc=("type of bbr cost function: signed [default], global_abs, local_abs"), + ) bbrslope = traits.Float( - argstr='-bbrslope %f', min_ver='5.0.0', desc='value of bbr slope') + argstr="-bbrslope %f", min_ver="5.0.0", desc="value of bbr slope" + ) class FLIRTOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc='path/name of registered file (if generated)') + out_file = File(exists=True, desc="path/name of registered file (if generated)") out_matrix_file = File( - exists=True, - desc='path/name of calculated affine transform ' - '(if generated)') - out_log = File(desc='path/name of output log (if generated)') + exists=True, desc="path/name of calculated affine transform (if generated)" + ) + out_log = File(desc="path/name of output log (if generated)") class FLIRT(FSLCommand): @@ -656,17 +732,19 @@ class FLIRT(FSLCommand): >>> res = flt.run() #doctest: +SKIP """ - _cmd = 'flirt' + + _cmd = "flirt" input_spec = FLIRTInputSpec output_spec = FLIRTOutputSpec _log_written = False def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = super(FLIRT, self).aggregate_outputs( - runtime=runtime, needed_outputs=needed_outputs) + outputs = super().aggregate_outputs( + runtime=runtime, needed_outputs=needed_outputs + ) if self.inputs.save_log and not self._log_written: with open(outputs.out_log, "a") as text_file: - text_file.write(runtime.stdout + '\n') + text_file.write(runtime.stdout + "\n") self._log_written = True return outputs @@ -675,29 +753,34 @@ def _parse_inputs(self, skip=None): skip = [] if self.inputs.save_log and not self.inputs.verbose: self.inputs.verbose = 1 - if self.inputs.apply_xfm and not (self.inputs.in_matrix_file - or self.inputs.uses_qform): - raise RuntimeError('Argument apply_xfm requires in_matrix_file or ' - 'uses_qform arguments to run') - skip.append('save_log') - return super(FLIRT, self)._parse_inputs(skip=skip) + if self.inputs.apply_xfm and not ( + self.inputs.in_matrix_file or self.inputs.uses_qform + ): + raise RuntimeError( + "Argument apply_xfm requires in_matrix_file or " + "uses_qform arguments to run" + ) + skip.append("save_log") + return super()._parse_inputs(skip=skip) class ApplyXFMInputSpec(FLIRTInputSpec): apply_xfm = traits.Bool( True, - argstr='-applyxfm', + argstr="-applyxfm", desc=( - 'apply transformation supplied by in_matrix_file or uses_qform to' - ' use the affine matrix stored in the reference header'), - usedefault=True) + "apply transformation supplied by in_matrix_file or uses_qform to" + " use the affine matrix stored in the reference header" + ), + usedefault=True, + ) class ApplyXFM(FLIRT): """Currently just a light wrapper around FLIRT, with no modifications - ApplyXFM is used to apply an existing tranform to an image + ApplyXFM is used to apply an existing transform to an image Examples @@ -714,6 +797,7 @@ class ApplyXFM(FLIRT): >>> result = applyxfm.run() # doctest: +SKIP """ + input_spec = ApplyXFMInputSpec @@ -723,70 +807,69 @@ class MCFLIRTInputSpec(FSLCommandInputSpec): position=0, argstr="-in %s", mandatory=True, - desc="timeseries to motion-correct") + desc="timeseries to motion-correct", + ) out_file = File( - argstr='-out %s', genfile=True, desc="file to write", hash_files=False) + argstr="-out %s", genfile=True, desc="file to write", hash_files=False + ) cost = traits.Enum( - 'mutualinfo', - 'woods', - 'corratio', - 'normcorr', - 'normmi', - 'leastsquares', - argstr='-cost %s', - desc="cost function to optimize") - bins = traits.Int(argstr='-bins %d', desc="number of histogram bins") - dof = traits.Int( - argstr='-dof %d', desc="degrees of freedom for the transformation") - ref_vol = traits.Int(argstr='-refvol %d', desc="volume to align frames to") - scaling = traits.Float( - argstr='-scaling %.2f', desc="scaling factor to use") + "mutualinfo", + "woods", + "corratio", + "normcorr", + "normmi", + "leastsquares", + argstr="-cost %s", + desc="cost function to optimize", + ) + bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") + dof = traits.Int(argstr="-dof %d", desc="degrees of freedom for the transformation") + ref_vol = traits.Int(argstr="-refvol %d", desc="volume to align frames to") + scaling = traits.Float(argstr="-scaling %.2f", desc="scaling factor to use") smooth = traits.Float( - argstr='-smooth %.2f', desc="smoothing factor for the cost function") + argstr="-smooth %.2f", desc="smoothing factor for the cost function" + ) rotation = traits.Int( - argstr='-rotation %d', desc="scaling factor for rotation tolerances") + argstr="-rotation %d", desc="scaling factor for rotation tolerances" + ) stages = traits.Int( - argstr='-stages %d', - desc="stages (if 4, perform final search with sinc interpolation") - init = File( - exists=True, argstr='-init %s', desc="inital transformation matrix") + argstr="-stages %d", + desc="stages (if 4, perform final search with sinc interpolation", + ) + init = File(exists=True, argstr="-init %s", desc="initial transformation matrix") interpolation = traits.Enum( "spline", "nn", "sinc", argstr="-%s_final", - desc="interpolation method for transformation") - use_gradient = traits.Bool( - argstr='-gdt', desc="run search on gradient images") - use_contour = traits.Bool( - argstr='-edge', desc="run search on contour images") - mean_vol = traits.Bool(argstr='-meanvol', desc="register to mean volume") + desc="interpolation method for transformation", + ) + use_gradient = traits.Bool(argstr="-gdt", desc="run search on gradient images") + use_contour = traits.Bool(argstr="-edge", desc="run search on contour images") + mean_vol = traits.Bool(argstr="-meanvol", desc="register to mean volume") stats_imgs = traits.Bool( - argstr='-stats', desc="produce variance and std. dev. images") - save_mats = traits.Bool( - argstr='-mats', desc="save transformation matrices") - save_plots = traits.Bool( - argstr='-plots', desc="save transformation parameters") + argstr="-stats", desc="produce variance and std. dev. images" + ) + save_mats = traits.Bool(argstr="-mats", desc="save transformation matrices") + save_plots = traits.Bool(argstr="-plots", desc="save transformation parameters") save_rms = traits.Bool( - argstr='-rmsabs -rmsrel', desc="save rms displacement parameters") + argstr="-rmsabs -rmsrel", desc="save rms displacement parameters" + ) ref_file = File( - exists=True, - argstr='-reffile %s', - desc="target image for motion correction") + exists=True, argstr="-reffile %s", desc="target image for motion correction" + ) class MCFLIRTOutputSpec(TraitedSpec): out_file = File(exists=True, desc="motion-corrected timeseries") variance_img = File(exists=True, desc="variance image") std_img = File(exists=True, desc="standard deviation image") - mean_img = File( - exists=True, desc="mean timeseries image (if mean_vol=True)") + mean_img = File(exists=True, desc="mean timeseries image (if mean_vol=True)") par_file = File(exists=True, desc="text-file with motion parameters") - mat_file = OutputMultiPath( - File(exists=True), desc="transformation matrices") + mat_file = OutputMultiPath(File(exists=True), desc="transformation matrices") rms_files = OutputMultiPath( - File(exists=True), - desc="absolute and relative displacement parameters") + File(exists=True), desc="absolute and relative displacement parameters" + ) class MCFLIRT(FSLCommand): @@ -807,7 +890,8 @@ class MCFLIRT(FSLCommand): >>> res = mcflt.run() # doctest: +SKIP """ - _cmd = 'mcflirt' + + _cmd = "mcflirt" input_spec = MCFLIRTInputSpec output_spec = MCFLIRTOutputSpec @@ -817,19 +901,30 @@ def _format_arg(self, name, spec, value): return "" else: return spec.argstr % value - return super(MCFLIRT, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_outfilename() - output_dir = os.path.dirname(outputs['out_file']) + outputs["out_file"] = self._gen_outfilename() + output_dir = os.path.dirname(outputs["out_file"]) if isdefined(self.inputs.stats_imgs) and self.inputs.stats_imgs: - outputs['variance_img'] = self._gen_fname( - outputs['out_file'] + '_variance.ext', cwd=output_dir) - outputs['std_img'] = self._gen_fname( - outputs['out_file'] + '_sigma.ext', cwd=output_dir) + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): + # FSL <6.0 outputs have .nii.gz_variance.nii.gz as extension + outputs["variance_img"] = self._gen_fname( + outputs["out_file"] + "_variance.ext", cwd=output_dir + ) + outputs["std_img"] = self._gen_fname( + outputs["out_file"] + "_sigma.ext", cwd=output_dir + ) + else: + outputs["variance_img"] = self._gen_fname( + outputs["out_file"], suffix="_variance", cwd=output_dir + ) + outputs["std_img"] = self._gen_fname( + outputs["out_file"], suffix="_sigma", cwd=output_dir + ) # The mean image created if -stats option is specified ('meanvol') # is missing the top and bottom slices. Therefore we only expose the @@ -838,28 +933,34 @@ def _list_outputs(self): # Note that the same problem holds for the std and variance image. if isdefined(self.inputs.mean_vol) and self.inputs.mean_vol: - outputs['mean_img'] = self._gen_fname( - outputs['out_file'] + '_mean_reg.ext', cwd=output_dir) + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): + # FSL <6.0 outputs have .nii.gz_mean_img.nii.gz as extension + outputs["mean_img"] = self._gen_fname( + outputs["out_file"] + "_mean_reg.ext", cwd=output_dir + ) + else: + outputs["mean_img"] = self._gen_fname( + outputs["out_file"], suffix="_mean_reg", cwd=output_dir + ) if isdefined(self.inputs.save_mats) and self.inputs.save_mats: - _, filename = os.path.split(outputs['out_file']) - matpathname = os.path.join(output_dir, filename + '.mat') + _, filename = os.path.split(outputs["out_file"]) + matpathname = os.path.join(output_dir, filename + ".mat") _, _, _, timepoints = load(self.inputs.in_file).shape - outputs['mat_file'] = [] + outputs["mat_file"] = [] for t in range(timepoints): - outputs['mat_file'].append( - os.path.join(matpathname, 'MAT_%04d' % t)) + outputs["mat_file"].append(os.path.join(matpathname, "MAT_%04d" % t)) if isdefined(self.inputs.save_plots) and self.inputs.save_plots: # Note - if e.g. out_file has .nii.gz, you get .nii.gz.par, # which is what mcflirt does! - outputs['par_file'] = outputs['out_file'] + '.par' + outputs["par_file"] = outputs["out_file"] + ".par" if isdefined(self.inputs.save_rms) and self.inputs.save_rms: - outfile = outputs['out_file'] - outputs['rms_files'] = [outfile + '_abs.rms', outfile + '_rel.rms'] + outfile = outputs["out_file"] + outputs["rms_files"] = [outfile + "_abs.rms", outfile + "_rel.rms"] return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None @@ -868,241 +969,284 @@ def _gen_outfilename(self): if isdefined(out_file): out_file = os.path.realpath(out_file) if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_mcf') + out_file = self._gen_fname(self.inputs.in_file, suffix="_mcf") return os.path.abspath(out_file) class FNIRTInputSpec(FSLCommandInputSpec): ref_file = File( - exists=True, - argstr='--ref=%s', - mandatory=True, - desc='name of reference image') + exists=True, argstr="--ref=%s", mandatory=True, desc="name of reference image" + ) in_file = File( - exists=True, - argstr='--in=%s', - mandatory=True, - desc='name of input image') + exists=True, argstr="--in=%s", mandatory=True, desc="name of input image" + ) affine_file = File( - exists=True, - argstr='--aff=%s', - desc='name of file containing affine transform') + exists=True, argstr="--aff=%s", desc="name of file containing affine transform" + ) inwarp_file = File( exists=True, - argstr='--inwarp=%s', - desc='name of file containing initial non-linear warps') + argstr="--inwarp=%s", + desc="name of file containing initial non-linear warps", + ) in_intensitymap_file = traits.List( File(exists=True), - argstr='--intin=%s', + argstr="--intin=%s", copyfile=False, minlen=1, maxlen=2, - desc=('name of file/files containing ' - 'initial intensity mapping ' - 'usually generated by previous ' - 'fnirt run')) + desc=( + "name of file/files containing " + "initial intensity mapping " + "usually generated by previous " + "fnirt run" + ), + ) fieldcoeff_file = traits.Either( traits.Bool, File, - argstr='--cout=%s', - desc='name of output file with field coefficients or true') + argstr="--cout=%s", + desc="name of output file with field coefficients or true", + ) warped_file = File( - argstr='--iout=%s', - desc='name of output image', - genfile=True, - hash_files=False) + argstr="--iout=%s", desc="name of output image", genfile=True, hash_files=False + ) field_file = traits.Either( traits.Bool, File, - argstr='--fout=%s', - desc='name of output file with field or true', - hash_files=False) + argstr="--fout=%s", + desc="name of output file with field or true", + hash_files=False, + ) jacobian_file = traits.Either( traits.Bool, File, - argstr='--jout=%s', - desc=('name of file for writing out the ' - 'Jacobian of the field (for ' - 'diagnostic or VBM purposes)'), - hash_files=False) + argstr="--jout=%s", + desc=( + "name of file for writing out the " + "Jacobian of the field (for " + "diagnostic or VBM purposes)" + ), + hash_files=False, + ) modulatedref_file = traits.Either( traits.Bool, File, - argstr='--refout=%s', - desc=('name of file for writing out ' - 'intensity modulated --ref (for ' - 'diagnostic purposes)'), - hash_files=False) + argstr="--refout=%s", + desc=( + "name of file for writing out " + "intensity modulated --ref (for " + "diagnostic purposes)" + ), + hash_files=False, + ) out_intensitymap_file = traits.Either( traits.Bool, File, - argstr='--intout=%s', - desc=('name of files for writing ' - 'information pertaining to ' - 'intensity mapping'), - hash_files=False) + argstr="--intout=%s", + desc=( + "name of files for writing " + "information pertaining to " + "intensity mapping" + ), + hash_files=False, + ) log_file = File( - argstr='--logout=%s', - desc='Name of log-file', - genfile=True, - hash_files=False) + argstr="--logout=%s", desc="Name of log-file", genfile=True, hash_files=False + ) config_file = traits.Either( traits.Enum("T1_2_MNI152_2mm", "FA_2_FMRIB58_1mm"), File(exists=True), - argstr='--config=%s', - desc='Name of config file specifying command line arguments') + argstr="--config=%s", + desc="Name of config file specifying command line arguments", + ) refmask_file = File( exists=True, - argstr='--refmask=%s', - desc='name of file with mask in reference space') + argstr="--refmask=%s", + desc="name of file with mask in reference space", + ) inmask_file = File( exists=True, - argstr='--inmask=%s', - desc='name of file with mask in input image space') + argstr="--inmask=%s", + desc="name of file with mask in input image space", + ) skip_refmask = traits.Bool( - argstr='--applyrefmask=0', - xor=['apply_refmask'], - desc='Skip specified refmask if set, default false') + argstr="--applyrefmask=0", + xor=["apply_refmask"], + desc="Skip specified refmask if set, default false", + ) skip_inmask = traits.Bool( - argstr='--applyinmask=0', - xor=['apply_inmask'], - desc='skip specified inmask if set, default false') + argstr="--applyinmask=0", + xor=["apply_inmask"], + desc="skip specified inmask if set, default false", + ) apply_refmask = traits.List( traits.Enum(0, 1), - argstr='--applyrefmask=%s', - xor=['skip_refmask'], - desc=('list of iterations to use reference mask on (1 to use, 0 to ' - 'skip)'), - sep=",") + argstr="--applyrefmask=%s", + xor=["skip_refmask"], + desc=("list of iterations to use reference mask on (1 to use, 0 to skip)"), + sep=",", + ) apply_inmask = traits.List( traits.Enum(0, 1), - argstr='--applyinmask=%s', - xor=['skip_inmask'], - desc='list of iterations to use input mask on (1 to use, 0 to skip)', - sep=",") + argstr="--applyinmask=%s", + xor=["skip_inmask"], + desc="list of iterations to use input mask on (1 to use, 0 to skip)", + sep=",", + ) skip_implicit_ref_masking = traits.Bool( - argstr='--imprefm=0', - desc=('skip implicit masking based on value in --ref image. ' - 'Default = 0')) + argstr="--imprefm=0", + desc=("skip implicit masking based on value in --ref image. Default = 0"), + ) skip_implicit_in_masking = traits.Bool( - argstr='--impinm=0', - desc=('skip implicit masking based on value in --in image. ' - 'Default = 0')) + argstr="--impinm=0", + desc=("skip implicit masking based on value in --in image. Default = 0"), + ) refmask_val = traits.Float( - argstr='--imprefval=%f', - desc='Value to mask out in --ref image. Default =0.0') + argstr="--imprefval=%f", desc="Value to mask out in --ref image. Default =0.0" + ) inmask_val = traits.Float( - argstr='--impinval=%f', - desc='Value to mask out in --in image. Default =0.0') + argstr="--impinval=%f", desc="Value to mask out in --in image. Default =0.0" + ) max_nonlin_iter = traits.List( traits.Int, - argstr='--miter=%s', - desc='Max # of non-linear iterations list, default [5, 5, 5, 5]', - sep=",") + argstr="--miter=%s", + desc="Max # of non-linear iterations list, default [5, 5, 5, 5]", + sep=",", + ) subsampling_scheme = traits.List( traits.Int, - argstr='--subsamp=%s', - desc='sub-sampling scheme, list, default [4, 2, 1, 1]', - sep=",") - warp_resolution = traits.Tuple( + argstr="--subsamp=%s", + desc="sub-sampling scheme, list, default [4, 2, 1, 1]", + sep=",", + ) + warp_resolution = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--warpres=%d,%d,%d', - desc=('(approximate) resolution (in mm) of warp basis in x-, y- and ' - 'z-direction, default 10, 10, 10')) + argstr="--warpres=%d,%d,%d", + desc=( + "(approximate) resolution (in mm) of warp basis in x-, y- and " + "z-direction, default 10, 10, 10" + ), + ) spline_order = traits.Int( - argstr='--splineorder=%d', - desc='Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3') + argstr="--splineorder=%d", + desc="Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3", + ) in_fwhm = traits.List( traits.Int, - argstr='--infwhm=%s', - desc=('FWHM (in mm) of gaussian smoothing kernel for input volume, ' - 'default [6, 4, 2, 2]'), - sep=",") + argstr="--infwhm=%s", + desc=( + "FWHM (in mm) of gaussian smoothing kernel for input volume, " + "default [6, 4, 2, 2]" + ), + sep=",", + ) ref_fwhm = traits.List( traits.Int, - argstr='--reffwhm=%s', - desc=('FWHM (in mm) of gaussian smoothing kernel for ref volume, ' - 'default [4, 2, 0, 0]'), - sep=",") + argstr="--reffwhm=%s", + desc=( + "FWHM (in mm) of gaussian smoothing kernel for ref volume, " + "default [4, 2, 0, 0]" + ), + sep=",", + ) regularization_model = traits.Enum( - 'membrane_energy', - 'bending_energy', - argstr='--regmod=%s', - desc=('Model for regularisation of warp-field [membrane_energy ' - 'bending_energy], default bending_energy')) + "membrane_energy", + "bending_energy", + argstr="--regmod=%s", + desc=( + "Model for regularisation of warp-field [membrane_energy " + "bending_energy], default bending_energy" + ), + ) regularization_lambda = traits.List( traits.Float, - argstr='--lambda=%s', - desc=('Weight of regularisation, default depending on --ssqlambda and ' - '--regmod switches. See user documetation.'), - sep=",") + argstr="--lambda=%s", + desc=( + "Weight of regularisation, default depending on --ssqlambda and " + "--regmod switches. See user documentation." + ), + sep=",", + ) skip_lambda_ssq = traits.Bool( - argstr='--ssqlambda=0', - desc='If true, lambda is not weighted by current ssq, default false') - jacobian_range = traits.Tuple( + argstr="--ssqlambda=0", + desc="If true, lambda is not weighted by current ssq, default false", + ) + jacobian_range = Tuple( traits.Float, traits.Float, - argstr='--jacrange=%f,%f', - desc='Allowed range of Jacobian determinants, default 0.01, 100.0') + argstr="--jacrange=%f,%f", + desc="Allowed range of Jacobian determinants, default 0.01, 100.0", + ) derive_from_ref = traits.Bool( - argstr='--refderiv', - desc=('If true, ref image is used to calculate derivatives. ' - 'Default false')) + argstr="--refderiv", + desc=("If true, ref image is used to calculate derivatives. Default false"), + ) intensity_mapping_model = traits.Enum( - 'none', - 'global_linear', - 'global_non_linear', - 'local_linear', - 'global_non_linear_with_bias', - 'local_non_linear', - argstr='--intmod=%s', - desc='Model for intensity-mapping') + "none", + "global_linear", + "global_non_linear", + "local_linear", + "global_non_linear_with_bias", + "local_non_linear", + argstr="--intmod=%s", + desc="Model for intensity-mapping", + ) intensity_mapping_order = traits.Int( - argstr='--intorder=%d', - desc='Order of poynomial for mapping intensities, default 5') - biasfield_resolution = traits.Tuple( + argstr="--intorder=%d", + desc="Order of poynomial for mapping intensities, default 5", + ) + biasfield_resolution = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--biasres=%d,%d,%d', - desc=('Resolution (in mm) of bias-field modelling local intensities, ' - 'default 50, 50, 50')) + argstr="--biasres=%d,%d,%d", + desc=( + "Resolution (in mm) of bias-field modelling local intensities, " + "default 50, 50, 50" + ), + ) bias_regularization_lambda = traits.Float( - argstr='--biaslambda=%f', - desc='Weight of regularisation for bias-field, default 10000') + argstr="--biaslambda=%f", + desc="Weight of regularisation for bias-field, default 10000", + ) skip_intensity_mapping = traits.Bool( - argstr='--estint=0', - xor=['apply_intensity_mapping'], - desc='Skip estimate intensity-mapping default false') + argstr="--estint=0", + xor=["apply_intensity_mapping"], + desc="Skip estimate intensity-mapping default false", + ) apply_intensity_mapping = traits.List( traits.Enum(0, 1), - argstr='--estint=%s', - xor=['skip_intensity_mapping'], - desc=('List of subsampling levels to apply intensity mapping for ' - '(0 to skip, 1 to apply)'), - sep=",") + argstr="--estint=%s", + xor=["skip_intensity_mapping"], + desc=( + "List of subsampling levels to apply intensity mapping for " + "(0 to skip, 1 to apply)" + ), + sep=",", + ) hessian_precision = traits.Enum( - 'double', - 'float', - argstr='--numprec=%s', - desc=('Precision for representing Hessian, double or float. ' - 'Default double')) + "double", + "float", + argstr="--numprec=%s", + desc=("Precision for representing Hessian, double or float. Default double"), + ) class FNIRTOutputSpec(TraitedSpec): - fieldcoeff_file = File(exists=True, desc='file with field coefficients') - warped_file = File(exists=True, desc='warped image') - field_file = File(desc='file with warp field') - jacobian_file = File(desc='file containing Jacobian of the field') - modulatedref_file = File(desc='file containing intensity modulated --ref') + fieldcoeff_file = File(exists=True, desc="file with field coefficients") + warped_file = File(exists=True, desc="warped image") + field_file = File(desc="file with warp field") + jacobian_file = File(desc="file containing Jacobian of the field") + modulatedref_file = File(desc="file containing intensity modulated --ref") out_intensitymap_file = traits.List( File, minlen=2, maxlen=2, - desc='files containing info pertaining to intensity mapping') - log_file = File(desc='Name of log-file') + desc="files containing info pertaining to intensity mapping", + ) + log_file = File(desc="Name of log-file") class FNIRT(FSLCommand): @@ -1137,18 +1281,18 @@ class FNIRT(FSLCommand): """ - _cmd = 'fnirt' + _cmd = "fnirt" input_spec = FNIRTInputSpec output_spec = FNIRTOutputSpec filemap = { - 'warped_file': 'warped', - 'field_file': 'field', - 'jacobian_file': 'field_jacobian', - 'modulatedref_file': 'modulated', - 'out_intensitymap_file': 'intmap', - 'log_file': 'log.txt', - 'fieldcoeff_file': 'fieldwarp' + "warped_file": "warped", + "field_file": "field", + "jacobian_file": "field_jacobian", + "modulatedref_file": "modulated", + "out_intensitymap_file": "intmap", + "log_file": "log.txt", + "fieldcoeff_file": "fieldwarp", } def _list_outputs(self): @@ -1156,48 +1300,46 @@ def _list_outputs(self): for key, suffix in list(self.filemap.items()): inval = getattr(self.inputs, key) change_ext = True - if key in ['warped_file', 'log_file']: - if suffix.endswith('.txt'): + if key in ["warped_file", "log_file"]: + if suffix.endswith(".txt"): change_ext = False if isdefined(inval): - outputs[key] = inval + outputs[key] = os.path.abspath(inval) else: outputs[key] = self._gen_fname( - self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + self.inputs.in_file, suffix="_" + suffix, change_ext=change_ext + ) elif isdefined(inval): if isinstance(inval, bool): if inval: outputs[key] = self._gen_fname( self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + suffix="_" + suffix, + change_ext=change_ext, + ) else: outputs[key] = os.path.abspath(inval) - if key == 'out_intensitymap_file' and isdefined(outputs[key]): + if key == "out_intensitymap_file" and isdefined(outputs[key]): basename = FNIRT.intensitymap_file_basename(outputs[key]) - outputs[key] = [ - outputs[key], - '%s.txt' % basename, - ] + outputs[key] = [outputs[key], "%s.txt" % basename] return outputs def _format_arg(self, name, spec, value): - if name in ('in_intensitymap_file', 'out_intensitymap_file'): - if name == 'out_intensitymap_file': + if name in ("in_intensitymap_file", "out_intensitymap_file"): + if name == "out_intensitymap_file": value = self._list_outputs()[name] value = [FNIRT.intensitymap_file_basename(v) for v in value] - assert len(set(value)) == 1, ( - 'Found different basenames for {}: {}'.format(name, value)) + assert len(set(value)) == 1, "Found different basenames for {}: {}".format( + name, value + ) return spec.argstr % value[0] if name in list(self.filemap.keys()): return spec.argstr % self._list_outputs()[name] - return super(FNIRT, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): - if name in ['warped_file', 'log_file']: + if name in ["warped_file", "log_file"]: return self._list_outputs()[name] return None @@ -1211,12 +1353,12 @@ def write_config(self, configfile): configfile : /path/to/configfile """ try: - fid = open(configfile, 'w+') - except IOError: - print('unable to create config_file %s' % (configfile)) + fid = open(configfile, "w+") + except OSError: + print("unable to create config_file %s" % (configfile)) for item in list(self.inputs.get().items()): - fid.write('%s\n' % (item)) + fid.write("%s\n" % (item)) fid.close() @classmethod @@ -1224,9 +1366,9 @@ def intensitymap_file_basename(cls, f): """Removes valid intensitymap extensions from `f`, returning a basename that can refer to both intensitymap files. """ - for ext in list(Info.ftypes.values()) + ['.txt']: + for ext in list(Info.ftypes.values()) + [".txt"]: if f.endswith(ext): - return f[:-len(ext)] + return f[: -len(ext)] # TODO consider warning for this case return f @@ -1234,74 +1376,86 @@ def intensitymap_file_basename(cls, f): class ApplyWarpInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, position=0, - desc='image to be warped') + desc="image to be warped", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", genfile=True, position=2, - desc='output filename', - hash_files=False) + desc="output filename", + hash_files=False, + ) ref_file = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, position=1, - desc='reference image') + desc="reference image", + ) field_file = File( - exists=True, argstr='--warp=%s', desc='file containing warp field') + exists=True, argstr="--warp=%s", desc="file containing warp field" + ) abswarp = traits.Bool( - argstr='--abs', - xor=['relwarp'], - desc="treat warp field as absolute: x' = w(x)") + argstr="--abs", xor=["relwarp"], desc="treat warp field as absolute: x' = w(x)" + ) relwarp = traits.Bool( - argstr='--rel', - xor=['abswarp'], + argstr="--rel", + xor=["abswarp"], position=-1, - desc="treat warp field as relative: x' = x + w(x)") + desc="treat warp field as relative: x' = x + w(x)", + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='--datatype=%s', - desc='Force output data type [char short int float double].') + "char", + "short", + "int", + "float", + "double", + argstr="--datatype=%s", + desc="Force output data type [char short int float double].", + ) supersample = traits.Bool( - argstr='--super', - desc='intermediary supersampling of output, default is off') + argstr="--super", desc="intermediary supersampling of output, default is off" + ) superlevel = traits.Either( - traits.Enum('a'), + traits.Enum("a"), traits.Int, - argstr='--superlevel=%s', - desc=("level of intermediary supersampling, a for 'automatic' or " - "integer level. Default = 2")) + argstr="--superlevel=%s", + desc=( + "level of intermediary supersampling, a for 'automatic' or " + "integer level. Default = 2" + ), + ) premat = File( exists=True, - argstr='--premat=%s', - desc='filename for pre-transform (affine matrix)') + argstr="--premat=%s", + desc="filename for pre-transform (affine matrix)", + ) postmat = File( exists=True, - argstr='--postmat=%s', - desc='filename for post-transform (affine matrix)') + argstr="--postmat=%s", + desc="filename for post-transform (affine matrix)", + ) mask_file = File( exists=True, - argstr='--mask=%s', - desc='filename for mask image (in reference space)') + argstr="--mask=%s", + desc="filename for mask image (in reference space)", + ) interp = traits.Enum( - 'nn', - 'trilinear', - 'sinc', - 'spline', - argstr='--interp=%s', + "nn", + "trilinear", + "sinc", + "spline", + argstr="--interp=%s", position=-2, - desc='interpolation method') + desc="interpolation method", + ) class ApplyWarpOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Warped output file') + out_file = File(exists=True, desc="Warped output file") class ApplyWarp(FSLCommand): @@ -1320,26 +1474,25 @@ class ApplyWarp(FSLCommand): """ - _cmd = 'applywarp' + _cmd = "applywarp" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _format_arg(self, name, spec, value): - if name == 'superlevel': + if name == "superlevel": return spec.argstr % str(value) - return super(ApplyWarp, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_warp') + outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix="_warp") else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -1347,45 +1500,53 @@ def _gen_filename(self, name): class SliceTimerInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, position=0, - desc='filename of input timeseries') + desc="filename of input timeseries", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", genfile=True, - desc='filename of output timeseries', - hash_files=False) - index_dir = traits.Bool( - argstr='--down', desc='slice indexing from top to bottom') + desc="filename of output timeseries", + hash_files=False, + ) + index_dir = traits.Bool(argstr="--down", desc="slice indexing from top to bottom") time_repetition = traits.Float( - argstr='--repeat=%f', desc='Specify TR of data - default is 3s') + argstr="--repeat=%f", desc="Specify TR of data - default is 3s" + ) slice_direction = traits.Enum( 1, 2, 3, - argstr='--direction=%d', - desc='direction of slice acquisition (x=1, y=2, z=3) - default is z') - interleaved = traits.Bool( - argstr='--odd', desc='use interleaved acquisition') + argstr="--direction=%d", + desc="direction of slice acquisition (x=1, y=2, z=3) - default is z", + ) + interleaved = traits.Bool(argstr="--odd", desc="use interleaved acquisition") custom_timings = File( exists=True, - argstr='--tcustom=%s', - desc=('slice timings, in fractions of TR, range 0:1 (default is 0.5 = ' - 'no shift)')) + argstr="--tcustom=%s", + desc=( + "slice timings, in fractions of TR, range 0:1 (default is 0.5 = " + "no shift)" + ), + ) global_shift = traits.Float( - argstr='--tglobal', - desc='shift in fraction of TR, range 0:1 (default is 0.5 = no shift)') + argstr="--tglobal", + desc="shift in fraction of TR, range 0:1 (default is 0.5 = no shift)", + ) custom_order = File( exists=True, - argstr='--ocustom=%s', - desc=('filename of single-column custom interleave order file (first ' - 'slice is referred to as 1 not 0)')) + argstr="--ocustom=%s", + desc=( + "filename of single-column custom interleave order file (first " + "slice is referred to as 1 not 0)" + ), + ) class SliceTimerOutputSpec(TraitedSpec): - slice_time_corrected_file = File( - exists=True, desc='slice time corrected file') + slice_time_corrected_file = File(exists=True, desc="slice time corrected file") class SliceTimer(FSLCommand): @@ -1402,7 +1563,7 @@ class SliceTimer(FSLCommand): """ - _cmd = 'slicetimer' + _cmd = "slicetimer" input_spec = SliceTimerInputSpec output_spec = SliceTimerOutputSpec @@ -1410,69 +1571,80 @@ def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_st') - outputs['slice_time_corrected_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, suffix="_st") + outputs["slice_time_corrected_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['slice_time_corrected_file'] + if name == "out_file": + return self._list_outputs()["slice_time_corrected_file"] return None class SUSANInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='filename of input timeseries') + desc="filename of input timeseries", + ) brightness_threshold = traits.Float( - argstr='%.10f', + argstr="%.10f", position=2, mandatory=True, - desc=('brightness threshold and should be greater than noise level ' - 'and less than contrast of edges to be preserved.')) + desc=( + "brightness threshold and should be greater than noise level " + "and less than contrast of edges to be preserved." + ), + ) fwhm = traits.Float( - argstr='%.10f', + argstr="%.10f", position=3, mandatory=True, - desc='fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))') + desc="fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))", + ) dimension = traits.Enum( 3, 2, - argstr='%d', + argstr="%d", position=4, usedefault=True, - desc='within-plane (2) or fully 3D (3)') + desc="within-plane (2) or fully 3D (3)", + ) use_median = traits.Enum( 1, 0, - argstr='%d', + argstr="%d", position=5, usedefault=True, - desc=('whether to use a local median filter in the cases where ' - 'single-point noise is detected')) + desc=( + "whether to use a local median filter in the cases where " + "single-point noise is detected" + ), + ) usans = traits.List( - traits.Tuple(File(exists=True), traits.Float), + Tuple(File(exists=True), traits.Float), maxlen=2, - argstr='', + argstr="", position=6, usedefault=True, - desc='determines whether the smoothing area (USAN) is to be ' - 'found from secondary images (0, 1 or 2). A negative ' - 'value for any brightness threshold will auto-set the ' - 'threshold at 10% of the robust range') + desc="determines whether the smoothing area (USAN) is to be " + "found from secondary images (0, 1 or 2). A negative " + "value for any brightness threshold will auto-set the " + "threshold at 10% of the robust range", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output file name', - hash_files=False) + desc="output file name", + hash_files=False, + ) class SUSANOutputSpec(TraitedSpec): - smoothed_file = File(exists=True, desc='smoothed output file') + smoothed_file = File(exists=True, desc="smoothed output file") class SUSAN(FSLCommand): @@ -1495,154 +1667,170 @@ class SUSAN(FSLCommand): >>> result = sus.run() # doctest: +SKIP """ - _cmd = 'susan' + _cmd = "susan" input_spec = SUSANInputSpec output_spec = SUSANOutputSpec def _format_arg(self, name, spec, value): - if name == 'fwhm': + if name == "fwhm": return spec.argstr % (float(value) / np.sqrt(8 * np.log(2))) - if name == 'usans': + if name == "usans": if not value: - return '0' + return "0" arglist = [str(len(value))] for filename, thresh in value: - arglist.extend([filename, '%.10f' % thresh]) - return ' '.join(arglist) - return super(SUSAN, self)._format_arg(name, spec, value) + arglist.extend([filename, "%.10f" % thresh]) + return " ".join(arglist) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_smooth') - outputs['smoothed_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, suffix="_smooth") + outputs["smoothed_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['smoothed_file'] + if name == "out_file": + return self._list_outputs()["smoothed_file"] return None class FUGUEInputSpec(FSLCommandInputSpec): - in_file = File( - exists=True, argstr='--in=%s', desc='filename of input volume') + in_file = File(exists=True, argstr="--in=%s", desc="filename of input volume") shift_in_file = File( exists=True, - argstr='--loadshift=%s', - desc='filename for reading pixel shift volume') + argstr="--loadshift=%s", + desc="filename for reading pixel shift volume", + ) phasemap_in_file = File( - exists=True, - argstr='--phasemap=%s', - desc='filename for input phase image') + exists=True, argstr="--phasemap=%s", desc="filename for input phase image" + ) fmap_in_file = File( exists=True, - argstr='--loadfmap=%s', - desc='filename for loading fieldmap (rad/s)') + argstr="--loadfmap=%s", + desc="filename for loading fieldmap (rad/s)", + ) unwarped_file = File( - argstr='--unwarp=%s', - desc='apply unwarping and save as filename', - xor=['warped_file'], - requires=['in_file']) + argstr="--unwarp=%s", + desc="apply unwarping and save as filename", + xor=["warped_file"], + requires=["in_file"], + ) warped_file = File( - argstr='--warp=%s', - desc='apply forward warping and save as filename', - xor=['unwarped_file'], - requires=['in_file']) + argstr="--warp=%s", + desc="apply forward warping and save as filename", + xor=["unwarped_file"], + requires=["in_file"], + ) forward_warping = traits.Bool( - False, - usedefault=True, - desc='apply forward warping instead of unwarping') + False, usedefault=True, desc="apply forward warping instead of unwarping" + ) dwell_to_asym_ratio = traits.Float( - argstr='--dwelltoasym=%.10f', desc='set the dwell to asym time ratio') + argstr="--dwelltoasym=%.10f", desc="set the dwell to asym time ratio" + ) dwell_time = traits.Float( - argstr='--dwell=%.10f', - desc=('set the EPI dwell time per phase-encode line - same as echo ' - 'spacing - (sec)')) + argstr="--dwell=%.10f", + desc=( + "set the EPI dwell time per phase-encode line - same as echo " + "spacing - (sec)" + ), + ) asym_se_time = traits.Float( - argstr='--asym=%.10f', - desc='set the fieldmap asymmetric spin echo time (sec)') - median_2dfilter = traits.Bool( - argstr='--median', desc='apply 2D median filtering') + argstr="--asym=%.10f", desc="set the fieldmap asymmetric spin echo time (sec)" + ) + median_2dfilter = traits.Bool(argstr="--median", desc="apply 2D median filtering") despike_2dfilter = traits.Bool( - argstr='--despike', desc='apply a 2D de-spiking filter') + argstr="--despike", desc="apply a 2D de-spiking filter" + ) no_gap_fill = traits.Bool( - argstr='--nofill', - desc='do not apply gap-filling measure to the fieldmap') + argstr="--nofill", desc="do not apply gap-filling measure to the fieldmap" + ) no_extend = traits.Bool( - argstr='--noextend', - desc='do not apply rigid-body extrapolation to the fieldmap') + argstr="--noextend", + desc="do not apply rigid-body extrapolation to the fieldmap", + ) smooth2d = traits.Float( - argstr='--smooth2=%.2f', - desc='apply 2D Gaussian smoothing of sigma N (in mm)') + argstr="--smooth2=%.2f", desc="apply 2D Gaussian smoothing of sigma N (in mm)" + ) smooth3d = traits.Float( - argstr='--smooth3=%.2f', - desc='apply 3D Gaussian smoothing of sigma N (in mm)') + argstr="--smooth3=%.2f", desc="apply 3D Gaussian smoothing of sigma N (in mm)" + ) poly_order = traits.Int( - argstr='--poly=%d', desc='apply polynomial fitting of order N') + argstr="--poly=%d", desc="apply polynomial fitting of order N" + ) fourier_order = traits.Int( - argstr='--fourier=%d', - desc='apply Fourier (sinusoidal) fitting of order N') - pava = traits.Bool( - argstr='--pava', desc='apply monotonic enforcement via PAVA') + argstr="--fourier=%d", desc="apply Fourier (sinusoidal) fitting of order N" + ) + pava = traits.Bool(argstr="--pava", desc="apply monotonic enforcement via PAVA") despike_threshold = traits.Float( - argstr='--despikethreshold=%s', - desc='specify the threshold for de-spiking (default=3.0)') + argstr="--despikethreshold=%s", + desc="specify the threshold for de-spiking (default=3.0)", + ) unwarp_direction = traits.Enum( - 'x', - 'y', - 'z', - 'x-', - 'y-', - 'z-', - argstr='--unwarpdir=%s', - desc='specifies direction of warping (default y)') + "x", + "y", + "z", + "x-", + "y-", + "z-", + argstr="--unwarpdir=%s", + desc="specifies direction of warping (default y)", + ) phase_conjugate = traits.Bool( - argstr='--phaseconj', desc='apply phase conjugate method of unwarping') + argstr="--phaseconj", desc="apply phase conjugate method of unwarping" + ) icorr = traits.Bool( - argstr='--icorr', - requires=['shift_in_file'], - desc=('apply intensity correction to unwarping (pixel shift method ' - 'only)')) + argstr="--icorr", + requires=["shift_in_file"], + desc=("apply intensity correction to unwarping (pixel shift method only)"), + ) icorr_only = traits.Bool( - argstr='--icorronly', - requires=['unwarped_file'], - desc='apply intensity correction only') + argstr="--icorronly", + requires=["unwarped_file"], + desc="apply intensity correction only", + ) mask_file = File( - exists=True, - argstr='--mask=%s', - desc='filename for loading valid mask') + exists=True, argstr="--mask=%s", desc="filename for loading valid mask" + ) nokspace = traits.Bool( - False, argstr='--nokspace', desc='do not use k-space forward warping') + False, argstr="--nokspace", desc="do not use k-space forward warping" + ) # Special outputs: shift (voxel shift map, vsm) save_shift = traits.Bool( - False, xor=['save_unmasked_shift'], desc='write pixel shift volume') + False, xor=["save_unmasked_shift"], desc="write pixel shift volume" + ) shift_out_file = File( - argstr='--saveshift=%s', desc='filename for saving pixel shift volume') + argstr="--saveshift=%s", desc="filename for saving pixel shift volume" + ) save_unmasked_shift = traits.Bool( - argstr='--unmaskshift', - xor=['save_shift'], - desc='saves the unmasked shiftmap when using --saveshift') + argstr="--unmaskshift", + xor=["save_shift"], + desc="saves the unmasked shiftmap when using --saveshift", + ) # Special outputs: fieldmap (fmap) save_fmap = traits.Bool( - False, xor=['save_unmasked_fmap'], desc='write field map volume') + False, xor=["save_unmasked_fmap"], desc="write field map volume" + ) fmap_out_file = File( - argstr='--savefmap=%s', desc='filename for saving fieldmap (rad/s)') + argstr="--savefmap=%s", desc="filename for saving fieldmap (rad/s)" + ) save_unmasked_fmap = traits.Bool( False, - argstr='--unmaskfmap', - xor=['save_fmap'], - desc='saves the unmasked fieldmap when using --savefmap') + argstr="--unmaskfmap", + xor=["save_fmap"], + desc="saves the unmasked fieldmap when using --savefmap", + ) class FUGUEOutputSpec(TraitedSpec): - unwarped_file = File(desc='unwarped file') - warped_file = File(desc='forward warped file') - shift_out_file = File(desc='voxel shift map file') - fmap_out_file = File(desc='fieldmap file') + unwarped_file = File(desc="unwarped file") + warped_file = File(desc="forward warped file") + shift_out_file = File(desc="voxel shift map file") + fmap_out_file = File(desc="fieldmap file") class FUGUE(FSLCommand): @@ -1710,7 +1898,7 @@ class FUGUE(FSLCommand): """ - _cmd = 'fugue' + _cmd = "fugue" input_spec = FUGUEInputSpec output_spec = FUGUEOutputSpec @@ -1724,149 +1912,166 @@ def _parse_inputs(self, skip=None): if not input_phase and not input_vsm and not input_fmap: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or fmap_in_file must ' - 'be set.')) + "Either phasemap_in_file, shift_in_file or fmap_in_file must be set." + ) if not isdefined(self.inputs.in_file): - skip += ['unwarped_file', 'warped_file'] + skip += ["unwarped_file", "warped_file"] else: if self.inputs.forward_warping: - skip += ['unwarped_file'] - trait_spec = self.inputs.trait('warped_file') + skip += ["unwarped_file"] + trait_spec = self.inputs.trait("warped_file") trait_spec.name_template = "%s_warped" - trait_spec.name_source = 'in_file' - trait_spec.output_name = 'warped_file' + trait_spec.name_source = "in_file" + trait_spec.output_name = "warped_file" else: - skip += ['warped_file'] - trait_spec = self.inputs.trait('unwarped_file') + skip += ["warped_file"] + trait_spec = self.inputs.trait("unwarped_file") trait_spec.name_template = "%s_unwarped" - trait_spec.name_source = 'in_file' - trait_spec.output_name = 'unwarped_file' + trait_spec.name_source = "in_file" + trait_spec.output_name = "unwarped_file" # Handle shift output if not isdefined(self.inputs.shift_out_file): - vsm_save_masked = (isdefined(self.inputs.save_shift) - and self.inputs.save_shift) - vsm_save_unmasked = (isdefined(self.inputs.save_unmasked_shift) - and self.inputs.save_unmasked_shift) - - if (vsm_save_masked or vsm_save_unmasked): - trait_spec = self.inputs.trait('shift_out_file') - trait_spec.output_name = 'shift_out_file' + vsm_save_masked = ( + isdefined(self.inputs.save_shift) and self.inputs.save_shift + ) + vsm_save_unmasked = ( + isdefined(self.inputs.save_unmasked_shift) + and self.inputs.save_unmasked_shift + ) + + if vsm_save_masked or vsm_save_unmasked: + trait_spec = self.inputs.trait("shift_out_file") + trait_spec.output_name = "shift_out_file" if input_fmap: - trait_spec.name_source = 'fmap_in_file' + trait_spec.name_source = "fmap_in_file" elif input_phase: - trait_spec.name_source = 'phasemap_in_file' + trait_spec.name_source = "phasemap_in_file" elif input_vsm: - trait_spec.name_source = 'shift_in_file' + trait_spec.name_source = "shift_in_file" else: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) if vsm_save_unmasked: - trait_spec.name_template = '%s_vsm_unmasked' + trait_spec.name_template = "%s_vsm_unmasked" else: - trait_spec.name_template = '%s_vsm' + trait_spec.name_template = "%s_vsm" else: - skip += ['save_shift', 'save_unmasked_shift', 'shift_out_file'] + skip += ["save_shift", "save_unmasked_shift", "shift_out_file"] # Handle fieldmap output if not isdefined(self.inputs.fmap_out_file): - fmap_save_masked = (isdefined(self.inputs.save_fmap) - and self.inputs.save_fmap) - fmap_save_unmasked = (isdefined(self.inputs.save_unmasked_fmap) - and self.inputs.save_unmasked_fmap) - - if (fmap_save_masked or fmap_save_unmasked): - trait_spec = self.inputs.trait('fmap_out_file') - trait_spec.output_name = 'fmap_out_file' + fmap_save_masked = ( + isdefined(self.inputs.save_fmap) and self.inputs.save_fmap + ) + fmap_save_unmasked = ( + isdefined(self.inputs.save_unmasked_fmap) + and self.inputs.save_unmasked_fmap + ) + + if fmap_save_masked or fmap_save_unmasked: + trait_spec = self.inputs.trait("fmap_out_file") + trait_spec.output_name = "fmap_out_file" if input_vsm: - trait_spec.name_source = 'shift_in_file' + trait_spec.name_source = "shift_in_file" elif input_phase: - trait_spec.name_source = 'phasemap_in_file' + trait_spec.name_source = "phasemap_in_file" elif input_fmap: - trait_spec.name_source = 'fmap_in_file' + trait_spec.name_source = "fmap_in_file" else: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) if fmap_save_unmasked: - trait_spec.name_template = '%s_fieldmap_unmasked' + trait_spec.name_template = "%s_fieldmap_unmasked" else: - trait_spec.name_template = '%s_fieldmap' + trait_spec.name_template = "%s_fieldmap" else: - skip += ['save_fmap', 'save_unmasked_fmap', 'fmap_out_file'] + skip += ["save_fmap", "save_unmasked_fmap", "fmap_out_file"] - return super(FUGUE, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) class PRELUDEInputSpec(FSLCommandInputSpec): complex_phase_file = File( exists=True, - argstr='--complex=%s', + argstr="--complex=%s", mandatory=True, - xor=['magnitude_file', 'phase_file'], - desc='complex phase input volume') + xor=["magnitude_file", "phase_file"], + desc="complex phase input volume", + ) magnitude_file = File( exists=True, - argstr='--abs=%s', + argstr="--abs=%s", mandatory=True, - xor=['complex_phase_file'], - desc='file containing magnitude image') + xor=["complex_phase_file"], + desc="file containing magnitude image", + ) phase_file = File( exists=True, - argstr='--phase=%s', + argstr="--phase=%s", mandatory=True, - xor=['complex_phase_file'], - desc='raw phase file') + xor=["complex_phase_file"], + desc="raw phase file", + ) unwrapped_phase_file = File( genfile=True, - argstr='--unwrap=%s', - desc='file containing unwrapepd phase', - hash_files=False) + argstr="--unwrap=%s", + desc="file containing unwrapepd phase", + hash_files=False, + ) num_partitions = traits.Int( - argstr='--numphasesplit=%d', desc='number of phase partitions to use') + argstr="--numphasesplit=%d", desc="number of phase partitions to use" + ) labelprocess2d = traits.Bool( - argstr='--labelslices', - desc='does label processing in 2D (slice at a time)') + argstr="--labelslices", desc="does label processing in 2D (slice at a time)" + ) process2d = traits.Bool( - argstr='--slices', - xor=['labelprocess2d'], - desc='does all processing in 2D (slice at a time)') + argstr="--slices", + xor=["labelprocess2d"], + desc="does all processing in 2D (slice at a time)", + ) process3d = traits.Bool( - argstr='--force3D', - xor=['labelprocess2d', 'process2d'], - desc='forces all processing to be full 3D') + argstr="--force3D", + xor=["labelprocess2d", "process2d"], + desc="forces all processing to be full 3D", + ) threshold = traits.Float( - argstr='--thresh=%.10f', desc='intensity threshold for masking') + argstr="--thresh=%.10f", desc="intensity threshold for masking" + ) mask_file = File( - exists=True, argstr='--mask=%s', desc='filename of mask input volume') + exists=True, argstr="--mask=%s", desc="filename of mask input volume" + ) start = traits.Int( - argstr='--start=%d', desc='first image number to process (default 0)') + argstr="--start=%d", desc="first image number to process (default 0)" + ) end = traits.Int( - argstr='--end=%d', desc='final image number to process (default Inf)') + argstr="--end=%d", desc="final image number to process (default Inf)" + ) savemask_file = File( - argstr='--savemask=%s', - desc='saving the mask volume', - hash_files=False) + argstr="--savemask=%s", desc="saving the mask volume", hash_files=False + ) rawphase_file = File( - argstr='--rawphase=%s', - desc='saving the raw phase output', - hash_files=False) + argstr="--rawphase=%s", desc="saving the raw phase output", hash_files=False + ) label_file = File( - argstr='--labels=%s', - desc='saving the area labels output', - hash_files=False) + argstr="--labels=%s", desc="saving the area labels output", hash_files=False + ) removeramps = traits.Bool( - argstr='--removeramps', desc='remove phase ramps during unwrapping') + argstr="--removeramps", desc="remove phase ramps during unwrapping" + ) class PRELUDEOutputSpec(TraitedSpec): - unwrapped_phase_file = File(exists=True, desc='unwrapped phase file') + unwrapped_phase_file = File(exists=True, desc="unwrapped phase file") class PRELUDE(FSLCommand): @@ -1878,30 +2083,31 @@ class PRELUDE(FSLCommand): Please insert examples for use of this command """ + input_spec = PRELUDEInputSpec output_spec = PRELUDEOutputSpec - _cmd = 'prelude' + _cmd = "prelude" def __init__(self, **kwargs): - super(PRELUDE, self).__init__(**kwargs) - warn('This has not been fully tested. Please report any failures.') + super().__init__(**kwargs) + warn("This has not been fully tested. Please report any failures.") def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.unwrapped_phase_file if not isdefined(out_file): if isdefined(self.inputs.phase_file): - out_file = self._gen_fname( - self.inputs.phase_file, suffix='_unwrapped') + out_file = self._gen_fname(self.inputs.phase_file, suffix="_unwrapped") elif isdefined(self.inputs.complex_phase_file): out_file = self._gen_fname( - self.inputs.complex_phase_file, suffix='_phase_unwrapped') - outputs['unwrapped_phase_file'] = os.path.abspath(out_file) + self.inputs.complex_phase_file, suffix="_phase_unwrapped" + ) + outputs["unwrapped_phase_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'unwrapped_phase_file': - return self._list_outputs()['unwrapped_phase_file'] + if name == "unwrapped_phase_file": + return self._list_outputs()["unwrapped_phase_file"] return None @@ -1911,72 +2117,85 @@ class FIRSTInputSpec(FSLCommandInputSpec): mandatory=True, position=-2, copyfile=False, - argstr='-i %s', - desc='input data file') + argstr="-i %s", + desc="input data file", + ) out_file = File( - 'segmented', + "segmented", usedefault=True, mandatory=True, position=-1, - argstr='-o %s', - desc='output data file', - hash_files=False) - verbose = traits.Bool(argstr='-v', position=1, desc="Use verbose logging.") + argstr="-o %s", + desc="output data file", + hash_files=False, + ) + verbose = traits.Bool(argstr="-v", position=1, desc="Use verbose logging.") brain_extracted = traits.Bool( - argstr='-b', + argstr="-b", position=2, - desc="Input structural image is already brain-extracted") + desc="Input structural image is already brain-extracted", + ) no_cleanup = traits.Bool( - argstr='-d', + argstr="-d", position=3, - desc="Input structural image is already brain-extracted") + desc="Input structural image is already brain-extracted", + ) method = traits.Enum( - 'auto', - 'fast', - 'none', - xor=['method_as_numerical_threshold'], - argstr='-m %s', + "auto", + "fast", + "none", + xor=["method_as_numerical_threshold"], + argstr="-m %s", position=4, usedefault=True, - desc=("Method must be one of auto, fast, none, or it can be entered " - "using the 'method_as_numerical_threshold' input")) + desc=( + "Method must be one of auto, fast, none, or it can be entered " + "using the 'method_as_numerical_threshold' input" + ), + ) method_as_numerical_threshold = traits.Float( - argstr='-m %.4f', + argstr="-m %.4f", position=4, - desc=("Specify a numerical threshold value or use the 'method' input " - "to choose auto, fast, or none")) + desc=( + "Specify a numerical threshold value or use the 'method' input " + "to choose auto, fast, or none" + ), + ) list_of_specific_structures = traits.List( traits.Str, - argstr='-s %s', - sep=',', + argstr="-s %s", + sep=",", position=5, minlen=1, - desc='Runs only on the specified structures (e.g. L_Hipp, R_Hipp' - 'L_Accu, R_Accu, L_Amyg, R_Amyg' - 'L_Caud, R_Caud, L_Pall, R_Pall' - 'L_Puta, R_Puta, L_Thal, R_Thal, BrStem') + desc="Runs only on the specified structures (e.g. L_Hipp, R_Hipp" + "L_Accu, R_Accu, L_Amyg, R_Amyg" + "L_Caud, R_Caud, L_Pall, R_Pall" + "L_Puta, R_Puta, L_Thal, R_Thal, BrStem", + ) affine_file = File( exists=True, position=6, - argstr='-a %s', - desc=('Affine matrix to use (e.g. img2std.mat) (does not ' - 're-run registration)')) + argstr="-a %s", + desc=("Affine matrix to use (e.g. img2std.mat) (does not re-run registration)"), + ) class FIRSTOutputSpec(TraitedSpec): vtk_surfaces = OutputMultiPath( - File(exists=True), - desc='VTK format meshes for each subcortical region') - bvars = OutputMultiPath( - File(exists=True), desc='bvars for each subcortical region') + File(exists=True), desc="VTK format meshes for each subcortical region" + ) + bvars = OutputMultiPath(File(exists=True), desc="bvars for each subcortical region") original_segmentations = File( exists=True, - desc=('3D image file containing the segmented regions ' - 'as integer values. Uses CMA labelling')) + desc=( + "3D image file containing the segmented regions " + "as integer values. Uses CMA labelling" + ), + ) segmentation_file = File( exists=True, - desc=('4D image file containing a single volume per ' - 'segmented region')) + desc=("4D image file containing a single volume per segmented region"), + ) class FIRST(FSLCommand): @@ -1995,7 +2214,7 @@ class FIRST(FSLCommand): """ - _cmd = 'run_first_all' + _cmd = "run_first_all" input_spec = FIRSTInputSpec output_spec = FIRSTOutputSpec @@ -2006,51 +2225,60 @@ def _list_outputs(self): structures = self.inputs.list_of_specific_structures else: structures = [ - 'L_Hipp', 'R_Hipp', 'L_Accu', 'R_Accu', 'L_Amyg', 'R_Amyg', - 'L_Caud', 'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', - 'L_Thal', 'R_Thal', 'BrStem' + "L_Hipp", + "R_Hipp", + "L_Accu", + "R_Accu", + "L_Amyg", + "R_Amyg", + "L_Caud", + "R_Caud", + "L_Pall", + "R_Pall", + "L_Puta", + "R_Puta", + "L_Thal", + "R_Thal", + "BrStem", ] - outputs['original_segmentations'] = \ - self._gen_fname('original_segmentations') - outputs['segmentation_file'] = self._gen_fname('segmentation_file') - outputs['vtk_surfaces'] = self._gen_mesh_names('vtk_surfaces', - structures) - outputs['bvars'] = self._gen_mesh_names('bvars', structures) + outputs["original_segmentations"] = self._gen_fname("original_segmentations") + outputs["segmentation_file"] = self._gen_fname("segmentation_file") + outputs["vtk_surfaces"] = self._gen_mesh_names("vtk_surfaces", structures) + outputs["bvars"] = self._gen_mesh_names("bvars", structures) return outputs def _gen_fname(self, basename): path, outname, ext = split_filename(self.inputs.out_file) - method = 'none' - if isdefined(self.inputs.method) and self.inputs.method != 'none': - method = 'fast' - if (self.inputs.list_of_specific_structures - and self.inputs.method == 'auto'): - method = 'none' + method = "none" + if isdefined(self.inputs.method) and self.inputs.method != "none": + method = "fast" + if self.inputs.list_of_specific_structures and self.inputs.method == "auto": + method = "none" if isdefined(self.inputs.method_as_numerical_threshold): - thres = '%.4f' % self.inputs.method_as_numerical_threshold - method = thres.replace('.', '') + thres = "%.4f" % self.inputs.method_as_numerical_threshold + method = thres.replace(".", "") - if basename == 'original_segmentations': - return op.abspath('%s_all_%s_origsegs.nii.gz' % (outname, method)) - if basename == 'segmentation_file': - return op.abspath('%s_all_%s_firstseg.nii.gz' % (outname, method)) + if basename == "original_segmentations": + return op.abspath(f"{outname}_all_{method}_origsegs.nii.gz") + if basename == "segmentation_file": + return op.abspath(f"{outname}_all_{method}_firstseg.nii.gz") return None def _gen_mesh_names(self, name, structures): path, prefix, ext = split_filename(self.inputs.out_file) - if name == 'vtk_surfaces': + if name == "vtk_surfaces": vtks = list() for struct in structures: - vtk = prefix + '-' + struct + '_first.vtk' + vtk = prefix + "-" + struct + "_first.vtk" vtks.append(op.abspath(vtk)) return vtks - if name == 'bvars': + if name == "bvars": bvars = list() for struct in structures: - bvar = prefix + '-' + struct + '_first.bvars' + bvar = prefix + "-" + struct + "_first.bvars" bvars.append(op.abspath(bvar)) return bvars return None diff --git a/nipype/interfaces/fsl/tests/__init__.py b/nipype/interfaces/fsl/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/fsl/tests/__init__.py +++ b/nipype/interfaces/fsl/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index a37fc1b116..ce2cc57ffd 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -1,122 +1,146 @@ -# -*- coding: utf-8 -*- from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec def test_filmgls(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s"), autocorr_estimate_only=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-ac', + argstr="-ac", ), autocorr_noestimate=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-noest', + argstr="-noest", ), - brightness_threshold=dict(argstr='-epith %d', ), - design_file=dict(argstr='%s', ), - environ=dict(usedefault=True, ), + brightness_threshold=dict(argstr="-epith %d"), + design_file=dict(argstr="%s"), + environ=dict(usedefault=True), fit_armodel=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-ar', + argstr="-ar", ), - full_data=dict(argstr='-v', ), - in_file=dict( - mandatory=True, - argstr='%s', - ), - mask_size=dict(argstr='-ms %d', ), + full_data=dict(argstr="-v"), + in_file=dict(mandatory=True, argstr="%s"), + mask_size=dict(argstr="-ms %d"), multitaper_product=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-mt %d', + argstr="-mt %d", ), - output_pwdata=dict(argstr='-output_pwdata', ), + output_pwdata=dict(argstr="-output_pwdata"), output_type=dict(), - results_dir=dict( - usedefault=True, - argstr='-rn %s', - ), - smooth_autocorr=dict(argstr='-sa', ), - threshold=dict(argstr='%f', ), + results_dir=dict(usedefault=True, argstr="-rn %s"), + smooth_autocorr=dict(argstr="-sa"), + threshold=dict(argstr="%f"), tukey_window=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-tukey %d', + argstr="-tukey %d", ), - use_pava=dict(argstr='-pava', ), + use_pava=dict(argstr="-pava"), ) input_map2 = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s"), autocorr_estimate_only=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--ac', + argstr="--ac", ), autocorr_noestimate=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--noest', + argstr="--noest", ), - brightness_threshold=dict(argstr='--epith=%d', ), - design_file=dict(argstr='--pd=%s', ), - environ=dict(usedefault=True, ), + brightness_threshold=dict(argstr="--epith=%d"), + design_file=dict(argstr="--pd=%s"), + environ=dict(usedefault=True), fit_armodel=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--ar', - ), - full_data=dict(argstr='-v', ), - in_file=dict( - mandatory=True, - argstr='--in=%s', + argstr="--ar", ), - mask_size=dict(argstr='--ms=%d', ), + full_data=dict(argstr="-v"), + in_file=dict(mandatory=True, argstr="--in=%s"), + mask_size=dict(argstr="--ms=%d"), multitaper_product=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--mt=%d', + argstr="--mt=%d", ), - output_pwdata=dict(argstr='--outputPWdata', ), + output_pwdata=dict(argstr="--outputPWdata"), output_type=dict(), - results_dir=dict( - argstr='--rn=%s', - usedefault=True, - ), - smooth_autocorr=dict(argstr='--sa', ), - threshold=dict( - usedefault=True, - argstr='--thr=%f', - ), + results_dir=dict(argstr="--rn=%s", usedefault=True), + smooth_autocorr=dict(argstr="--sa"), + threshold=dict(usedefault=True, argstr="--thr=%f"), tukey_window=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--tukey=%d', + argstr="--tukey=%d", ), - use_pava=dict(argstr='--pava', ), + use_pava=dict(argstr="--pava"), ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): diff --git a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py index 44c04d41d5..157a217dbc 100644 --- a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py +++ b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from ...base import Undefined from ..model import Level1Design @@ -8,25 +7,31 @@ def test_level1design(tmpdir): old = tmpdir.chdir() l = Level1Design() runinfo = dict( - cond=[{ - 'name': 'test_condition', - 'onset': [0, 10], - 'duration': [10, 10] - }], - regress=[]) + cond=[{"name": "test_condition", "onset": [0, 10], "duration": [10, 10]}], + regress=[], + ) runidx = 0 contrasts = Undefined do_tempfilter = False orthogonalization = {} - basic_ev_parameters = {'temporalderiv': False} - convolution_variants = [('custom', 7, { - 'temporalderiv': False, - 'bfcustompath': '/some/path' - }), ('hrf', 3, basic_ev_parameters), ('dgamma', 3, basic_ev_parameters), - ('gamma', 2, basic_ev_parameters), - ('none', 0, basic_ev_parameters)] + basic_ev_parameters = {"temporalderiv": False} + convolution_variants = [ + ("custom", 7, {"temporalderiv": False, "bfcustompath": "/some/path"}), + ("hrf", 3, basic_ev_parameters), + ("dgamma", 3, basic_ev_parameters), + ("gamma", 2, basic_ev_parameters), + ("none", 0, basic_ev_parameters), + ] for key, val, ev_parameters in convolution_variants: output_num, output_txt = Level1Design._create_ev_files( - l, os.getcwd(), runinfo, runidx, ev_parameters, orthogonalization, - contrasts, do_tempfilter, key) - assert "set fmri(convolve1) {0}".format(val) in output_txt + l, + os.getcwd(), + runinfo, + runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + key, + ) + assert f"set fmri(convolve1) {val}" in output_txt diff --git a/nipype/interfaces/fsl/tests/test_auto_AR1Image.py b/nipype/interfaces/fsl/tests/test_auto_AR1Image.py index 32efb1826e..0f4cfc2bcc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AR1Image.py +++ b/nipype/interfaces/fsl/tests/test_auto_AR1Image.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import AR1Image def test_AR1Image_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%sar1', + argstr="-%sar1", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_AR1Image_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_AR1Image_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AR1Image_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = AR1Image.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py b/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py index 062d6367f7..359a076f2d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py +++ b/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py @@ -1,28 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import AccuracyTester def test_AccuracyTester_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), mel_icas=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=3, ), output_directory=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, ), trained_wts_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), @@ -32,12 +34,15 @@ def test_AccuracyTester_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AccuracyTester_outputs(): output_map = dict( output_directory=dict( - argstr='%s', + argstr="%s", position=1, - ), ) + ), + ) outputs = AccuracyTester.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py b/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py index 8a39956c18..f38990e572 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py @@ -1,41 +1,45 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import ApplyMask def test_ApplyMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), mask_file=dict( - argstr='-mas %s', + argstr="-mas %s", + extensions=None, mandatory=True, position=4, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +49,14 @@ def test_ApplyMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyMask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py index 7a803f9695..4c5bcc13a7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py @@ -1,14 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import ApplyTOPUP def test_ApplyTOPUP_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datatype=dict(argstr='-d=%s', ), + args=dict( + argstr="%s", + ), + datatype=dict( + argstr="-d=%s", + ), encoding_file=dict( - argstr='--datain=%s', + argstr="--datain=%s", + extensions=None, mandatory=True, ), environ=dict( @@ -16,29 +20,36 @@ def test_ApplyTOPUP_inputs(): usedefault=True, ), in_files=dict( - argstr='--imain=%s', + argstr="--imain=%s", mandatory=True, - sep=',', + sep=",", ), in_index=dict( - argstr='--inindex=%s', - sep=',', + argstr="--inindex=%s", + sep=",", ), in_topup_fieldcoef=dict( - argstr='--topup=%s', + argstr="--topup=%s", copyfile=False, - requires=['in_topup_movpar'], + extensions=None, + requires=["in_topup_movpar"], ), in_topup_movpar=dict( copyfile=False, - requires=['in_topup_fieldcoef'], + extensions=None, + requires=["in_topup_fieldcoef"], + ), + interp=dict( + argstr="--interp=%s", + ), + method=dict( + argstr="--method=%s", ), - interp=dict(argstr='--interp=%s', ), - method=dict(argstr='--method=%s', ), out_corrected=dict( - argstr='--out=%s', - name_source=['in_files'], - name_template='%s_corrected', + argstr="--out=%s", + extensions=None, + name_source=["in_files"], + name_template="%s_corrected", ), output_type=dict(), ) @@ -47,8 +58,14 @@ def test_ApplyTOPUP_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTOPUP_outputs(): - output_map = dict(out_corrected=dict(), ) + output_map = dict( + out_corrected=dict( + extensions=None, + ), + ) outputs = ApplyTOPUP.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py index 71977cb873..1eaf3eb2e7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py @@ -1,60 +1,88 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ApplyWarp def test_ApplyWarp_inputs(): input_map = dict( abswarp=dict( - argstr='--abs', - xor=['relwarp'], + argstr="--abs", + xor=["relwarp"], + ), + args=dict( + argstr="%s", + ), + datatype=dict( + argstr="--datatype=%s", ), - args=dict(argstr='%s', ), - datatype=dict(argstr='--datatype=%s', ), environ=dict( nohash=True, usedefault=True, ), - field_file=dict(argstr='--warp=%s', ), + field_file=dict( + argstr="--warp=%s", + extensions=None, + ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, position=0, ), interp=dict( - argstr='--interp=%s', + argstr="--interp=%s", position=-2, ), - mask_file=dict(argstr='--mask=%s', ), + mask_file=dict( + argstr="--mask=%s", + extensions=None, + ), out_file=dict( - argstr='--out=%s', + argstr="--out=%s", + extensions=None, genfile=True, hash_files=False, position=2, ), output_type=dict(), - postmat=dict(argstr='--postmat=%s', ), - premat=dict(argstr='--premat=%s', ), + postmat=dict( + argstr="--postmat=%s", + extensions=None, + ), + premat=dict( + argstr="--premat=%s", + extensions=None, + ), ref_file=dict( - argstr='--ref=%s', + argstr="--ref=%s", + extensions=None, mandatory=True, position=1, ), relwarp=dict( - argstr='--rel', + argstr="--rel", position=-1, - xor=['abswarp'], + xor=["abswarp"], + ), + superlevel=dict( + argstr="--superlevel=%s", + ), + supersample=dict( + argstr="--super", ), - superlevel=dict(argstr='--superlevel=%s', ), - supersample=dict(argstr='--super', ), ) inputs = ApplyWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyWarp_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py b/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py index f515be1f04..0fc914cdde 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py @@ -1,145 +1,204 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ApplyXFM def test_ApplyXFM_inputs(): input_map = dict( - angle_rep=dict(argstr='-anglerep %s', ), + angle_rep=dict( + argstr="-anglerep %s", + ), apply_isoxfm=dict( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], + argstr="-applyisoxfm %f", + xor=["apply_xfm"], ), apply_xfm=dict( - argstr='-applyxfm', + argstr="-applyxfm", usedefault=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), bbrslope=dict( - argstr='-bbrslope %f', - min_ver='5.0.0', + argstr="-bbrslope %f", + min_ver="5.0.0", ), bbrtype=dict( - argstr='-bbrtype %s', - min_ver='5.0.0', + argstr="-bbrtype %s", + min_ver="5.0.0", + ), + bgvalue=dict( + argstr="-setbackground %f", + ), + bins=dict( + argstr="-bins %d", ), - bgvalue=dict(argstr='-setbackground %f', ), - bins=dict(argstr='-bins %d', ), coarse_search=dict( - argstr='-coarsesearch %d', - units='degrees', - ), - cost=dict(argstr='-cost %s', ), - cost_func=dict(argstr='-searchcost %s', ), - datatype=dict(argstr='-datatype %s', ), - display_init=dict(argstr='-displayinit', ), - dof=dict(argstr='-dof %d', ), + argstr="-coarsesearch %d", + units="degrees", + ), + cost=dict( + argstr="-cost %s", + ), + cost_func=dict( + argstr="-searchcost %s", + ), + datatype=dict( + argstr="-datatype %s", + ), + display_init=dict( + argstr="-displayinit", + ), + dof=dict( + argstr="-dof %d", + ), echospacing=dict( - argstr='-echospacing %f', - min_ver='5.0.0', + argstr="-echospacing %f", + min_ver="5.0.0", ), environ=dict( nohash=True, usedefault=True, ), fieldmap=dict( - argstr='-fieldmap %s', - min_ver='5.0.0', + argstr="-fieldmap %s", + extensions=None, + min_ver="5.0.0", ), fieldmapmask=dict( - argstr='-fieldmapmask %s', - min_ver='5.0.0', + argstr="-fieldmapmask %s", + extensions=None, + min_ver="5.0.0", ), fine_search=dict( - argstr='-finesearch %d', - units='degrees', + argstr="-finesearch %d", + units="degrees", + ), + force_scaling=dict( + argstr="-forcescaling", ), - force_scaling=dict(argstr='-forcescaling', ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=0, ), - in_matrix_file=dict(argstr='-init %s', ), - in_weight=dict(argstr='-inweight %s', ), - interp=dict(argstr='-interp %s', ), + in_matrix_file=dict( + argstr="-init %s", + extensions=None, + ), + in_weight=dict( + argstr="-inweight %s", + extensions=None, + ), + interp=dict( + argstr="-interp %s", + ), min_sampling=dict( - argstr='-minsampling %f', - units='mm', + argstr="-minsampling %f", + units="mm", + ), + no_clamp=dict( + argstr="-noclamp", + ), + no_resample=dict( + argstr="-noresample", + ), + no_resample_blur=dict( + argstr="-noresampblur", + ), + no_search=dict( + argstr="-nosearch", ), - no_clamp=dict(argstr='-noclamp', ), - no_resample=dict(argstr='-noresample', ), - no_resample_blur=dict(argstr='-noresampblur', ), - no_search=dict(argstr='-nosearch', ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_flirt', + name_source=["in_file"], + name_template="%s_flirt", position=2, ), out_log=dict( + extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.log', - requires=['save_log'], + name_source=["in_file"], + name_template="%s_flirt.log", + requires=["save_log"], ), out_matrix_file=dict( - argstr='-omat %s', + argstr="-omat %s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.mat', + name_source=["in_file"], + name_template="%s_flirt.mat", position=3, ), output_type=dict(), padding_size=dict( - argstr='-paddingsize %d', - units='voxels', + argstr="-paddingsize %d", + units="voxels", ), pedir=dict( - argstr='-pedir %d', - min_ver='5.0.0', + argstr="-pedir %d", + min_ver="5.0.0", + ), + ref_weight=dict( + argstr="-refweight %s", + extensions=None, ), - ref_weight=dict(argstr='-refweight %s', ), reference=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, position=1, ), - rigid2D=dict(argstr='-2D', ), + rigid2D=dict( + argstr="-2D", + ), save_log=dict(), - schedule=dict(argstr='-schedule %s', ), + schedule=dict( + argstr="-schedule %s", + extensions=None, + ), searchr_x=dict( - argstr='-searchrx %s', - units='degrees', + argstr="-searchrx %s", + units="degrees", ), searchr_y=dict( - argstr='-searchry %s', - units='degrees', + argstr="-searchry %s", + units="degrees", ), searchr_z=dict( - argstr='-searchrz %s', - units='degrees', + argstr="-searchrz %s", + units="degrees", ), sinc_width=dict( - argstr='-sincwidth %d', - units='voxels', + argstr="-sincwidth %d", + units="voxels", + ), + sinc_window=dict( + argstr="-sincwindow %s", + ), + uses_qform=dict( + argstr="-usesqform", + ), + verbose=dict( + argstr="-verbose %d", ), - sinc_window=dict(argstr='-sincwindow %s', ), - uses_qform=dict(argstr='-usesqform', ), - verbose=dict(argstr='-verbose %d', ), wm_seg=dict( - argstr='-wmseg %s', - min_ver='5.0.0', + argstr="-wmseg %s", + extensions=None, + min_ver="5.0.0", ), wmcoords=dict( - argstr='-wmcoords %s', - min_ver='5.0.0', + argstr="-wmcoords %s", + extensions=None, + min_ver="5.0.0", ), wmnorms=dict( - argstr='-wmnorms %s', - min_ver='5.0.0', + argstr="-wmnorms %s", + extensions=None, + min_ver="5.0.0", ), ) inputs = ApplyXFM.input_spec() @@ -147,11 +206,19 @@ def test_ApplyXFM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyXFM_outputs(): output_map = dict( - out_file=dict(), - out_log=dict(), - out_matrix_file=dict(), + out_file=dict( + extensions=None, + ), + out_log=dict( + extensions=None, + ), + out_matrix_file=dict( + extensions=None, + ), ) outputs = ApplyXFM.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_AvScale.py b/nipype/interfaces/fsl/tests/test_auto_AvScale.py index 762ada916d..4748d6a9bc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AvScale.py +++ b/nipype/interfaces/fsl/tests/test_auto_AvScale.py @@ -1,22 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import AvScale def test_AvScale_inputs(): input_map = dict( - all_param=dict(argstr='--allparams', ), - args=dict(argstr='%s', ), + all_param=dict( + argstr="--allparams", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), mat_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), ref_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), ) @@ -25,6 +30,8 @@ def test_AvScale_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AvScale_outputs(): output_map = dict( average_scaling=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_B0Calc.py b/nipype/interfaces/fsl/tests/test_auto_B0Calc.py index 96ec6b949c..01727179ff 100644 --- a/nipype/interfaces/fsl/tests/test_auto_B0Calc.py +++ b/nipype/interfaces/fsl/tests/test_auto_B0Calc.py @@ -1,25 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..possum import B0Calc def test_B0Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), chi_air=dict( - argstr='--chi0=%e', + argstr="--chi0=%e", usedefault=True, ), compute_xyz=dict( - argstr='--xyz', + argstr="--xyz", usedefault=True, ), delta=dict( - argstr='-d %e', + argstr="-d %e", usedefault=True, ), directconv=dict( - argstr='--directconv', + argstr="--directconv", usedefault=True, ), environ=dict( @@ -27,51 +28,53 @@ def test_B0Calc_inputs(): usedefault=True, ), extendboundary=dict( - argstr='--extendboundary=%0.2f', + argstr="--extendboundary=%0.2f", usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=0, ), out_file=dict( - argstr='-o %s', - name_source=['in_file'], - name_template='%s_b0field', - output_name='out_file', + argstr="-o %s", + extensions=None, + name_source=["in_file"], + name_template="%s_b0field", + output_name="out_file", position=1, ), output_type=dict(), x_b0=dict( - argstr='--b0x=%0.2f', + argstr="--b0x=%0.2f", usedefault=True, - xor=['xyz_b0'], + xor=["xyz_b0"], ), x_grad=dict( - argstr='--gx=%0.4f', + argstr="--gx=%0.4f", usedefault=True, ), xyz_b0=dict( - argstr='--b0x=%0.2f --b0y=%0.2f --b0=%0.2f', - xor=['x_b0', 'y_b0', 'z_b0'], + argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", + xor=["x_b0", "y_b0", "z_b0"], ), y_b0=dict( - argstr='--b0y=%0.2f', + argstr="--b0y=%0.2f", usedefault=True, - xor=['xyz_b0'], + xor=["xyz_b0"], ), y_grad=dict( - argstr='--gy=%0.4f', + argstr="--gy=%0.4f", usedefault=True, ), z_b0=dict( - argstr='--b0=%0.2f', + argstr="--b0=%0.2f", usedefault=True, - xor=['xyz_b0'], + xor=["xyz_b0"], ), z_grad=dict( - argstr='--gz=%0.4f', + argstr="--gz=%0.4f", usedefault=True, ), ) @@ -80,8 +83,14 @@ def test_B0Calc_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_B0Calc_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = B0Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py index 1ac5db111b..2424307165 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py +++ b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py @@ -1,88 +1,115 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import BEDPOSTX5 def test_BEDPOSTX5_inputs(): input_map = dict( all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), + argstr="--allard", + xor=("no_ard", "all_ard"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), burn_in=dict( - argstr='-b %d', + argstr="-b %d", usedefault=True, ), burn_in_no_ard=dict( - argstr='--burnin_noard=%d', + argstr="--burnin_noard=%d", usedefault=True, ), - bvals=dict(mandatory=True, ), - bvecs=dict(mandatory=True, ), + bvals=dict( + extensions=None, + mandatory=True, + ), + bvecs=dict( + extensions=None, + mandatory=True, + ), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--cnonlinear", + xor=("no_spat", "non_linear", "cnlinear"), + ), + dwi=dict( + extensions=None, + mandatory=True, ), - dwi=dict(mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + argstr="--f0 --ardf0", + xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], + argstr="--f0", + xor=["f0_noard", "f0_ard"], ), force_dir=dict( - argstr='--forcedir', + argstr="--forcedir", usedefault=True, ), - fudge=dict(argstr='-w %d', ), - grad_dev=dict(), - gradnonlin=dict(argstr='-g', ), - logdir=dict(argstr='--logdir=%s', ), - mask=dict(mandatory=True, ), - model=dict(argstr='-model %d', ), + fudge=dict( + argstr="-w %d", + ), + grad_dev=dict( + extensions=None, + ), + gradnonlin=dict( + argstr="-g", + ), + logdir=dict( + argstr="--logdir=%s", + ), + mask=dict( + extensions=None, + mandatory=True, + ), + model=dict( + argstr="-model %d", + ), n_fibres=dict( - argstr='-n %d', + argstr="-n %d", mandatory=True, usedefault=True, ), n_jumps=dict( - argstr='-j %d', + argstr="-j %d", usedefault=True, ), no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), + argstr="--noard", + xor=("no_ard", "all_ard"), ), no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nospat", + xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nonlinear", + xor=("no_spat", "non_linear", "cnlinear"), ), out_dir=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, usedefault=True, ), output_type=dict(), - rician=dict(argstr='--rician', ), + rician=dict( + argstr="--rician", + ), sample_every=dict( - argstr='-s %d', + argstr="-s %d", usedefault=True, ), - seed=dict(argstr='--seed=%d', ), + seed=dict( + argstr="--seed=%d", + ), update_proposal_every=dict( - argstr='--updateproposalevery=%d', + argstr="--updateproposalevery=%d", usedefault=True, ), use_gpu=dict(), @@ -92,12 +119,18 @@ def test_BEDPOSTX5_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BEDPOSTX5_outputs(): output_map = dict( dyads=dict(), dyads_dispersion=dict(), - mean_S0samples=dict(), - mean_dsamples=dict(), + mean_S0samples=dict( + extensions=None, + ), + mean_dsamples=dict( + extensions=None, + ), mean_fsamples=dict(), mean_phsamples=dict(), mean_thsamples=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_BET.py b/nipype/interfaces/fsl/tests/test_auto_BET.py index eccf348b3e..82757a10a6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BET.py +++ b/nipype/interfaces/fsl/tests/test_auto_BET.py @@ -1,97 +1,194 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import BET def test_BET_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), center=dict( - argstr='-c %s', - units='voxels', + argstr="-c %s", + units="voxels", ), environ=dict( nohash=True, usedefault=True, ), - frac=dict(argstr='-f %.2f', ), + frac=dict( + argstr="-f %.2f", + ), functional=dict( - argstr='-F', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-F", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), in_file=dict( - argstr='%s', + argstr="%s", + copyfile=False, + extensions=None, mandatory=True, position=0, ), - mask=dict(argstr='-m', ), - mesh=dict(argstr='-e', ), - no_output=dict(argstr='-n', ), + mask=dict( + argstr="-m", + ), + mesh=dict( + argstr="-e", + ), + no_output=dict( + argstr="-n", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=1, ), - outline=dict(argstr='-o', ), + outline=dict( + argstr="-o", + ), output_type=dict(), padding=dict( - argstr='-Z', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-Z", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), radius=dict( - argstr='-r %d', - units='mm', + argstr="-r %d", + units="mm", ), reduce_bias=dict( - argstr='-B', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-B", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), remove_eyes=dict( - argstr='-S', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-S", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), robust=dict( - argstr='-R', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-R", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), + ), + skull=dict( + argstr="-s", ), - skull=dict(argstr='-s', ), surfaces=dict( - argstr='-A', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-A", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), t2_guided=dict( - argstr='-A2 %s', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-A2 %s", + extensions=None, + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), + ), + threshold=dict( + argstr="-t", + ), + vertical_gradient=dict( + argstr="-g %.2f", ), - threshold=dict(argstr='-t', ), - vertical_gradient=dict(argstr='-g %.2f', ), ) inputs = BET.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BET_outputs(): output_map = dict( - inskull_mask_file=dict(), - inskull_mesh_file=dict(), - mask_file=dict(), - meshfile=dict(), - out_file=dict(), - outline_file=dict(), - outskin_mask_file=dict(), - outskin_mesh_file=dict(), - outskull_mask_file=dict(), - outskull_mesh_file=dict(), - skull_mask_file=dict(), + inskull_mask_file=dict( + extensions=None, + ), + inskull_mesh_file=dict( + extensions=None, + ), + mask_file=dict( + extensions=None, + ), + meshfile=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + outline_file=dict( + extensions=None, + ), + outskin_mask_file=dict( + extensions=None, + ), + outskin_mesh_file=dict( + extensions=None, + ), + outskull_mask_file=dict( + extensions=None, + ), + outskull_mesh_file=dict( + extensions=None, + ), + skull_file=dict( + extensions=None, + ), + skull_mask_file=dict( + extensions=None, + ), ) outputs = BET.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py index 2548ae68e5..405cd592f6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py @@ -1,53 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import BinaryMaths def test_BinaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), operand_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=5, - xor=['operand_value'], + xor=["operand_value"], ), operand_value=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file'], + xor=["operand_file"], ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -57,8 +61,14 @@ def test_BinaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py b/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py index aab508f2bf..61f27be950 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py +++ b/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py @@ -1,36 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import ChangeDataType def test_ChangeDataType_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", mandatory=True, position=-1, ), @@ -41,8 +44,14 @@ def test_ChangeDataType_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ChangeDataType_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ChangeDataType.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Classifier.py b/nipype/interfaces/fsl/tests/test_auto_Classifier.py index a8db888acb..44fde8ae1a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Classifier.py +++ b/nipype/interfaces/fsl/tests/test_auto_Classifier.py @@ -1,29 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import Classifier def test_Classifier_inputs(): input_map = dict( - args=dict(argstr='%s', ), - artifacts_list_file=dict(), + args=dict( + argstr="%s", + ), + artifacts_list_file=dict( + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), mel_ica=dict( - argstr='%s', + argstr="%s", copyfile=False, position=1, ), thresh=dict( - argstr='%d', + argstr="%d", mandatory=True, position=-1, ), trained_wts_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=2, ), @@ -33,8 +37,14 @@ def test_Classifier_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Classifier_outputs(): - output_map = dict(artifacts_list_file=dict(), ) + output_map = dict( + artifacts_list_file=dict( + extensions=None, + ), + ) outputs = Classifier.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Cleaner.py b/nipype/interfaces/fsl/tests/test_auto_Cleaner.py index 5fc505a174..55e6851603 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Cleaner.py +++ b/nipype/interfaces/fsl/tests/test_auto_Cleaner.py @@ -1,34 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import Cleaner def test_Cleaner_inputs(): input_map = dict( aggressive=dict( - argstr='-A', + argstr="-A", position=3, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), artifacts_list_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), cleanup_motion=dict( - argstr='-m', + argstr="-m", position=2, ), confound_file=dict( - argstr='-x %s', + argstr="-x %s", + extensions=None, position=4, ), confound_file_1=dict( - argstr='-x %s', + argstr="-x %s", + extensions=None, position=5, ), confound_file_2=dict( - argstr='-x %s', + argstr="-x %s", + extensions=None, position=6, ), environ=dict( @@ -36,7 +41,7 @@ def test_Cleaner_inputs(): usedefault=True, ), highpass=dict( - argstr='-m -h %f', + argstr="-m -h %f", position=2, usedefault=True, ), @@ -46,8 +51,14 @@ def test_Cleaner_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cleaner_outputs(): - output_map = dict(cleaned_functional_file=dict(), ) + output_map = dict( + cleaned_functional_file=dict( + extensions=None, + ), + ) outputs = Cleaner.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Cluster.py b/nipype/interfaces/fsl/tests/test_auto_Cluster.py index f876f85e64..ba4dfc8ae3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Cluster.py +++ b/nipype/interfaces/fsl/tests/test_auto_Cluster.py @@ -1,105 +1,147 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Cluster def test_Cluster_inputs(): input_map = dict( - args=dict(argstr='%s', ), - connectivity=dict(argstr='--connectivity=%d', ), - cope_file=dict(argstr='--cope=%s', ), - dlh=dict(argstr='--dlh=%.10f', ), + args=dict( + argstr="%s", + ), + connectivity=dict( + argstr="--connectivity=%d", + ), + cope_file=dict( + argstr="--cope=%s", + extensions=None, + ), + dlh=dict( + argstr="--dlh=%.10f", + ), environ=dict( nohash=True, usedefault=True, ), find_min=dict( - argstr='--min', + argstr="--min", usedefault=True, ), fractional=dict( - argstr='--fractional', + argstr="--fractional", usedefault=True, ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, ), minclustersize=dict( - argstr='--minclustersize', + argstr="--minclustersize", usedefault=True, ), no_table=dict( - argstr='--no_table', + argstr="--no_table", usedefault=True, ), - num_maxima=dict(argstr='--num=%d', ), + num_maxima=dict( + argstr="--num=%d", + ), out_index_file=dict( - argstr='--oindex=%s', + argstr="--oindex=%s", hash_files=False, ), out_localmax_txt_file=dict( - argstr='--olmax=%s', + argstr="--olmax=%s", hash_files=False, ), out_localmax_vol_file=dict( - argstr='--olmaxim=%s', + argstr="--olmaxim=%s", hash_files=False, ), out_max_file=dict( - argstr='--omax=%s', + argstr="--omax=%s", hash_files=False, ), out_mean_file=dict( - argstr='--omean=%s', + argstr="--omean=%s", hash_files=False, ), out_pval_file=dict( - argstr='--opvals=%s', + argstr="--opvals=%s", hash_files=False, ), out_size_file=dict( - argstr='--osize=%s', + argstr="--osize=%s", hash_files=False, ), out_threshold_file=dict( - argstr='--othresh=%s', + argstr="--othresh=%s", hash_files=False, ), output_type=dict(), - peak_distance=dict(argstr='--peakdist=%.10f', ), + peak_distance=dict( + argstr="--peakdist=%.10f", + ), pthreshold=dict( - argstr='--pthresh=%.10f', - requires=['dlh', 'volume'], + argstr="--pthresh=%.10f", + requires=["dlh", "volume"], + ), + std_space_file=dict( + argstr="--stdvol=%s", + extensions=None, ), - std_space_file=dict(argstr='--stdvol=%s', ), threshold=dict( - argstr='--thresh=%.10f', + argstr="--thresh=%.10f", mandatory=True, ), use_mm=dict( - argstr='--mm', + argstr="--mm", usedefault=True, ), - volume=dict(argstr='--volume=%d', ), - warpfield_file=dict(argstr='--warpvol=%s', ), - xfm_file=dict(argstr='--xfm=%s', ), + volume=dict( + argstr="--volume=%d", + ), + warpfield_file=dict( + argstr="--warpvol=%s", + extensions=None, + ), + xfm_file=dict( + argstr="--xfm=%s", + extensions=None, + ), ) inputs = Cluster.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cluster_outputs(): output_map = dict( - index_file=dict(), - localmax_txt_file=dict(), - localmax_vol_file=dict(), - max_file=dict(), - mean_file=dict(), - pval_file=dict(), - size_file=dict(), - threshold_file=dict(), + index_file=dict( + extensions=None, + ), + localmax_txt_file=dict( + extensions=None, + ), + localmax_vol_file=dict( + extensions=None, + ), + max_file=dict( + extensions=None, + ), + mean_file=dict( + extensions=None, + ), + pval_file=dict( + extensions=None, + ), + size_file=dict( + extensions=None, + ), + threshold_file=dict( + extensions=None, + ), ) outputs = Cluster.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Complex.py b/nipype/interfaces/fsl/tests/test_auto_Complex.py index 7e49bcdfa8..80d6f20fe6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Complex.py +++ b/nipype/interfaces/fsl/tests/test_auto_Complex.py @@ -1,64 +1,89 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Complex def test_Complex_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), complex_cartesian=dict( - argstr='-complex', + argstr="-complex", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), complex_in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), complex_in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), complex_merge=dict( - argstr='-complexmerge', + argstr="-complexmerge", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge', 'start_vol', - 'end_vol' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", + "start_vol", + "end_vol", ], ), complex_out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_out_file', 'imaginary_out_file', 'real_polar', - 'real_cartesian' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_out_file", + "imaginary_out_file", + "real_polar", + "real_cartesian", ], ), complex_polar=dict( - argstr='-complexpolar', + argstr="-complexpolar", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), complex_split=dict( - argstr='-complexsplit', + argstr="-complexsplit", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), end_vol=dict( - argstr='%d', + argstr="%d", position=-1, ), environ=dict( @@ -66,80 +91,116 @@ def test_Complex_inputs(): usedefault=True, ), imaginary_in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), imaginary_out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_polar', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_polar", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), magnitude_in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), magnitude_out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-4, xor=[ - 'complex_out_file', 'real_out_file', 'imaginary_out_file', - 'real_cartesian', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "real_out_file", + "imaginary_out_file", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), output_type=dict(), phase_in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), phase_out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'real_out_file', 'imaginary_out_file', - 'real_cartesian', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "real_out_file", + "imaginary_out_file", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), real_cartesian=dict( - argstr='-realcartesian', + argstr="-realcartesian", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), real_in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), real_out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-4, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_polar', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_polar", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), real_polar=dict( - argstr='-realpolar', + argstr="-realpolar", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), start_vol=dict( - argstr='%d', + argstr="%d", position=-2, ), ) @@ -148,13 +209,25 @@ def test_Complex_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Complex_outputs(): output_map = dict( - complex_out_file=dict(), - imaginary_out_file=dict(), - magnitude_out_file=dict(), - phase_out_file=dict(), - real_out_file=dict(), + complex_out_file=dict( + extensions=None, + ), + imaginary_out_file=dict( + extensions=None, + ), + magnitude_out_file=dict( + extensions=None, + ), + phase_out_file=dict( + extensions=None, + ), + real_out_file=dict( + extensions=None, + ), ) outputs = Complex.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py b/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py index a82ea8fe43..35e5bd43a7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py +++ b/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py @@ -1,41 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import ContrastMgr def test_ContrastMgr_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrast_num=dict(argstr='-cope', ), + args=dict( + argstr="%s", + ), + contrast_num=dict( + argstr="-cope", + ), corrections=dict( copyfile=False, + extensions=None, mandatory=True, ), dof_file=dict( - argstr='', + argstr="", copyfile=False, + extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), - fcon_file=dict(argstr='-f %s', ), + fcon_file=dict( + argstr="-f %s", + extensions=None, + ), output_type=dict(), param_estimates=dict( - argstr='', + argstr="", copyfile=False, mandatory=True, ), sigmasquareds=dict( - argstr='', + argstr="", copyfile=False, + extensions=None, mandatory=True, position=-2, ), - suffix=dict(argstr='-suffix %s', ), + suffix=dict( + argstr="-suffix %s", + ), tcon_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), @@ -45,6 +57,8 @@ def test_ContrastMgr_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ContrastMgr_outputs(): output_map = dict( copes=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py index 2ed14aaad2..7ae7f7471b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py @@ -1,65 +1,98 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ConvertWarp def test_ConvertWarp_inputs(): input_map = dict( abswarp=dict( - argstr='--abs', - xor=['relwarp'], + argstr="--abs", + xor=["relwarp"], + ), + args=dict( + argstr="%s", + ), + cons_jacobian=dict( + argstr="--constrainj", ), - args=dict(argstr='%s', ), - cons_jacobian=dict(argstr='--constrainj', ), environ=dict( nohash=True, usedefault=True, ), - jacobian_max=dict(argstr='--jmax=%f', ), - jacobian_min=dict(argstr='--jmin=%f', ), - midmat=dict(argstr='--midmat=%s', ), + jacobian_max=dict( + argstr="--jmax=%f", + ), + jacobian_min=dict( + argstr="--jmin=%f", + ), + midmat=dict( + argstr="--midmat=%s", + extensions=None, + ), out_abswarp=dict( - argstr='--absout', - xor=['out_relwarp'], + argstr="--absout", + xor=["out_relwarp"], ), out_file=dict( - argstr='--out=%s', - name_source=['reference'], - name_template='%s_concatwarp', - output_name='out_file', + argstr="--out=%s", + extensions=None, + name_source=["reference"], + name_template="%s_concatwarp", + output_name="out_file", position=-1, ), out_relwarp=dict( - argstr='--relout', - xor=['out_abswarp'], + argstr="--relout", + xor=["out_abswarp"], ), output_type=dict(), - postmat=dict(argstr='--postmat=%s', ), - premat=dict(argstr='--premat=%s', ), + postmat=dict( + argstr="--postmat=%s", + extensions=None, + ), + premat=dict( + argstr="--premat=%s", + extensions=None, + ), reference=dict( - argstr='--ref=%s', + argstr="--ref=%s", + extensions=None, mandatory=True, position=1, ), relwarp=dict( - argstr='--rel', - xor=['abswarp'], + argstr="--rel", + xor=["abswarp"], ), shift_direction=dict( - argstr='--shiftdir=%s', - requires=['shift_in_file'], + argstr="--shiftdir=%s", + requires=["shift_in_file"], + ), + shift_in_file=dict( + argstr="--shiftmap=%s", + extensions=None, + ), + warp1=dict( + argstr="--warp1=%s", + extensions=None, + ), + warp2=dict( + argstr="--warp2=%s", + extensions=None, ), - shift_in_file=dict(argstr='--shiftmap=%s', ), - warp1=dict(argstr='--warp1=%s', ), - warp2=dict(argstr='--warp2=%s', ), ) inputs = ConvertWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertWarp_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ConvertWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py index c017a39a52..60e1d7553b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py @@ -1,43 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ConvertXFM def test_ConvertXFM_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), concat_xfm=dict( - argstr='-concat', + argstr="-concat", position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + requires=["in_file2"], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), environ=dict( nohash=True, usedefault=True, ), fix_scale_skew=dict( - argstr='-fixscaleskew', + argstr="-fixscaleskew", position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + requires=["in_file2"], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), invert_xfm=dict( - argstr='-inverse', + argstr="-inverse", position=-3, - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), out_file=dict( - argstr='-omat %s', + argstr="-omat %s", + extensions=None, genfile=True, hash_files=False, position=1, @@ -49,8 +53,14 @@ def test_ConvertXFM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertXFM_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ConvertXFM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py b/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py index c66feff211..6f15d47bfc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py +++ b/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py @@ -1,18 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CopyGeom def test_CopyGeom_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dest_file=dict( - argstr='%s', + argstr="%s", copyfile=True, + extensions=None, mandatory=True, - name_source='dest_file', - name_template='%s', - output_name='out_file', + name_source="dest_file", + name_template="%s", + output_name="out_file", position=1, ), environ=dict( @@ -20,11 +22,12 @@ def test_CopyGeom_inputs(): usedefault=True, ), ignore_dims=dict( - argstr='-d', - position='-1', + argstr="-d", + position="-1", ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), @@ -35,8 +38,14 @@ def test_CopyGeom_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CopyGeom_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = CopyGeom.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_DTIFit.py b/nipype/interfaces/fsl/tests/test_auto_DTIFit.py index b1d3b4822d..546ffa6848 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DTIFit.py +++ b/nipype/interfaces/fsl/tests/test_auto_DTIFit.py @@ -1,29 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DTIFit def test_DTIFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), base_name=dict( - argstr='-o %s', + argstr="-o %s", position=1, usedefault=True, ), bvals=dict( - argstr='-b %s', + argstr="-b %s", + extensions=None, mandatory=True, position=4, ), bvecs=dict( - argstr='-r %s', + argstr="-r %s", + extensions=None, mandatory=True, position=3, ), - cni=dict(argstr='--cni=%s', ), + cni=dict( + argstr="--cni=%s", + extensions=None, + ), dwi=dict( - argstr='-k %s', + argstr="-k %s", + extensions=None, mandatory=True, position=0, ), @@ -31,42 +38,90 @@ def test_DTIFit_inputs(): nohash=True, usedefault=True, ), - gradnonlin=dict(argstr='--gradnonlin=%s', ), - little_bit=dict(argstr='--littlebit', ), + gradnonlin=dict( + argstr="--gradnonlin=%s", + extensions=None, + ), + little_bit=dict( + argstr="--littlebit", + ), mask=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, position=2, ), - max_x=dict(argstr='-X %d', ), - max_y=dict(argstr='-Y %d', ), - max_z=dict(argstr='-Z %d', ), - min_x=dict(argstr='-x %d', ), - min_y=dict(argstr='-y %d', ), - min_z=dict(argstr='-z %d', ), + max_x=dict( + argstr="-X %d", + ), + max_y=dict( + argstr="-Y %d", + ), + max_z=dict( + argstr="-Z %d", + ), + min_x=dict( + argstr="-x %d", + ), + min_y=dict( + argstr="-y %d", + ), + min_z=dict( + argstr="-z %d", + ), output_type=dict(), - save_tensor=dict(argstr='--save_tensor', ), - sse=dict(argstr='--sse', ), + save_tensor=dict( + argstr="--save_tensor", + ), + sse=dict( + argstr="--sse", + ), ) inputs = DTIFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIFit_outputs(): output_map = dict( - FA=dict(), - L1=dict(), - L2=dict(), - L3=dict(), - MD=dict(), - MO=dict(), - S0=dict(), - V1=dict(), - V2=dict(), - V3=dict(), - sse=dict(), - tensor=dict(), + FA=dict( + extensions=None, + ), + L1=dict( + extensions=None, + ), + L2=dict( + extensions=None, + ), + L3=dict( + extensions=None, + ), + MD=dict( + extensions=None, + ), + MO=dict( + extensions=None, + ), + S0=dict( + extensions=None, + ), + V1=dict( + extensions=None, + ), + V2=dict( + extensions=None, + ), + V3=dict( + extensions=None, + ), + sse=dict( + extensions=None, + ), + tensor=dict( + extensions=None, + ), ) outputs = DTIFit.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py index 3ed43fbd22..ac204e5a38 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py @@ -1,55 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import DilateImage def test_DilateImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), kernel_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=5, - xor=['kernel_size'], + xor=["kernel_size"], ), kernel_shape=dict( - argstr='-kernel %s', + argstr="-kernel %s", position=4, ), kernel_size=dict( - argstr='%.4f', + argstr="%.4f", position=5, - xor=['kernel_file'], + xor=["kernel_file"], ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), operation=dict( - argstr='-dil%s', + argstr="-dil%s", mandatory=True, position=6, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -59,8 +63,14 @@ def test_DilateImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py b/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py index 5f3321de78..74ea024917 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py +++ b/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import DistanceMap def test_DistanceMap_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), distance_map=dict( - argstr='--out=%s', + argstr="--out=%s", + extensions=None, genfile=True, hash_files=False, ), @@ -16,15 +18,21 @@ def test_DistanceMap_inputs(): usedefault=True, ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, ), - invert_input=dict(argstr='--invert', ), + invert_input=dict( + argstr="--invert", + ), local_max_file=dict( - argstr='--localmax=%s', + argstr="--localmax=%s", hash_files=False, ), - mask_file=dict(argstr='--mask=%s', ), + mask_file=dict( + argstr="--mask=%s", + extensions=None, + ), output_type=dict(), ) inputs = DistanceMap.input_spec() @@ -32,10 +40,16 @@ def test_DistanceMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DistanceMap_outputs(): output_map = dict( - distance_map=dict(), - local_max_file=dict(), + distance_map=dict( + extensions=None, + ), + local_max_file=dict( + extensions=None, + ), ) outputs = DistanceMap.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_DualRegression.py b/nipype/interfaces/fsl/tests/test_auto_DualRegression.py index 4c2fdd3ceb..4b3d8f6851 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DualRegression.py +++ b/nipype/interfaces/fsl/tests/test_auto_DualRegression.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import DualRegression def test_DualRegression_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), con_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=4, ), des_norm=dict( - argstr='%i', + argstr="%i", position=2, usedefault=True, ), design_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), environ=dict( @@ -24,27 +27,28 @@ def test_DualRegression_inputs(): usedefault=True, ), group_IC_maps_4D=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, - sep=' ', + sep=" ", ), n_perm=dict( - argstr='%i', + argstr="%i", mandatory=True, position=5, ), one_sample_group_mean=dict( - argstr='-1', + argstr="-1", position=3, ), out_dir=dict( - argstr='%s', + argstr="%s", genfile=True, position=6, usedefault=True, @@ -56,8 +60,12 @@ def test_DualRegression_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DualRegression_outputs(): - output_map = dict(out_dir=dict(), ) + output_map = dict( + out_dir=dict(), + ) outputs = DualRegression.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py b/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py index 2a5f0c86b6..0462fa9cbe 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py @@ -1,58 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import EPIDeWarp def test_EPIDeWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cleanup=dict(argstr='--cleanup', ), + args=dict( + argstr="%s", + ), + cleanup=dict( + argstr="--cleanup", + ), dph_file=dict( - argstr='--dph %s', + argstr="--dph %s", + extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), - epi_file=dict(argstr='--epi %s', ), + epi_file=dict( + argstr="--epi %s", + extensions=None, + ), epidw=dict( - argstr='--epidw %s', + argstr="--epidw %s", genfile=False, ), esp=dict( - argstr='--esp %s', + argstr="--esp %s", usedefault=True, ), - exf_file=dict(argstr='--exf %s', ), + exf_file=dict( + argstr="--exf %s", + extensions=None, + ), exfdw=dict( - argstr='--exfdw %s', + argstr="--exfdw %s", genfile=True, ), mag_file=dict( - argstr='--mag %s', + argstr="--mag %s", + extensions=None, mandatory=True, position=0, ), nocleanup=dict( - argstr='--nocleanup', + argstr="--nocleanup", usedefault=True, ), output_type=dict(), sigma=dict( - argstr='--sigma %s', + argstr="--sigma %s", usedefault=True, ), tediff=dict( - argstr='--tediff %s', + argstr="--tediff %s", usedefault=True, ), tmpdir=dict( - argstr='--tmpdir %s', + argstr="--tmpdir %s", genfile=True, ), vsm=dict( - argstr='--vsm %s', + argstr="--vsm %s", genfile=True, ), ) @@ -61,12 +72,22 @@ def test_EPIDeWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EPIDeWarp_outputs(): output_map = dict( - exf_mask=dict(), - exfdw=dict(), - unwarped_file=dict(), - vsm_file=dict(), + exf_mask=dict( + extensions=None, + ), + exfdw=dict( + extensions=None, + ), + unwarped_file=dict( + extensions=None, + ), + vsm_file=dict( + extensions=None, + ), ) outputs = EPIDeWarp.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Eddy.py b/nipype/interfaces/fsl/tests/test_auto_Eddy.py index 55d9409e17..0005085474 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Eddy.py +++ b/nipype/interfaces/fsl/tests/test_auto_Eddy.py @@ -1,64 +1,143 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import Eddy def test_Eddy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cnr_maps=dict( - argstr='--cnr_maps', - min_ver='5.0.10', + argstr="--cnr_maps", + min_ver="5.0.10", + ), + dont_peas=dict( + argstr="--dont_peas", + ), + dont_sep_offs_move=dict( + argstr="--dont_sep_offs_move", ), - dont_peas=dict(argstr='--dont_peas', ), - dont_sep_offs_move=dict(argstr='--dont_sep_offs_move', ), environ=dict( nohash=True, usedefault=True, ), - fep=dict(argstr='--fep', ), - field=dict(argstr='--field=%s', ), - field_mat=dict(argstr='--field_mat=%s', ), - flm=dict(argstr='--flm=%s', ), + estimate_move_by_susceptibility=dict( + argstr="--estimate_move_by_susceptibility", + min_ver="6.0.1", + ), + fep=dict( + argstr="--fep", + ), + field=dict( + argstr="--field=%s", + extensions=None, + ), + field_mat=dict( + argstr="--field_mat=%s", + extensions=None, + ), + flm=dict( + argstr="--flm=%s", + usedefault=True, + ), fudge_factor=dict( - argstr='--ff=%s', + argstr="--ff=%s", usedefault=True, ), - fwhm=dict(argstr='--fwhm=%s', ), + fwhm=dict( + argstr="--fwhm=%s", + ), in_acqp=dict( - argstr='--acqp=%s', + argstr="--acqp=%s", + extensions=None, mandatory=True, ), in_bval=dict( - argstr='--bvals=%s', + argstr="--bvals=%s", + extensions=None, mandatory=True, ), in_bvec=dict( - argstr='--bvecs=%s', + argstr="--bvecs=%s", + extensions=None, mandatory=True, ), in_file=dict( - argstr='--imain=%s', + argstr="--imain=%s", + extensions=None, mandatory=True, ), in_index=dict( - argstr='--index=%s', + argstr="--index=%s", + extensions=None, mandatory=True, ), in_mask=dict( - argstr='--mask=%s', + argstr="--mask=%s", + extensions=None, mandatory=True, ), in_topup_fieldcoef=dict( - argstr='--topup=%s', - requires=['in_topup_movpar'], + argstr="--topup=%s", + extensions=None, + requires=["in_topup_movpar"], + ), + in_topup_movpar=dict( + extensions=None, + requires=["in_topup_fieldcoef"], + ), + initrand=dict( + argstr="--initrand", + min_ver="5.0.10", + ), + interp=dict( + argstr="--interp=%s", + usedefault=True, + ), + is_shelled=dict( + argstr="--data_is_shelled", + ), + json=dict( + argstr="--json=%s", + min_ver="6.0.1", + requires=["mporder"], + xor=["slice_order"], + ), + mbs_ksp=dict( + argstr="--mbs_ksp=%smm", + min_ver="6.0.1", + requires=["estimate_move_by_susceptibility"], + ), + mbs_lambda=dict( + argstr="--mbs_lambda=%s", + min_ver="6.0.1", + requires=["estimate_move_by_susceptibility"], + ), + mbs_niter=dict( + argstr="--mbs_niter=%s", + min_ver="6.0.1", + requires=["estimate_move_by_susceptibility"], + ), + method=dict( + argstr="--resamp=%s", + usedefault=True, + ), + mporder=dict( + argstr="--mporder=%s", + min_ver="5.0.11", + requires=["use_cuda"], + ), + multiband_factor=dict( + argstr="--mb=%s", + min_ver="5.0.10", + ), + multiband_offset=dict( + argstr="--mb_offs=%d", + min_ver="5.0.10", + requires=["multiband_factor"], ), - in_topup_movpar=dict(requires=['in_topup_fieldcoef'], ), - interp=dict(argstr='--interp=%s', ), - is_shelled=dict(argstr='--data_is_shelled', ), - method=dict(argstr='--resamp=%s', ), niter=dict( - argstr='--niter=%s', + argstr="--niter=%s", usedefault=True, ), num_threads=dict( @@ -66,21 +145,75 @@ def test_Eddy_inputs(): usedefault=True, ), nvoxhp=dict( - argstr='--nvoxhp=%s', + argstr="--nvoxhp=%s", usedefault=True, ), out_base=dict( - argstr='--out=%s', + argstr="--out=%s", usedefault=True, ), + outlier_nstd=dict( + argstr="--ol_nstd", + min_ver="5.0.10", + requires=["repol"], + ), + outlier_nvox=dict( + argstr="--ol_nvox", + min_ver="5.0.10", + requires=["repol"], + ), + outlier_pos=dict( + argstr="--ol_pos", + min_ver="5.0.10", + requires=["repol"], + ), + outlier_sqr=dict( + argstr="--ol_sqr", + min_ver="5.0.10", + requires=["repol"], + ), + outlier_type=dict( + argstr="--ol_type", + min_ver="5.0.10", + requires=["repol"], + ), output_type=dict(), - repol=dict(argstr='--repol', ), + repol=dict( + argstr="--repol", + ), residuals=dict( - argstr='--residuals', - min_ver='5.0.10', + argstr="--residuals", + min_ver="5.0.10", + ), + session=dict( + argstr="--session=%s", + extensions=None, + ), + slice2vol_interp=dict( + argstr="--s2v_interp=%s", + min_ver="5.0.11", + requires=["mporder"], + ), + slice2vol_lambda=dict( + argstr="--s2v_lambda=%d", + min_ver="5.0.11", + requires=["mporder"], + ), + slice2vol_niter=dict( + argstr="--s2v_niter=%d", + min_ver="5.0.11", + requires=["mporder"], + ), + slice_order=dict( + argstr="--slspec=%s", + min_ver="5.0.11", + requires=["mporder"], + xor=["json"], + ), + slm=dict( + argstr="--slm=%s", + usedefault=True, ), - session=dict(argstr='--session=%s', ), - slm=dict(argstr='--slm=%s', ), use_cuda=dict(), ) inputs = Eddy.input_spec() @@ -88,17 +221,55 @@ def test_Eddy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Eddy_outputs(): output_map = dict( - out_cnr_maps=dict(), - out_corrected=dict(), - out_movement_rms=dict(), - out_outlier_report=dict(), - out_parameter=dict(), - out_residuals=dict(), - out_restricted_movement_rms=dict(), - out_rotated_bvecs=dict(), - out_shell_alignment_parameters=dict(), + out_cnr_maps=dict( + extensions=None, + ), + out_corrected=dict( + extensions=None, + ), + out_movement_over_time=dict( + extensions=None, + ), + out_movement_rms=dict( + extensions=None, + ), + out_outlier_free=dict( + extensions=None, + ), + out_outlier_map=dict( + extensions=None, + ), + out_outlier_n_sqr_stdev_map=dict( + extensions=None, + ), + out_outlier_n_stdev_map=dict( + extensions=None, + ), + out_outlier_report=dict( + extensions=None, + ), + out_parameter=dict( + extensions=None, + ), + out_residuals=dict( + extensions=None, + ), + out_restricted_movement_rms=dict( + extensions=None, + ), + out_rotated_bvecs=dict( + extensions=None, + ), + out_shell_alignment_parameters=dict( + extensions=None, + ), + out_shell_pe_translation_parameters=dict( + extensions=None, + ), ) outputs = Eddy.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py index e88219aa04..e2ce1c0a3b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py +++ b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py @@ -1,30 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import EddyCorrect def test_EddyCorrect_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s_edc', - output_name='eddy_corrected', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_edc", + output_name="eddy_corrected", position=1, ), output_type=dict(), ref_num=dict( - argstr='%d', + argstr="%d", mandatory=True, position=2, usedefault=True, @@ -35,8 +38,14 @@ def test_EddyCorrect_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EddyCorrect_outputs(): - output_map = dict(eddy_corrected=dict(), ) + output_map = dict( + eddy_corrected=dict( + extensions=None, + ), + ) outputs = EddyCorrect.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py b/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py new file mode 100644 index 0000000000..3d9756a4be --- /dev/null +++ b/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py @@ -0,0 +1,93 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..epi import EddyQuad + + +def test_EddyQuad_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + base_name=dict( + argstr="%s", + position=0, + usedefault=True, + ), + bval_file=dict( + argstr="--bvals %s", + extensions=None, + mandatory=True, + ), + bvec_file=dict( + argstr="--bvecs %s", + extensions=None, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + field=dict( + argstr="--field %s", + extensions=None, + ), + idx_file=dict( + argstr="--eddyIdx %s", + extensions=None, + mandatory=True, + ), + mask_file=dict( + argstr="--mask %s", + extensions=None, + mandatory=True, + ), + output_dir=dict( + argstr="--output-dir %s", + name_source=["base_name"], + name_template="%s.qc", + ), + output_type=dict(), + param_file=dict( + argstr="--eddyParams %s", + extensions=None, + mandatory=True, + ), + slice_spec=dict( + argstr="--slspec %s", + extensions=None, + ), + verbose=dict( + argstr="--verbose", + ), + ) + inputs = EddyQuad.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_EddyQuad_outputs(): + output_map = dict( + avg_b0_pe_png=dict(), + avg_b_png=dict(), + clean_volumes=dict( + extensions=None, + ), + cnr_png=dict(), + qc_json=dict( + extensions=None, + ), + qc_pdf=dict( + extensions=None, + ), + residuals=dict( + extensions=None, + ), + vdm_png=dict( + extensions=None, + ), + ) + outputs = EddyQuad.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/fsl/tests/test_auto_EpiReg.py b/nipype/interfaces/fsl/tests/test_auto_EpiReg.py index 634a75e376..242c2e6040 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EpiReg.py +++ b/nipype/interfaces/fsl/tests/test_auto_EpiReg.py @@ -1,70 +1,125 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import EpiReg def test_EpiReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - echospacing=dict(argstr='--echospacing=%f', ), + args=dict( + argstr="%s", + ), + echospacing=dict( + argstr="--echospacing=%f", + ), environ=dict( nohash=True, usedefault=True, ), epi=dict( - argstr='--epi=%s', + argstr="--epi=%s", + extensions=None, mandatory=True, position=-4, ), - fmap=dict(argstr='--fmap=%s', ), - fmapmag=dict(argstr='--fmapmag=%s', ), - fmapmagbrain=dict(argstr='--fmapmagbrain=%s', ), + fmap=dict( + argstr="--fmap=%s", + extensions=None, + ), + fmapmag=dict( + argstr="--fmapmag=%s", + extensions=None, + ), + fmapmagbrain=dict( + argstr="--fmapmagbrain=%s", + extensions=None, + ), no_clean=dict( - argstr='--noclean', + argstr="--noclean", usedefault=True, ), - no_fmapreg=dict(argstr='--nofmapreg', ), + no_fmapreg=dict( + argstr="--nofmapreg", + ), out_base=dict( - argstr='--out=%s', + argstr="--out=%s", position=-1, usedefault=True, ), output_type=dict(), - pedir=dict(argstr='--pedir=%s', ), + pedir=dict( + argstr="--pedir=%s", + ), t1_brain=dict( - argstr='--t1brain=%s', + argstr="--t1brain=%s", + extensions=None, mandatory=True, position=-2, ), t1_head=dict( - argstr='--t1=%s', + argstr="--t1=%s", + extensions=None, mandatory=True, position=-3, ), - weight_image=dict(argstr='--weight=%s', ), - wmseg=dict(argstr='--wmseg=%s', ), + weight_image=dict( + argstr="--weight=%s", + extensions=None, + ), + wmseg=dict( + argstr="--wmseg=%s", + extensions=None, + ), ) inputs = EpiReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EpiReg_outputs(): output_map = dict( - epi2str_inv=dict(), - epi2str_mat=dict(), - fmap2epi_mat=dict(), - fmap2str_mat=dict(), - fmap_epi=dict(), - fmap_str=dict(), - fmapmag_str=dict(), - fullwarp=dict(), - out_1vol=dict(), - out_file=dict(), - seg=dict(), - shiftmap=dict(), - wmedge=dict(), - wmseg=dict(), + epi2str_inv=dict( + extensions=None, + ), + epi2str_mat=dict( + extensions=None, + ), + fmap2epi_mat=dict( + extensions=None, + ), + fmap2str_mat=dict( + extensions=None, + ), + fmap_epi=dict( + extensions=None, + ), + fmap_str=dict( + extensions=None, + ), + fmapmag_str=dict( + extensions=None, + ), + fullwarp=dict( + extensions=None, + ), + out_1vol=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + seg=dict( + extensions=None, + ), + shiftmap=dict( + extensions=None, + ), + wmedge=dict( + extensions=None, + ), + wmseg=dict( + extensions=None, + ), ) outputs = EpiReg.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py index 797a403d45..1aad31cd16 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py @@ -1,55 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import ErodeImage def test_ErodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), kernel_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=5, - xor=['kernel_size'], + xor=["kernel_size"], ), kernel_shape=dict( - argstr='-kernel %s', + argstr="-kernel %s", position=4, ), kernel_size=dict( - argstr='%.4f', + argstr="%.4f", position=5, - xor=['kernel_file'], + xor=["kernel_file"], ), minimum_filter=dict( - argstr='%s', + argstr="%s", position=6, usedefault=True, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -59,8 +63,14 @@ def test_ErodeImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ErodeImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py index df8de64144..bd6acb137c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py +++ b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py @@ -1,17 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ExtractROI def test_ExtractROI_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), crop_list=dict( - argstr='%s', + argstr="%s", position=2, xor=[ - 'x_min', 'x_size', 'y_min', 'y_size', 'z_min', 'z_size', - 't_min', 't_size' + "x_min", + "x_size", + "y_min", + "y_size", + "z_min", + "z_size", + "t_min", + "t_size", ], ), environ=dict( @@ -19,47 +26,49 @@ def test_ExtractROI_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), output_type=dict(), roi_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=1, ), t_min=dict( - argstr='%d', + argstr="%d", position=8, ), t_size=dict( - argstr='%d', + argstr="%d", position=9, ), x_min=dict( - argstr='%d', + argstr="%d", position=2, ), x_size=dict( - argstr='%d', + argstr="%d", position=3, ), y_min=dict( - argstr='%d', + argstr="%d", position=4, ), y_size=dict( - argstr='%d', + argstr="%d", position=5, ), z_min=dict( - argstr='%d', + argstr="%d", position=6, ), z_size=dict( - argstr='%d', + argstr="%d", position=7, ), ) @@ -68,8 +77,14 @@ def test_ExtractROI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractROI_outputs(): - output_map = dict(roi_file=dict(), ) + output_map = dict( + roi_file=dict( + extensions=None, + ), + ) outputs = ExtractROI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FAST.py b/nipype/interfaces/fsl/tests/test_auto_FAST.py index 0b983181af..e775d97b35 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FAST.py +++ b/nipype/interfaces/fsl/tests/test_auto_FAST.py @@ -1,62 +1,114 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FAST def test_FAST_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_iters=dict(argstr='-I %d', ), + args=dict( + argstr="%s", + ), + bias_iters=dict( + argstr="-I %d", + ), bias_lowpass=dict( - argstr='-l %d', - units='mm', + argstr="-l %d", + units="mm", ), environ=dict( nohash=True, usedefault=True, ), - hyper=dict(argstr='-H %.2f', ), - img_type=dict(argstr='-t %d', ), + hyper=dict( + argstr="-H %.2f", + ), + img_type=dict( + argstr="-t %d", + ), in_files=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, ), - init_seg_smooth=dict(argstr='-f %.3f', ), - init_transform=dict(argstr='-a %s', ), - iters_afterbias=dict(argstr='-O %d', ), - manual_seg=dict(argstr='-s %s', ), - mixel_smooth=dict(argstr='-R %.2f', ), - no_bias=dict(argstr='-N', ), - no_pve=dict(argstr='--nopve', ), - number_classes=dict(argstr='-n %d', ), - other_priors=dict(argstr='-A %s', ), - out_basename=dict(argstr='-o %s', ), - output_biascorrected=dict(argstr='-B', ), - output_biasfield=dict(argstr='-b', ), + init_seg_smooth=dict( + argstr="-f %.3f", + ), + init_transform=dict( + argstr="-a %s", + extensions=None, + ), + iters_afterbias=dict( + argstr="-O %d", + ), + manual_seg=dict( + argstr="-s %s", + extensions=None, + ), + mixel_smooth=dict( + argstr="-R %.2f", + ), + no_bias=dict( + argstr="-N", + ), + no_pve=dict( + argstr="--nopve", + ), + number_classes=dict( + argstr="-n %d", + ), + other_priors=dict( + argstr="-A %s", + ), + out_basename=dict( + argstr="-o %s", + extensions=None, + ), + output_biascorrected=dict( + argstr="-B", + ), + output_biasfield=dict( + argstr="-b", + ), output_type=dict(), - probability_maps=dict(argstr='-p', ), - segment_iters=dict(argstr='-W %d', ), - segments=dict(argstr='-g', ), - use_priors=dict(argstr='-P', ), - verbose=dict(argstr='-v', ), + probability_maps=dict( + argstr="-p", + ), + segment_iters=dict( + argstr="-W %d", + ), + segments=dict( + argstr="-g", + ), + use_priors=dict( + argstr="-P", + ), + verbose=dict( + argstr="-v", + ), ) inputs = FAST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FAST_outputs(): output_map = dict( bias_field=dict(), - mixeltype=dict(), + mixeltype=dict( + extensions=None, + ), partial_volume_files=dict(), - partial_volume_map=dict(), + partial_volume_map=dict( + extensions=None, + ), probability_maps=dict(), restored_image=dict(), tissue_class_files=dict(), - tissue_class_map=dict(), + tissue_class_map=dict( + extensions=None, + ), ) outputs = FAST.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FEAT.py b/nipype/interfaces/fsl/tests/test_auto_FEAT.py index 01fc72506f..b363dd290f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEAT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEAT.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FEAT def test_FEAT_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fsf_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), @@ -22,8 +24,12 @@ def test_FEAT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEAT_outputs(): - output_map = dict(feat_dir=dict(), ) + output_map = dict( + feat_dir=dict(), + ) outputs = FEAT.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FEATModel.py b/nipype/interfaces/fsl/tests/test_auto_FEATModel.py index 34ea37d47f..0e6c2f9e33 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEATModel.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEATModel.py @@ -1,24 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FEATModel def test_FEATModel_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), ev_files=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=1, ), fsf_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), @@ -29,13 +31,25 @@ def test_FEATModel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEATModel_outputs(): output_map = dict( - con_file=dict(), - design_cov=dict(), - design_file=dict(), - design_image=dict(), - fcon_file=dict(), + con_file=dict( + extensions=None, + ), + design_cov=dict( + extensions=None, + ), + design_file=dict( + extensions=None, + ), + design_image=dict( + extensions=None, + ), + fcon_file=dict( + extensions=None, + ), ) outputs = FEATModel.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py b/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py index a8f59a0ec3..fe09c468ec 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py @@ -1,21 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FEATRegister def test_FEATRegister_inputs(): input_map = dict( - feat_dirs=dict(mandatory=True, ), - reg_dof=dict(usedefault=True, ), - reg_image=dict(mandatory=True, ), + feat_dirs=dict( + mandatory=True, + ), + reg_dof=dict( + usedefault=True, + ), + reg_image=dict( + extensions=None, + mandatory=True, + ), ) inputs = FEATRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEATRegister_outputs(): - output_map = dict(fsf_file=dict(), ) + output_map = dict( + fsf_file=dict( + extensions=None, + ), + ) outputs = FEATRegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FIRST.py b/nipype/interfaces/fsl/tests/test_auto_FIRST.py index 964ee4922c..42ba79e799 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FIRST.py +++ b/nipype/interfaces/fsl/tests/test_auto_FIRST.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FIRST def test_FIRST_inputs(): input_map = dict( affine_file=dict( - argstr='-a %s', + argstr="-a %s", + extensions=None, position=6, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brain_extracted=dict( - argstr='-b', + argstr="-b", position=2, ), environ=dict( @@ -19,32 +21,34 @@ def test_FIRST_inputs(): usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", copyfile=False, + extensions=None, mandatory=True, position=-2, ), list_of_specific_structures=dict( - argstr='-s %s', + argstr="-s %s", position=5, - sep=',', + sep=",", ), method=dict( - argstr='-m %s', + argstr="-m %s", position=4, usedefault=True, - xor=['method_as_numerical_threshold'], + xor=["method_as_numerical_threshold"], ), method_as_numerical_threshold=dict( - argstr='-m %.4f', + argstr="-m %.4f", position=4, ), no_cleanup=dict( - argstr='-d', + argstr="-d", position=3, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, hash_files=False, mandatory=True, position=-1, @@ -52,7 +56,7 @@ def test_FIRST_inputs(): ), output_type=dict(), verbose=dict( - argstr='-v', + argstr="-v", position=1, ), ) @@ -61,11 +65,17 @@ def test_FIRST_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FIRST_outputs(): output_map = dict( bvars=dict(), - original_segmentations=dict(), - segmentation_file=dict(), + original_segmentations=dict( + extensions=None, + ), + segmentation_file=dict( + extensions=None, + ), vtk_surfaces=dict(), ) outputs = FIRST.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py b/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py index bd335282e3..f25b225d6e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py +++ b/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py @@ -1,61 +1,94 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FLAMEO def test_FLAMEO_inputs(): input_map = dict( - args=dict(argstr='%s', ), - burnin=dict(argstr='--burnin=%d', ), + args=dict( + argstr="%s", + ), + burnin=dict( + argstr="--burnin=%d", + ), cope_file=dict( - argstr='--copefile=%s', + argstr="--copefile=%s", + extensions=None, mandatory=True, ), cov_split_file=dict( - argstr='--covsplitfile=%s', + argstr="--covsplitfile=%s", + extensions=None, mandatory=True, ), design_file=dict( - argstr='--designfile=%s', + argstr="--designfile=%s", + extensions=None, mandatory=True, ), - dof_var_cope_file=dict(argstr='--dofvarcopefile=%s', ), + dof_var_cope_file=dict( + argstr="--dofvarcopefile=%s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - f_con_file=dict(argstr='--fcontrastsfile=%s', ), - fix_mean=dict(argstr='--fixmean', ), - infer_outliers=dict(argstr='--inferoutliers', ), + f_con_file=dict( + argstr="--fcontrastsfile=%s", + extensions=None, + ), + fix_mean=dict( + argstr="--fixmean", + ), + infer_outliers=dict( + argstr="--inferoutliers", + ), log_dir=dict( - argstr='--ld=%s', + argstr="--ld=%s", usedefault=True, ), mask_file=dict( - argstr='--maskfile=%s', + argstr="--maskfile=%s", + extensions=None, mandatory=True, ), - n_jumps=dict(argstr='--njumps=%d', ), - no_pe_outputs=dict(argstr='--nopeoutput', ), - outlier_iter=dict(argstr='--ioni=%d', ), + n_jumps=dict( + argstr="--njumps=%d", + ), + no_pe_outputs=dict( + argstr="--nopeoutput", + ), + outlier_iter=dict( + argstr="--ioni=%d", + ), output_type=dict(), run_mode=dict( - argstr='--runmode=%s', + argstr="--runmode=%s", mandatory=True, ), - sample_every=dict(argstr='--sampleevery=%d', ), - sigma_dofs=dict(argstr='--sigma_dofs=%d', ), + sample_every=dict( + argstr="--sampleevery=%d", + ), + sigma_dofs=dict( + argstr="--sigma_dofs=%d", + ), t_con_file=dict( - argstr='--tcontrastsfile=%s', + argstr="--tcontrastsfile=%s", + extensions=None, mandatory=True, ), - var_cope_file=dict(argstr='--varcopefile=%s', ), + var_cope_file=dict( + argstr="--varcopefile=%s", + extensions=None, + ), ) inputs = FLAMEO.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FLAMEO_outputs(): output_map = dict( copes=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py index 0b59550e5a..a9bdc38477 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py @@ -1,142 +1,203 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FLIRT def test_FLIRT_inputs(): input_map = dict( - angle_rep=dict(argstr='-anglerep %s', ), + angle_rep=dict( + argstr="-anglerep %s", + ), apply_isoxfm=dict( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], + argstr="-applyisoxfm %f", + xor=["apply_xfm"], + ), + apply_xfm=dict( + argstr="-applyxfm", + ), + args=dict( + argstr="%s", ), - apply_xfm=dict(argstr='-applyxfm', ), - args=dict(argstr='%s', ), bbrslope=dict( - argstr='-bbrslope %f', - min_ver='5.0.0', + argstr="-bbrslope %f", + min_ver="5.0.0", ), bbrtype=dict( - argstr='-bbrtype %s', - min_ver='5.0.0', + argstr="-bbrtype %s", + min_ver="5.0.0", + ), + bgvalue=dict( + argstr="-setbackground %f", + ), + bins=dict( + argstr="-bins %d", ), - bgvalue=dict(argstr='-setbackground %f', ), - bins=dict(argstr='-bins %d', ), coarse_search=dict( - argstr='-coarsesearch %d', - units='degrees', - ), - cost=dict(argstr='-cost %s', ), - cost_func=dict(argstr='-searchcost %s', ), - datatype=dict(argstr='-datatype %s', ), - display_init=dict(argstr='-displayinit', ), - dof=dict(argstr='-dof %d', ), + argstr="-coarsesearch %d", + units="degrees", + ), + cost=dict( + argstr="-cost %s", + ), + cost_func=dict( + argstr="-searchcost %s", + ), + datatype=dict( + argstr="-datatype %s", + ), + display_init=dict( + argstr="-displayinit", + ), + dof=dict( + argstr="-dof %d", + ), echospacing=dict( - argstr='-echospacing %f', - min_ver='5.0.0', + argstr="-echospacing %f", + min_ver="5.0.0", ), environ=dict( nohash=True, usedefault=True, ), fieldmap=dict( - argstr='-fieldmap %s', - min_ver='5.0.0', + argstr="-fieldmap %s", + extensions=None, + min_ver="5.0.0", ), fieldmapmask=dict( - argstr='-fieldmapmask %s', - min_ver='5.0.0', + argstr="-fieldmapmask %s", + extensions=None, + min_ver="5.0.0", ), fine_search=dict( - argstr='-finesearch %d', - units='degrees', + argstr="-finesearch %d", + units="degrees", + ), + force_scaling=dict( + argstr="-forcescaling", ), - force_scaling=dict(argstr='-forcescaling', ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=0, ), - in_matrix_file=dict(argstr='-init %s', ), - in_weight=dict(argstr='-inweight %s', ), - interp=dict(argstr='-interp %s', ), + in_matrix_file=dict( + argstr="-init %s", + extensions=None, + ), + in_weight=dict( + argstr="-inweight %s", + extensions=None, + ), + interp=dict( + argstr="-interp %s", + ), min_sampling=dict( - argstr='-minsampling %f', - units='mm', + argstr="-minsampling %f", + units="mm", + ), + no_clamp=dict( + argstr="-noclamp", + ), + no_resample=dict( + argstr="-noresample", + ), + no_resample_blur=dict( + argstr="-noresampblur", + ), + no_search=dict( + argstr="-nosearch", ), - no_clamp=dict(argstr='-noclamp', ), - no_resample=dict(argstr='-noresample', ), - no_resample_blur=dict(argstr='-noresampblur', ), - no_search=dict(argstr='-nosearch', ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_flirt', + name_source=["in_file"], + name_template="%s_flirt", position=2, ), out_log=dict( + extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.log', - requires=['save_log'], + name_source=["in_file"], + name_template="%s_flirt.log", + requires=["save_log"], ), out_matrix_file=dict( - argstr='-omat %s', + argstr="-omat %s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.mat', + name_source=["in_file"], + name_template="%s_flirt.mat", position=3, ), output_type=dict(), padding_size=dict( - argstr='-paddingsize %d', - units='voxels', + argstr="-paddingsize %d", + units="voxels", ), pedir=dict( - argstr='-pedir %d', - min_ver='5.0.0', + argstr="-pedir %d", + min_ver="5.0.0", + ), + ref_weight=dict( + argstr="-refweight %s", + extensions=None, ), - ref_weight=dict(argstr='-refweight %s', ), reference=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, position=1, ), - rigid2D=dict(argstr='-2D', ), + rigid2D=dict( + argstr="-2D", + ), save_log=dict(), - schedule=dict(argstr='-schedule %s', ), + schedule=dict( + argstr="-schedule %s", + extensions=None, + ), searchr_x=dict( - argstr='-searchrx %s', - units='degrees', + argstr="-searchrx %s", + units="degrees", ), searchr_y=dict( - argstr='-searchry %s', - units='degrees', + argstr="-searchry %s", + units="degrees", ), searchr_z=dict( - argstr='-searchrz %s', - units='degrees', + argstr="-searchrz %s", + units="degrees", ), sinc_width=dict( - argstr='-sincwidth %d', - units='voxels', + argstr="-sincwidth %d", + units="voxels", + ), + sinc_window=dict( + argstr="-sincwindow %s", + ), + uses_qform=dict( + argstr="-usesqform", + ), + verbose=dict( + argstr="-verbose %d", ), - sinc_window=dict(argstr='-sincwindow %s', ), - uses_qform=dict(argstr='-usesqform', ), - verbose=dict(argstr='-verbose %d', ), wm_seg=dict( - argstr='-wmseg %s', - min_ver='5.0.0', + argstr="-wmseg %s", + extensions=None, + min_ver="5.0.0", ), wmcoords=dict( - argstr='-wmcoords %s', - min_ver='5.0.0', + argstr="-wmcoords %s", + extensions=None, + min_ver="5.0.0", ), wmnorms=dict( - argstr='-wmnorms %s', - min_ver='5.0.0', + argstr="-wmnorms %s", + extensions=None, + min_ver="5.0.0", ), ) inputs = FLIRT.input_spec() @@ -144,11 +205,19 @@ def test_FLIRT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FLIRT_outputs(): output_map = dict( - out_file=dict(), - out_log=dict(), - out_matrix_file=dict(), + out_file=dict( + extensions=None, + ), + out_log=dict( + extensions=None, + ), + out_matrix_file=dict( + extensions=None, + ), ) outputs = FLIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py index 2b7d0b1b00..eb6ae1f714 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py @@ -1,119 +1,170 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FNIRT def test_FNIRT_inputs(): input_map = dict( - affine_file=dict(argstr='--aff=%s', ), + affine_file=dict( + argstr="--aff=%s", + extensions=None, + ), apply_inmask=dict( - argstr='--applyinmask=%s', - sep=',', - xor=['skip_inmask'], + argstr="--applyinmask=%s", + sep=",", + xor=["skip_inmask"], ), apply_intensity_mapping=dict( - argstr='--estint=%s', - sep=',', - xor=['skip_intensity_mapping'], + argstr="--estint=%s", + sep=",", + xor=["skip_intensity_mapping"], ), apply_refmask=dict( - argstr='--applyrefmask=%s', - sep=',', - xor=['skip_refmask'], - ), - args=dict(argstr='%s', ), - bias_regularization_lambda=dict(argstr='--biaslambda=%f', ), - biasfield_resolution=dict(argstr='--biasres=%d,%d,%d', ), - config_file=dict(argstr='--config=%s', ), - derive_from_ref=dict(argstr='--refderiv', ), + argstr="--applyrefmask=%s", + sep=",", + xor=["skip_refmask"], + ), + args=dict( + argstr="%s", + ), + bias_regularization_lambda=dict( + argstr="--biaslambda=%f", + ), + biasfield_resolution=dict( + argstr="--biasres=%d,%d,%d", + ), + config_file=dict( + argstr="--config=%s", + ), + derive_from_ref=dict( + argstr="--refderiv", + ), environ=dict( nohash=True, usedefault=True, ), field_file=dict( - argstr='--fout=%s', + argstr="--fout=%s", hash_files=False, ), - fieldcoeff_file=dict(argstr='--cout=%s', ), - hessian_precision=dict(argstr='--numprec=%s', ), + fieldcoeff_file=dict( + argstr="--cout=%s", + ), + hessian_precision=dict( + argstr="--numprec=%s", + ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, ), in_fwhm=dict( - argstr='--infwhm=%s', - sep=',', + argstr="--infwhm=%s", + sep=",", ), in_intensitymap_file=dict( - argstr='--intin=%s', + argstr="--intin=%s", copyfile=False, ), - inmask_file=dict(argstr='--inmask=%s', ), - inmask_val=dict(argstr='--impinval=%f', ), - intensity_mapping_model=dict(argstr='--intmod=%s', ), - intensity_mapping_order=dict(argstr='--intorder=%d', ), - inwarp_file=dict(argstr='--inwarp=%s', ), + inmask_file=dict( + argstr="--inmask=%s", + extensions=None, + ), + inmask_val=dict( + argstr="--impinval=%f", + ), + intensity_mapping_model=dict( + argstr="--intmod=%s", + ), + intensity_mapping_order=dict( + argstr="--intorder=%d", + ), + inwarp_file=dict( + argstr="--inwarp=%s", + extensions=None, + ), jacobian_file=dict( - argstr='--jout=%s', + argstr="--jout=%s", hash_files=False, ), - jacobian_range=dict(argstr='--jacrange=%f,%f', ), + jacobian_range=dict( + argstr="--jacrange=%f,%f", + ), log_file=dict( - argstr='--logout=%s', + argstr="--logout=%s", + extensions=None, genfile=True, hash_files=False, ), max_nonlin_iter=dict( - argstr='--miter=%s', - sep=',', + argstr="--miter=%s", + sep=",", ), modulatedref_file=dict( - argstr='--refout=%s', + argstr="--refout=%s", hash_files=False, ), out_intensitymap_file=dict( - argstr='--intout=%s', + argstr="--intout=%s", hash_files=False, ), output_type=dict(), ref_file=dict( - argstr='--ref=%s', + argstr="--ref=%s", + extensions=None, mandatory=True, ), ref_fwhm=dict( - argstr='--reffwhm=%s', - sep=',', + argstr="--reffwhm=%s", + sep=",", + ), + refmask_file=dict( + argstr="--refmask=%s", + extensions=None, + ), + refmask_val=dict( + argstr="--imprefval=%f", ), - refmask_file=dict(argstr='--refmask=%s', ), - refmask_val=dict(argstr='--imprefval=%f', ), regularization_lambda=dict( - argstr='--lambda=%s', - sep=',', + argstr="--lambda=%s", + sep=",", + ), + regularization_model=dict( + argstr="--regmod=%s", + ), + skip_implicit_in_masking=dict( + argstr="--impinm=0", + ), + skip_implicit_ref_masking=dict( + argstr="--imprefm=0", ), - regularization_model=dict(argstr='--regmod=%s', ), - skip_implicit_in_masking=dict(argstr='--impinm=0', ), - skip_implicit_ref_masking=dict(argstr='--imprefm=0', ), skip_inmask=dict( - argstr='--applyinmask=0', - xor=['apply_inmask'], + argstr="--applyinmask=0", + xor=["apply_inmask"], ), skip_intensity_mapping=dict( - argstr='--estint=0', - xor=['apply_intensity_mapping'], + argstr="--estint=0", + xor=["apply_intensity_mapping"], + ), + skip_lambda_ssq=dict( + argstr="--ssqlambda=0", ), - skip_lambda_ssq=dict(argstr='--ssqlambda=0', ), skip_refmask=dict( - argstr='--applyrefmask=0', - xor=['apply_refmask'], + argstr="--applyrefmask=0", + xor=["apply_refmask"], + ), + spline_order=dict( + argstr="--splineorder=%d", ), - spline_order=dict(argstr='--splineorder=%d', ), subsampling_scheme=dict( - argstr='--subsamp=%s', - sep=',', + argstr="--subsamp=%s", + sep=",", + ), + warp_resolution=dict( + argstr="--warpres=%d,%d,%d", ), - warp_resolution=dict(argstr='--warpres=%d,%d,%d', ), warped_file=dict( - argstr='--iout=%s', + argstr="--iout=%s", + extensions=None, genfile=True, hash_files=False, ), @@ -123,15 +174,29 @@ def test_FNIRT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FNIRT_outputs(): output_map = dict( - field_file=dict(), - fieldcoeff_file=dict(), - jacobian_file=dict(), - log_file=dict(), - modulatedref_file=dict(), + field_file=dict( + extensions=None, + ), + fieldcoeff_file=dict( + extensions=None, + ), + jacobian_file=dict( + extensions=None, + ), + log_file=dict( + extensions=None, + ), + modulatedref_file=dict( + extensions=None, + ), out_intensitymap_file=dict(), - warped_file=dict(), + warped_file=dict( + extensions=None, + ), ) outputs = FNIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py b/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py index 1d95b77df5..1b444c381e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import FSLCommand def test_FSLCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py index fd85eee3bf..3948f3d650 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py @@ -1,37 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import FSLXCommand def test_FSLXCommand_inputs(): input_map = dict( all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), + argstr="--allard", + xor=("no_ard", "all_ard"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), burn_in=dict( - argstr='--burnin=%d', + argstr="--burnin=%d", usedefault=True, ), burn_in_no_ard=dict( - argstr='--burnin_noard=%d', + argstr="--burnin_noard=%d", usedefault=True, ), bvals=dict( - argstr='--bvals=%s', + argstr="--bvals=%s", + extensions=None, mandatory=True, ), bvecs=dict( - argstr='--bvecs=%s', + argstr="--bvecs=%s", + extensions=None, mandatory=True, ), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--cnonlinear", + xor=("no_spat", "non_linear", "cnlinear"), ), dwi=dict( - argstr='--data=%s', + argstr="--data=%s", + extensions=None, mandatory=True, ), environ=dict( @@ -39,57 +43,66 @@ def test_FSLXCommand_inputs(): usedefault=True, ), f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + argstr="--f0 --ardf0", + xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], + argstr="--f0", + xor=["f0_noard", "f0_ard"], ), force_dir=dict( - argstr='--forcedir', + argstr="--forcedir", usedefault=True, ), - fudge=dict(argstr='--fudge=%d', ), + fudge=dict( + argstr="--fudge=%d", + ), logdir=dict( - argstr='--logdir=%s', + argstr="--logdir=%s", usedefault=True, ), mask=dict( - argstr='--mask=%s', + argstr="--mask=%s", + extensions=None, mandatory=True, ), - model=dict(argstr='--model=%d', ), + model=dict( + argstr="--model=%d", + ), n_fibres=dict( - argstr='--nfibres=%d', + argstr="--nfibres=%d", mandatory=True, usedefault=True, ), n_jumps=dict( - argstr='--njumps=%d', + argstr="--njumps=%d", usedefault=True, ), no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), + argstr="--noard", + xor=("no_ard", "all_ard"), ), no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nospat", + xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nonlinear", + xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), - rician=dict(argstr='--rician', ), + rician=dict( + argstr="--rician", + ), sample_every=dict( - argstr='--sampleevery=%d', + argstr="--sampleevery=%d", usedefault=True, ), - seed=dict(argstr='--seed=%d', ), + seed=dict( + argstr="--seed=%d", + ), update_proposal_every=dict( - argstr='--updateproposalevery=%d', + argstr="--updateproposalevery=%d", usedefault=True, ), ) @@ -98,14 +111,22 @@ def test_FSLXCommand_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSLXCommand_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), - mean_S0samples=dict(), - mean_dsamples=dict(), + mean_S0samples=dict( + extensions=None, + ), + mean_dsamples=dict( + extensions=None, + ), mean_fsamples=dict(), - mean_tausamples=dict(), + mean_tausamples=dict( + extensions=None, + ), phsamples=dict(), thsamples=dict(), ) diff --git a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py index 0e96f1e867..841bb2021f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py +++ b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py @@ -1,67 +1,129 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FUGUE def test_FUGUE_inputs(): input_map = dict( - args=dict(argstr='%s', ), - asym_se_time=dict(argstr='--asym=%.10f', ), - despike_2dfilter=dict(argstr='--despike', ), - despike_threshold=dict(argstr='--despikethreshold=%s', ), - dwell_time=dict(argstr='--dwell=%.10f', ), - dwell_to_asym_ratio=dict(argstr='--dwelltoasym=%.10f', ), + args=dict( + argstr="%s", + ), + asym_se_time=dict( + argstr="--asym=%.10f", + ), + despike_2dfilter=dict( + argstr="--despike", + ), + despike_threshold=dict( + argstr="--despikethreshold=%s", + ), + dwell_time=dict( + argstr="--dwell=%.10f", + ), + dwell_to_asym_ratio=dict( + argstr="--dwelltoasym=%.10f", + ), environ=dict( nohash=True, usedefault=True, ), - fmap_in_file=dict(argstr='--loadfmap=%s', ), - fmap_out_file=dict(argstr='--savefmap=%s', ), - forward_warping=dict(usedefault=True, ), - fourier_order=dict(argstr='--fourier=%d', ), + fmap_in_file=dict( + argstr="--loadfmap=%s", + extensions=None, + ), + fmap_out_file=dict( + argstr="--savefmap=%s", + extensions=None, + ), + forward_warping=dict( + usedefault=True, + ), + fourier_order=dict( + argstr="--fourier=%d", + ), icorr=dict( - argstr='--icorr', - requires=['shift_in_file'], + argstr="--icorr", + requires=["shift_in_file"], ), icorr_only=dict( - argstr='--icorronly', - requires=['unwarped_file'], - ), - in_file=dict(argstr='--in=%s', ), - mask_file=dict(argstr='--mask=%s', ), - median_2dfilter=dict(argstr='--median', ), - no_extend=dict(argstr='--noextend', ), - no_gap_fill=dict(argstr='--nofill', ), - nokspace=dict(argstr='--nokspace', ), + argstr="--icorronly", + requires=["unwarped_file"], + ), + in_file=dict( + argstr="--in=%s", + extensions=None, + ), + mask_file=dict( + argstr="--mask=%s", + extensions=None, + ), + median_2dfilter=dict( + argstr="--median", + ), + no_extend=dict( + argstr="--noextend", + ), + no_gap_fill=dict( + argstr="--nofill", + ), + nokspace=dict( + argstr="--nokspace", + ), output_type=dict(), - pava=dict(argstr='--pava', ), - phase_conjugate=dict(argstr='--phaseconj', ), - phasemap_in_file=dict(argstr='--phasemap=%s', ), - poly_order=dict(argstr='--poly=%d', ), - save_fmap=dict(xor=['save_unmasked_fmap'], ), - save_shift=dict(xor=['save_unmasked_shift'], ), + pava=dict( + argstr="--pava", + ), + phase_conjugate=dict( + argstr="--phaseconj", + ), + phasemap_in_file=dict( + argstr="--phasemap=%s", + extensions=None, + ), + poly_order=dict( + argstr="--poly=%d", + ), + save_fmap=dict( + xor=["save_unmasked_fmap"], + ), + save_shift=dict( + xor=["save_unmasked_shift"], + ), save_unmasked_fmap=dict( - argstr='--unmaskfmap', - xor=['save_fmap'], + argstr="--unmaskfmap", + xor=["save_fmap"], ), save_unmasked_shift=dict( - argstr='--unmaskshift', - xor=['save_shift'], - ), - shift_in_file=dict(argstr='--loadshift=%s', ), - shift_out_file=dict(argstr='--saveshift=%s', ), - smooth2d=dict(argstr='--smooth2=%.2f', ), - smooth3d=dict(argstr='--smooth3=%.2f', ), - unwarp_direction=dict(argstr='--unwarpdir=%s', ), + argstr="--unmaskshift", + xor=["save_shift"], + ), + shift_in_file=dict( + argstr="--loadshift=%s", + extensions=None, + ), + shift_out_file=dict( + argstr="--saveshift=%s", + extensions=None, + ), + smooth2d=dict( + argstr="--smooth2=%.2f", + ), + smooth3d=dict( + argstr="--smooth3=%.2f", + ), + unwarp_direction=dict( + argstr="--unwarpdir=%s", + ), unwarped_file=dict( - argstr='--unwarp=%s', - requires=['in_file'], - xor=['warped_file'], + argstr="--unwarp=%s", + extensions=None, + requires=["in_file"], + xor=["warped_file"], ), warped_file=dict( - argstr='--warp=%s', - requires=['in_file'], - xor=['unwarped_file'], + argstr="--warp=%s", + extensions=None, + requires=["in_file"], + xor=["unwarped_file"], ), ) inputs = FUGUE.input_spec() @@ -69,12 +131,22 @@ def test_FUGUE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FUGUE_outputs(): output_map = dict( - fmap_out_file=dict(), - shift_out_file=dict(), - unwarped_file=dict(), - warped_file=dict(), + fmap_out_file=dict( + extensions=None, + ), + shift_out_file=dict( + extensions=None, + ), + unwarped_file=dict( + extensions=None, + ), + warped_file=dict( + extensions=None, + ), ) outputs = FUGUE.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py b/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py index 3945c40a87..8531fe17c4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py +++ b/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import FeatureExtractor def test_FeatureExtractor_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), mel_ica=dict( - argstr='%s', + argstr="%s", copyfile=False, position=-1, ), @@ -21,13 +22,16 @@ def test_FeatureExtractor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FeatureExtractor_outputs(): output_map = dict( mel_ica=dict( - argstr='%s', + argstr="%s", copyfile=False, position=-1, - ), ) + ), + ) outputs = FeatureExtractor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py index 36cbb979d7..e4826db355 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py +++ b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import FilterRegressor def test_FilterRegressor_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), design_file=dict( - argstr='-d %s', + argstr="-d %s", + extensions=None, mandatory=True, position=3, ), @@ -19,37 +21,52 @@ def test_FilterRegressor_inputs(): argstr="-f '%s'", mandatory=True, position=4, - xor=['filter_columns'], + xor=["filter_columns"], ), filter_columns=dict( argstr="-f '%s'", mandatory=True, position=4, - xor=['filter_all'], + xor=["filter_all"], ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), - mask=dict(argstr='-m %s', ), + mask=dict( + argstr="-m %s", + extensions=None, + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, position=2, ), - out_vnscales=dict(argstr='--out_vnscales', ), + out_vnscales=dict( + argstr="--out_vnscales", + ), output_type=dict(), - var_norm=dict(argstr='--vn', ), + var_norm=dict( + argstr="--vn", + ), ) inputs = FilterRegressor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FilterRegressor_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FilterRegressor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py b/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py index 3731c842e7..14b3bbb8da 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py +++ b/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py @@ -1,22 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import FindTheBiggest def test_FindTheBiggest_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=0, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=2, @@ -28,8 +30,15 @@ def test_FindTheBiggest_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindTheBiggest_outputs(): - output_map = dict(out_file=dict(argstr='%s', ), ) + output_map = dict( + out_file=dict( + argstr="%s", + extensions=None, + ), + ) outputs = FindTheBiggest.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_GLM.py b/nipype/interfaces/fsl/tests/test_auto_GLM.py index 1dbf7eba19..63105f128d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_GLM.py +++ b/nipype/interfaces/fsl/tests/test_auto_GLM.py @@ -1,63 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import GLM def test_GLM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrasts=dict(argstr='-c %s', ), - dat_norm=dict(argstr='--dat_norm', ), - demean=dict(argstr='--demean', ), - des_norm=dict(argstr='--des_norm', ), + args=dict( + argstr="%s", + ), + contrasts=dict( + argstr="-c %s", + extensions=None, + ), + dat_norm=dict( + argstr="--dat_norm", + ), + demean=dict( + argstr="--demean", + ), + des_norm=dict( + argstr="--des_norm", + ), design=dict( - argstr='-d %s', + argstr="-d %s", + extensions=None, mandatory=True, position=2, ), - dof=dict(argstr='--dof=%d', ), + dof=dict( + argstr="--dof=%d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), - mask=dict(argstr='-m %s', ), - out_cope=dict(argstr='--out_cope=%s', ), - out_data_name=dict(argstr='--out_data=%s', ), - out_f_name=dict(argstr='--out_f=%s', ), + mask=dict( + argstr="-m %s", + extensions=None, + ), + out_cope=dict( + argstr="--out_cope=%s", + extensions=None, + ), + out_data_name=dict( + argstr="--out_data=%s", + extensions=None, + ), + out_f_name=dict( + argstr="--out_f=%s", + extensions=None, + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_glm', + name_source="in_file", + name_template="%s_glm", position=3, ), - out_p_name=dict(argstr='--out_p=%s', ), - out_pf_name=dict(argstr='--out_pf=%s', ), - out_res_name=dict(argstr='--out_res=%s', ), - out_sigsq_name=dict(argstr='--out_sigsq=%s', ), - out_t_name=dict(argstr='--out_t=%s', ), - out_varcb_name=dict(argstr='--out_varcb=%s', ), - out_vnscales_name=dict(argstr='--out_vnscales=%s', ), - out_z_name=dict(argstr='--out_z=%s', ), + out_p_name=dict( + argstr="--out_p=%s", + extensions=None, + ), + out_pf_name=dict( + argstr="--out_pf=%s", + extensions=None, + ), + out_res_name=dict( + argstr="--out_res=%s", + extensions=None, + ), + out_sigsq_name=dict( + argstr="--out_sigsq=%s", + extensions=None, + ), + out_t_name=dict( + argstr="--out_t=%s", + extensions=None, + ), + out_varcb_name=dict( + argstr="--out_varcb=%s", + extensions=None, + ), + out_vnscales_name=dict( + argstr="--out_vnscales=%s", + extensions=None, + ), + out_z_name=dict( + argstr="--out_z=%s", + extensions=None, + ), output_type=dict(), - var_norm=dict(argstr='--vn', ), + var_norm=dict( + argstr="--vn", + ), ) inputs = GLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GLM_outputs(): output_map = dict( out_cope=dict(), out_data=dict(), out_f=dict(), - out_file=dict(), + out_file=dict( + extensions=None, + ), out_p=dict(), out_pf=dict(), out_res=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py b/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py index 05bbb5b106..b49813e24d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py +++ b/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py @@ -1,54 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..aroma import ICA_AROMA def test_ICA_AROMA_inputs(): input_map = dict( - TR=dict(argstr='-tr %.3f', ), - args=dict(argstr='%s', ), + TR=dict( + argstr="-tr %.3f", + ), + args=dict( + argstr="%s", + ), denoise_type=dict( - argstr='-den %s', + argstr="-den %s", mandatory=True, usedefault=True, ), - dim=dict(argstr='-dim %d', ), + dim=dict( + argstr="-dim %d", + ), environ=dict( nohash=True, usedefault=True, ), feat_dir=dict( - argstr='-feat %s', + argstr="-feat %s", mandatory=True, - xor=[ - 'in_file', 'mat_file', 'fnirt_warp_file', 'motion_parameters' - ], + xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], ), fnirt_warp_file=dict( - argstr='-warp %s', - xor=['feat_dir'], + argstr="-warp %s", + extensions=None, + xor=["feat_dir"], ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, - xor=['feat_dir'], + xor=["feat_dir"], ), mask=dict( - argstr='-m %s', - xor=['feat_dir'], + argstr="-m %s", + extensions=None, + xor=["feat_dir"], ), mat_file=dict( - argstr='-affmat %s', - xor=['feat_dir'], + argstr="-affmat %s", + extensions=None, + xor=["feat_dir"], + ), + melodic_dir=dict( + argstr="-meldir %s", ), - melodic_dir=dict(argstr='-meldir %s', ), motion_parameters=dict( - argstr='-mc %s', + argstr="-mc %s", + extensions=None, mandatory=True, - xor=['feat_dir'], + xor=["feat_dir"], ), out_dir=dict( - argstr='-o %s', + argstr="-o %s", mandatory=True, usedefault=True, ), @@ -58,10 +68,16 @@ def test_ICA_AROMA_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ICA_AROMA_outputs(): output_map = dict( - aggr_denoised_file=dict(), - nonaggr_denoised_file=dict(), + aggr_denoised_file=dict( + extensions=None, + ), + nonaggr_denoised_file=dict( + extensions=None, + ), out_dir=dict(), ) outputs = ICA_AROMA.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py b/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py index 6d4e06827e..d2c4737d65 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py @@ -1,35 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ImageMaths def test_ImageMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), - mask_file=dict(argstr='-mas %s', ), + mask_file=dict( + argstr="-mas %s", + extensions=None, + ), op_string=dict( - argstr='%s', + argstr="%s", position=2, ), out_data_type=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, @@ -42,8 +49,14 @@ def test_ImageMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ImageMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py b/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py index 042f93112b..b050d8f50b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py @@ -1,45 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ImageMeants def test_ImageMeants_inputs(): input_map = dict( - args=dict(argstr='%s', ), - eig=dict(argstr='--eig', ), + args=dict( + argstr="%s", + ), + eig=dict( + argstr="--eig", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=0, ), - mask=dict(argstr='-m %s', ), - nobin=dict(argstr='--no_bin', ), + mask=dict( + argstr="-m %s", + extensions=None, + ), + nobin=dict( + argstr="--no_bin", + ), order=dict( - argstr='--order=%d', + argstr="--order=%d", usedefault=True, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - show_all=dict(argstr='--showall', ), - spatial_coord=dict(argstr='-c %s', ), - transpose=dict(argstr='--transpose', ), - use_mm=dict(argstr='--usemm', ), + show_all=dict( + argstr="--showall", + ), + spatial_coord=dict( + argstr="-c %s", + ), + transpose=dict( + argstr="--transpose", + ), + use_mm=dict( + argstr="--usemm", + ), ) inputs = ImageMeants.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageMeants_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ImageMeants.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageStats.py b/nipype/interfaces/fsl/tests/test_auto_ImageStats.py index 91a6059798..e4ddf5f06d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageStats.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageStats.py @@ -1,29 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ImageStats def test_ImageStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, + position=3, + ), + index_mask_file=dict( + argstr="-K %s", + extensions=None, position=2, ), - mask_file=dict(argstr='', ), + mask_file=dict( + argstr="", + extensions=None, + ), op_string=dict( - argstr='%s', + argstr="%s", mandatory=True, - position=3, + position=4, ), output_type=dict(), split_4d=dict( - argstr='-t', + argstr="-t", position=1, ), ) @@ -32,8 +42,12 @@ def test_ImageStats_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageStats_outputs(): - output_map = dict(out_stat=dict(), ) + output_map = dict( + out_stat=dict(), + ) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py index 973f42ea34..1dba5e578a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py @@ -1,41 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import InvWarp def test_InvWarp_inputs(): input_map = dict( absolute=dict( - argstr='--abs', - xor=['relative'], + argstr="--abs", + xor=["relative"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), inverse_warp=dict( - argstr='--out=%s', + argstr="--out=%s", + extensions=None, hash_files=False, - name_source=['warp'], - name_template='%s_inverse', + name_source=["warp"], + name_template="%s_inverse", + ), + jacobian_max=dict( + argstr="--jmax=%f", + ), + jacobian_min=dict( + argstr="--jmin=%f", + ), + niter=dict( + argstr="--niter=%d", + ), + noconstraint=dict( + argstr="--noconstraint", ), - jacobian_max=dict(argstr='--jmax=%f', ), - jacobian_min=dict(argstr='--jmin=%f', ), - niter=dict(argstr='--niter=%d', ), - noconstraint=dict(argstr='--noconstraint', ), output_type=dict(), reference=dict( - argstr='--ref=%s', + argstr="--ref=%s", + extensions=None, mandatory=True, ), - regularise=dict(argstr='--regularise=%f', ), + regularise=dict( + argstr="--regularise=%f", + ), relative=dict( - argstr='--rel', - xor=['absolute'], + argstr="--rel", + xor=["absolute"], ), warp=dict( - argstr='--warp=%s', + argstr="--warp=%s", + extensions=None, mandatory=True, ), ) @@ -44,8 +58,14 @@ def test_InvWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_InvWarp_outputs(): - output_map = dict(inverse_warp=dict(), ) + output_map = dict( + inverse_warp=dict( + extensions=None, + ), + ) outputs = InvWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py index 4b6192c98d..f9c5432d40 100644 --- a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py @@ -1,50 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import IsotropicSmooth def test_IsotropicSmooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( - argstr='-s %.5f', + argstr="-s %.5f", mandatory=True, position=4, - xor=['sigma'], + xor=["sigma"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), sigma=dict( - argstr='-s %.5f', + argstr="-s %.5f", mandatory=True, position=4, - xor=['fwhm'], + xor=["fwhm"], ), ) inputs = IsotropicSmooth.input_spec() @@ -52,8 +55,14 @@ def test_IsotropicSmooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IsotropicSmooth_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = IsotropicSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_L2Model.py b/nipype/interfaces/fsl/tests/test_auto_L2Model.py index 9d3588666f..c4547fc7a2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_L2Model.py +++ b/nipype/interfaces/fsl/tests/test_auto_L2Model.py @@ -1,20 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import L2Model def test_L2Model_inputs(): - input_map = dict(num_copes=dict(mandatory=True, ), ) + input_map = dict( + num_copes=dict( + mandatory=True, + ), + ) inputs = L2Model.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_L2Model_outputs(): output_map = dict( - design_con=dict(), - design_grp=dict(), - design_mat=dict(), + design_con=dict( + extensions=None, + ), + design_grp=dict( + extensions=None, + ), + design_mat=dict( + extensions=None, + ), ) outputs = L2Model.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Level1Design.py b/nipype/interfaces/fsl/tests/test_auto_Level1Design.py index 45451f8eff..5a43989601 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Level1Design.py +++ b/nipype/interfaces/fsl/tests/test_auto_Level1Design.py @@ -1,22 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Level1Design def test_Level1Design_inputs(): input_map = dict( - bases=dict(mandatory=True, ), + bases=dict( + mandatory=True, + ), contrasts=dict(), - interscan_interval=dict(mandatory=True, ), - model_serial_correlations=dict(mandatory=True, ), - orthogonalization=dict(usedefault=True, ), - session_info=dict(mandatory=True, ), + interscan_interval=dict( + mandatory=True, + ), + model_serial_correlations=dict( + mandatory=True, + ), + orthogonalization=dict( + usedefault=True, + ), + session_info=dict( + mandatory=True, + ), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Level1Design_outputs(): output_map = dict( ev_files=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py b/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py index daeca07b10..768c52a7f4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py @@ -1,59 +1,112 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MCFLIRT def test_MCFLIRT_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bins=dict(argstr='-bins %d', ), - cost=dict(argstr='-cost %s', ), - dof=dict(argstr='-dof %d', ), + args=dict( + argstr="%s", + ), + bins=dict( + argstr="-bins %d", + ), + cost=dict( + argstr="-cost %s", + ), + dof=dict( + argstr="-dof %d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=0, ), - init=dict(argstr='-init %s', ), - interpolation=dict(argstr='-%s_final', ), - mean_vol=dict(argstr='-meanvol', ), + init=dict( + argstr="-init %s", + extensions=None, + ), + interpolation=dict( + argstr="-%s_final", + ), + mean_vol=dict( + argstr="-meanvol", + ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - ref_file=dict(argstr='-reffile %s', ), - ref_vol=dict(argstr='-refvol %d', ), - rotation=dict(argstr='-rotation %d', ), - save_mats=dict(argstr='-mats', ), - save_plots=dict(argstr='-plots', ), - save_rms=dict(argstr='-rmsabs -rmsrel', ), - scaling=dict(argstr='-scaling %.2f', ), - smooth=dict(argstr='-smooth %.2f', ), - stages=dict(argstr='-stages %d', ), - stats_imgs=dict(argstr='-stats', ), - use_contour=dict(argstr='-edge', ), - use_gradient=dict(argstr='-gdt', ), + ref_file=dict( + argstr="-reffile %s", + extensions=None, + ), + ref_vol=dict( + argstr="-refvol %d", + ), + rotation=dict( + argstr="-rotation %d", + ), + save_mats=dict( + argstr="-mats", + ), + save_plots=dict( + argstr="-plots", + ), + save_rms=dict( + argstr="-rmsabs -rmsrel", + ), + scaling=dict( + argstr="-scaling %.2f", + ), + smooth=dict( + argstr="-smooth %.2f", + ), + stages=dict( + argstr="-stages %d", + ), + stats_imgs=dict( + argstr="-stats", + ), + use_contour=dict( + argstr="-edge", + ), + use_gradient=dict( + argstr="-gdt", + ), ) inputs = MCFLIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MCFLIRT_outputs(): output_map = dict( mat_file=dict(), - mean_img=dict(), - out_file=dict(), - par_file=dict(), + mean_img=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + par_file=dict( + extensions=None, + ), rms_files=dict(), - std_img=dict(), - variance_img=dict(), + std_img=dict( + extensions=None, + ), + variance_img=dict( + extensions=None, + ), ) outputs = MCFLIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_MELODIC.py b/nipype/interfaces/fsl/tests/test_auto_MELODIC.py index b22078c450..db2406e30f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MELODIC.py +++ b/nipype/interfaces/fsl/tests/test_auto_MELODIC.py @@ -1,78 +1,182 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MELODIC def test_MELODIC_inputs(): input_map = dict( - ICs=dict(argstr='--ICs=%s', ), - approach=dict(argstr='-a %s', ), - args=dict(argstr='%s', ), - bg_image=dict(argstr='--bgimage=%s', ), - bg_threshold=dict(argstr='--bgthreshold=%f', ), - cov_weight=dict(argstr='--covarweight=%f', ), - dim=dict(argstr='-d %d', ), - dim_est=dict(argstr='--dimest=%s', ), + ICs=dict( + argstr="--ICs=%s", + extensions=None, + ), + approach=dict( + argstr="-a %s", + ), + args=dict( + argstr="%s", + ), + bg_image=dict( + argstr="--bgimage=%s", + extensions=None, + ), + bg_threshold=dict( + argstr="--bgthreshold=%f", + ), + cov_weight=dict( + argstr="--covarweight=%f", + ), + dim=dict( + argstr="-d %d", + ), + dim_est=dict( + argstr="--dimest=%s", + ), environ=dict( nohash=True, usedefault=True, ), - epsilon=dict(argstr='--eps=%f', ), - epsilonS=dict(argstr='--epsS=%f', ), + epsilon=dict( + argstr="--eps=%f", + ), + epsilonS=dict( + argstr="--epsS=%f", + ), in_files=dict( - argstr='-i %s', + argstr="-i %s", mandatory=True, position=0, - sep=',', - ), - log_power=dict(argstr='--logPower', ), - mask=dict(argstr='-m %s', ), - max_restart=dict(argstr='--maxrestart=%d', ), - maxit=dict(argstr='--maxit=%d', ), - migp=dict(argstr='--migp', ), - migpN=dict(argstr='--migpN %d', ), - migp_factor=dict(argstr='--migp_factor %d', ), - migp_shuffle=dict(argstr='--migp_shuffle', ), - mix=dict(argstr='--mix=%s', ), - mm_thresh=dict(argstr='--mmthresh=%f', ), - no_bet=dict(argstr='--nobet', ), - no_mask=dict(argstr='--nomask', ), - no_mm=dict(argstr='--no_mm', ), - non_linearity=dict(argstr='--nl=%s', ), - num_ICs=dict(argstr='-n %d', ), - out_all=dict(argstr='--Oall', ), + sep=",", + ), + log_power=dict( + argstr="--logPower", + ), + mask=dict( + argstr="-m %s", + extensions=None, + ), + max_restart=dict( + argstr="--maxrestart=%d", + ), + maxit=dict( + argstr="--maxit=%d", + ), + migp=dict( + argstr="--migp", + ), + migpN=dict( + argstr="--migpN %d", + ), + migp_factor=dict( + argstr="--migp_factor %d", + ), + migp_shuffle=dict( + argstr="--migp_shuffle", + ), + mix=dict( + argstr="--mix=%s", + extensions=None, + ), + mm_thresh=dict( + argstr="--mmthresh=%f", + ), + no_bet=dict( + argstr="--nobet", + ), + no_mask=dict( + argstr="--nomask", + ), + no_mm=dict( + argstr="--no_mm", + ), + non_linearity=dict( + argstr="--nl=%s", + ), + num_ICs=dict( + argstr="-n %d", + ), + out_all=dict( + argstr="--Oall", + ), out_dir=dict( - argstr='-o %s', + argstr="-o %s", genfile=True, ), - out_mean=dict(argstr='--Omean', ), - out_orig=dict(argstr='--Oorig', ), - out_pca=dict(argstr='--Opca', ), - out_stats=dict(argstr='--Ostats', ), - out_unmix=dict(argstr='--Ounmix', ), - out_white=dict(argstr='--Owhite', ), + out_mean=dict( + argstr="--Omean", + ), + out_orig=dict( + argstr="--Oorig", + ), + out_pca=dict( + argstr="--Opca", + ), + out_stats=dict( + argstr="--Ostats", + ), + out_unmix=dict( + argstr="--Ounmix", + ), + out_white=dict( + argstr="--Owhite", + ), output_type=dict(), - pbsc=dict(argstr='--pbsc', ), - rem_cmp=dict(argstr='-f %d', ), - remove_deriv=dict(argstr='--remove_deriv', ), - report=dict(argstr='--report', ), - report_maps=dict(argstr='--report_maps=%s', ), - s_con=dict(argstr='--Scon=%s', ), - s_des=dict(argstr='--Sdes=%s', ), - sep_vn=dict(argstr='--sep_vn', ), - sep_whiten=dict(argstr='--sep_whiten', ), - smode=dict(argstr='--smode=%s', ), - t_con=dict(argstr='--Tcon=%s', ), - t_des=dict(argstr='--Tdes=%s', ), - tr_sec=dict(argstr='--tr=%f', ), - update_mask=dict(argstr='--update_mask', ), - var_norm=dict(argstr='--vn', ), + pbsc=dict( + argstr="--pbsc", + ), + rem_cmp=dict( + argstr="-f %d", + ), + remove_deriv=dict( + argstr="--remove_deriv", + ), + report=dict( + argstr="--report", + ), + report_maps=dict( + argstr="--report_maps=%s", + ), + s_con=dict( + argstr="--Scon=%s", + extensions=None, + ), + s_des=dict( + argstr="--Sdes=%s", + extensions=None, + ), + sep_vn=dict( + argstr="--sep_vn", + ), + sep_whiten=dict( + argstr="--sep_whiten", + ), + smode=dict( + argstr="--smode=%s", + extensions=None, + ), + t_con=dict( + argstr="--Tcon=%s", + extensions=None, + ), + t_des=dict( + argstr="--Tdes=%s", + extensions=None, + ), + tr_sec=dict( + argstr="--tr=%f", + ), + update_mask=dict( + argstr="--update_mask", + ), + var_norm=dict( + argstr="--vn", + ), ) inputs = MELODIC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MELODIC_outputs(): output_map = dict( out_dir=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py b/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py index c7a5a3e465..bfdb32146e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py +++ b/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py @@ -1,37 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import MakeDyadicVectors def test_MakeDyadicVectors_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), mask=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), output=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, position=3, usedefault=True, ), output_type=dict(), perc=dict( - argstr='%f', + argstr="%f", position=4, ), phi_vol=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), theta_vol=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), @@ -41,10 +46,16 @@ def test_MakeDyadicVectors_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeDyadicVectors_outputs(): output_map = dict( - dispersion=dict(), - dyads=dict(), + dispersion=dict( + extensions=None, + ), + dyads=dict( + extensions=None, + ), ) outputs = MakeDyadicVectors.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py b/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py index 64b0f8b089..e14e4a4005 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py @@ -1,36 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MathsCommand def test_MathsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -40,8 +43,14 @@ def test_MathsCommand_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MathsCommand_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MaxImage.py b/nipype/interfaces/fsl/tests/test_auto_MaxImage.py index 9910f9c4e8..f96f931fcf 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MaxImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MaxImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MaxImage def test_MaxImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%smax', + argstr="-%smax", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_MaxImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_MaxImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaxImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MaxImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py b/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py index f49c5f462c..30ada25d79 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MaxnImage def test_MaxnImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%smaxn', + argstr="-%smaxn", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_MaxnImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_MaxnImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaxnImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MaxnImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MeanImage.py b/nipype/interfaces/fsl/tests/test_auto_MeanImage.py index 2172dcfa9e..e29104476c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MeanImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MeanImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MeanImage def test_MeanImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%smean', + argstr="-%smean", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_MeanImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_MeanImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeanImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MeanImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MedianImage.py b/nipype/interfaces/fsl/tests/test_auto_MedianImage.py index c14bf8d839..7c8052fd31 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MedianImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MedianImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MedianImage def test_MedianImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%smedian', + argstr="-%smedian", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_MedianImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_MedianImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MedianImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Merge.py b/nipype/interfaces/fsl/tests/test_auto_Merge.py index 826270239e..847f9b7bd3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Merge.py +++ b/nipype/interfaces/fsl/tests/test_auto_Merge.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Merge def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=0, ), @@ -16,20 +17,21 @@ def test_Merge_inputs(): usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=2, ), merged_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source='in_files', - name_template='%s_merged', + name_source="in_files", + name_template="%s_merged", position=1, ), output_type=dict(), tr=dict( - argstr='%.2f', + argstr="%.2f", position=-1, ), ) @@ -38,8 +40,14 @@ def test_Merge_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(merged_file=dict(), ) + output_map = dict( + merged_file=dict( + extensions=None, + ), + ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MinImage.py b/nipype/interfaces/fsl/tests/test_auto_MinImage.py index 4e9002c259..bde76c1afc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MinImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MinImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MinImage def test_MinImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%smin', + argstr="-%smin", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_MinImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_MinImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MinImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MinImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py b/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py index 97d2426b53..9a5773336f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py +++ b/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py @@ -1,57 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MotionOutliers def test_MotionOutliers_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dummy=dict(argstr='--dummy=%d', ), + args=dict( + argstr="%s", + ), + dummy=dict( + argstr="--dummy=%d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - mask=dict(argstr='-m %s', ), - metric=dict(argstr='--%s', ), - no_motion_correction=dict(argstr='--nomoco', ), + mask=dict( + argstr="-m %s", + extensions=None, + ), + metric=dict( + argstr="--%s", + ), + no_motion_correction=dict( + argstr="--nomoco", + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_outliers.txt', + name_source="in_file", + name_template="%s_outliers.txt", ), out_metric_plot=dict( - argstr='-p %s', + argstr="-p %s", + extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_metrics.png', + name_source="in_file", + name_template="%s_metrics.png", ), out_metric_values=dict( - argstr='-s %s', + argstr="-s %s", + extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_metrics.txt', + name_source="in_file", + name_template="%s_metrics.txt", ), output_type=dict(), - threshold=dict(argstr='--thresh=%g', ), + threshold=dict( + argstr="--thresh=%g", + ), ) inputs = MotionOutliers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MotionOutliers_outputs(): output_map = dict( - out_file=dict(), - out_metric_plot=dict(), - out_metric_values=dict(), + out_file=dict( + extensions=None, + ), + out_metric_plot=dict( + extensions=None, + ), + out_metric_values=dict( + extensions=None, + ), ) outputs = MotionOutliers.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py b/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py index 50a2977a4c..95de40d023 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py @@ -1,42 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MultiImageMaths def test_MultiImageMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), op_string=dict( - argstr='%s', + argstr="%s", mandatory=True, position=4, ), - operand_files=dict(mandatory=True, ), + operand_files=dict( + mandatory=True, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -46,8 +51,14 @@ def test_MultiImageMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiImageMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MultiImageMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py b/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py index 78bd97ff85..cae5e90cd4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py +++ b/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py @@ -1,25 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MultipleRegressDesign def test_MultipleRegressDesign_inputs(): input_map = dict( - contrasts=dict(mandatory=True, ), + contrasts=dict( + mandatory=True, + ), groups=dict(), - regressors=dict(mandatory=True, ), + regressors=dict( + mandatory=True, + ), ) inputs = MultipleRegressDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultipleRegressDesign_outputs(): output_map = dict( - design_con=dict(), - design_fts=dict(), - design_grp=dict(), - design_mat=dict(), + design_con=dict( + extensions=None, + ), + design_fts=dict( + extensions=None, + ), + design_grp=dict( + extensions=None, + ), + design_mat=dict( + extensions=None, + ), ) outputs = MultipleRegressDesign.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Overlay.py b/nipype/interfaces/fsl/tests/test_auto_Overlay.py index 9818ae1eda..22c4f08a44 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Overlay.py +++ b/nipype/interfaces/fsl/tests/test_auto_Overlay.py @@ -1,81 +1,86 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Overlay def test_Overlay_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), auto_thresh_bg=dict( - argstr='-a', + argstr="-a", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), background_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=4, ), bg_thresh=dict( - argstr='%.3f %.3f', + argstr="%.3f %.3f", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), environ=dict( nohash=True, usedefault=True, ), full_bg_range=dict( - argstr='-A', + argstr="-A", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-1, ), out_type=dict( - argstr='%s', + argstr="%s", position=2, usedefault=True, ), output_type=dict(), show_negative_stats=dict( - argstr='%s', + argstr="%s", position=8, - xor=['stat_image2'], + xor=["stat_image2"], ), stat_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=6, ), stat_image2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=9, - xor=['show_negative_stats'], + xor=["show_negative_stats"], ), stat_thresh=dict( - argstr='%.2f %.2f', + argstr="%.2f %.2f", mandatory=True, position=7, ), stat_thresh2=dict( - argstr='%.2f %.2f', + argstr="%.2f %.2f", position=10, ), transparency=dict( - argstr='%s', + argstr="%s", position=1, usedefault=True, ), use_checkerboard=dict( - argstr='-c', + argstr="-c", position=3, ), ) @@ -84,8 +89,14 @@ def test_Overlay_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Overlay_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Overlay.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py index 328a8e3272..0194526c70 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py +++ b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py @@ -1,60 +1,83 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import PRELUDE def test_PRELUDE_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), complex_phase_file=dict( - argstr='--complex=%s', + argstr="--complex=%s", + extensions=None, mandatory=True, - xor=['magnitude_file', 'phase_file'], + xor=["magnitude_file", "phase_file"], + ), + end=dict( + argstr="--end=%d", ), - end=dict(argstr='--end=%d', ), environ=dict( nohash=True, usedefault=True, ), label_file=dict( - argstr='--labels=%s', + argstr="--labels=%s", + extensions=None, hash_files=False, ), - labelprocess2d=dict(argstr='--labelslices', ), + labelprocess2d=dict( + argstr="--labelslices", + ), magnitude_file=dict( - argstr='--abs=%s', + argstr="--abs=%s", + extensions=None, mandatory=True, - xor=['complex_phase_file'], + xor=["complex_phase_file"], + ), + mask_file=dict( + argstr="--mask=%s", + extensions=None, + ), + num_partitions=dict( + argstr="--numphasesplit=%d", ), - mask_file=dict(argstr='--mask=%s', ), - num_partitions=dict(argstr='--numphasesplit=%d', ), output_type=dict(), phase_file=dict( - argstr='--phase=%s', + argstr="--phase=%s", + extensions=None, mandatory=True, - xor=['complex_phase_file'], + xor=["complex_phase_file"], ), process2d=dict( - argstr='--slices', - xor=['labelprocess2d'], + argstr="--slices", + xor=["labelprocess2d"], ), process3d=dict( - argstr='--force3D', - xor=['labelprocess2d', 'process2d'], + argstr="--force3D", + xor=["labelprocess2d", "process2d"], ), rawphase_file=dict( - argstr='--rawphase=%s', + argstr="--rawphase=%s", + extensions=None, hash_files=False, ), - removeramps=dict(argstr='--removeramps', ), + removeramps=dict( + argstr="--removeramps", + ), savemask_file=dict( - argstr='--savemask=%s', + argstr="--savemask=%s", + extensions=None, hash_files=False, ), - start=dict(argstr='--start=%d', ), - threshold=dict(argstr='--thresh=%.10f', ), + start=dict( + argstr="--start=%d", + ), + threshold=dict( + argstr="--thresh=%.10f", + ), unwrapped_phase_file=dict( - argstr='--unwrap=%s', + argstr="--unwrap=%s", + extensions=None, genfile=True, hash_files=False, ), @@ -64,8 +87,14 @@ def test_PRELUDE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PRELUDE_outputs(): - output_map = dict(unwrapped_phase_file=dict(), ) + output_map = dict( + unwrapped_phase_file=dict( + extensions=None, + ), + ) outputs = PRELUDE.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py b/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py index 4e08c18db0..3a3ae14a78 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import PercentileImage def test_PercentileImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%sperc', + argstr="-%sperc", position=4, usedefault=True, ), @@ -16,31 +17,33 @@ def test_PercentileImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), perc=dict( - argstr='%f', + argstr="%f", position=5, ), ) @@ -49,8 +52,14 @@ def test_PercentileImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PercentileImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PercentileImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py b/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py index 45a5b43945..8cf1d2e214 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py +++ b/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py @@ -1,30 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import PlotMotionParams def test_PlotMotionParams_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, ), - in_source=dict(mandatory=True, ), + in_source=dict( + mandatory=True, + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - plot_size=dict(argstr='%s', ), + plot_size=dict( + argstr="%s", + ), plot_type=dict( - argstr='%s', + argstr="%s", mandatory=True, ), ) @@ -33,8 +39,14 @@ def test_PlotMotionParams_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PlotMotionParams_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PlotMotionParams.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py index 62ced498e1..5b4ebc46aa 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py +++ b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py @@ -1,59 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import PlotTimeSeries def test_PlotTimeSeries_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, ), - labels=dict(argstr='%s', ), - legend_file=dict(argstr='--legend=%s', ), + labels=dict( + argstr="%s", + ), + legend_file=dict( + argstr="--legend=%s", + extensions=None, + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), plot_finish=dict( - argstr='--finish=%d', - xor=('plot_range', ), + argstr="--finish=%d", + xor=("plot_range",), ), plot_range=dict( - argstr='%s', - xor=('plot_start', 'plot_finish'), + argstr="%s", + xor=("plot_start", "plot_finish"), + ), + plot_size=dict( + argstr="%s", ), - plot_size=dict(argstr='%s', ), plot_start=dict( - argstr='--start=%d', - xor=('plot_range', ), + argstr="--start=%d", + xor=("plot_range",), + ), + sci_notation=dict( + argstr="--sci", + ), + title=dict( + argstr="%s", + ), + x_precision=dict( + argstr="--precision=%d", ), - sci_notation=dict(argstr='--sci', ), - title=dict(argstr='%s', ), - x_precision=dict(argstr='--precision=%d', ), x_units=dict( - argstr='-u %d', + argstr="-u %d", usedefault=True, ), y_max=dict( - argstr='--ymax=%.2f', - xor=('y_range', ), + argstr="--ymax=%.2f", + xor=("y_range",), ), y_min=dict( - argstr='--ymin=%.2f', - xor=('y_range', ), + argstr="--ymin=%.2f", + xor=("y_range",), ), y_range=dict( - argstr='%s', - xor=('y_min', 'y_max'), + argstr="%s", + xor=("y_min", "y_max"), ), ) inputs = PlotTimeSeries.input_spec() @@ -61,8 +76,14 @@ def test_PlotTimeSeries_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PlotTimeSeries_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PlotTimeSeries.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py b/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py index 18a812c00b..874cbcf0e8 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py +++ b/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import PowerSpectrum def test_PowerSpectrum_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=1, @@ -28,8 +31,14 @@ def test_PowerSpectrum_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PowerSpectrum_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PowerSpectrum.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py b/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py index 66bfd51fab..2286dad026 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py +++ b/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import PrepareFieldmap def test_PrepareFieldmap_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), delta_TE=dict( - argstr='%f', + argstr="%f", mandatory=True, position=-2, usedefault=True, @@ -17,27 +18,30 @@ def test_PrepareFieldmap_inputs(): usedefault=True, ), in_magnitude=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=3, ), in_phase=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), nocheck=dict( - argstr='--nocheck', + argstr="--nocheck", position=-1, usedefault=True, ), out_fieldmap=dict( - argstr='%s', + argstr="%s", + extensions=None, position=4, ), output_type=dict(), scanner=dict( - argstr='%s', + argstr="%s", position=1, usedefault=True, ), @@ -47,8 +51,14 @@ def test_PrepareFieldmap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PrepareFieldmap_outputs(): - output_map = dict(out_fieldmap=dict(), ) + output_map = dict( + out_fieldmap=dict( + extensions=None, + ), + ) outputs = PrepareFieldmap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py index 5368d0dd37..c841391efc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py @@ -1,89 +1,161 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ProbTrackX def test_ProbTrackX_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avoid_mp=dict(argstr='--avoid=%s', ), - c_thresh=dict(argstr='--cthr=%.3f', ), - correct_path_distribution=dict(argstr='--pd', ), - dist_thresh=dict(argstr='--distthresh=%.3f', ), + args=dict( + argstr="%s", + ), + avoid_mp=dict( + argstr="--avoid=%s", + extensions=None, + ), + c_thresh=dict( + argstr="--cthr=%.3f", + ), + correct_path_distribution=dict( + argstr="--pd", + ), + dist_thresh=dict( + argstr="--distthresh=%.3f", + ), environ=dict( nohash=True, usedefault=True, ), - fibst=dict(argstr='--fibst=%d', ), + fibst=dict( + argstr="--fibst=%d", + ), force_dir=dict( - argstr='--forcedir', + argstr="--forcedir", usedefault=True, ), - fsamples=dict(mandatory=True, ), - inv_xfm=dict(argstr='--invxfm=%s', ), - loop_check=dict(argstr='--loopcheck', ), + fsamples=dict( + mandatory=True, + ), + inv_xfm=dict( + argstr="--invxfm=%s", + extensions=None, + ), + loop_check=dict( + argstr="--loopcheck", + ), mask=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), - mask2=dict(argstr='--mask2=%s', ), - mesh=dict(argstr='--mesh=%s', ), - mod_euler=dict(argstr='--modeuler', ), + mask2=dict( + argstr="--mask2=%s", + extensions=None, + ), + mesh=dict( + argstr="--mesh=%s", + extensions=None, + ), + mod_euler=dict( + argstr="--modeuler", + ), mode=dict( - argstr='--mode=%s', + argstr="--mode=%s", genfile=True, ), n_samples=dict( - argstr='--nsamples=%d', + argstr="--nsamples=%d", usedefault=True, ), - n_steps=dict(argstr='--nsteps=%d', ), - network=dict(argstr='--network', ), + n_steps=dict( + argstr="--nsteps=%d", + ), + network=dict( + argstr="--network", + ), opd=dict( - argstr='--opd', + argstr="--opd", usedefault=True, ), - os2t=dict(argstr='--os2t', ), + os2t=dict( + argstr="--os2t", + ), out_dir=dict( - argstr='--dir=%s', + argstr="--dir=%s", genfile=True, ), output_type=dict(), - phsamples=dict(mandatory=True, ), - rand_fib=dict(argstr='--randfib=%d', ), - random_seed=dict(argstr='--rseed', ), - s2tastext=dict(argstr='--s2tastext', ), - sample_random_points=dict(argstr='--sampvox', ), + phsamples=dict( + mandatory=True, + ), + rand_fib=dict( + argstr="--randfib=%d", + ), + random_seed=dict( + argstr="--rseed=%d", + ), + s2tastext=dict( + argstr="--s2tastext", + ), + sample_random_points=dict( + argstr="--sampvox=%.3f", + ), samples_base_name=dict( - argstr='--samples=%s', + argstr="--samples=%s", usedefault=True, ), seed=dict( - argstr='--seed=%s', + argstr="--seed=%s", + mandatory=True, + ), + seed_ref=dict( + argstr="--seedref=%s", + extensions=None, + ), + step_length=dict( + argstr="--steplength=%.3f", + ), + stop_mask=dict( + argstr="--stop=%s", + extensions=None, + ), + target_masks=dict( + argstr="--targetmasks=%s", + ), + thsamples=dict( mandatory=True, ), - seed_ref=dict(argstr='--seedref=%s', ), - step_length=dict(argstr='--steplength=%.3f', ), - stop_mask=dict(argstr='--stop=%s', ), - target_masks=dict(argstr='--targetmasks=%s', ), - thsamples=dict(mandatory=True, ), - use_anisotropy=dict(argstr='--usef', ), - verbose=dict(argstr='--verbose=%d', ), - waypoints=dict(argstr='--waypoints=%s', ), - xfm=dict(argstr='--xfm=%s', ), + use_anisotropy=dict( + argstr="--usef", + ), + verbose=dict( + argstr="--verbose=%d", + ), + waypoints=dict( + argstr="--waypoints=%s", + extensions=None, + ), + xfm=dict( + argstr="--xfm=%s", + extensions=None, + ), ) inputs = ProbTrackX.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbTrackX_outputs(): output_map = dict( fdt_paths=dict(), - log=dict(), + log=dict( + extensions=None, + ), particle_files=dict(), targets=dict(), - way_total=dict(), + way_total=dict( + extensions=None, + ), ) outputs = ProbTrackX.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py index 8592b5ae1a..f1941f036d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py @@ -1,111 +1,223 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ProbTrackX2 def test_ProbTrackX2_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avoid_mp=dict(argstr='--avoid=%s', ), - c_thresh=dict(argstr='--cthr=%.3f', ), - colmask4=dict(argstr='--colmask4=%s', ), - correct_path_distribution=dict(argstr='--pd', ), - dist_thresh=dict(argstr='--distthresh=%.3f', ), - distthresh1=dict(argstr='--distthresh1=%.3f', ), - distthresh3=dict(argstr='--distthresh3=%.3f', ), + args=dict( + argstr="%s", + ), + avoid_mp=dict( + argstr="--avoid=%s", + extensions=None, + ), + c_thresh=dict( + argstr="--cthr=%.3f", + ), + colmask4=dict( + argstr="--colmask4=%s", + extensions=None, + ), + correct_path_distribution=dict( + argstr="--pd", + ), + dist_thresh=dict( + argstr="--distthresh=%.3f", + ), + distthresh1=dict( + argstr="--distthresh1=%.3f", + ), + distthresh3=dict( + argstr="--distthresh3=%.3f", + ), environ=dict( nohash=True, usedefault=True, ), - fibst=dict(argstr='--fibst=%d', ), - fopd=dict(argstr='--fopd=%s', ), + fibst=dict( + argstr="--fibst=%d", + ), + fopd=dict( + argstr="--fopd=%s", + extensions=None, + ), force_dir=dict( - argstr='--forcedir', + argstr="--forcedir", usedefault=True, ), - fsamples=dict(mandatory=True, ), - inv_xfm=dict(argstr='--invxfm=%s', ), - loop_check=dict(argstr='--loopcheck', ), - lrtarget3=dict(argstr='--lrtarget3=%s', ), + fsamples=dict( + mandatory=True, + ), + inv_xfm=dict( + argstr="--invxfm=%s", + extensions=None, + ), + loop_check=dict( + argstr="--loopcheck", + ), + lrtarget3=dict( + argstr="--lrtarget3=%s", + extensions=None, + ), mask=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), - meshspace=dict(argstr='--meshspace=%s', ), - mod_euler=dict(argstr='--modeuler', ), + meshspace=dict( + argstr="--meshspace=%s", + ), + mod_euler=dict( + argstr="--modeuler", + ), n_samples=dict( - argstr='--nsamples=%d', + argstr="--nsamples=%d", usedefault=True, ), - n_steps=dict(argstr='--nsteps=%d', ), - network=dict(argstr='--network', ), - omatrix1=dict(argstr='--omatrix1', ), + n_steps=dict( + argstr="--nsteps=%d", + ), + network=dict( + argstr="--network", + ), + omatrix1=dict( + argstr="--omatrix1", + ), omatrix2=dict( - argstr='--omatrix2', - requires=['target2'], + argstr="--omatrix2", + requires=["target2"], ), omatrix3=dict( - argstr='--omatrix3', - requires=['target3', 'lrtarget3'], + argstr="--omatrix3", + requires=["target3", "lrtarget3"], + ), + omatrix4=dict( + argstr="--omatrix4", + ), + onewaycondition=dict( + argstr="--onewaycondition", ), - omatrix4=dict(argstr='--omatrix4', ), - onewaycondition=dict(argstr='--onewaycondition', ), opd=dict( - argstr='--opd', + argstr="--opd", usedefault=True, ), - os2t=dict(argstr='--os2t', ), + os2t=dict( + argstr="--os2t", + ), out_dir=dict( - argstr='--dir=%s', + argstr="--dir=%s", genfile=True, ), output_type=dict(), - phsamples=dict(mandatory=True, ), - rand_fib=dict(argstr='--randfib=%d', ), - random_seed=dict(argstr='--rseed', ), - s2tastext=dict(argstr='--s2tastext', ), - sample_random_points=dict(argstr='--sampvox', ), + phsamples=dict( + mandatory=True, + ), + rand_fib=dict( + argstr="--randfib=%d", + ), + random_seed=dict( + argstr="--rseed=%d", + ), + s2tastext=dict( + argstr="--s2tastext", + ), + sample_random_points=dict( + argstr="--sampvox=%.3f", + ), samples_base_name=dict( - argstr='--samples=%s', + argstr="--samples=%s", usedefault=True, ), seed=dict( - argstr='--seed=%s', + argstr="--seed=%s", + mandatory=True, + ), + seed_ref=dict( + argstr="--seedref=%s", + extensions=None, + ), + simple=dict( + argstr="--simple", + ), + step_length=dict( + argstr="--steplength=%.3f", + ), + stop_mask=dict( + argstr="--stop=%s", + extensions=None, + ), + target2=dict( + argstr="--target2=%s", + extensions=None, + ), + target3=dict( + argstr="--target3=%s", + extensions=None, + ), + target4=dict( + argstr="--target4=%s", + extensions=None, + ), + target_masks=dict( + argstr="--targetmasks=%s", + ), + thsamples=dict( mandatory=True, ), - seed_ref=dict(argstr='--seedref=%s', ), - simple=dict(argstr='--simple', ), - step_length=dict(argstr='--steplength=%.3f', ), - stop_mask=dict(argstr='--stop=%s', ), - target2=dict(argstr='--target2=%s', ), - target3=dict(argstr='--target3=%s', ), - target4=dict(argstr='--target4=%s', ), - target_masks=dict(argstr='--targetmasks=%s', ), - thsamples=dict(mandatory=True, ), - use_anisotropy=dict(argstr='--usef', ), - verbose=dict(argstr='--verbose=%d', ), - waycond=dict(argstr='--waycond=%s', ), - wayorder=dict(argstr='--wayorder', ), - waypoints=dict(argstr='--waypoints=%s', ), - xfm=dict(argstr='--xfm=%s', ), + use_anisotropy=dict( + argstr="--usef", + ), + verbose=dict( + argstr="--verbose=%d", + ), + waycond=dict( + argstr="--waycond=%s", + ), + wayorder=dict( + argstr="--wayorder", + ), + waypoints=dict( + argstr="--waypoints=%s", + extensions=None, + ), + xfm=dict( + argstr="--xfm=%s", + extensions=None, + ), ) inputs = ProbTrackX2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbTrackX2_outputs(): output_map = dict( fdt_paths=dict(), - log=dict(), - lookup_tractspace=dict(), - matrix1_dot=dict(), - matrix2_dot=dict(), - matrix3_dot=dict(), - network_matrix=dict(), + log=dict( + extensions=None, + ), + lookup_tractspace=dict( + extensions=None, + ), + matrix1_dot=dict( + extensions=None, + ), + matrix2_dot=dict( + extensions=None, + ), + matrix3_dot=dict( + extensions=None, + ), + network_matrix=dict( + extensions=None, + ), particle_files=dict(), targets=dict(), - way_total=dict(), + way_total=dict( + extensions=None, + ), ) outputs = ProbTrackX2.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py b/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py index dc3878e2c0..420eacb9c2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py @@ -1,23 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import ProjThresh def test_ProjThresh_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=0, ), output_type=dict(), threshold=dict( - argstr='%d', + argstr="%d", mandatory=True, position=1, ), @@ -27,8 +28,12 @@ def test_ProjThresh_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProjThresh_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = ProjThresh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Randomise.py b/nipype/interfaces/fsl/tests/test_auto_Randomise.py index 3d52347265..9b0b74bf28 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Randomise.py +++ b/nipype/interfaces/fsl/tests/test_auto_Randomise.py @@ -1,63 +1,118 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Randomise def test_Randomise_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), base_name=dict( argstr='-o "%s"', position=1, usedefault=True, ), - c_thresh=dict(argstr='-c %.1f', ), - cm_thresh=dict(argstr='-C %.1f', ), - demean=dict(argstr='-D', ), + c_thresh=dict( + argstr="-c %.1f", + ), + cm_thresh=dict( + argstr="-C %.1f", + ), + demean=dict( + argstr="-D", + ), design_mat=dict( - argstr='-d %s', + argstr="-d %s", + extensions=None, position=2, ), environ=dict( nohash=True, usedefault=True, ), - f_c_thresh=dict(argstr='-F %.2f', ), - f_cm_thresh=dict(argstr='-S %.2f', ), - f_only=dict(argstr='--f_only', ), - fcon=dict(argstr='-f %s', ), + f_c_thresh=dict( + argstr="-F %.2f", + ), + f_cm_thresh=dict( + argstr="-S %.2f", + ), + f_only=dict( + argstr="--fonly", + ), + fcon=dict( + argstr="-f %s", + extensions=None, + ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=0, ), - mask=dict(argstr='-m %s', ), - num_perm=dict(argstr='-n %d', ), - one_sample_group_mean=dict(argstr='-1', ), + mask=dict( + argstr="-m %s", + extensions=None, + ), + num_perm=dict( + argstr="-n %d", + ), + one_sample_group_mean=dict( + argstr="-1", + ), output_type=dict(), - p_vec_n_dist_files=dict(argstr='-P', ), - raw_stats_imgs=dict(argstr='-R', ), - seed=dict(argstr='--seed=%d', ), - show_info_parallel_mode=dict(argstr='-Q', ), - show_total_perms=dict(argstr='-q', ), + p_vec_n_dist_files=dict( + argstr="-P", + ), + raw_stats_imgs=dict( + argstr="-R", + ), + seed=dict( + argstr="--seed=%d", + ), + show_info_parallel_mode=dict( + argstr="-Q", + ), + show_total_perms=dict( + argstr="-q", + ), tcon=dict( - argstr='-t %s', + argstr="-t %s", + extensions=None, position=3, ), - tfce=dict(argstr='-T', ), - tfce2D=dict(argstr='--T2', ), - tfce_C=dict(argstr='--tfce_C=%.2f', ), - tfce_E=dict(argstr='--tfce_E=%.2f', ), - tfce_H=dict(argstr='--tfce_H=%.2f', ), - var_smooth=dict(argstr='-v %d', ), - vox_p_values=dict(argstr='-x', ), - x_block_labels=dict(argstr='-e %s', ), + tfce=dict( + argstr="-T", + ), + tfce2D=dict( + argstr="--T2", + ), + tfce_C=dict( + argstr="--tfce_C=%.2f", + ), + tfce_E=dict( + argstr="--tfce_E=%.2f", + ), + tfce_H=dict( + argstr="--tfce_H=%.2f", + ), + var_smooth=dict( + argstr="-v %d", + ), + vox_p_values=dict( + argstr="-x", + ), + x_block_labels=dict( + argstr="-e %s", + extensions=None, + ), ) inputs = Randomise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Randomise_outputs(): output_map = dict( f_corrected_p_files=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py b/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py index 76647d82d8..e008eb44e6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py +++ b/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py @@ -1,21 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Reorient2Std def test_Reorient2Std_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, ), @@ -26,8 +29,14 @@ def test_Reorient2Std_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reorient2Std_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Reorient2Std.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py index e1cbac6fc9..b5598f0de4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py +++ b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py @@ -1,32 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import RobustFOV def test_RobustFOV_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brainsize=dict(argstr='-b %d', ), + args=dict( + argstr="%s", + ), + brainsize=dict( + argstr="-b %d", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=0, ), out_roi=dict( - argstr='-r %s', + argstr="-r %s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_ROI', + name_source=["in_file"], + name_template="%s_ROI", ), out_transform=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_to_ROI', + name_source=["in_file"], + name_template="%s_to_ROI", ), output_type=dict(), ) @@ -35,10 +41,16 @@ def test_RobustFOV_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustFOV_outputs(): output_map = dict( - out_roi=dict(), - out_transform=dict(), + out_roi=dict( + extensions=None, + ), + out_transform=dict( + extensions=None, + ), ) outputs = RobustFOV.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_SMM.py b/nipype/interfaces/fsl/tests/test_auto_SMM.py index 1e1cc308cc..51777eaed9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SMM.py +++ b/nipype/interfaces/fsl/tests/test_auto_SMM.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import SMM def test_SMM_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -13,17 +14,19 @@ def test_SMM_inputs(): mask=dict( argstr='--mask="%s"', copyfile=False, + extensions=None, mandatory=True, position=1, ), no_deactivation_class=dict( - argstr='--zfstatmode', + argstr="--zfstatmode", position=2, ), output_type=dict(), spatial_data_file=dict( argstr='--sdf="%s"', copyfile=False, + extensions=None, mandatory=True, position=0, ), @@ -33,11 +36,19 @@ def test_SMM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SMM_outputs(): output_map = dict( - activation_p_map=dict(), - deactivation_p_map=dict(), - null_p_map=dict(), + activation_p_map=dict( + extensions=None, + ), + deactivation_p_map=dict( + extensions=None, + ), + null_p_map=dict( + extensions=None, + ), ) outputs = SMM.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_SUSAN.py b/nipype/interfaces/fsl/tests/test_auto_SUSAN.py index ebbef0d427..427b770222 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SUSAN.py +++ b/nipype/interfaces/fsl/tests/test_auto_SUSAN.py @@ -1,18 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SUSAN def test_SUSAN_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), brightness_threshold=dict( - argstr='%.10f', + argstr="%.10f", mandatory=True, position=2, ), dimension=dict( - argstr='%d', + argstr="%d", position=4, usedefault=True, ), @@ -21,29 +22,31 @@ def test_SUSAN_inputs(): usedefault=True, ), fwhm=dict( - argstr='%.10f', + argstr="%.10f", mandatory=True, position=3, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), usans=dict( - argstr='', + argstr="", position=6, usedefault=True, ), use_median=dict( - argstr='%d', + argstr="%d", position=5, usedefault=True, ), @@ -53,8 +56,14 @@ def test_SUSAN_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SUSAN_outputs(): - output_map = dict(smoothed_file=dict(), ) + output_map = dict( + smoothed_file=dict( + extensions=None, + ), + ) outputs = SUSAN.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SigLoss.py b/nipype/interfaces/fsl/tests/test_auto_SigLoss.py index f3be97b350..11be93c5b9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SigLoss.py +++ b/nipype/interfaces/fsl/tests/test_auto_SigLoss.py @@ -1,35 +1,51 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SigLoss def test_SigLoss_inputs(): input_map = dict( - args=dict(argstr='%s', ), - echo_time=dict(argstr='--te=%f', ), + args=dict( + argstr="%s", + ), + echo_time=dict( + argstr="--te=%f", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - mask_file=dict(argstr='-m %s', ), + mask_file=dict( + argstr="-m %s", + extensions=None, + ), out_file=dict( - argstr='-s %s', + argstr="-s %s", + extensions=None, genfile=True, ), output_type=dict(), - slice_direction=dict(argstr='-d %s', ), + slice_direction=dict( + argstr="-d %s", + ), ) inputs = SigLoss.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SigLoss_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SigLoss.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Slice.py b/nipype/interfaces/fsl/tests/test_auto_Slice.py index d1e9093e37..f5360716c6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Slice.py +++ b/nipype/interfaces/fsl/tests/test_auto_Slice.py @@ -1,23 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Slice def test_Slice_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", copyfile=False, + extensions=None, mandatory=True, position=0, ), out_base_name=dict( - argstr='%s', + argstr="%s", position=1, ), output_type=dict(), @@ -27,8 +29,12 @@ def test_Slice_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Slice_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = Slice.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py b/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py index 75b9918ad9..acdbd8c2ca 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py +++ b/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py @@ -1,41 +1,66 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SliceTimer def test_SliceTimer_inputs(): input_map = dict( - args=dict(argstr='%s', ), - custom_order=dict(argstr='--ocustom=%s', ), - custom_timings=dict(argstr='--tcustom=%s', ), + args=dict( + argstr="%s", + ), + custom_order=dict( + argstr="--ocustom=%s", + extensions=None, + ), + custom_timings=dict( + argstr="--tcustom=%s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - global_shift=dict(argstr='--tglobal', ), + global_shift=dict( + argstr="--tglobal", + ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, position=0, ), - index_dir=dict(argstr='--down', ), - interleaved=dict(argstr='--odd', ), + index_dir=dict( + argstr="--down", + ), + interleaved=dict( + argstr="--odd", + ), out_file=dict( - argstr='--out=%s', + argstr="--out=%s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - slice_direction=dict(argstr='--direction=%d', ), - time_repetition=dict(argstr='--repeat=%f', ), + slice_direction=dict( + argstr="--direction=%d", + ), + time_repetition=dict( + argstr="--repeat=%f", + ), ) inputs = SliceTimer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SliceTimer_outputs(): - output_map = dict(slice_time_corrected_file=dict(), ) + output_map = dict( + slice_time_corrected_file=dict( + extensions=None, + ), + ) outputs = SliceTimer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Slicer.py b/nipype/interfaces/fsl/tests/test_auto_Slicer.py index d313cda474..8e3195fd39 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Slicer.py +++ b/nipype/interfaces/fsl/tests/test_auto_Slicer.py @@ -1,23 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Slicer def test_Slicer_inputs(): input_map = dict( all_axial=dict( - argstr='-A', + argstr="-A", position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=["image_width"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), colour_map=dict( - argstr='-l %s', + argstr="-l %s", + extensions=None, position=4, ), dither_edges=dict( - argstr='-t', + argstr="-t", position=7, ), environ=dict( @@ -25,70 +27,73 @@ def test_Slicer_inputs(): usedefault=True, ), image_edges=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), image_width=dict( - argstr='%d', + argstr="%d", position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), intensity_range=dict( - argstr='-i %.3f %.3f', + argstr="-i %.3f %.3f", position=5, ), label_slices=dict( - argstr='-L', + argstr="-L", position=3, usedefault=True, ), middle_slices=dict( - argstr='-a', + argstr="-a", position=10, - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), nearest_neighbour=dict( - argstr='-n', + argstr="-n", position=8, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), sample_axial=dict( - argstr='-S %d', + argstr="-S %d", position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=["image_width"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), scaling=dict( - argstr='-s %f', + argstr="-s %f", position=0, ), show_orientation=dict( - argstr='%s', + argstr="%s", position=9, usedefault=True, ), single_slice=dict( - argstr='-%s', + argstr="-%s", position=10, - requires=['slice_number'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=["slice_number"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), slice_number=dict( - argstr='-%d', + argstr="-%d", position=11, ), threshold_edges=dict( - argstr='-e %.3f', + argstr="-e %.3f", position=6, ), ) @@ -97,8 +102,14 @@ def test_Slicer_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Slicer_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Slicer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Smooth.py b/nipype/interfaces/fsl/tests/test_auto_Smooth.py index e8d8e06117..9d9324770b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Smooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_Smooth.py @@ -1,38 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Smooth def test_Smooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( - argstr='-kernel gauss %.03f -fmean', + argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, - xor=['sigma'], + xor=["sigma"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), output_type=dict(), sigma=dict( - argstr='-kernel gauss %.03f -fmean', + argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, - xor=['fwhm'], + xor=["fwhm"], ), smoothed_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_smooth', + name_source=["in_file"], + name_template="%s_smooth", position=2, ), ) @@ -41,8 +44,14 @@ def test_Smooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_file=dict(), ) + output_map = dict( + smoothed_file=dict( + extensions=None, + ), + ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py index df7544f931..bf21438d1d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py +++ b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py @@ -1,32 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import SmoothEstimate def test_SmoothEstimate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dof=dict( - argstr='--dof=%d', + argstr="--dof=%d", mandatory=True, - xor=['zstat_file'], + xor=["zstat_file"], ), environ=dict( nohash=True, usedefault=True, ), mask_file=dict( - argstr='--mask=%s', + argstr="--mask=%s", + extensions=None, mandatory=True, ), output_type=dict(), residual_fit_file=dict( - argstr='--res=%s', - requires=['dof'], + argstr="--res=%s", + extensions=None, + requires=["dof"], ), zstat_file=dict( - argstr='--zstat=%s', - xor=['dof'], + argstr="--zstat=%s", + extensions=None, + xor=["dof"], ), ) inputs = SmoothEstimate.input_spec() @@ -34,6 +38,8 @@ def test_SmoothEstimate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SmoothEstimate_outputs(): output_map = dict( dlh=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py index d62f904655..0d6f68cbea 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py +++ b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py @@ -1,55 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import SpatialFilter def test_SpatialFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), kernel_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=5, - xor=['kernel_size'], + xor=["kernel_size"], ), kernel_shape=dict( - argstr='-kernel %s', + argstr="-kernel %s", position=4, ), kernel_size=dict( - argstr='%.4f', + argstr="%.4f", position=5, - xor=['kernel_file'], + xor=["kernel_file"], ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), operation=dict( - argstr='-f%s', + argstr="-f%s", mandatory=True, position=6, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -59,8 +63,14 @@ def test_SpatialFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpatialFilter_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SpatialFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Split.py b/nipype/interfaces/fsl/tests/test_auto_Split.py index cec2ab462d..79aa3a7ade 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Split.py +++ b/nipype/interfaces/fsl/tests/test_auto_Split.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Split def test_Split_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=2, ), @@ -16,12 +17,13 @@ def test_Split_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), out_base_name=dict( - argstr='%s', + argstr="%s", position=1, ), output_type=dict(), @@ -31,8 +33,12 @@ def test_Split_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Split_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = Split.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_StdImage.py b/nipype/interfaces/fsl/tests/test_auto_StdImage.py index 302ab9c4c2..226abb5e5e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_StdImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_StdImage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import StdImage def test_StdImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), dimension=dict( - argstr='-%sstd', + argstr="-%sstd", position=4, usedefault=True, ), @@ -16,26 +17,28 @@ def test_StdImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_StdImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StdImage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = StdImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py b/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py index 7cbb57491c..26b3b3ff54 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py +++ b/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py @@ -1,26 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import SwapDimensions def test_SwapDimensions_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, - position='1', + position="1", ), new_dims=dict( - argstr='%s %s %s', + argstr="%s %s %s", mandatory=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, ), @@ -31,8 +34,14 @@ def test_SwapDimensions_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SwapDimensions_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SwapDimensions.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py index 3f91b3722a..3358190dfb 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py @@ -1,110 +1,159 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..epi import TOPUP def test_TOPUP_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), config=dict( - argstr='--config=%s', + argstr="--config=%s", usedefault=True, ), encoding_direction=dict( - argstr='--datain=%s', + argstr="--datain=%s", mandatory=True, - requires=['readout_times'], - xor=['encoding_file'], + requires=["readout_times"], + xor=["encoding_file"], ), encoding_file=dict( - argstr='--datain=%s', + argstr="--datain=%s", + extensions=None, mandatory=True, - xor=['encoding_direction'], + xor=["encoding_direction"], ), environ=dict( nohash=True, usedefault=True, ), - estmov=dict(argstr='--estmov=%d', ), - fwhm=dict(argstr='--fwhm=%f', ), + estmov=dict( + argstr="--estmov=%d", + ), + fwhm=dict( + argstr="--fwhm=%f", + ), in_file=dict( - argstr='--imain=%s', + argstr="--imain=%s", + extensions=None, mandatory=True, ), - interp=dict(argstr='--interp=%s', ), - max_iter=dict(argstr='--miter=%d', ), - minmet=dict(argstr='--minmet=%d', ), - numprec=dict(argstr='--numprec=%s', ), + interp=dict( + argstr="--interp=%s", + ), + max_iter=dict( + argstr="--miter=%d", + ), + minmet=dict( + argstr="--minmet=%d", + ), + numprec=dict( + argstr="--numprec=%s", + ), out_base=dict( - argstr='--out=%s', + argstr="--out=%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_base', + name_source=["in_file"], + name_template="%s_base", ), out_corrected=dict( - argstr='--iout=%s', + argstr="--iout=%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_corrected', + name_source=["in_file"], + name_template="%s_corrected", ), out_field=dict( - argstr='--fout=%s', + argstr="--fout=%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_field', + name_source=["in_file"], + name_template="%s_field", ), out_jac_prefix=dict( - argstr='--jacout=%s', + argstr="--jacout=%s", hash_files=False, usedefault=True, ), out_logfile=dict( - argstr='--logout=%s', + argstr="--logout=%s", + extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_topup.log', + name_source=["in_file"], + name_template="%s_topup.log", ), out_mat_prefix=dict( - argstr='--rbmout=%s', + argstr="--rbmout=%s", hash_files=False, usedefault=True, ), out_warp_prefix=dict( - argstr='--dfout=%s', + argstr="--dfout=%s", hash_files=False, usedefault=True, ), output_type=dict(), readout_times=dict( mandatory=True, - requires=['encoding_direction'], - xor=['encoding_file'], - ), - reg_lambda=dict(argstr='--lambda=%0.f', ), - regmod=dict(argstr='--regmod=%s', ), - regrid=dict(argstr='--regrid=%d', ), - scale=dict(argstr='--scale=%d', ), - splineorder=dict(argstr='--splineorder=%d', ), - ssqlambda=dict(argstr='--ssqlambda=%d', ), - subsamp=dict(argstr='--subsamp=%d', ), - warp_res=dict(argstr='--warpres=%f', ), + requires=["encoding_direction"], + xor=["encoding_file"], + ), + reg_lambda=dict( + argstr="--lambda=%0.f", + ), + regmod=dict( + argstr="--regmod=%s", + ), + regrid=dict( + argstr="--regrid=%d", + ), + scale=dict( + argstr="--scale=%d", + ), + splineorder=dict( + argstr="--splineorder=%d", + ), + ssqlambda=dict( + argstr="--ssqlambda=%d", + ), + subsamp=dict( + argstr="--subsamp=%d", + ), + warp_res=dict( + argstr="--warpres=%f", + ), ) inputs = TOPUP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TOPUP_outputs(): output_map = dict( - out_corrected=dict(), - out_enc_file=dict(), - out_field=dict(), - out_fieldcoef=dict(), + out_corrected=dict( + extensions=None, + ), + out_enc_file=dict( + extensions=None, + ), + out_field=dict( + extensions=None, + ), + out_fieldcoef=dict( + extensions=None, + ), out_jacs=dict(), - out_logfile=dict(), + out_logfile=dict( + extensions=None, + ), out_mats=dict(), - out_movpar=dict(), + out_movpar=dict( + extensions=None, + ), out_warps=dict(), ) outputs = TOPUP.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py b/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py index c762b99d31..3a825e4e45 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py +++ b/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py @@ -1,46 +1,49 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import TemporalFilter def test_TemporalFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), highpass_sigma=dict( - argstr='-bptf %.6f', + argstr="-bptf %.6f", position=4, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), lowpass_sigma=dict( - argstr='%.6f', + argstr="%.6f", position=5, usedefault=True, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -50,8 +53,14 @@ def test_TemporalFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TemporalFilter_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TemporalFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py b/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py new file mode 100644 index 0000000000..fdc39356a9 --- /dev/null +++ b/nipype/interfaces/fsl/tests/test_auto_Text2Vest.py @@ -0,0 +1,45 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import Text2Vest + + +def test_Text2Vest_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + out_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=1, + ), + output_type=dict(), + ) + inputs = Text2Vest.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Text2Vest_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = Text2Vest.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/fsl/tests/test_auto_Threshold.py b/nipype/interfaces/fsl/tests/test_auto_Threshold.py index 5c2bb46cc8..cc3446bd47 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Threshold.py +++ b/nipype/interfaces/fsl/tests/test_auto_Threshold.py @@ -1,46 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import Threshold def test_Threshold_inputs(): input_map = dict( - args=dict(argstr='%s', ), - direction=dict(usedefault=True, ), + args=dict( + argstr="%s", + ), + direction=dict( + usedefault=True, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), thresh=dict( - argstr='%s', + argstr="%s", mandatory=True, position=4, ), - use_nonzero_voxels=dict(requires=['use_robust_range'], ), + use_nonzero_voxels=dict( + requires=["use_robust_range"], + ), use_robust_range=dict(), ) inputs = Threshold.input_spec() @@ -48,8 +55,14 @@ def test_Threshold_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py index 4308ee8153..631741da49 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py +++ b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py @@ -1,35 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import TractSkeleton def test_TractSkeleton_inputs(): input_map = dict( - alt_data_file=dict(argstr='-a %s', ), - alt_skeleton=dict(argstr='-s %s', ), - args=dict(argstr='%s', ), - data_file=dict(), - distance_map=dict(), + alt_data_file=dict( + argstr="-a %s", + extensions=None, + ), + alt_skeleton=dict( + argstr="-s %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), + data_file=dict( + extensions=None, + ), + distance_map=dict( + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), output_type=dict(), project_data=dict( - argstr='-p %.3f %s %s %s %s', - requires=['threshold', 'distance_map', 'data_file'], + argstr="-p %.3f %s %s %s %s", + requires=["threshold", "distance_map", "data_file"], + ), + projected_data=dict( + extensions=None, + ), + search_mask_file=dict( + extensions=None, + xor=["use_cingulum_mask"], + ), + skeleton_file=dict( + argstr="-o %s", ), - projected_data=dict(), - search_mask_file=dict(xor=['use_cingulum_mask'], ), - skeleton_file=dict(argstr='-o %s', ), threshold=dict(), use_cingulum_mask=dict( usedefault=True, - xor=['search_mask_file'], + xor=["search_mask_file"], ), ) inputs = TractSkeleton.input_spec() @@ -37,10 +56,16 @@ def test_TractSkeleton_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractSkeleton_outputs(): output_map = dict( - projected_data=dict(), - skeleton_file=dict(), + projected_data=dict( + extensions=None, + ), + skeleton_file=dict( + extensions=None, + ), ) outputs = TractSkeleton.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Training.py b/nipype/interfaces/fsl/tests/test_auto_Training.py index 91d48e4c01..82a53d1408 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Training.py +++ b/nipype/interfaces/fsl/tests/test_auto_Training.py @@ -1,26 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import Training def test_Training_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), loo=dict( - argstr='-l', + argstr="-l", position=2, ), mel_icas=dict( - argstr='%s', + argstr="%s", copyfile=False, position=-1, ), trained_wts_filestem=dict( - argstr='%s', + argstr="%s", position=1, ), ) @@ -29,8 +30,14 @@ def test_Training_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Training_outputs(): - output_map = dict(trained_wts_file=dict(), ) + output_map = dict( + trained_wts_file=dict( + extensions=None, + ), + ) outputs = Training.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py b/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py index 18ef078a79..3ab307d6a8 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py +++ b/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fix import TrainingSetCreator def test_TrainingSetCreator_inputs(): input_map = dict( mel_icas_in=dict( - argstr='%s', + argstr="%s", copyfile=False, position=-1, - ), ) + ), + ) inputs = TrainingSetCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrainingSetCreator_outputs(): output_map = dict( mel_icas_out=dict( - argstr='%s', + argstr="%s", copyfile=False, position=-1, - ), ) + ), + ) outputs = TrainingSetCreator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py b/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py index a0dcf2610d..cb27a76876 100644 --- a/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py @@ -1,41 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import UnaryMaths def test_UnaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), internal_datatype=dict( - argstr='-dt %s', + argstr="-dt %s", position=1, ), nan2zeros=dict( - argstr='-nan', + argstr="-nan", position=3, ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-1, ), output_type=dict(), @@ -45,8 +48,14 @@ def test_UnaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_VecReg.py b/nipype/interfaces/fsl/tests/test_auto_VecReg.py index 61a28f7369..9564241cc3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_VecReg.py +++ b/nipype/interfaces/fsl/tests/test_auto_VecReg.py @@ -1,44 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import VecReg def test_VecReg_inputs(): input_map = dict( - affine_mat=dict(argstr='-t %s', ), - args=dict(argstr='%s', ), + affine_mat=dict( + argstr="-t %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), - interpolation=dict(argstr='--interp=%s', ), - mask=dict(argstr='-m %s', ), + interpolation=dict( + argstr="--interp=%s", + ), + mask=dict( + argstr="-m %s", + extensions=None, + ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - ref_mask=dict(argstr='--refmask=%s', ), + ref_mask=dict( + argstr="--refmask=%s", + extensions=None, + ), ref_vol=dict( - argstr='-r %s', + argstr="-r %s", + extensions=None, mandatory=True, ), - rotation_mat=dict(argstr='--rotmat=%s', ), - rotation_warp=dict(argstr='--rotwarp=%s', ), - warp_field=dict(argstr='-w %s', ), + rotation_mat=dict( + argstr="--rotmat=%s", + extensions=None, + ), + rotation_warp=dict( + argstr="--rotwarp=%s", + extensions=None, + ), + warp_field=dict( + argstr="-w %s", + extensions=None, + ), ) inputs = VecReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VecReg_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = VecReg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py b/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py new file mode 100644 index 0000000000..2732e95d12 --- /dev/null +++ b/nipype/interfaces/fsl/tests/test_auto_Vest2Text.py @@ -0,0 +1,45 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import Vest2Text + + +def test_Vest2Text_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + out_file=dict( + argstr="%s", + extensions=None, + position=1, + usedefault=True, + ), + output_type=dict(), + ) + inputs = Vest2Text.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Vest2Text_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = Vest2Text.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py index 30924223cf..b7f7fc7d87 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py @@ -1,21 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import WarpPoints def test_WarpPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], + argstr="-mm", + xor=["coord_vox"], ), coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], + argstr="-vox", + xor=["coord_mm"], ), dest_file=dict( - argstr='-dest %s', + argstr="-dest %s", + extensions=None, mandatory=True, ), environ=dict( @@ -23,26 +25,31 @@ def test_WarpPoints_inputs(): usedefault=True, ), in_coords=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), out_file=dict( - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', + extensions=None, + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", ), src_file=dict( - argstr='-src %s', + argstr="-src %s", + extensions=None, mandatory=True, ), warp_file=dict( - argstr='-warp %s', - xor=['xfm_file'], + argstr="-warp %s", + extensions=None, + xor=["xfm_file"], ), xfm_file=dict( - argstr='-xfm %s', - xor=['warp_file'], + argstr="-xfm %s", + extensions=None, + xor=["warp_file"], ), ) inputs = WarpPoints.input_spec() @@ -50,8 +57,14 @@ def test_WarpPoints_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPoints_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py b/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py index 627ef60ad6..b8f4cbef97 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py @@ -1,43 +1,49 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import WarpPointsFromStd def test_WarpPointsFromStd_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], + argstr="-mm", + xor=["coord_vox"], ), coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], + argstr="-vox", + xor=["coord_mm"], ), environ=dict( nohash=True, usedefault=True, ), img_file=dict( - argstr='-img %s', + argstr="-img %s", + extensions=None, mandatory=True, ), in_coords=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), std_file=dict( - argstr='-std %s', + argstr="-std %s", + extensions=None, mandatory=True, ), warp_file=dict( - argstr='-warp %s', - xor=['xfm_file'], + argstr="-warp %s", + extensions=None, + xor=["xfm_file"], ), xfm_file=dict( - argstr='-xfm %s', - xor=['warp_file'], + argstr="-xfm %s", + extensions=None, + xor=["warp_file"], ), ) inputs = WarpPointsFromStd.input_spec() @@ -45,8 +51,14 @@ def test_WarpPointsFromStd_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPointsFromStd_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = WarpPointsFromStd.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py index 3d37ad1486..0b5881c776 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py @@ -1,49 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import WarpPointsToStd def test_WarpPointsToStd_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], + argstr="-mm", + xor=["coord_vox"], ), coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], + argstr="-vox", + xor=["coord_mm"], ), environ=dict( nohash=True, usedefault=True, ), img_file=dict( - argstr='-img %s', + argstr="-img %s", + extensions=None, mandatory=True, ), in_coords=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), out_file=dict( - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', + extensions=None, + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", + ), + premat_file=dict( + argstr="-premat %s", + extensions=None, ), - premat_file=dict(argstr='-premat %s', ), std_file=dict( - argstr='-std %s', + argstr="-std %s", + extensions=None, mandatory=True, ), warp_file=dict( - argstr='-warp %s', - xor=['xfm_file'], + argstr="-warp %s", + extensions=None, + xor=["xfm_file"], ), xfm_file=dict( - argstr='-xfm %s', - xor=['warp_file'], + argstr="-xfm %s", + extensions=None, + xor=["warp_file"], ), ) inputs = WarpPointsToStd.input_spec() @@ -51,8 +61,14 @@ def test_WarpPointsToStd_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPointsToStd_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = WarpPointsToStd.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py index 04d3a05f14..c8caa8da84 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py @@ -1,35 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import WarpUtils def test_WarpUtils_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='--in=%s', + argstr="--in=%s", + extensions=None, mandatory=True, ), - knot_space=dict(argstr='--knotspace=%d,%d,%d', ), + knot_space=dict( + argstr="--knotspace=%d,%d,%d", + ), out_file=dict( - argstr='--out=%s', - name_source=['in_file'], - output_name='out_file', + argstr="--out=%s", + extensions=None, + name_source=["in_file"], + output_name="out_file", position=-1, ), - out_format=dict(argstr='--outformat=%s', ), - out_jacobian=dict(argstr='--jac=%s', ), + out_format=dict( + argstr="--outformat=%s", + ), + out_jacobian=dict( + argstr="--jac=%s", + extensions=None, + ), output_type=dict(), reference=dict( - argstr='--ref=%s', + argstr="--ref=%s", + extensions=None, mandatory=True, ), - warp_resolution=dict(argstr='--warpres=%0.4f,%0.4f,%0.4f', ), - with_affine=dict(argstr='--withaff', ), + warp_resolution=dict( + argstr="--warpres=%0.4f,%0.4f,%0.4f", + ), + with_affine=dict( + argstr="--withaff", + ), write_jacobian=dict( mandatory=True, usedefault=True, @@ -40,10 +55,16 @@ def test_WarpUtils_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpUtils_outputs(): output_map = dict( - out_file=dict(), - out_jacobian=dict(), + out_file=dict( + extensions=None, + ), + out_jacobian=dict( + extensions=None, + ), ) outputs = WarpUtils.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py index d72bb3bb4b..349b2f52e7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py +++ b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py @@ -1,37 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dti import XFibres5 def test_XFibres5_inputs(): input_map = dict( all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), + argstr="--allard", + xor=("no_ard", "all_ard"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), burn_in=dict( - argstr='--burnin=%d', + argstr="--burnin=%d", usedefault=True, ), burn_in_no_ard=dict( - argstr='--burnin_noard=%d', + argstr="--burnin_noard=%d", usedefault=True, ), bvals=dict( - argstr='--bvals=%s', + argstr="--bvals=%s", + extensions=None, mandatory=True, ), bvecs=dict( - argstr='--bvecs=%s', + argstr="--bvecs=%s", + extensions=None, mandatory=True, ), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--cnonlinear", + xor=("no_spat", "non_linear", "cnlinear"), ), dwi=dict( - argstr='--data=%s', + argstr="--data=%s", + extensions=None, mandatory=True, ), environ=dict( @@ -39,58 +43,70 @@ def test_XFibres5_inputs(): usedefault=True, ), f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + argstr="--f0 --ardf0", + xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], + argstr="--f0", + xor=["f0_noard", "f0_ard"], ), force_dir=dict( - argstr='--forcedir', + argstr="--forcedir", usedefault=True, ), - fudge=dict(argstr='--fudge=%d', ), - gradnonlin=dict(argstr='--gradnonlin=%s', ), + fudge=dict( + argstr="--fudge=%d", + ), + gradnonlin=dict( + argstr="--gradnonlin=%s", + extensions=None, + ), logdir=dict( - argstr='--logdir=%s', + argstr="--logdir=%s", usedefault=True, ), mask=dict( - argstr='--mask=%s', + argstr="--mask=%s", + extensions=None, mandatory=True, ), - model=dict(argstr='--model=%d', ), + model=dict( + argstr="--model=%d", + ), n_fibres=dict( - argstr='--nfibres=%d', + argstr="--nfibres=%d", mandatory=True, usedefault=True, ), n_jumps=dict( - argstr='--njumps=%d', + argstr="--njumps=%d", usedefault=True, ), no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), + argstr="--noard", + xor=("no_ard", "all_ard"), ), no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nospat", + xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nonlinear", + xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), - rician=dict(argstr='--rician', ), + rician=dict( + argstr="--rician", + ), sample_every=dict( - argstr='--sampleevery=%d', + argstr="--sampleevery=%d", usedefault=True, ), - seed=dict(argstr='--seed=%d', ), + seed=dict( + argstr="--seed=%d", + ), update_proposal_every=dict( - argstr='--updateproposalevery=%d', + argstr="--updateproposalevery=%d", usedefault=True, ), ) @@ -99,14 +115,22 @@ def test_XFibres5_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XFibres5_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), - mean_S0samples=dict(), - mean_dsamples=dict(), + mean_S0samples=dict( + extensions=None, + ), + mean_dsamples=dict( + extensions=None, + ), mean_fsamples=dict(), - mean_tausamples=dict(), + mean_tausamples=dict( + extensions=None, + ), phsamples=dict(), thsamples=dict(), ) diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index 71022997b6..1a76d0f6a5 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -1,11 +1,10 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import nipype.interfaces.fsl as fsl from nipype.interfaces.base import InterfaceResult -from nipype.interfaces.fsl import check_fsl, no_fsl +from nipype.interfaces.fsl import no_fsl import pytest @@ -13,8 +12,7 @@ @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslversion(): ver = fsl.Info.version() - ver = ver.split('.') - assert ver[0] in ['4', '5'] + assert ver.split(".", 1)[0].isdigit() @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -30,28 +28,28 @@ def test_outputtype_to_ext(): assert res == ext with pytest.raises(KeyError): - fsl.Info.output_type_to_ext('JUNK') + fsl.Info.output_type_to_ext("JUNK") @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand(): # Most methods in FSLCommand are tested in the subclasses. Only # testing the one item that is not. - cmd = fsl.FSLCommand(command='ls') + cmd = fsl.FSLCommand(command="ls") res = cmd.run() - assert type(res) == InterfaceResult + assert type(res) is InterfaceResult @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand2(): # Check default output type and environ - cmd = fsl.FSLCommand(command='junk') + cmd = fsl.FSLCommand(command="junk") assert cmd._output_type == fsl.Info.output_type() - assert cmd.inputs.environ['FSLOUTPUTTYPE'] == cmd._output_type + assert cmd.inputs.environ["FSLOUTPUTTYPE"] == cmd._output_type assert cmd._output_type in fsl.Info.ftypes cmd = fsl.FSLCommand - cmdinst = fsl.FSLCommand(command='junk') + cmdinst = fsl.FSLCommand(command="junk") for out_type in fsl.Info.ftypes: cmd.set_default_output_type(out_type) assert cmd._output_type == out_type @@ -64,39 +62,24 @@ def test_FSLCommand2(): @pytest.mark.parametrize( "args, desired_name", [ - ({}, { - "file": 'foo.nii.gz' - }), # just the filename + ({}, {"file": "foo.nii.gz"}), # just the filename # filename with suffix - ({ - "suffix": '_brain' - }, { - "file": 'foo_brain.nii.gz' - }), + ({"suffix": "_brain"}, {"file": "foo_brain.nii.gz"}), ( - { - "suffix": '_brain', - "cwd": '/data' - }, + {"suffix": "_brain", "cwd": "/data"}, # filename with suffix and working directory - { - "dir": '/data', - "file": 'foo_brain.nii.gz' - }), + {"dir": "/data", "file": "foo_brain.nii.gz"}, + ), # filename with suffix and no file extension change - ({ - "suffix": '_brain.mat', - "change_ext": False - }, { - "file": 'foo_brain.mat' - }) - ]) + ({"suffix": "_brain.mat", "change_ext": False}, {"file": "foo_brain.mat"}), + ], +) def test_gen_fname(args, desired_name): # Test _gen_fname method of FSLCommand - cmd = fsl.FSLCommand(command='junk', output_type='NIFTI_GZ') + cmd = fsl.FSLCommand(command="junk", output_type="NIFTI_GZ") pth = os.getcwd() - fname = cmd._gen_fname('foo.nii.gz', **args) - if "dir" in desired_name.keys(): + fname = cmd._gen_fname("foo.nii.gz", **args) + if "dir" in desired_name: desired = os.path.join(desired_name["dir"], desired_name["file"]) else: desired = os.path.join(pth, desired_name["file"]) diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index cd76766dd8..05b2caa0bb 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import open, range import os import nipype.interfaces.fsl.dti as fsl @@ -19,7 +16,7 @@ def test_dtifit2(create_files_in_directory): filelist, outdir = create_files_in_directory dti = fsl.DTIFit() # make sure command gets called - assert dti.cmd == 'dtifit' + assert dti.cmd == "dtifit" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -27,211 +24,202 @@ def test_dtifit2(create_files_in_directory): # .inputs based parameters setting dti.inputs.dwi = filelist[0] - dti.inputs.base_name = 'foo.dti.nii' + dti.inputs.base_name = "foo.dti.nii" dti.inputs.mask = filelist[1] dti.inputs.bvecs = filelist[0] dti.inputs.bvals = filelist[1] dti.inputs.min_z = 10 dti.inputs.max_z = 50 - assert dti.cmdline == \ - 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10' % (filelist[0], - filelist[1], - filelist[0], - filelist[1]) + assert ( + dti.cmdline + == "dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10" + % (filelist[0], filelist[1], filelist[0], filelist[1]) + ) -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_randomise2(): - rand = fsl.Randomise() # make sure command gets called - assert rand.cmd == 'randomise' + assert rand.cmd == "randomise" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting - rand.inputs.input_4D = 'infile.nii' - rand.inputs.output_rootname = 'outfile' - rand.inputs.design_matrix = 'design.mat' - rand.inputs.t_contrast = 'infile.con' + rand.inputs.input_4D = "infile.nii" + rand.inputs.output_rootname = "outfile" + rand.inputs.design_matrix = "design.mat" + rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) - cmd = 'randomise -i infile.nii -o outfile -d design.mat -t infile.con' + cmd = "randomise -i infile.nii -o outfile -d design.mat -t infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise( - input_4D='infile2', - output_rootname='outfile2', - f_contrast='infile.f', + input_4D="infile2", + output_rootname="outfile2", + f_contrast="infile.f", one_sample_gmean=True, - int_seed=4) + int_seed=4, + ) actualCmdline = sorted(rand2.cmdline.split()) - cmd = 'randomise -i infile2 -o outfile2 -1 -f infile.f --seed=4' + cmd = "randomise -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise() - results = rand3.run(input_4D='infile3', output_rootname='outfile3') - assert results.runtime.cmdline == \ - 'randomise -i infile3 -o outfile3' + results = rand3.run(input_4D="infile3", output_rootname="outfile3") + assert results.runtime.cmdline == "randomise -i infile3 -o outfile3" # test arguments for opt_map opt_map = { - 'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', 'design.mat'), - 't_contrast': ('-t input.con', 'input.con'), - 'f_contrast': ('-f input.fts', 'input.fts'), - 'xchange_block_labels': ('-e design.grp', 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=1,2,3,4', '1,2,3,4'), - 'list_img_voxel_EVs': ('--vxf=6,7,8,9,3', '6,7,8,9,3') + "demean_data": ("-D", True), + "one_sample_gmean": ("-1", True), + "mask_image": ("-m inp_mask", "inp_mask"), + "design_matrix": ("-d design.mat", "design.mat"), + "t_contrast": ("-t input.con", "input.con"), + "f_contrast": ("-f input.fts", "input.fts"), + "xchange_block_labels": ("-e design.grp", "design.grp"), + "print_unique_perm": ("-q", True), + "print_info_parallelMode": ("-Q", True), + "num_permutations": ("-n 10", 10), + "vox_pvalus": ("-x", True), + "fstats_only": ("--fonly", True), + "thresh_free_cluster": ("-T", True), + "thresh_free_cluster_2Dopt": ("--T2", True), + "cluster_thresholding": ("-c 0.20", 0.20), + "cluster_mass_thresholding": ("-C 0.40", 0.40), + "fcluster_thresholding": ("-F 0.10", 0.10), + "fcluster_mass_thresholding": ("-S 0.30", 0.30), + "variance_smoothing": ("-v 0.20", 0.20), + "diagnostics_off": ("--quiet", True), + "output_raw": ("-R", True), + "output_perm_vect": ("-P", True), + "int_seed": ("--seed=20", 20), + "TFCE_height_param": ("--tfce_H=0.11", 0.11), + "TFCE_extent_param": ("--tfce_E=0.50", 0.50), + "TFCE_connectivity": ("--tfce_C=0.30", 0.30), + "list_num_voxel_EVs_pos": ("--vxl=1,2,3,4", "1,2,3,4"), + "list_img_voxel_EVs": ("--vxf=6,7,8,9,3", "6,7,8,9,3"), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise( - input_4D='infile', output_rootname='root', **{ - name: settings[1] - }) - assert rand4.cmdline == rand4.cmd + ' -i infile -o root ' + settings[0] + input_4D="infile", output_rootname="root", **{name: settings[1]} + ) + assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Randomise_parallel(): rand = fsl.Randomise_parallel() # make sure command gets called - assert rand.cmd == 'randomise_parallel' + assert rand.cmd == "randomise_parallel" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting - rand.inputs.input_4D = 'infile.nii' - rand.inputs.output_rootname = 'outfile' - rand.inputs.design_matrix = 'design.mat' - rand.inputs.t_contrast = 'infile.con' + rand.inputs.input_4D = "infile.nii" + rand.inputs.output_rootname = "outfile" + rand.inputs.design_matrix = "design.mat" + rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) - cmd = ('randomise_parallel -i infile.nii -o outfile -d design.mat -t ' - 'infile.con') + cmd = "randomise_parallel -i infile.nii -o outfile -d design.mat -t infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise_parallel( - input_4D='infile2', - output_rootname='outfile2', - f_contrast='infile.f', + input_4D="infile2", + output_rootname="outfile2", + f_contrast="infile.f", one_sample_gmean=True, - int_seed=4) + int_seed=4, + ) actualCmdline = sorted(rand2.cmdline.split()) - cmd = 'randomise_parallel -i infile2 -o outfile2 -1 -f infile.f --seed=4' + cmd = "randomise_parallel -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise_parallel() - results = rand3.run(input_4D='infile3', output_rootname='outfile3') - assert results.runtime.cmdline == \ - 'randomise_parallel -i infile3 -o outfile3' + results = rand3.run(input_4D="infile3", output_rootname="outfile3") + assert results.runtime.cmdline == "randomise_parallel -i infile3 -o outfile3" # test arguments for opt_map opt_map = { - 'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', 'design.mat'), - 't_contrast': ('-t input.con', 'input.con'), - 'f_contrast': ('-f input.fts', 'input.fts'), - 'xchange_block_labels': ('-e design.grp', 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=' + repr([1, 2, 3, 4]), - repr([1, 2, 3, 4])), - 'list_img_voxel_EVs': ('--vxf=' + repr([6, 7, 8, 9, 3]), - repr([6, 7, 8, 9, 3])) + "demean_data": ("-D", True), + "one_sample_gmean": ("-1", True), + "mask_image": ("-m inp_mask", "inp_mask"), + "design_matrix": ("-d design.mat", "design.mat"), + "t_contrast": ("-t input.con", "input.con"), + "f_contrast": ("-f input.fts", "input.fts"), + "xchange_block_labels": ("-e design.grp", "design.grp"), + "print_unique_perm": ("-q", True), + "print_info_parallelMode": ("-Q", True), + "num_permutations": ("-n 10", 10), + "vox_pvalus": ("-x", True), + "fstats_only": ("--fonly", True), + "thresh_free_cluster": ("-T", True), + "thresh_free_cluster_2Dopt": ("--T2", True), + "cluster_thresholding": ("-c 0.20", 0.20), + "cluster_mass_thresholding": ("-C 0.40", 0.40), + "fcluster_thresholding": ("-F 0.10", 0.10), + "fcluster_mass_thresholding": ("-S 0.30", 0.30), + "variance_smoothing": ("-v 0.20", 0.20), + "diagnostics_off": ("--quiet", True), + "output_raw": ("-R", True), + "output_perm_vect": ("-P", True), + "int_seed": ("--seed=20", 20), + "TFCE_height_param": ("--tfce_H=0.11", 0.11), + "TFCE_extent_param": ("--tfce_E=0.50", 0.50), + "TFCE_connectivity": ("--tfce_C=0.30", 0.30), + "list_num_voxel_EVs_pos": ("--vxl=" + repr([1, 2, 3, 4]), repr([1, 2, 3, 4])), + "list_img_voxel_EVs": ("--vxf=" + repr([6, 7, 8, 9, 3]), repr([6, 7, 8, 9, 3])), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise_parallel( - input_4D='infile', output_rootname='root', **{ - name: settings[1] - }) - assert rand4.cmdline == rand4.cmd + ' -i infile -o root ' + settings[0] + input_4D="infile", output_rootname="root", **{name: settings[1]} + ) + assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] # test proj_thresh -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Proj_thresh(): proj = fsl.ProjThresh() # make sure command gets called - assert proj.cmd == 'proj_thresh' + assert proj.cmd == "proj_thresh" # test raising error with mandatory args absent with pytest.raises(ValueError): proj.run() # .inputs based parameters setting - proj.inputs.volumes = ['vol1', 'vol2', 'vol3'] + proj.inputs.volumes = ["vol1", "vol2", "vol3"] proj.inputs.threshold = 3 - assert proj.cmdline == 'proj_thresh vol1 vol2 vol3 3' + assert proj.cmdline == "proj_thresh vol1 vol2 vol3 3" - proj2 = fsl.ProjThresh(threshold=10, volumes=['vola', 'volb']) - assert proj2.cmdline == 'proj_thresh vola volb 10' + proj2 = fsl.ProjThresh(threshold=10, volumes=["vola", "volb"]) + assert proj2.cmdline == "proj_thresh vola volb 10" # .run based parameters setting proj3 = fsl.ProjThresh() - results = proj3.run(volumes=['inp1', 'inp3', 'inp2'], threshold=2) - assert results.runtime.cmdline == 'proj_thresh inp1 inp3 inp2 2' + results = proj3.run(volumes=["inp1", "inp3", "inp2"], threshold=2) + assert results.runtime.cmdline == "proj_thresh inp1 inp3 inp2 2" assert results.runtime.returncode != 0 assert isinstance(results.interface.inputs.volumes, list) assert results.interface.inputs.threshold == 2 @@ -241,103 +229,103 @@ def test_Proj_thresh(): # test vec_reg -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Vec_reg(): - vrg = fsl.VecReg() # make sure command gets called - assert vrg.cmd == 'vecreg' + assert vrg.cmd == "vecreg" # test raising error with mandatory args absent with pytest.raises(ValueError): vrg.run() # .inputs based parameters setting - vrg.inputs.infile = 'infile' - vrg.inputs.outfile = 'outfile' - vrg.inputs.refVolName = 'MNI152' - vrg.inputs.affineTmat = 'tmat.mat' - assert vrg.cmdline == 'vecreg -i infile -o outfile -r MNI152 -t tmat.mat' + vrg.inputs.infile = "infile" + vrg.inputs.outfile = "outfile" + vrg.inputs.refVolName = "MNI152" + vrg.inputs.affineTmat = "tmat.mat" + assert vrg.cmdline == "vecreg -i infile -o outfile -r MNI152 -t tmat.mat" # .run based parameter setting vrg2 = fsl.VecReg( - infile='infile2', - outfile='outfile2', - refVolName='MNI152', - affineTmat='tmat2.mat', - brainMask='nodif_brain_mask') + infile="infile2", + outfile="outfile2", + refVolName="MNI152", + affineTmat="tmat2.mat", + brainMask="nodif_brain_mask", + ) actualCmdline = sorted(vrg2.cmdline.split()) - cmd = 'vecreg -i infile2 -o outfile2 -r MNI152 -t tmat2.mat -m nodif_brain_mask' + cmd = "vecreg -i infile2 -o outfile2 -r MNI152 -t tmat2.mat -m nodif_brain_mask" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline vrg3 = fsl.VecReg() results = vrg3.run( - infile='infile3', - outfile='outfile3', - refVolName='MNI152', - affineTmat='tmat3.mat', + infile="infile3", + outfile="outfile3", + refVolName="MNI152", + affineTmat="tmat3.mat", ) - assert results.runtime.cmdline == \ - 'vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat' + assert ( + results.runtime.cmdline + == "vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat" + ) assert results.runtime.returncode != 0 - assert results.interface.inputs.infile == 'infile3' - assert results.interface.inputs.outfile == 'outfile3' - assert results.interface.inputs.refVolName == 'MNI152' - assert results.interface.inputs.affineTmat == 'tmat3.mat' + assert results.interface.inputs.infile == "infile3" + assert results.interface.inputs.outfile == "outfile3" + assert results.interface.inputs.refVolName == "MNI152" + assert results.interface.inputs.affineTmat == "tmat3.mat" # test arguments for opt_map opt_map = { - 'verbose': ('-v', True), - 'helpDoc': ('-h', True), - 'tensor': ('--tensor', True), - 'affineTmat': ('-t Tmat', 'Tmat'), - 'warpFile': ('-w wrpFile', 'wrpFile'), - 'interpolation': ('--interp=sinc', 'sinc'), - 'brainMask': ('-m mask', 'mask') + "verbose": ("-v", True), + "helpDoc": ("-h", True), + "tensor": ("--tensor", True), + "affineTmat": ("-t Tmat", "Tmat"), + "warpFile": ("-w wrpFile", "wrpFile"), + "interpolation": ("--interp=sinc", "sinc"), + "brainMask": ("-m mask", "mask"), } for name, settings in list(opt_map.items()): vrg4 = fsl.VecReg( - infile='infile', - outfile='outfile', - refVolName='MNI152', - **{ - name: settings[1] - }) - assert vrg4.cmdline == vrg4.cmd + \ - ' -i infile -o outfile -r MNI152 ' + settings[0] + infile="infile", + outfile="outfile", + refVolName="MNI152", + **{name: settings[1]} + ) + assert ( + vrg4.cmdline == vrg4.cmd + " -i infile -o outfile -r MNI152 " + settings[0] + ) # test find_the_biggest -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Find_the_biggest(): fbg = fsl.FindTheBiggest() # make sure command gets called - assert fbg.cmd == 'find_the_biggest' + assert fbg.cmd == "find_the_biggest" # test raising error with mandatory args absent with pytest.raises(ValueError): fbg.run() # .inputs based parameters setting - fbg.inputs.infiles = 'seed*' - fbg.inputs.outfile = 'fbgfile' - assert fbg.cmdline == 'find_the_biggest seed* fbgfile' + fbg.inputs.infiles = "seed*" + fbg.inputs.outfile = "fbgfile" + assert fbg.cmdline == "find_the_biggest seed* fbgfile" - fbg2 = fsl.FindTheBiggest(infiles='seed2*', outfile='fbgfile2') - assert fbg2.cmdline == 'find_the_biggest seed2* fbgfile2' + fbg2 = fsl.FindTheBiggest(infiles="seed2*", outfile="fbgfile2") + assert fbg2.cmdline == "find_the_biggest seed2* fbgfile2" # .run based parameters setting fbg3 = fsl.FindTheBiggest() - results = fbg3.run(infiles='seed3', outfile='out3') - assert results.runtime.cmdline == 'find_the_biggest seed3 out3' + results = fbg3.run(infiles="seed3", outfile="out3") + assert results.runtime.cmdline == "find_the_biggest seed3 out3" # test arguments for opt_map # Find_the_biggest doesn't have an opt_map{} @@ -361,8 +349,9 @@ def test_tbss_skeleton(create_files_in_directory): # First by implicit argument skeletor.inputs.skeleton_file = True - assert skeletor.cmdline == \ - "tbss_skeleton -i a.nii -o %s" % os.path.join(newdir, "a_skeleton.nii") + assert skeletor.cmdline == "tbss_skeleton -i a.nii -o %s" % os.path.join( + newdir, "a_skeleton.nii" + ) # Now with a specific name skeletor.inputs.skeleton_file = "old_boney.nii" @@ -381,15 +370,19 @@ def test_tbss_skeleton(create_files_in_directory): bones.inputs.data_file = "b.nii" # Even though that's silly # Now we get a command line - assert bones.cmdline == \ - "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % (Info.standard_image("LowerCingulum_1mm.nii.gz"), - os.path.join(newdir, "b_skeletonised.nii")) + assert bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii {} b.nii {}".format( + Info.standard_image("LowerCingulum_1mm.nii.gz"), + os.path.join(newdir, "b_skeletonised.nii"), + ) # Can we specify a mask? bones.inputs.use_cingulum_mask = Undefined bones.inputs.search_mask_file = "a.nii" - assert bones.cmdline == \ - "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" % os.path.join(newdir, "b_skeletonised.nii") + assert ( + bones.cmdline + == "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" + % os.path.join(newdir, "b_skeletonised.nii") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -410,15 +403,20 @@ def test_distancemap(create_files_in_directory): # It should assert mapper.cmdline == "distancemap --out=%s --in=a.nii" % os.path.join( - newdir, "a_dstmap.nii") + newdir, "a_dstmap.nii" + ) # And we should be able to write out a maxima map mapper.inputs.local_max_file = True - assert mapper.cmdline == \ - "distancemap --out=%s --in=a.nii --localmax=%s" % (os.path.join(newdir, "a_dstmap.nii"), - os.path.join(newdir, "a_lclmax.nii")) + assert mapper.cmdline == "distancemap --out={} --in=a.nii --localmax={}".format( + os.path.join(newdir, "a_dstmap.nii"), + os.path.join(newdir, "a_lclmax.nii"), + ) # And call it whatever we want mapper.inputs.local_max_file = "max.nii" - assert mapper.cmdline == \ - "distancemap --out=%s --in=a.nii --localmax=max.nii" % os.path.join(newdir, "a_dstmap.nii") + assert ( + mapper.cmdline + == "distancemap --out=%s --in=a.nii --localmax=max.nii" + % os.path.join(newdir, "a_dstmap.nii") + ) diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index bf025e991d..715da57f7d 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -import os import pytest from nipype.testing.fixtures import create_files_in_directory @@ -17,7 +15,7 @@ def test_eddy_correct2(create_files_in_directory): eddy = fsl.EddyCorrect() # make sure command gets called - assert eddy.cmd == 'eddy_correct' + assert eddy.cmd == "eddy_correct" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -25,14 +23,13 @@ def test_eddy_correct2(create_files_in_directory): # .inputs based parameters setting eddy.inputs.in_file = filelist[0] - eddy.inputs.out_file = 'foo_eddc.nii' + eddy.inputs.out_file = "foo_eddc.nii" eddy.inputs.ref_num = 100 - assert eddy.cmdline == 'eddy_correct %s foo_eddc.nii 100' % filelist[0] + assert eddy.cmdline == "eddy_correct %s foo_eddc.nii 100" % filelist[0] # .run based parameter setting - eddy2 = fsl.EddyCorrect( - in_file=filelist[0], out_file='foo_ec.nii', ref_num=20) - assert eddy2.cmdline == 'eddy_correct %s foo_ec.nii 20' % filelist[0] + eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file="foo_ec.nii", ref_num=20) + assert eddy2.cmdline == "eddy_correct %s foo_ec.nii 20" % filelist[0] # test arguments for opt_map # eddy_correct class doesn't have opt_map{} diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index b8aa41dc6b..189fff8b3f 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -1,9 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division -from __future__ import unicode_literals -from builtins import open import os import numpy as np @@ -31,26 +27,26 @@ def test_maths_base(create_files_in_directory_plus_output_type): # Set an in file maths.inputs.in_file = "a.nii" - out_file = "a_maths{}".format(out_ext) + out_file = f"a_maths{out_ext}" # Now test the most basic command line - assert maths.cmdline == "fslmaths a.nii {}".format( - os.path.join(testdir, out_file)) + assert maths.cmdline == f"fslmaths a.nii {os.path.join(testdir, out_file)}" # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] int_cmdline = "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) - out_cmdline = "fslmaths a.nii " + os.path.join(testdir, - out_file) + " -odt {}" - duo_cmdline = "fslmaths -dt {} a.nii " + os.path.join( - testdir, out_file) + " -odt {}" + out_cmdline = "fslmaths a.nii " + os.path.join(testdir, out_file) + " -odt {}" + duo_cmdline = ( + "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) + " -odt {}" + ) for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype) assert foo.cmdline == int_cmdline.format(dtype) bar = fsl.MathsCommand(in_file="a.nii", output_datatype=dtype) assert bar.cmdline == out_cmdline.format(dtype) foobar = fsl.MathsCommand( - in_file="a.nii", internal_datatype=dtype, output_datatype=dtype) + in_file="a.nii", internal_datatype=dtype, output_datatype=dtype + ) assert foobar.cmdline == duo_cmdline.format(dtype, dtype) # Test that we can ask for an outfile name @@ -84,8 +80,7 @@ def test_changedt(create_files_in_directory_plus_output_type): dtypes = ["float", "char", "int", "short", "double", "input"] cmdline = "fslmaths a.nii b.nii -odt {}" for dtype in dtypes: - foo = fsl.MathsCommand( - in_file="a.nii", out_file="b.nii", output_datatype=dtype) + foo = fsl.MathsCommand(in_file="a.nii", out_file="b.nii", output_datatype=dtype) assert foo.cmdline == cmdline.format(dtype) @@ -105,18 +100,20 @@ def test_threshold(create_files_in_directory_plus_output_type): # Test the various opstrings cmdline = "fslmaths a.nii {} b.nii" - for val in [0, 0., -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: + for val in [0, 0.0, -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val - assert thresh.cmdline == cmdline.format("-thr {:.10f}".format(val)) + assert thresh.cmdline == cmdline.format(f"-thr {val:.10f}") - val = "{:.10f}".format(42) + val = f"{42:.10f}" thresh = fsl.Threshold( - in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True) + in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True + ) assert thresh.cmdline == cmdline.format("-thrp " + val) thresh.inputs.use_nonzero_voxels = True assert thresh.cmdline == cmdline.format("-thrP " + val) thresh = fsl.Threshold( - in_file="a.nii", out_file="b.nii", thresh=42, direction="above") + in_file="a.nii", out_file="b.nii", thresh=42, direction="above" + ) assert thresh.cmdline == cmdline.format("-uthr " + val) thresh.inputs.use_robust_range = True assert thresh.cmdline == cmdline.format("-uthrp " + val) @@ -134,7 +131,7 @@ def test_meanimage(create_files_in_directory_plus_output_type): # Test the underlying command assert meaner.cmd == "fslmaths" - # Test the defualt opstring + # Test the default opstring assert meaner.cmdline == "fslmaths a.nii -Tmean b.nii" # Test the other dimensions @@ -146,7 +143,8 @@ def test_meanimage(create_files_in_directory_plus_output_type): # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") assert meaner.cmdline == "fslmaths a.nii -Tmean {}".format( - os.path.join(testdir, "a_mean{}".format(out_ext))) + os.path.join(testdir, f"a_mean{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -159,7 +157,7 @@ def test_stdimage(create_files_in_directory_plus_output_type): # Test the underlying command assert stder.cmd == "fslmaths" - # Test the defualt opstring + # Test the default opstring assert stder.cmdline == "fslmaths a.nii -Tstd b.nii" # Test the other dimensions @@ -169,9 +167,10 @@ def test_stdimage(create_files_in_directory_plus_output_type): assert stder.cmdline == cmdline.format(dim) # Test the auto naming - stder = fsl.StdImage(in_file="a.nii", output_type='NIFTI') + stder = fsl.StdImage(in_file="a.nii", output_type="NIFTI") assert stder.cmdline == "fslmaths a.nii -Tstd {}".format( - os.path.join(testdir, "a_std.nii")) + os.path.join(testdir, "a_std.nii") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -184,7 +183,7 @@ def test_maximage(create_files_in_directory_plus_output_type): # Test the underlying command assert maxer.cmd == "fslmaths" - # Test the defualt opstring + # Test the default opstring assert maxer.cmdline == "fslmaths a.nii -Tmax b.nii" # Test the other dimensions @@ -196,7 +195,8 @@ def test_maximage(create_files_in_directory_plus_output_type): # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") assert maxer.cmdline == "fslmaths a.nii -Tmax {}".format( - os.path.join(testdir, "a_max{}".format(out_ext))) + os.path.join(testdir, f"a_max{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -215,19 +215,18 @@ def test_smooth(create_files_in_directory_plus_output_type): # Test smoothing kernels cmdline = "fslmaths a.nii -s {:.5f} b.nii" - for val in [0, 1., 1, 25, 0.5, 8 / 3.]: - smoother = fsl.IsotropicSmooth( - in_file="a.nii", out_file="b.nii", sigma=val) + for val in [0, 1.0, 1, 25, 0.5, 8 / 3.0]: + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", sigma=val) assert smoother.cmdline == cmdline.format(val) - smoother = fsl.IsotropicSmooth( - in_file="a.nii", out_file="b.nii", fwhm=val) + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", fwhm=val) val = float(val) / np.sqrt(8 * np.log(2)) assert smoother.cmdline == cmdline.format(val) # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) assert smoother.cmdline == "fslmaths a.nii -s {:.5f} {}".format( - 5, os.path.join(testdir, "a_smooth{}".format(out_ext))) + 5, os.path.join(testdir, f"a_smooth{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -251,7 +250,8 @@ def test_mask(create_files_in_directory_plus_output_type): # Test auto name generation masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") assert masker.cmdline == "fslmaths a.nii -mas b.nii " + os.path.join( - testdir, "a_masked{}".format(out_ext)) + testdir, f"a_masked{out_ext}" + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -272,15 +272,16 @@ def test_dilation(create_files_in_directory_plus_output_type): for op in ["mean", "modal", "max"]: cv = dict(mean="M", modal="D", max="F") diller.inputs.operation = op - assert diller.cmdline == "fslmaths a.nii -dil{} b.nii".format(cv[op]) + assert diller.cmdline == f"fslmaths a.nii -dil{cv[op]} b.nii" # Now test the different kernel options for k in ["3D", "2D", "box", "boxv", "gauss", "sphere"]: for size in [1, 1.5, 5]: diller.inputs.kernel_shape = k diller.inputs.kernel_size = size - assert diller.cmdline == "fslmaths a.nii -kernel {} {:.4f} -dilF b.nii".format( - k, size) + assert ( + diller.cmdline == f"fslmaths a.nii -kernel {k} {size:.4f} -dilF b.nii" + ) # Test that we can use a file kernel f = open("kernel.txt", "w").close() @@ -293,7 +294,8 @@ def test_dilation(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") assert dil.cmdline == "fslmaths a.nii -dilF {}".format( - os.path.join(testdir, "a_dil{}".format(out_ext))) + os.path.join(testdir, f"a_dil{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -316,7 +318,8 @@ def test_erosion(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") assert erode.cmdline == "fslmaths a.nii -ero {}".format( - os.path.join(testdir, "a_ero{}".format(out_ext))) + os.path.join(testdir, f"a_ero{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -336,12 +339,13 @@ def test_spatial_filter(create_files_in_directory_plus_output_type): # Test the different operations for op in ["mean", "meanu", "median"]: filter.inputs.operation = op - assert filter.cmdline == "fslmaths a.nii -f{} b.nii".format(op) + assert filter.cmdline == f"fslmaths a.nii -f{op} b.nii" # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") assert filter.cmdline == "fslmaths a.nii -fmean {}".format( - os.path.join(testdir, "a_filt{}".format(out_ext))) + os.path.join(testdir, f"a_filt{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -359,19 +363,17 @@ def test_unarymaths(create_files_in_directory_plus_output_type): maths.run() # Test the different operations - ops = [ - "exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", - "index" - ] + ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op - assert maths.cmdline == "fslmaths a.nii -{} b.nii".format(op) + assert maths.cmdline == f"fslmaths a.nii -{op} b.nii" # Test that we don't need to ask for an out file for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) assert maths.cmdline == "fslmaths a.nii -{} {}".format( - op, os.path.join(testdir, "a_{}{}".format(op, out_ext))) + op, os.path.join(testdir, f"a_{op}{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -390,26 +392,25 @@ def test_binarymaths(create_files_in_directory_plus_output_type): # Test the different operations ops = ["add", "sub", "mul", "div", "rem", "min", "max"] - operands = ["b.nii", -2, -0.5, 0, .123456, np.pi, 500] + operands = ["b.nii", -2, -0.5, 0, 0.123456, np.pi, 500] for op in ops: for ent in operands: - maths = fsl.BinaryMaths( - in_file="a.nii", out_file="c.nii", operation=op) + maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent - assert maths.cmdline == "fslmaths a.nii -{} b.nii c.nii".format( - op) + assert maths.cmdline == f"fslmaths a.nii -{op} b.nii c.nii" else: maths.inputs.operand_value = ent assert maths.cmdline == "fslmaths a.nii -{} {:.8f} c.nii".format( - op, ent) + op, ent + ) # Test that we don't need to ask for an out file for op in ops: - maths = fsl.BinaryMaths( - in_file="a.nii", operation=op, operand_file="b.nii") + maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") assert maths.cmdline == "fslmaths a.nii -{} b.nii {}".format( - op, os.path.join(testdir, "a_maths{}".format(out_ext))) + op, os.path.join(testdir, f"a_maths{out_ext}") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -428,19 +429,18 @@ def test_multimaths(create_files_in_directory_plus_output_type): # Test a few operations maths.inputs.operand_files = ["a.nii", "b.nii"] - opstrings = [ - "-add %s -div %s", "-max 1 -sub %s -min %s", "-mas %s -add %s" - ] + opstrings = ["-add %s -div %s", "-max 1 -sub %s -min %s", "-mas %s -add %s"] for ostr in opstrings: maths.inputs.op_string = ostr - assert maths.cmdline == "fslmaths a.nii %s c.nii" % ostr % ("a.nii", - "b.nii") + assert maths.cmdline == "fslmaths a.nii %s c.nii" % ostr % ("a.nii", "b.nii") # Test that we don't need to ask for an out file maths = fsl.MultiImageMaths( - in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"]) - assert maths.cmdline == \ - "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join(testdir, "a_maths%s" % out_ext) + in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"] + ) + assert maths.cmdline == "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join( + testdir, "a_maths%s" % out_ext + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -462,9 +462,11 @@ def test_tempfilt(create_files_in_directory_plus_output_type): filt.inputs.highpass_sigma = win[0] filt.inputs.lowpass_sigma = win[1] assert filt.cmdline == "fslmaths a.nii -bptf {:.6f} {:.6f} b.nii".format( - win[0], win[1]) + win[0], win[1] + ) # Test that we don't need to ask for an out file filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) - assert filt.cmdline == \ - "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format(os.path.join(testdir, "a_filt{}".format(out_ext))) + assert filt.cmdline == "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format( + os.path.join(testdir, f"a_filt{out_ext}") + ) diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index 8b8d0b7b40..5b43850c43 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -1,32 +1,40 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import open - -import os import pytest import nipype.interfaces.fsl.model as fsl from nipype.interfaces.fsl import no_fsl +from pathlib import Path +from ....pipeline import engine as pe @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_MultipleRegressDesign(tmpdir): - tmpdir.chdir() - foo = fsl.MultipleRegressDesign() - foo.inputs.regressors = dict( - voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2]) - con1 = ['voice_and_age', 'T', ['age', 'voice_stenght'], [0.5, 0.5]] - con2 = ['just_BMI', 'T', ['BMI'], [1]] - foo.inputs.contrasts = [con1, con2, ['con3', 'F', [con1, con2]]] - res = foo.run() + designer = pe.Node( + fsl.MultipleRegressDesign(), name="designer", base_dir=str(tmpdir) + ) + designer.inputs.regressors = dict( + voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2] + ) + con1 = ["voice_and_age", "T", ["age", "voice_stenght"], [0.5, 0.5]] + con2 = ["just_BMI", "T", ["BMI"], [1]] + designer.inputs.contrasts = [ + con1, + con2, + ["con3", "F", [con1, con2]], + ["con4", "F", [con2]], + ] + res = designer.run() + outputs = res.outputs.get_traitsfree() + + for ftype in ["mat", "con", "fts", "grp"]: + assert Path(outputs["design_" + ftype]).exists() - for ii in ["mat", "con", "fts", "grp"]: - assert getattr(res.outputs, - "design_" + ii) == tmpdir.join('design.' + ii).strpath + expected_content = {} - design_mat_expected_content = """/NumWaves 3 + expected_content[ + "design_mat" + ] = """/NumWaves 3 /NumPoints 3 /PPheights 3.000000e+00 5.000000e-01 1.000000e+00 @@ -36,7 +44,9 @@ def test_MultipleRegressDesign(tmpdir): 2.000000e+00 5.000000e-01 1.000000e+00 """ - design_con_expected_content = """/ContrastName1 voice_and_age + expected_content[ + "design_con" + ] = """/ContrastName1 voice_and_age /ContrastName2 just_BMI /NumWaves 3 /NumContrasts 2 @@ -48,14 +58,19 @@ def test_MultipleRegressDesign(tmpdir): 1.000000e+00 0.000000e+00 0.000000e+00 """ - design_fts_expected_content = """/NumWaves 2 -/NumContrasts 1 + expected_content[ + "design_fts" + ] = """/NumWaves 2 +/NumContrasts 2 /Matrix 1 1 +0 1 """ - design_grp_expected_content = """/NumWaves 1 + expected_content[ + "design_grp" + ] = """/NumWaves 1 /NumPoints 3 /Matrix @@ -63,6 +78,6 @@ def test_MultipleRegressDesign(tmpdir): 1 1 """ - for ii in ["mat", "con", "fts", "grp"]: - assert tmpdir.join('design.' + ii).read() == eval( - "design_" + ii + "_expected_content") + for ftype in ["mat", "con", "fts", "grp"]: + outfile = "design_" + ftype + assert Path(outputs[outfile]).read_text() == expected_content[outfile] diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 4b387201cf..143179a5ec 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -1,15 +1,9 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals -from builtins import str # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import open - import os from copy import deepcopy import pytest -import pdb from nipype.utils.filemanip import split_filename, ensure_list from .. import preprocess as fsl from nipype.interfaces.fsl import Info @@ -18,8 +12,7 @@ def fsl_name(obj, fname): - """Create valid fsl name, including file extension for output type. - """ + """Create valid fsl name, including file extension for output type.""" ext = Info.output_type_to_ext(obj.inputs.output_type) return fname + ext @@ -27,7 +20,7 @@ def fsl_name(obj, fname): @pytest.fixture() def setup_infile(tmpdir): ext = Info.output_type_to_ext(Info.output_type()) - tmp_infile = tmpdir.join('foo' + ext) + tmp_infile = tmpdir.join("foo" + ext) tmp_infile.open("w") return (tmp_infile.strpath, tmpdir.strpath) @@ -35,8 +28,11 @@ def setup_infile(tmpdir): @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_bet(setup_infile): tmp_infile, tp_dir = setup_infile + # BET converts the in_file path to be relative to prevent + # failure with long paths. + tmp_infile = os.path.relpath(tmp_infile, start=os.getcwd()) better = fsl.BET() - assert better.cmd == 'bet' + assert better.cmd == "bet" # Test raising error with mandatory args absent with pytest.raises(ValueError): @@ -44,19 +40,18 @@ def test_bet(setup_infile): # Test generated outfile name better.inputs.in_file = tmp_infile - outfile = fsl_name(better, 'foo_brain') - outpath = os.path.join(os.getcwd(), outfile) - realcmd = 'bet %s %s' % (tmp_infile, outpath) + outfile = fsl_name(better, "foo_brain") + realcmd = f"bet {tmp_infile} {outfile}" assert better.cmdline == realcmd # Test specified outfile name - outfile = fsl_name(better, '/newdata/bar') + outfile = fsl_name(better, "/newdata/bar") better.inputs.out_file = outfile - realcmd = 'bet %s %s' % (tmp_infile, outfile) + realcmd = f"bet {tmp_infile} {outfile}" assert better.cmdline == realcmd # infile foo.nii doesn't exist def func(): - better.run(in_file='foo2.nii', out_file='bar.nii') + better.run(in_file="foo2.nii", out_file="bar.nii") with pytest.raises(TraitError): func() @@ -64,17 +59,17 @@ def func(): # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { - 'outline': ('-o', True), - 'mask': ('-m', True), - 'skull': ('-s', True), - 'no_output': ('-n', True), - 'frac': ('-f 0.40', 0.4), - 'vertical_gradient': ('-g 0.75', 0.75), - 'radius': ('-r 20', 20), - 'center': ('-c 54 75 80', [54, 75, 80]), - 'threshold': ('-t', True), - 'mesh': ('-e', True), - 'surfaces': ('-A', True) + "outline": ("-o", True), + "mask": ("-m", True), + "skull": ("-s", True), + "no_output": ("-n", True), + "frac": ("-f 0.40", 0.4), + "vertical_gradient": ("-g 0.75", 0.75), + "radius": ("-r 20", 20), + "center": ("-c 54 75 80", [54, 75, 80]), + "threshold": ("-t", True), + "mesh": ("-e", True), + "surfaces": ("-A", True), # 'verbose': ('-v', True), # 'flags': ('--i-made-this-up', '--i-made-this-up'), } @@ -82,13 +77,12 @@ def func(): # test each of our arguments better = fsl.BET() - outfile = fsl_name(better, 'foo_brain') - outpath = os.path.join(os.getcwd(), outfile) + outfile = fsl_name(better, "foo_brain") for name, settings in list(opt_map.items()): better = fsl.BET(**{name: settings[1]}) # Add mandatory input better.inputs.in_file = tmp_infile - realcmd = ' '.join([better.cmd, tmp_infile, outpath, settings[0]]) + realcmd = " ".join([better.cmd, tmp_infile, outfile, settings[0]]) assert better.cmdline == realcmd @@ -103,62 +97,61 @@ def test_fast(setup_infile): fasted = fsl.FAST(in_files=tmp_infile, verbose=True) fasted2 = fsl.FAST(in_files=[tmp_infile, tmp_infile], verbose=True) - assert faster.cmd == 'fast' + assert faster.cmd == "fast" assert faster.inputs.verbose assert faster.inputs.manual_seg == Undefined assert faster.inputs != fasted.inputs - assert fasted.cmdline == 'fast -v -S 1 %s' % (tmp_infile) - assert fasted2.cmdline == 'fast -v -S 2 %s %s' % (tmp_infile, tmp_infile) + assert fasted.cmdline == "fast -v -S 1 %s" % (tmp_infile) + assert fasted2.cmdline == f"fast -v -S 2 {tmp_infile} {tmp_infile}" faster = fsl.FAST() faster.inputs.in_files = tmp_infile - assert faster.cmdline == 'fast -S 1 %s' % (tmp_infile) + assert faster.cmdline == "fast -S 1 %s" % (tmp_infile) faster.inputs.in_files = [tmp_infile, tmp_infile] - assert faster.cmdline == 'fast -S 2 %s %s' % (tmp_infile, tmp_infile) + assert faster.cmdline == f"fast -S 2 {tmp_infile} {tmp_infile}" # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { - 'number_classes': ('-n 4', 4), - 'bias_iters': ('-I 5', 5), - 'bias_lowpass': ('-l 15', 15), - 'img_type': ('-t 2', 2), - 'init_seg_smooth': ('-f 0.035', 0.035), - 'segments': ('-g', True), - 'init_transform': ('-a %s' % (tmp_infile), '%s' % (tmp_infile)), - 'other_priors': - ('-A %s %s %s' % (tmp_infile, tmp_infile, tmp_infile), - (['%s' % (tmp_infile), - '%s' % (tmp_infile), - '%s' % (tmp_infile)])), - 'no_pve': ('--nopve', True), - 'output_biasfield': ('-b', True), - 'output_biascorrected': ('-B', True), - 'no_bias': ('-N', True), - 'out_basename': ('-o fasted', 'fasted'), - 'use_priors': ('-P', True), - 'segment_iters': ('-W 14', 14), - 'mixel_smooth': ('-R 0.25', 0.25), - 'iters_afterbias': ('-O 3', 3), - 'hyper': ('-H 0.15', 0.15), - 'verbose': ('-v', True), - 'manual_seg': ('-s %s' % (tmp_infile), '%s' % (tmp_infile)), - 'probability_maps': ('-p', True), + "number_classes": ("-n 4", 4), + "bias_iters": ("-I 5", 5), + "bias_lowpass": ("-l 15", 15), + "img_type": ("-t 2", 2), + "init_seg_smooth": ("-f 0.035", 0.035), + "segments": ("-g", True), + "init_transform": ("-a %s" % (tmp_infile), "%s" % (tmp_infile)), + "other_priors": ( + f"-A {tmp_infile} {tmp_infile} {tmp_infile}", + (["%s" % (tmp_infile), "%s" % (tmp_infile), "%s" % (tmp_infile)]), + ), + "no_pve": ("--nopve", True), + "output_biasfield": ("-b", True), + "output_biascorrected": ("-B", True), + "no_bias": ("-N", True), + "out_basename": ("-o fasted", "fasted"), + "use_priors": ("-P", True), + "segment_iters": ("-W 14", 14), + "mixel_smooth": ("-R 0.25", 0.25), + "iters_afterbias": ("-O 3", 3), + "hyper": ("-H 0.15", 0.15), + "verbose": ("-v", True), + "manual_seg": ("-s %s" % (tmp_infile), "%s" % (tmp_infile)), + "probability_maps": ("-p", True), } # test each of our arguments for name, settings in list(opt_map.items()): faster = fsl.FAST(in_files=tmp_infile, **{name: settings[1]}) - assert faster.cmdline == ' '.join( - [faster.cmd, settings[0], - "-S 1 %s" % tmp_infile]) + assert faster.cmdline == " ".join( + [faster.cmd, settings[0], "-S 1 %s" % tmp_infile] + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fast_list_outputs(setup_infile, tmpdir): - ''' By default (no -o), FSL's fast command outputs files into the same + """By default (no -o), FSL's fast command outputs files into the same directory as the input files. If the flag -o is set, it outputs files into - the cwd ''' + the cwd""" def _run_and_test(opts, output_base): outputs = fsl.FAST(**opts)._list_outputs() @@ -166,21 +159,22 @@ def _run_and_test(opts, output_base): if output: for filename in ensure_list(output): assert os.path.realpath(filename).startswith( - os.path.realpath(output_base)) + os.path.realpath(output_base) + ) # set up tmp_infile, indir = setup_infile cwd = tmpdir.mkdir("new") cwd.chdir() assert indir != cwd.strpath - out_basename = 'a_basename' + out_basename = "a_basename" # run and test - opts = {'in_files': tmp_infile} + opts = {"in_files": tmp_infile} input_path, input_filename, input_ext = split_filename(tmp_infile) _run_and_test(opts, os.path.join(input_path, input_filename)) - opts['out_basename'] = out_basename + opts["out_basename"] = out_basename _run_and_test(opts, os.path.join(cwd.strpath, out_basename)) @@ -200,32 +194,36 @@ def test_flirt(setup_flirt): tmpdir, infile, reffile = setup_flirt flirter = fsl.FLIRT() - assert flirter.cmd == 'flirt' + assert flirter.cmd == "flirt" flirter.inputs.bins = 256 - flirter.inputs.cost = 'mutualinfo' + flirter.inputs.cost = "mutualinfo" flirted = fsl.FLIRT( in_file=infile, reference=reffile, - out_file='outfile', - out_matrix_file='outmat.mat', + out_file="outfile", + out_matrix_file="outmat.mat", bins=256, - cost='mutualinfo') + cost="mutualinfo", + ) flirt_est = fsl.FLIRT( in_file=infile, reference=reffile, - out_matrix_file='outmat.mat', + out_matrix_file="outmat.mat", bins=256, - cost='mutualinfo') + cost="mutualinfo", + ) assert flirter.inputs != flirted.inputs assert flirted.inputs != flirt_est.inputs assert flirter.inputs.bins == flirted.inputs.bins assert flirter.inputs.cost == flirt_est.inputs.cost - realcmd = 'flirt -in %s -ref %s -out outfile -omat outmat.mat ' \ - '-bins 256 -cost mutualinfo' % (infile, reffile) + realcmd = ( + "flirt -in %s -ref %s -out outfile -omat outmat.mat " + "-bins 256 -cost mutualinfo" % (infile, reffile) + ) assert flirted.cmdline == realcmd flirter = fsl.FLIRT() @@ -240,10 +238,14 @@ def test_flirt(setup_flirt): # Generate outfile and outmatrix pth, fname, ext = split_filename(infile) - outfile = fsl_name(flirter, '%s_flirt' % fname) - outmat = '%s_flirt.mat' % fname - realcmd = 'flirt -in %s -ref %s -out %s -omat %s' % (infile, reffile, - outfile, outmat) + outfile = fsl_name(flirter, "%s_flirt" % fname) + outmat = "%s_flirt.mat" % fname + realcmd = "flirt -in {} -ref {} -out {} -omat {}".format( + infile, + reffile, + outfile, + outmat, + ) assert flirter.cmdline == realcmd # test apply_xfm option @@ -255,10 +257,10 @@ def test_flirt(setup_flirt): axfm2 = deepcopy(axfm) # test uses_qform axfm.inputs.uses_qform = True - assert axfm.cmdline == (realcmd + ' -applyxfm -usesqform') + assert axfm.cmdline == (realcmd + " -applyxfm -usesqform") # test in_matrix_file axfm2.inputs.in_matrix_file = reffile - assert axfm2.cmdline == (realcmd + ' -applyxfm -init %s' % reffile) + assert axfm2.cmdline == (realcmd + " -applyxfm -init %s" % reffile) tmpfile = tmpdir.join("file4test.nii") tmpfile.open("w") @@ -266,39 +268,49 @@ def test_flirt(setup_flirt): # cmdline is updated correctly. for key, trait_spec in sorted(fsl.FLIRT.input_spec().traits().items()): # Skip mandatory inputs and the trait methods - if key in ('trait_added', 'trait_modified', 'in_file', 'reference', - 'environ', 'output_type', 'out_file', 'out_matrix_file', - 'in_matrix_file', 'apply_xfm', - 'resource_monitor', 'out_log', - 'save_log'): + if key in ( + "trait_added", + "trait_modified", + "in_file", + "reference", + "environ", + "output_type", + "out_file", + "out_matrix_file", + "in_matrix_file", + "apply_xfm", + "resource_monitor", + "out_log", + "save_log", + ): continue param = None value = None - if key == 'args': - param = '-v' - value = '-v' + if key == "args": + param = "-v" + value = "-v" elif isinstance(trait_spec.trait_type, File): value = tmpfile.strpath param = trait_spec.argstr % value elif trait_spec.default is False: param = trait_spec.argstr value = True - elif key in ('searchr_x', 'searchr_y', 'searchr_z'): + elif key in ("searchr_x", "searchr_y", "searchr_z"): value = [-45, 45] - param = trait_spec.argstr % ' '.join(str(elt) for elt in value) + param = trait_spec.argstr % " ".join(str(elt) for elt in value) else: value = trait_spec.default param = trait_spec.argstr % value - cmdline = 'flirt -in %s -ref %s' % (infile, reffile) + cmdline = f"flirt -in {infile} -ref {reffile}" # Handle autogeneration of outfile pth, fname, ext = split_filename(infile) - outfile = fsl_name(fsl.FLIRT(), '%s_flirt' % fname) - outfile = ' '.join(['-out', outfile]) + outfile = fsl_name(fsl.FLIRT(), "%s_flirt" % fname) + outfile = f"-out {outfile}" # Handle autogeneration of outmatrix - outmatrix = '%s_flirt.mat' % fname - outmatrix = ' '.join(['-omat', outmatrix]) + outmatrix = "%s_flirt.mat" % fname + outmatrix = f"-omat {outmatrix}" # Build command line - cmdline = ' '.join([cmdline, outfile, outmatrix, param]) + cmdline = f"{cmdline} {outfile} {outmatrix} {param}" flirter = fsl.FLIRT(in_file=infile, reference=reffile) setattr(flirter.inputs, key, value) assert flirter.cmdline == cmdline @@ -306,13 +318,13 @@ def test_flirt(setup_flirt): # Test OutputSpec flirter = fsl.FLIRT(in_file=infile, reference=reffile) pth, fname, ext = split_filename(infile) - flirter.inputs.out_file = ''.join(['foo', ext]) - flirter.inputs.out_matrix_file = ''.join(['bar', ext]) + flirter.inputs.out_file = f"foo{ext}" + flirter.inputs.out_matrix_file = f"bar{ext}" outs = flirter._list_outputs() - assert outs['out_file'] == \ - os.path.join(os.getcwd(), flirter.inputs.out_file) - assert outs['out_matrix_file'] == \ - os.path.join(os.getcwd(), flirter.inputs.out_matrix_file) + assert outs["out_file"] == os.path.join(os.getcwd(), flirter.inputs.out_file) + assert outs["out_matrix_file"] == os.path.join( + os.getcwd(), flirter.inputs.out_matrix_file + ) assert not isdefined(flirter.inputs.out_log) @@ -322,19 +334,19 @@ def test_mcflirt(setup_flirt): tmpdir, infile, reffile = setup_flirt frt = fsl.MCFLIRT() - assert frt.cmd == 'mcflirt' + assert frt.cmd == "mcflirt" # Test generated outfile name frt.inputs.in_file = infile _, nme = os.path.split(infile) outfile = os.path.join(os.getcwd(), nme) - outfile = frt._gen_fname(outfile, suffix='_mcf') - realcmd = 'mcflirt -in ' + infile + ' -out ' + outfile + outfile = frt._gen_fname(outfile, suffix="_mcf") + realcmd = "mcflirt -in " + infile + " -out " + outfile assert frt.cmdline == realcmd # Test specified outfile name - outfile2 = '/newdata/bar.nii' + outfile2 = "/newdata/bar.nii" frt.inputs.out_file = outfile2 - realcmd = 'mcflirt -in ' + infile + ' -out ' + outfile2 + realcmd = "mcflirt -in " + infile + " -out " + outfile2 assert frt.cmdline == realcmd @@ -344,36 +356,34 @@ def test_mcflirt_opt(setup_flirt): _, nme = os.path.split(infile) opt_map = { - 'cost': ('-cost mutualinfo', 'mutualinfo'), - 'bins': ('-bins 256', 256), - 'dof': ('-dof 6', 6), - 'ref_vol': ('-refvol 2', 2), - 'scaling': ('-scaling 6.00', 6.00), - 'smooth': ('-smooth 1.00', 1.00), - 'rotation': ('-rotation 2', 2), - 'stages': ('-stages 3', 3), - 'init': ('-init %s' % (infile), infile), - 'use_gradient': ('-gdt', True), - 'use_contour': ('-edge', True), - 'mean_vol': ('-meanvol', True), - 'stats_imgs': ('-stats', True), - 'save_mats': ('-mats', True), - 'save_plots': ('-plots', True), + "cost": ("-cost mutualinfo", "mutualinfo"), + "bins": ("-bins 256", 256), + "dof": ("-dof 6", 6), + "ref_vol": ("-refvol 2", 2), + "scaling": ("-scaling 6.00", 6.00), + "smooth": ("-smooth 1.00", 1.00), + "rotation": ("-rotation 2", 2), + "stages": ("-stages 3", 3), + "init": ("-init %s" % (infile), infile), + "use_gradient": ("-gdt", True), + "use_contour": ("-edge", True), + "mean_vol": ("-meanvol", True), + "stats_imgs": ("-stats", True), + "save_mats": ("-mats", True), + "save_plots": ("-plots", True), } for name, settings in list(opt_map.items()): fnt = fsl.MCFLIRT(in_file=infile, **{name: settings[1]}) outfile = os.path.join(os.getcwd(), nme) - outfile = fnt._gen_fname(outfile, suffix='_mcf') + outfile = fnt._gen_fname(outfile, suffix="_mcf") - instr = '-in %s' % (infile) - outstr = '-out %s' % (outfile) - if name in ('init', 'cost', 'dof', 'mean_vol', 'bins'): - assert fnt.cmdline == ' '.join( - [fnt.cmd, instr, settings[0], outstr]) + instr = "-in %s" % (infile) + outstr = "-out %s" % (outfile) + if name in ("init", "cost", "dof", "mean_vol", "bins"): + assert fnt.cmdline == " ".join([fnt.cmd, instr, settings[0], outstr]) else: - assert fnt.cmdline == ' '.join( - [fnt.cmd, instr, outstr, settings[0]]) + assert fnt.cmdline == " ".join([fnt.cmd, instr, outstr, settings[0]]) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -382,8 +392,7 @@ def test_mcflirt_noinput(): fnt = fsl.MCFLIRT() with pytest.raises(ValueError) as excinfo: fnt.run() - assert str(excinfo.value).startswith( - "MCFLIRT requires a value for input 'in_file'") + assert str(excinfo.value).startswith("MCFLIRT requires a value for input 'in_file'") # test fnirt @@ -391,54 +400,61 @@ def test_mcflirt_noinput(): @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fnirt(setup_flirt): - tmpdir, infile, reffile = setup_flirt tmpdir.chdir() fnirt = fsl.FNIRT() - assert fnirt.cmd == 'fnirt' + assert fnirt.cmd == "fnirt" # Test list parameters - params = [('subsampling_scheme', '--subsamp', [4, 2, 2, 1], - '4,2,2,1'), ('max_nonlin_iter', '--miter', [4, 4, 4, 2], - '4,4,4,2'), ('ref_fwhm', '--reffwhm', [4, 2, 2, 0], - '4,2,2,0'), ('in_fwhm', '--infwhm', - [4, 2, 2, 0], '4,2,2,0'), - ('apply_refmask', '--applyrefmask', [0, 0, 1, 1], - '0,0,1,1'), ('apply_inmask', '--applyinmask', [0, 0, 0, 1], - '0,0,0,1'), ('regularization_lambda', '--lambda', - [0.5, 0.75], '0.5,0.75'), - ('intensity_mapping_model', '--intmod', 'global_non_linear', - 'global_non_linear')] + params = [ + ("subsampling_scheme", "--subsamp", [4, 2, 2, 1], "4,2,2,1"), + ("max_nonlin_iter", "--miter", [4, 4, 4, 2], "4,4,4,2"), + ("ref_fwhm", "--reffwhm", [4, 2, 2, 0], "4,2,2,0"), + ("in_fwhm", "--infwhm", [4, 2, 2, 0], "4,2,2,0"), + ("apply_refmask", "--applyrefmask", [0, 0, 1, 1], "0,0,1,1"), + ("apply_inmask", "--applyinmask", [0, 0, 0, 1], "0,0,0,1"), + ("regularization_lambda", "--lambda", [0.5, 0.75], "0.5,0.75"), + ( + "intensity_mapping_model", + "--intmod", + "global_non_linear", + "global_non_linear", + ), + ] for item, flag, val, strval in params: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{item: val}) - log = fnirt._gen_fname(infile, suffix='_log.txt', change_ext=False) - iout = fnirt._gen_fname(infile, suffix='_warped') - if item in ('max_nonlin_iter'): - cmd = 'fnirt --in=%s '\ - '--logout=%s'\ - ' %s=%s --ref=%s'\ - ' --iout=%s' % (infile, log, - flag, strval, reffile, iout) - elif item in ('in_fwhm', 'intensity_mapping_model'): - cmd = 'fnirt --in=%s %s=%s --logout=%s '\ - '--ref=%s --iout=%s' % (infile, flag, - strval, log, reffile, iout) - elif item.startswith('apply'): - cmd = 'fnirt %s=%s '\ - '--in=%s '\ - '--logout=%s '\ - '--ref=%s --iout=%s' % (flag, strval, - infile, log, - reffile, - iout) + log = fnirt._gen_fname(infile, suffix="_log.txt", change_ext=False) + iout = fnirt._gen_fname(infile, suffix="_warped") + if item in ("max_nonlin_iter"): + cmd = ( + "fnirt --in=%s " + "--logout=%s" + " %s=%s --ref=%s" + " --iout=%s" % (infile, log, flag, strval, reffile, iout) + ) + elif item in ("in_fwhm", "intensity_mapping_model"): + cmd = "fnirt --in={} {}={} --logout={} --ref={} --iout={}".format( + infile, + flag, + strval, + log, + reffile, + iout, + ) + elif item.startswith("apply"): + cmd = ( + "fnirt %s=%s " + "--in=%s " + "--logout=%s " + "--ref=%s --iout=%s" % (flag, strval, infile, log, reffile, iout) + ) else: - cmd = 'fnirt '\ - '--in=%s --logout=%s '\ - '--ref=%s %s=%s --iout=%s' % (infile, log, - reffile, - flag, strval, - iout) + cmd = ( + "fnirt " + "--in=%s --logout=%s " + "--ref=%s %s=%s --iout=%s" % (infile, log, reffile, flag, strval, iout) + ) assert fnirt.cmdline == cmd # Test ValueError is raised when missing mandatory args @@ -447,83 +463,84 @@ def test_fnirt(setup_flirt): fnirt.run() fnirt.inputs.in_file = infile fnirt.inputs.ref_file = reffile - intmap_basename = '%s_intmap' % fsl.FNIRT.intensitymap_file_basename( - infile) + intmap_basename = "%s_intmap" % fsl.FNIRT.intensitymap_file_basename(infile) intmap_image = fsl_name(fnirt, intmap_basename) - intmap_txt = '%s.txt' % intmap_basename + intmap_txt = "%s.txt" % intmap_basename # doing this to create the file to pass tests for file existence - with open(intmap_image, 'w'): + with open(intmap_image, "w"): pass - with open(intmap_txt, 'w'): + with open(intmap_txt, "w"): pass # test files - opt_map = [('affine_file', '--aff=%s' % infile, - infile), ('inwarp_file', '--inwarp=%s' % infile, infile), - ('in_intensitymap_file', '--intin=%s' % intmap_basename, - [intmap_image]), ('in_intensitymap_file', - '--intin=%s' % intmap_basename, - [intmap_image, intmap_txt]), - ('config_file', '--config=%s' % infile, - infile), ('refmask_file', '--refmask=%s' % infile, - infile), ('inmask_file', '--inmask=%s' % infile, - infile), ('field_file', - '--fout=%s' % infile, infile), - ('jacobian_file', '--jout=%s' % infile, - infile), ('modulatedref_file', '--refout=%s' % infile, - infile), ('out_intensitymap_file', - '--intout=%s' % intmap_basename, True), - ('out_intensitymap_file', '--intout=%s' % intmap_basename, - intmap_image), ('fieldcoeff_file', '--cout=%s' % infile, - infile), ('log_file', '--logout=%s' % infile, - infile)] - - for (name, settings, arg) in opt_map: + opt_map = [ + ("affine_file", "--aff=%s" % infile, infile), + ("inwarp_file", "--inwarp=%s" % infile, infile), + ("in_intensitymap_file", "--intin=%s" % intmap_basename, [intmap_image]), + ( + "in_intensitymap_file", + "--intin=%s" % intmap_basename, + [intmap_image, intmap_txt], + ), + ("config_file", "--config=%s" % infile, infile), + ("refmask_file", "--refmask=%s" % infile, infile), + ("inmask_file", "--inmask=%s" % infile, infile), + ("field_file", "--fout=%s" % infile, infile), + ("jacobian_file", "--jout=%s" % infile, infile), + ("modulatedref_file", "--refout=%s" % infile, infile), + ("out_intensitymap_file", "--intout=%s" % intmap_basename, True), + ("out_intensitymap_file", "--intout=%s" % intmap_basename, intmap_image), + ("fieldcoeff_file", "--cout=%s" % infile, infile), + ("log_file", "--logout=%s" % infile, infile), + ] + + for name, settings, arg in opt_map: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{name: arg}) - if name in ('config_file', 'affine_file', 'field_file', - 'fieldcoeff_file'): - cmd = 'fnirt %s --in=%s '\ - '--logout=%s '\ - '--ref=%s --iout=%s' % (settings, infile, log, - reffile, iout) - elif name in ('refmask_file'): - cmd = 'fnirt --in=%s '\ - '--logout=%s --ref=%s '\ - '%s '\ - '--iout=%s' % (infile, log, - reffile, - settings, - iout) - elif name in ('in_intensitymap_file', 'inwarp_file', 'inmask_file', - 'jacobian_file'): - cmd = 'fnirt --in=%s '\ - '%s '\ - '--logout=%s --ref=%s '\ - '--iout=%s' % (infile, - settings, - log, - reffile, - iout) - elif name in ('log_file'): - cmd = 'fnirt --in=%s '\ - '%s --ref=%s '\ - '--iout=%s' % (infile, - settings, - reffile, - iout) + if name in ("config_file", "affine_file", "field_file", "fieldcoeff_file"): + cmd = ( + "fnirt %s --in=%s " + "--logout=%s " + "--ref=%s --iout=%s" % (settings, infile, log, reffile, iout) + ) + elif name in ("refmask_file"): + cmd = ( + "fnirt --in=%s " + "--logout=%s --ref=%s " + "%s " + "--iout=%s" % (infile, log, reffile, settings, iout) + ) + elif name in ( + "in_intensitymap_file", + "inwarp_file", + "inmask_file", + "jacobian_file", + ): + cmd = ( + "fnirt --in=%s " + "%s " + "--logout=%s --ref=%s " + "--iout=%s" % (infile, settings, log, reffile, iout) + ) + elif name in ("log_file"): + cmd = ( + "fnirt --in=%s " + "%s --ref=%s " + "--iout=%s" % (infile, settings, reffile, iout) + ) else: - cmd = 'fnirt --in=%s '\ - '--logout=%s %s '\ - '--ref=%s --iout=%s' % (infile, log, - settings, - reffile, iout) + cmd = ( + "fnirt --in=%s " + "--logout=%s %s " + "--ref=%s --iout=%s" % (infile, log, settings, reffile, iout) + ) assert fnirt.cmdline == cmd - if name == 'out_intensitymap_file': - assert fnirt._list_outputs()['out_intensitymap_file'] == [ - intmap_image, intmap_txt + if name == "out_intensitymap_file": + assert fnirt._list_outputs()["out_intensitymap_file"] == [ + intmap_image, + intmap_txt, ] @@ -531,32 +548,29 @@ def test_fnirt(setup_flirt): def test_applywarp(setup_flirt): tmpdir, infile, reffile = setup_flirt opt_map = { - 'out_file': ('--out=bar.nii', 'bar.nii'), - 'premat': ('--premat=%s' % (reffile), reffile), - 'postmat': ('--postmat=%s' % (reffile), reffile), + "out_file": ("--out=bar.nii", "bar.nii"), + "premat": ("--premat=%s" % (reffile), reffile), + "postmat": ("--postmat=%s" % (reffile), reffile), } # in_file, ref_file, field_file mandatory for name, settings in list(opt_map.items()): awarp = fsl.ApplyWarp( - in_file=infile, - ref_file=reffile, - field_file=reffile, - **{ - name: settings[1] - }) - if name == 'out_file': - realcmd = 'applywarp --in=%s '\ - '--ref=%s --out=%s '\ - '--warp=%s' % (infile, reffile, - settings[1], reffile) + in_file=infile, ref_file=reffile, field_file=reffile, **{name: settings[1]} + ) + if name == "out_file": + realcmd = ( + "applywarp --in=%s " + "--ref=%s --out=%s " + "--warp=%s" % (infile, reffile, settings[1], reffile) + ) else: - outfile = awarp._gen_fname(infile, suffix='_warp') - realcmd = 'applywarp --in=%s '\ - '--ref=%s --out=%s '\ - '--warp=%s %s' % (infile, reffile, - outfile, reffile, - settings[0]) + outfile = awarp._gen_fname(infile, suffix="_warp") + realcmd = ( + "applywarp --in=%s " + "--ref=%s --out=%s " + "--warp=%s %s" % (infile, reffile, outfile, reffile, settings[0]) + ) assert awarp.cmdline == realcmd @@ -564,35 +578,51 @@ def test_applywarp(setup_flirt): def setup_fugue(tmpdir): import nibabel as nb import numpy as np - import os.path as op d = np.ones((80, 80, 80)) - infile = tmpdir.join('dumbfile.nii.gz').strpath + infile = tmpdir.join("dumbfile.nii.gz").strpath nb.Nifti1Image(d, None, None).to_filename(infile) return (tmpdir, infile) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.parametrize("attr, out_file", [({ - "save_unmasked_fmap": True, - "fmap_in_file": "infile", - "mask_file": "infile", - "output_type": "NIFTI_GZ" -}, 'fmap_out_file'), ({ - "save_unmasked_shift": True, - "fmap_in_file": "infile", - "dwell_time": 1.e-3, - "mask_file": "infile", - "output_type": "NIFTI_GZ" -}, "shift_out_file"), ({ - "in_file": "infile", - "mask_file": "infile", - "shift_in_file": "infile", - "output_type": "NIFTI_GZ" -}, 'unwarped_file')]) +@pytest.mark.parametrize( + "attr, out_file", + [ + ( + { + "save_unmasked_fmap": True, + "fmap_in_file": "infile", + "mask_file": "infile", + "output_type": "NIFTI_GZ", + }, + "fmap_out_file", + ), + ( + { + "save_unmasked_shift": True, + "fmap_in_file": "infile", + "dwell_time": 1.0e-3, + "mask_file": "infile", + "output_type": "NIFTI_GZ", + }, + "shift_out_file", + ), + ( + { + "in_file": "infile", + "mask_file": "infile", + "shift_in_file": "infile", + "output_type": "NIFTI_GZ", + }, + "unwarped_file", + ), + ], +) def test_fugue(setup_fugue, attr, out_file): import os.path as op + tmpdir, infile = setup_fugue fugue = fsl.FUGUE() @@ -605,26 +635,26 @@ def test_fugue(setup_fugue, attr, out_file): assert isdefined(getattr(res.outputs, out_file)) trait_spec = fugue.inputs.trait(out_file) - out_name = trait_spec.name_template % 'dumbfile' - out_name += '.nii.gz' + out_name = trait_spec.name_template % "dumbfile" + out_name += ".nii.gz" assert op.basename(getattr(res.outputs, out_file)) == out_name @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_first_genfname(): first = fsl.FIRST() - first.inputs.out_file = 'segment.nii' + first.inputs.out_file = "segment.nii" first.inputs.output_type = "NIFTI_GZ" - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_fast_origsegs.nii.gz') + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_fast_origsegs.nii.gz") assert value == expected_value - first.inputs.method = 'none' - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') + first.inputs.method = "none" + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value - first.inputs.method = 'auto' - first.inputs.list_of_specific_structures = ['L_Hipp', 'R_Hipp'] - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') + first.inputs.method = "auto" + first.inputs.list_of_specific_structures = ["L_Hipp", "R_Hipp"] + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index 5df6d88a49..bfe895c6ee 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -6,10 +5,9 @@ import numpy as np -import nibabel as nb import pytest import nipype.interfaces.fsl.utils as fsl -from nipype.interfaces.fsl import no_fsl, Info +from nipype.interfaces.fsl import no_fsl from nipype.testing.fixtures import create_files_in_directory_plus_output_type @@ -21,7 +19,7 @@ def test_fslroi(create_files_in_directory_plus_output_type): roi = fsl.ExtractROI() # make sure command gets called - assert roi.cmd == 'fslroi' + assert roi.cmd == "fslroi" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -29,15 +27,15 @@ def test_fslroi(create_files_in_directory_plus_output_type): # .inputs based parameters setting roi.inputs.in_file = filelist[0] - roi.inputs.roi_file = 'foo_roi.nii' + roi.inputs.roi_file = "foo_roi.nii" roi.inputs.t_min = 10 roi.inputs.t_size = 20 - assert roi.cmdline == 'fslroi %s foo_roi.nii 10 20' % filelist[0] + assert roi.cmdline == "fslroi %s foo_roi.nii 10 20" % filelist[0] # .run based parameter setting roi2 = fsl.ExtractROI( in_file=filelist[0], - roi_file='foo2_roi.nii', + roi_file="foo2_roi.nii", t_min=20, t_size=40, x_min=3, @@ -45,9 +43,9 @@ def test_fslroi(create_files_in_directory_plus_output_type): y_min=40, y_size=10, z_min=5, - z_size=20) - assert roi2.cmdline == \ - 'fslroi %s foo2_roi.nii 3 30 40 10 5 20 20 40' % filelist[0] + z_size=20, + ) + assert roi2.cmdline == "fslroi %s foo2_roi.nii 3 30 40 10 5 20 20 40" % filelist[0] # test arguments for opt_map # Fslroi class doesn't have a filled opt_map{} @@ -60,7 +58,7 @@ def test_fslmerge(create_files_in_directory_plus_output_type): merger = fsl.Merge() # make sure command gets called - assert merger.cmd == 'fslmerge' + assert merger.cmd == "fslmerge" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -68,27 +66,31 @@ def test_fslmerge(create_files_in_directory_plus_output_type): # .inputs based parameters setting merger.inputs.in_files = filelist - merger.inputs.merged_file = 'foo_merged.nii' - merger.inputs.dimension = 't' - merger.inputs.output_type = 'NIFTI' - assert merger.cmdline == 'fslmerge -t foo_merged.nii %s' % ' '.join( - filelist) + merger.inputs.merged_file = "foo_merged.nii" + merger.inputs.dimension = "t" + merger.inputs.output_type = "NIFTI" + assert merger.cmdline == "fslmerge -t foo_merged.nii %s" % " ".join(filelist) # verify that providing a tr value updates the dimension to tr merger.inputs.tr = 2.25 - assert merger.cmdline == 'fslmerge -tr foo_merged.nii %s %.2f' % ( - ' '.join(filelist), 2.25) + assert merger.cmdline == "fslmerge -tr foo_merged.nii {} {:.2f}".format( + " ".join(filelist), + 2.25, + ) # .run based parameter setting merger2 = fsl.Merge( in_files=filelist, - merged_file='foo_merged.nii', - dimension='t', - output_type='NIFTI', - tr=2.25) + merged_file="foo_merged.nii", + dimension="t", + output_type="NIFTI", + tr=2.25, + ) - assert merger2.cmdline == \ - 'fslmerge -tr foo_merged.nii %s %.2f' % (' '.join(filelist), 2.25) + assert merger2.cmdline == "fslmerge -tr foo_merged.nii {} {:.2f}".format( + " ".join(filelist), + 2.25, + ) # test arguments for opt_map # Fslmerge class doesn't have a filled opt_map{} @@ -103,7 +105,7 @@ def test_fslmaths(create_files_in_directory_plus_output_type): math = fsl.ImageMaths() # make sure command gets called - assert math.cmd == 'fslmaths' + assert math.cmd == "fslmaths" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -111,15 +113,18 @@ def test_fslmaths(create_files_in_directory_plus_output_type): # .inputs based parameters setting math.inputs.in_file = filelist[0] - math.inputs.op_string = '-add 2.5 -mul input_volume2' - math.inputs.out_file = 'foo_math.nii' - assert math.cmdline == \ - 'fslmaths %s -add 2.5 -mul input_volume2 foo_math.nii' % filelist[0] + math.inputs.op_string = "-add 2.5 -mul input_volume2" + math.inputs.out_file = "foo_math.nii" + assert ( + math.cmdline + == "fslmaths %s -add 2.5 -mul input_volume2 foo_math.nii" % filelist[0] + ) # .run based parameter setting math2 = fsl.ImageMaths( - in_file=filelist[0], op_string='-add 2.5', out_file='foo2_math.nii') - assert math2.cmdline == 'fslmaths %s -add 2.5 foo2_math.nii' % filelist[0] + in_file=filelist[0], op_string="-add 2.5", out_file="foo2_math.nii" + ) + assert math2.cmdline == "fslmaths %s -add 2.5 foo2_math.nii" % filelist[0] # test arguments for opt_map # Fslmath class doesn't have opt_map{} @@ -134,7 +139,7 @@ def test_overlay(create_files_in_directory_plus_output_type): overlay = fsl.Overlay() # make sure command gets called - assert overlay.cmd == 'overlay' + assert overlay.cmd == "overlay" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -146,10 +151,12 @@ def test_overlay(create_files_in_directory_plus_output_type): overlay.inputs.background_image = filelist[1] overlay.inputs.auto_thresh_bg = True overlay.inputs.show_negative_stats = True - overlay.inputs.out_file = 'foo_overlay.nii' - assert overlay.cmdline == \ - 'overlay 1 0 %s -a %s 2.50 10.00 %s -2.50 -10.00 foo_overlay.nii' % ( - filelist[1], filelist[0], filelist[0]) + overlay.inputs.out_file = "foo_overlay.nii" + assert ( + overlay.cmdline + == "overlay 1 0 %s -a %s 2.50 10.00 %s -2.50 -10.00 foo_overlay.nii" + % (filelist[1], filelist[0], filelist[0]) + ) # .run based parameter setting overlay2 = fsl.Overlay( @@ -157,9 +164,15 @@ def test_overlay(create_files_in_directory_plus_output_type): stat_thresh=(2.5, 10), background_image=filelist[1], auto_thresh_bg=True, - out_file='foo2_overlay.nii') - assert overlay2.cmdline == 'overlay 1 0 %s -a %s 2.50 10.00 foo2_overlay.nii' % ( - filelist[1], filelist[0]) + out_file="foo2_overlay.nii", + ) + assert ( + overlay2.cmdline + == "overlay 1 0 {} -a {} 2.50 10.00 foo2_overlay.nii".format( + filelist[1], + filelist[0], + ) + ) # test slicer @@ -171,7 +184,7 @@ def test_slicer(create_files_in_directory_plus_output_type): slicer = fsl.Slicer() # make sure command gets called - assert slicer.cmd == 'slicer' + assert slicer.cmd == "slicer" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -180,27 +193,32 @@ def test_slicer(create_files_in_directory_plus_output_type): # .inputs based parameters setting slicer.inputs.in_file = filelist[0] slicer.inputs.image_edges = filelist[1] - slicer.inputs.intensity_range = (10., 20.) + slicer.inputs.intensity_range = (10.0, 20.0) slicer.inputs.all_axial = True slicer.inputs.image_width = 750 - slicer.inputs.out_file = 'foo_bar.png' - assert slicer.cmdline == \ - 'slicer %s %s -L -i 10.000 20.000 -A 750 foo_bar.png' % ( - filelist[0], filelist[1]) + slicer.inputs.out_file = "foo_bar.png" + assert ( + slicer.cmdline + == "slicer {} {} -L -i 10.000 20.000 -A 750 foo_bar.png".format( + filelist[0], + filelist[1], + ) + ) # .run based parameter setting slicer2 = fsl.Slicer( in_file=filelist[0], middle_slices=True, label_slices=False, - out_file='foo_bar2.png') - assert slicer2.cmdline == 'slicer %s -a foo_bar2.png' % (filelist[0]) + out_file="foo_bar2.png", + ) + assert slicer2.cmdline == "slicer %s -a foo_bar2.png" % (filelist[0]) def create_parfiles(): - np.savetxt('a.par', np.random.rand(6, 3)) - np.savetxt('b.par', np.random.rand(6, 3)) - return ['a.par', 'b.par'] + np.savetxt("a.par", np.random.rand(6, 3)) + np.savetxt("b.par", np.random.rand(6, 3)) + return ["a.par", "b.par"] # test fsl_tsplot @@ -213,7 +231,7 @@ def test_plottimeseries(create_files_in_directory_plus_output_type): plotter = fsl.PlotTimeSeries() # make sure command gets called - assert plotter.cmd == 'fsl_tsplot' + assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -221,23 +239,24 @@ def test_plottimeseries(create_files_in_directory_plus_output_type): # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] - plotter.inputs.labels = ['x', 'y', 'z'] + plotter.inputs.labels = ["x", "y", "z"] plotter.inputs.y_range = (0, 1) - plotter.inputs.title = 'test plot' - plotter.inputs.out_file = 'foo.png' - assert plotter.cmdline == \ - ('fsl_tsplot -i %s -a x,y,z -o foo.png -t \'test plot\' -u 1 --ymin=0 --ymax=1' - % parfiles[0]) + plotter.inputs.title = "test plot" + plotter.inputs.out_file = "foo.png" + assert plotter.cmdline == ( + "fsl_tsplot -i %s -a x,y,z -o foo.png -t 'test plot' -u 1 --ymin=0 --ymax=1" + % parfiles[0] + ) # .run based parameter setting plotter2 = fsl.PlotTimeSeries( - in_file=parfiles, - title='test2 plot', - plot_range=(2, 5), - out_file='bar.png') - assert plotter2.cmdline == \ - 'fsl_tsplot -i %s,%s -o bar.png --start=2 --finish=5 -t \'test2 plot\' -u 1' % tuple( - parfiles) + in_file=parfiles, title="test2 plot", plot_range=(2, 5), out_file="bar.png" + ) + assert ( + plotter2.cmdline + == "fsl_tsplot -i %s,%s -o bar.png --start=2 --finish=5 -t 'test2 plot' -u 1" + % tuple(parfiles) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -247,7 +266,7 @@ def test_plotmotionparams(create_files_in_directory_plus_output_type): plotter = fsl.PlotMotionParams() # make sure command gets called - assert plotter.cmd == 'fsl_tsplot' + assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -255,22 +274,25 @@ def test_plotmotionparams(create_files_in_directory_plus_output_type): # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] - plotter.inputs.in_source = 'fsl' - plotter.inputs.plot_type = 'rotations' - plotter.inputs.out_file = 'foo.png' - assert plotter.cmdline == \ - ('fsl_tsplot -i %s -o foo.png -t \'MCFLIRT estimated rotations (radians)\' ' - '--start=1 --finish=3 -a x,y,z' % parfiles[0]) + plotter.inputs.in_source = "fsl" + plotter.inputs.plot_type = "rotations" + plotter.inputs.out_file = "foo.png" + assert plotter.cmdline == ( + "fsl_tsplot -i %s -o foo.png -t 'MCFLIRT estimated rotations (radians)' " + "--start=1 --finish=3 -a x,y,z" % parfiles[0] + ) # .run based parameter setting plotter2 = fsl.PlotMotionParams( in_file=parfiles[1], - in_source='spm', - plot_type='translations', - out_file='bar.png') - assert plotter2.cmdline == \ - ('fsl_tsplot -i %s -o bar.png -t \'Realign estimated translations (mm)\' ' - '--start=1 --finish=3 -a x,y,z' % parfiles[1]) + in_source="spm", + plot_type="translations", + out_file="bar.png", + ) + assert plotter2.cmdline == ( + "fsl_tsplot -i %s -o bar.png -t 'Realign estimated translations (mm)' " + "--start=1 --finish=3 -a x,y,z" % parfiles[1] + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -289,16 +311,16 @@ def test_convertxfm(create_files_in_directory_plus_output_type): cvt.inputs.in_file = filelist[0] cvt.inputs.invert_xfm = True cvt.inputs.out_file = "foo.mat" - assert cvt.cmdline == 'convert_xfm -omat foo.mat -inverse %s' % filelist[0] + assert cvt.cmdline == "convert_xfm -omat foo.mat -inverse %s" % filelist[0] # constructor based parameter setting cvt2 = fsl.ConvertXFM( - in_file=filelist[0], - in_file2=filelist[1], - concat_xfm=True, - out_file="bar.mat") - assert cvt2.cmdline == \ - "convert_xfm -omat bar.mat -concat %s %s" % (filelist[1], filelist[0]) + in_file=filelist[0], in_file2=filelist[1], concat_xfm=True, out_file="bar.mat" + ) + assert cvt2.cmdline == "convert_xfm -omat bar.mat -concat {} {}".format( + filelist[1], + filelist[0], + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -320,7 +342,8 @@ def test_swapdims(create_files_in_directory_plus_output_type): swap.inputs.in_file = files[0] swap.inputs.new_dims = ("x", "y", "z") assert swap.cmdline == "fslswapdim a.nii x y z %s" % os.path.realpath( - os.path.join(testdir, "a_newdims%s" % out_ext)) + os.path.join(testdir, "a_newdims%s" % out_ext) + ) # Test that we can set an output name swap.inputs.out_file = "b.nii" diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index f4ef73c0e9..704fb77fef 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -9,10 +8,6 @@ -------- See the docstrings of the individual classes for examples. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import map, range - import os import os.path as op import re @@ -21,20 +16,24 @@ import numpy as np -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix) -from ..base import (traits, TraitedSpec, OutputMultiPath, File, CommandLine, - CommandLineInputSpec, isdefined) +from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix +from ..base import ( + traits, + Tuple, + TraitedSpec, + OutputMultiPath, + File, + CommandLine, + CommandLineInputSpec, + isdefined, +) from .base import FSLCommand, FSLCommandInputSpec, Info class CopyGeomInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr="%s", - position=0, - desc="source image") + exists=True, mandatory=True, argstr="%s", position=0, desc="source image" + ) dest_file = File( exists=True, mandatory=True, @@ -42,11 +41,13 @@ class CopyGeomInputSpec(FSLCommandInputSpec): position=1, desc="destination image", copyfile=True, - output_name='out_file', - name_source='dest_file', - name_template='%s') + output_name="out_file", + name_source="dest_file", + name_template="%s", + ) ignore_dims = traits.Bool( - desc='Do not copy image dimensions', argstr='-d', position="-1") + desc="Do not copy image dimensions", argstr="-d", position="-1" + ) class CopyGeomOutputSpec(TraitedSpec): @@ -62,6 +63,7 @@ class CopyGeom(FSLCommand): different files will result in loss of information or potentially incorrect settings. """ + _cmd = "fslcpgeom" input_spec = CopyGeomInputSpec output_spec = CopyGeomOutputSpec @@ -69,36 +71,32 @@ class CopyGeom(FSLCommand): class RobustFOVInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='input filename', - argstr='-i %s', - position=0, - mandatory=True) + exists=True, desc="input filename", argstr="-i %s", position=0, mandatory=True + ) out_roi = File( desc="ROI volume output name", argstr="-r %s", - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s_ROI') + name_template="%s_ROI", + ) brainsize = traits.Int( - desc=('size of brain in z-dimension (default ' - '170mm/150mm)'), - argstr='-b %d') + desc=("size of brain in z-dimension (default 170mm/150mm)"), argstr="-b %d" + ) out_transform = File( - desc=("Transformation matrix in_file to out_roi " - "output name"), + desc=("Transformation matrix in_file to out_roi output name"), argstr="-m %s", - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s_to_ROI') + name_template="%s_to_ROI", + ) class RobustFOVOutputSpec(TraitedSpec): out_roi = File(exists=True, desc="ROI volume output name") out_transform = File( - exists=True, - desc=("Transformation matrix in_file to out_roi " - "output name")) + exists=True, desc=("Transformation matrix in_file to out_roi output name") + ) class RobustFOV(FSLCommand): @@ -108,7 +106,7 @@ class RobustFOV(FSLCommand): 150mm to 170mm. """ - _cmd = 'robustfov' + _cmd = "robustfov" input_spec = RobustFOVInputSpec output_spec = RobustFOVOutputSpec @@ -116,45 +114,46 @@ class RobustFOV(FSLCommand): class ImageMeantsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - desc='input file for computing the average timeseries', - argstr='-i %s', + desc="input file for computing the average timeseries", + argstr="-i %s", position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - desc='name of output text matrix', - argstr='-o %s', + desc="name of output text matrix", + argstr="-o %s", genfile=True, - hash_files=False) - mask = File(exists=True, desc='input 3D mask', argstr='-m %s') + hash_files=False, + ) + mask = File(exists=True, desc="input 3D mask", argstr="-m %s") spatial_coord = traits.List( traits.Int, - desc=(' requested spatial coordinate ' - '(instead of mask)'), - argstr='-c %s') + desc=(" requested spatial coordinate (instead of mask)"), + argstr="-c %s", + ) use_mm = traits.Bool( - desc=('use mm instead of voxel coordinates (for -c ' - 'option)'), - argstr='--usemm') + desc=("use mm instead of voxel coordinates (for -c option)"), + argstr="--usemm", + ) show_all = traits.Bool( - desc=('show all voxel time series (within mask) ' - 'instead of averaging'), - argstr='--showall') + desc=("show all voxel time series (within mask) instead of averaging"), + argstr="--showall", + ) eig = traits.Bool( - desc=('calculate Eigenvariate(s) instead of mean (output will have 0 ' - 'mean)'), - argstr='--eig') + desc=("calculate Eigenvariate(s) instead of mean (output will have 0 mean)"), + argstr="--eig", + ) order = traits.Int( - 1, - desc='select number of Eigenvariates', - argstr='--order=%d', - usedefault=True) + 1, desc="select number of Eigenvariates", argstr="--order=%d", usedefault=True + ) nobin = traits.Bool( - desc=('do not binarise the mask for calculation of ' - 'Eigenvariates'), - argstr='--no_bin') + desc=("do not binarise the mask for calculation of Eigenvariates"), + argstr="--no_bin", + ) transpose = traits.Bool( - desc=('output results in transpose format (one row per voxel/mean)'), - argstr='--transpose') + desc=("output results in transpose format (one row per voxel/mean)"), + argstr="--transpose", + ) class ImageMeantsOutputSpec(TraitedSpec): @@ -162,26 +161,28 @@ class ImageMeantsOutputSpec(TraitedSpec): class ImageMeants(FSLCommand): - """ Use fslmeants for printing the average timeseries (intensities) to - the screen (or saves to a file). The average is taken over all voxels - in the mask (or all voxels in the image if no mask is specified) + """Use fslmeants for printing the average timeseries (intensities) to + the screen (or saves to a file). The average is taken over all voxels + in the mask (or all voxels in the image if no mask is specified) """ - _cmd = 'fslmeants' + + _cmd = "fslmeants" input_spec = ImageMeantsInputSpec output_spec = ImageMeantsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_ts', ext='.txt', change_ext=True) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_ts", ext=".txt", change_ext=True + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -191,22 +192,24 @@ class SmoothInputSpec(FSLCommandInputSpec): sigma = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, - xor=['fwhm'], + xor=["fwhm"], mandatory=True, - desc='gaussian kernel sigma in mm (not voxels)') + desc="gaussian kernel sigma in mm (not voxels)", + ) fwhm = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, - xor=['sigma'], + xor=["sigma"], mandatory=True, - desc=('gaussian kernel fwhm, will be converted to sigma in mm ' - '(not voxels)')) + desc=("gaussian kernel fwhm, will be converted to sigma in mm (not voxels)"), + ) smoothed_file = File( argstr="%s", position=2, - name_source=['in_file'], - name_template='%s_smooth', - hash_files=False) + name_source=["in_file"], + name_template="%s_smooth", + hash_files=False, + ) class SmoothOutputSpec(TraitedSpec): @@ -253,18 +256,24 @@ class Smooth(FSLCommand): input_spec = SmoothInputSpec output_spec = SmoothOutputSpec - _cmd = 'fslmaths' + _cmd = "fslmaths" def _format_arg(self, name, trait_spec, value): - if name == 'fwhm': + if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) - return super(Smooth, self)._format_arg(name, trait_spec, sigma) - return super(Smooth, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, sigma) + return super()._format_arg(name, trait_spec, value) class SliceInputSpec(FSLCommandInputSpec): - in_file = File(exists=True, argstr="%s", position=0, mandatory=True, - desc="input filename", copyfile=False) + in_file = File( + exists=True, + argstr="%s", + position=0, + mandatory=True, + desc="input filename", + copyfile=False, + ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") @@ -289,7 +298,7 @@ class Slice(FSLCommand): """ - _cmd = 'fslslice' + _cmd = "fslslice" input_spec = SliceInputSpec output_spec = SliceOutputSpec @@ -311,44 +320,50 @@ def _list_outputs(self): """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) - suffix = '_slice_*' + ext + suffix = "_slice_*" + ext if isdefined(self.inputs.out_base_name): - fname_template = os.path.abspath( - self.inputs.out_base_name + suffix) + fname_template = os.path.abspath(self.inputs.out_base_name + suffix) else: - fname_template = fname_presuffix(self.inputs.in_file, - suffix=suffix, use_ext=False) + fname_template = fname_presuffix( + self.inputs.in_file, suffix=suffix, use_ext=False + ) - outputs['out_files'] = sorted(glob(fname_template)) + outputs["out_files"] = sorted(glob(fname_template)) return outputs class MergeInputSpec(FSLCommandInputSpec): - in_files = traits.List( - File(exists=True), argstr="%s", position=2, mandatory=True) + in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) dimension = traits.Enum( - 't', - 'x', - 'y', - 'z', - 'a', + "t", + "x", + "y", + "z", + "a", argstr="-%s", position=0, - desc=("dimension along which to merge, optionally " - "set tr input when dimension is t"), - mandatory=True) + desc=( + "dimension along which to merge, optionally " + "set tr input when dimension is t" + ), + mandatory=True, + ) tr = traits.Float( position=-1, - argstr='%.2f', - desc=('use to specify TR in seconds (default is 1.00 ' - 'sec), overrides dimension and sets it to tr')) + argstr="%.2f", + desc=( + "use to specify TR in seconds (default is 1.00 " + "sec), overrides dimension and sets it to tr" + ), + ) merged_file = File( argstr="%s", position=1, - name_source='in_files', - name_template='%s_merged', - hash_files=False) + name_source="in_files", + name_template="%s_merged", + hash_files=False, + ) class MergeOutputSpec(TraitedSpec): @@ -382,35 +397,29 @@ class Merge(FSLCommand): """ - _cmd = 'fslmerge' + _cmd = "fslmerge" input_spec = MergeInputSpec output_spec = MergeOutputSpec def _format_arg(self, name, spec, value): - if name == 'tr': - if self.inputs.dimension != 't': - raise ValueError('When TR is specified, dimension must be t') + if name == "tr": + if self.inputs.dimension != "t": + raise ValueError("When TR is specified, dimension must be t") return spec.argstr % value - if name == 'dimension': + if name == "dimension": if isdefined(self.inputs.tr): - return '-tr' + return "-tr" return spec.argstr % value - return super(Merge, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ExtractROIInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr="%s", - position=0, - desc="input file", - mandatory=True) + exists=True, argstr="%s", position=0, desc="input file", mandatory=True + ) roi_file = File( - argstr="%s", - position=1, - desc="output file", - genfile=True, - hash_files=False) + argstr="%s", position=1, desc="output file", genfile=True, hash_files=False + ) x_min = traits.Int(argstr="%d", position=2) x_size = traits.Int(argstr="%d", position=3) y_min = traits.Int(argstr="%d", position=4) @@ -420,15 +429,22 @@ class ExtractROIInputSpec(FSLCommandInputSpec): t_min = traits.Int(argstr="%d", position=8) t_size = traits.Int(argstr="%d", position=9) _crop_xor = [ - 'x_min', 'x_size', 'y_min', 'y_size', 'z_min', 'z_size', 't_min', - 't_size' + "x_min", + "x_size", + "y_min", + "y_size", + "z_min", + "z_size", + "t_min", + "t_size", ] crop_list = traits.List( - traits.Tuple(traits.Int, traits.Int), + Tuple(traits.Int, traits.Int), argstr="%s", position=2, xor=_crop_xor, - desc="list of two tuples specifying crop options") + desc="list of two tuples specifying crop options", + ) class ExtractROIOutputSpec(TraitedSpec): @@ -461,15 +477,14 @@ class ExtractROI(FSLCommand): """ - _cmd = 'fslroi' + _cmd = "fslroi" input_spec = ExtractROIInputSpec output_spec = ExtractROIOutputSpec def _format_arg(self, name, spec, value): - if name == "crop_list": - return " ".join(map(str, sum(list(map(list, value)), []))) - return super(ExtractROI, self)._format_arg(name, spec, value) + return " ".join(str(x) for sublist in value for x in sublist) + return super()._format_arg(name, spec, value) def _list_outputs(self): """Create a Bunch which contains all possible files generated @@ -489,36 +504,33 @@ def _list_outputs(self): """ outputs = self._outputs().get() - outputs['roi_file'] = self.inputs.roi_file - if not isdefined(outputs['roi_file']): - outputs['roi_file'] = self._gen_fname( - self.inputs.in_file, suffix='_roi') - outputs['roi_file'] = os.path.abspath(outputs['roi_file']) + outputs["roi_file"] = self.inputs.roi_file + if not isdefined(outputs["roi_file"]): + outputs["roi_file"] = self._gen_fname(self.inputs.in_file, suffix="_roi") + outputs["roi_file"] = os.path.abspath(outputs["roi_file"]) return outputs def _gen_filename(self, name): - if name == 'roi_file': + if name == "roi_file": return self._list_outputs()[name] return None class SplitInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr="%s", - position=0, - mandatory=True, - desc="input filename") + exists=True, argstr="%s", position=0, mandatory=True, desc="input filename" + ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") dimension = traits.Enum( - 't', - 'x', - 'y', - 'z', + "t", + "x", + "y", + "z", argstr="-%s", position=2, mandatory=True, - desc="dimension along which the file will be split") + desc="dimension along which the file will be split", + ) class SplitOutputSpec(TraitedSpec): @@ -529,7 +541,8 @@ class Split(FSLCommand): """Uses FSL Fslsplit command to separate a volume into images in time, x, y or z dimension. """ - _cmd = 'fslsplit' + + _cmd = "fslsplit" input_spec = SplitInputSpec output_spec = SplitOutputSpec @@ -551,36 +564,37 @@ def _list_outputs(self): """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) - outbase = 'vol[0-9]*' + outbase = "vol[0-9]*" if isdefined(self.inputs.out_base_name): - outbase = '%s[0-9]*' % self.inputs.out_base_name - outputs['out_files'] = sorted( - glob(os.path.join(os.getcwd(), outbase + ext))) + outbase = "%s[0-9]*" % self.inputs.out_base_name + outputs["out_files"] = sorted(glob(os.path.join(os.getcwd(), outbase + ext))) return outputs class ImageMathsInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr="%s", mandatory=True, position=1) in_file2 = File(exists=True, argstr="%s", position=3) - mask_file = File(exists=True, argstr='-mas %s', - desc='use (following image>0) to mask current image') + mask_file = File( + exists=True, + argstr="-mas %s", + desc="use (following image>0) to mask current image", + ) out_file = File(argstr="%s", position=-2, genfile=True, hash_files=False) op_string = traits.Str( - argstr="%s", - position=2, - desc="string defining the operation, i. e. -add") + argstr="%s", position=2, desc="string defining the operation, i. e. -add" + ) suffix = traits.Str(desc="out_file suffix") out_data_type = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - 'input', + "char", + "short", + "int", + "float", + "double", + "input", argstr="-odt %s", position=-1, - desc=("output datatype, one of (char, short, " - "int, float, double, input)")) + desc=("output datatype, one of (char, short, int, float, double, input)"), + ) class ImageMathsOutputSpec(TraitedSpec): @@ -604,29 +618,29 @@ class ImageMaths(FSLCommand): """ + input_spec = ImageMathsInputSpec output_spec = ImageMathsOutputSpec - _cmd = 'fslmaths' + _cmd = "fslmaths" def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None def _parse_inputs(self, skip=None): - return super(ImageMaths, self)._parse_inputs(skip=['suffix']) + return super()._parse_inputs(skip=["suffix"]) def _list_outputs(self): - suffix = '_maths' # ohinds: build suffix + suffix = "_maths" # ohinds: build suffix if isdefined(self.inputs.suffix): suffix = self.inputs.suffix outputs = self._outputs().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix=suffix) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix=suffix) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -636,41 +650,46 @@ class FilterRegressorInputSpec(FSLCommandInputSpec): argstr="-i %s", desc="input file name (4D image)", mandatory=True, - position=1) + position=1, + ) out_file = File( argstr="-o %s", desc="output file name for the filtered data", genfile=True, position=2, - hash_files=False) + hash_files=False, + ) design_file = File( exists=True, argstr="-d %s", position=3, mandatory=True, - desc=("name of the matrix with time courses (e.g. GLM " - "design or MELODIC mixing matrix)")) + desc=( + "name of the matrix with time courses (e.g. GLM " + "design or MELODIC mixing matrix)" + ), + ) filter_columns = traits.List( traits.Int, argstr="-f '%s'", xor=["filter_all"], mandatory=True, position=4, - desc=("(1-based) column indices to filter out of the data")) + desc=("(1-based) column indices to filter out of the data"), + ) filter_all = traits.Bool( mandatory=True, argstr="-f '%s'", xor=["filter_columns"], position=4, - desc=("use all columns in the design file in " - "denoising")) + desc=("use all columns in the design file in denoising"), + ) mask = File(exists=True, argstr="-m %s", desc="mask image file name") - var_norm = traits.Bool( - argstr="--vn", desc="perform variance-normalization on data") + var_norm = traits.Bool(argstr="--vn", desc="perform variance-normalization on data") out_vnscales = traits.Bool( argstr="--out_vnscales", - desc=("output scaling factors for variance " - "normalization")) + desc=("output scaling factors for variance normalization"), + ) class FilterRegressorOutputSpec(TraitedSpec): @@ -682,12 +701,13 @@ class FilterRegressor(FSLCommand): Uses simple OLS regression on 4D images """ + input_spec = FilterRegressorInputSpec output_spec = FilterRegressorOutputSpec - _cmd = 'fsl_regfilt' + _cmd = "fsl_regfilt" def _format_arg(self, name, trait_spec, value): - if name == 'filter_columns': + if name == "filter_columns": return trait_spec.argstr % ",".join(map(str, value)) elif name == "filter_all": design = np.loadtxt(self.inputs.design_file) @@ -695,52 +715,62 @@ def _format_arg(self, name, trait_spec, value): n_cols = design.shape[1] except IndexError: n_cols = 1 - return trait_spec.argstr % ",".join( - map(str, list(range(1, n_cols + 1)))) - return super(FilterRegressor, self)._format_arg( - name, trait_spec, value) + return trait_spec.argstr % ",".join(map(str, list(range(1, n_cols + 1)))) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_regfilt') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_regfilt" + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None class ImageStatsInputSpec(FSLCommandInputSpec): split_4d = traits.Bool( - argstr='-t', + argstr="-t", position=1, - desc=('give a separate output line for each 3D ' - 'volume of a 4D timeseries')) + desc=("give a separate output line for each 3D volume of a 4D timeseries"), + ) in_file = File( exists=True, argstr="%s", mandatory=True, - position=2, - desc='input file to generate stats of') + position=3, + desc="input file to generate stats of", + ) op_string = traits.Str( argstr="%s", mandatory=True, - position=3, - desc=("string defining the operation, options are " - "applied in order, e.g. -M -l 10 -M will " - "report the non-zero mean, apply a threshold " - "and then report the new nonzero mean")) - mask_file = File( - exists=True, argstr="", desc='mask file used for option -k %s') + position=4, + desc=( + "string defining the operation, options are " + "applied in order, e.g. -M -l 10 -M will " + "report the non-zero mean, apply a threshold " + "and then report the new nonzero mean" + ), + ) + mask_file = File(exists=True, argstr="", desc="mask file used for option -k %s") + index_mask_file = File( + exists=True, + argstr="-K %s", + position=2, + desc="generate separate n submasks from indexMask, " + "for indexvalues 1..n where n is the maximum index " + "value in indexMask, and generate statistics for each submask", + ) class ImageStatsOutputSpec(TraitedSpec): - out_stat = traits.Any(desc='stats output') + out_stat = traits.Any(desc="stats output") class ImageStats(FSLCommand): @@ -760,35 +790,35 @@ class ImageStats(FSLCommand): """ + input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec - _cmd = 'fslstats' + _cmd = "fslstats" def _format_arg(self, name, trait_spec, value): - if name == 'mask_file': - return '' - if name == 'op_string': - if '-k %s' in self.inputs.op_string: + if name == "mask_file": + return "" + if name == "op_string": + if "-k %s" in self.inputs.op_string: if isdefined(self.inputs.mask_file): return self.inputs.op_string % self.inputs.mask_file else: - raise ValueError( - '-k %s option in op_string requires mask_file') - return super(ImageStats, self)._format_arg(name, trait_spec, value) + raise ValueError("-k %s option in op_string requires mask_file") + return super()._format_arg(name, trait_spec, value) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - out_stat = load_json(outfile)['stat'] - except IOError: + out_stat = load_json(outfile)["stat"] + except OSError: return self.run().outputs else: out_stat = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -803,31 +833,35 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class AvScaleInputSpec(CommandLineInputSpec): - all_param = traits.Bool(False, argstr='--allparams') - mat_file = File( - exists=True, argstr='%s', desc='mat file to read', position=-2) + all_param = traits.Bool(False, argstr="--allparams") + mat_file = File(exists=True, argstr="%s", desc="mat file to read", position=-2) ref_file = File( exists=True, - argstr='%s', + argstr="%s", position=-1, - desc='reference file to get center of rotation') + desc="reference file to get center of rotation", + ) class AvScaleOutputSpec(TraitedSpec): rotation_translation_matrix = traits.List( - traits.List(traits.Float), desc='Rotation and Translation Matrix') - scales = traits.List(traits.Float, desc='Scales (x,y,z)') - skews = traits.List(traits.Float, desc='Skews') - average_scaling = traits.Float(desc='Average Scaling') - determinant = traits.Float(desc='Determinant') + traits.List(traits.Float), desc="Rotation and Translation Matrix" + ) + scales = traits.List(traits.Float, desc="Scales (x,y,z)") + skews = traits.List(traits.Float, desc="Skews") + average_scaling = traits.Float(desc="Average Scaling") + determinant = traits.Float(desc="Determinant") forward_half_transform = traits.List( - traits.List(traits.Float), desc='Forward Half Transform') + traits.List(traits.Float), desc="Forward Half Transform" + ) backward_half_transform = traits.List( - traits.List(traits.Float), desc='Backwards Half Transform') + traits.List(traits.Float), desc="Backwards Half Transform" + ) left_right_orientation_preserved = traits.Bool( - desc='True if LR orientation preserved') - rot_angles = traits.List(traits.Float, desc='rotation angles') - translations = traits.List(traits.Float, desc='translations') + desc="True if LR orientation preserved" + ) + rot_angles = traits.List(traits.Float, desc="rotation angles") + translations = traits.List(traits.Float, desc="translations") class AvScale(CommandLine): @@ -842,56 +876,60 @@ class AvScale(CommandLine): """ + input_spec = AvScaleInputSpec output_spec = AvScaleOutputSpec - _cmd = 'avscale' + _cmd = "avscale" def _run_interface(self, runtime): - runtime = super(AvScale, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) expr = re.compile( - 'Rotation\ &\ Translation\ Matrix:\n(?P[0-9\.\ \n-]+)[\s\n]*' - '(Rotation\ Angles\ \(x,y,z\)\ \[rads\]\ =\ (?P[0-9\.\ -]+))?[\s\n]*' - '(Translations\ \(x,y,z\)\ \[mm\]\ =\ (?P[0-9\.\ -]+))?[\s\n]*' - 'Scales\ \(x,y,z\)\ =\ (?P[0-9\.\ -]+)[\s\n]*' - 'Skews\ \(xy,xz,yz\)\ =\ (?P[0-9\.\ -]+)[\s\n]*' - 'Average\ scaling\ =\ (?P[0-9\.-]+)[\s\n]*' - 'Determinant\ =\ (?P[0-9\.-]+)[\s\n]*' - 'Left-Right\ orientation:\ (?P[A-Za-z]+)[\s\n]*' - 'Forward\ half\ transform\ =[\s]*\n' - '(?P[0-9\.\ \n-]+)[\s\n]*' - 'Backward\ half\ transform\ =[\s]*\n' - '(?P[0-9\.\ \n-]+)[\s\n]*') + r"Rotation & Translation Matrix:\n(?P[0-9\. \n-]+)[\s\n]*" + r"(Rotation Angles \(x,y,z\) \[rads\] = (?P[0-9\. -]+))?[\s\n]*" + r"(Translations \(x,y,z\) \[mm\] = (?P[0-9\. -]+))?[\s\n]*" + r"Scales \(x,y,z\) = (?P[0-9\. -]+)[\s\n]*" + r"Skews \(xy,xz,yz\) = (?P[0-9\. -]+)[\s\n]*" + r"Average scaling = (?P[0-9\.-]+)[\s\n]*" + r"Determinant = (?P[0-9\.-]+)[\s\n]*" + r"Left-Right orientation: (?P[A-Za-z]+)[\s\n]*" + r"Forward half transform =[\s]*\n" + r"(?P[0-9\. \n-]+)[\s\n]*" + r"Backward half transform =[\s]*\n" + r"(?P[0-9\. \n-]+)[\s\n]*" + ) out = expr.search(runtime.stdout).groupdict() outputs = {} - outputs['rotation_translation_matrix'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['rot_tran_mat'].strip().split('\n')] - outputs['scales'] = [ - float(s) for s in out['scales'].strip().split(' ') + outputs["rotation_translation_matrix"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["rot_tran_mat"].strip().split("\n") + ] + outputs["scales"] = [float(s) for s in out["scales"].strip().split(" ")] + outputs["skews"] = [float(s) for s in out["skews"].strip().split(" ")] + outputs["average_scaling"] = float(out["avg_scaling"].strip()) + outputs["determinant"] = float(out["determinant"].strip()) + outputs["left_right_orientation_preserved"] = ( + out["lr_orientation"].strip() == "preserved" + ) + outputs["forward_half_transform"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["fwd_half_xfm"].strip().split("\n") + ] + outputs["backward_half_transform"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["bwd_half_xfm"].strip().split("\n") ] - outputs['skews'] = [float(s) for s in out['skews'].strip().split(' ')] - outputs['average_scaling'] = float(out['avg_scaling'].strip()) - outputs['determinant'] = float(out['determinant'].strip()) - outputs['left_right_orientation_preserved'] = out[ - 'lr_orientation'].strip() == 'preserved' - outputs['forward_half_transform'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['fwd_half_xfm'].strip().split('\n')] - outputs['backward_half_transform'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['bwd_half_xfm'].strip().split('\n')] if self.inputs.all_param: - outputs['rot_angles'] = [ - float(r) for r in out['rot_angles'].strip().split(' ') + outputs["rot_angles"] = [ + float(r) for r in out["rot_angles"].strip().split(" ") ] - outputs['translations'] = [ - float(r) for r in out['translations'].strip().split(' ') + outputs["translations"] = [ + float(r) for r in out["translations"].strip().split(" ") ] - setattr(self, '_results', outputs) + self._results = outputs return runtime def _list_outputs(self): @@ -900,94 +938,104 @@ def _list_outputs(self): class OverlayInputSpec(FSLCommandInputSpec): transparency = traits.Bool( - desc='make overlay colors semi-transparent', + desc="make overlay colors semi-transparent", position=1, - argstr='%s', + argstr="%s", usedefault=True, - default_value=True) + default_value=True, + ) out_type = traits.Enum( - 'float', - 'int', + "float", + "int", position=2, usedefault=True, - argstr='%s', - desc='write output with float or int') + argstr="%s", + desc="write output with float or int", + ) use_checkerboard = traits.Bool( - desc='use checkerboard mask for overlay', argstr='-c', position=3) + desc="use checkerboard mask for overlay", argstr="-c", position=3 + ) background_image = File( exists=True, position=4, mandatory=True, - argstr='%s', - desc='image to use as background') - _xor_inputs = ('auto_thresh_bg', 'full_bg_range', 'bg_thresh') + argstr="%s", + desc="image to use as background", + ) + _xor_inputs = ("auto_thresh_bg", "full_bg_range", "bg_thresh") auto_thresh_bg = traits.Bool( - desc=('automatically threshold the background image'), - argstr='-a', + desc=("automatically threshold the background image"), + argstr="-a", position=5, xor=_xor_inputs, - mandatory=True) + mandatory=True, + ) full_bg_range = traits.Bool( - desc='use full range of background image', - argstr='-A', + desc="use full range of background image", + argstr="-A", position=5, xor=_xor_inputs, - mandatory=True) - bg_thresh = traits.Tuple( + mandatory=True, + ) + bg_thresh = Tuple( traits.Float, traits.Float, - argstr='%.3f %.3f', + argstr="%.3f %.3f", position=5, - desc='min and max values for background intensity', + desc="min and max values for background intensity", xor=_xor_inputs, - mandatory=True) + mandatory=True, + ) stat_image = File( exists=True, position=6, mandatory=True, - argstr='%s', - desc='statistical image to overlay in color') - stat_thresh = traits.Tuple( + argstr="%s", + desc="statistical image to overlay in color", + ) + stat_thresh = Tuple( traits.Float, traits.Float, position=7, mandatory=True, - argstr='%.2f %.2f', - desc=('min and max values for the statistical ' - 'overlay')) + argstr="%.2f %.2f", + desc=("min and max values for the statistical overlay"), + ) show_negative_stats = traits.Bool( - desc=('display negative statistics in ' - 'overlay'), - xor=['stat_image2'], - argstr='%s', - position=8) + desc=("display negative statistics in overlay"), + xor=["stat_image2"], + argstr="%s", + position=8, + ) stat_image2 = File( exists=True, position=9, - xor=['show_negative_stats'], - argstr='%s', - desc='second statistical image to overlay in color') - stat_thresh2 = traits.Tuple( + xor=["show_negative_stats"], + argstr="%s", + desc="second statistical image to overlay in color", + ) + stat_thresh2 = Tuple( traits.Float, traits.Float, position=10, - desc=('min and max values for second ' - 'statistical overlay'), - argstr='%.2f %.2f') + desc=("min and max values for second statistical overlay"), + argstr="%.2f %.2f", + ) out_file = File( - desc='combined image volume', + desc="combined image volume", position=-1, - argstr='%s', + argstr="%s", genfile=True, - hash_files=False) + hash_files=False, + ) class OverlayOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='combined image volume') + out_file = File(exists=True, desc="combined image volume") class Overlay(FSLCommand): - """ Use FSL's overlay command to combine background and statistical images + """Use FSL's overlay command to combine background and statistical images into one volume @@ -1005,143 +1053,149 @@ class Overlay(FSLCommand): """ - _cmd = 'overlay' + + _cmd = "overlay" input_spec = OverlayInputSpec output_spec = OverlayOutputSpec def _format_arg(self, name, spec, value): - if name == 'transparency': + if name == "transparency": if value: - return '1' + return "1" else: - return '0' - if name == 'out_type': - if value == 'float': - return '0' + return "0" + if name == "out_type": + if value == "float": + return "0" else: - return '1' - if name == 'show_negative_stats': - return '%s %.2f %.2f' % (self.inputs.stat_image, - self.inputs.stat_thresh[0] * -1, - self.inputs.stat_thresh[1] * -1) - return super(Overlay, self)._format_arg(name, spec, value) + return "1" + if name == "show_negative_stats": + return "{} {:.2f} {:.2f}".format( + self.inputs.stat_image, + self.inputs.stat_thresh[0] * -1, + self.inputs.stat_thresh[1] * -1, + ) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): if isdefined(self.inputs.stat_image2) and ( - not isdefined(self.inputs.show_negative_stats) - or not self.inputs.show_negative_stats): - stem = "%s_and_%s" % ( + not isdefined(self.inputs.show_negative_stats) + or not self.inputs.show_negative_stats + ): + stem = "{}_and_{}".format( split_filename(self.inputs.stat_image)[1], - split_filename(self.inputs.stat_image2)[1]) + split_filename(self.inputs.stat_image2)[1], + ) else: stem = split_filename(self.inputs.stat_image)[1] - out_file = self._gen_fname(stem, suffix='_overlay') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(stem, suffix="_overlay") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class SlicerInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - position=1, - argstr='%s', - mandatory=True, - desc='input volume') + exists=True, position=1, argstr="%s", mandatory=True, desc="input volume" + ) image_edges = File( exists=True, position=2, - argstr='%s', - desc=('volume to display edge overlay for (useful for ' - 'checking registration')) + argstr="%s", + desc=("volume to display edge overlay for (useful for checking registration"), + ) label_slices = traits.Bool( position=3, - argstr='-L', - desc='display slice number', + argstr="-L", + desc="display slice number", usedefault=True, - default_value=True) + default_value=True, + ) colour_map = File( exists=True, position=4, - argstr='-l %s', - desc=('use different colour map from that stored in ' - 'nifti header')) - intensity_range = traits.Tuple( + argstr="-l %s", + desc=("use different colour map from that stored in nifti header"), + ) + intensity_range = Tuple( traits.Float, traits.Float, position=5, - argstr='-i %.3f %.3f', - desc='min and max intensities to display') + argstr="-i %.3f %.3f", + desc="min and max intensities to display", + ) threshold_edges = traits.Float( - position=6, argstr='-e %.3f', desc='use threshold for edges') + position=6, argstr="-e %.3f", desc="use threshold for edges" + ) dither_edges = traits.Bool( - position=7, - argstr='-t', - desc=('produce semi-transparent (dithered) ' - 'edges')) + position=7, argstr="-t", desc=("produce semi-transparent (dithered) edges") + ) nearest_neighbour = traits.Bool( position=8, - argstr='-n', - desc=('use nearest neighbor interpolation ' - 'for output')) + argstr="-n", + desc=("use nearest neighbor interpolation for output"), + ) show_orientation = traits.Bool( position=9, - argstr='%s', + argstr="%s", usedefault=True, default_value=True, - desc='label left-right orientation') - _xor_options = ('single_slice', 'middle_slices', 'all_axial', - 'sample_axial') + desc="label left-right orientation", + ) + _xor_options = ("single_slice", "middle_slices", "all_axial", "sample_axial") single_slice = traits.Enum( - 'x', - 'y', - 'z', + "x", + "y", + "z", position=10, - argstr='-%s', + argstr="-%s", xor=_xor_options, - requires=['slice_number'], - desc=('output picture of single slice in the x, y, or z plane')) + requires=["slice_number"], + desc=("output picture of single slice in the x, y, or z plane"), + ) slice_number = traits.Int( - position=11, argstr='-%d', desc='slice number to save in picture') + position=11, argstr="-%d", desc="slice number to save in picture" + ) middle_slices = traits.Bool( position=10, - argstr='-a', + argstr="-a", xor=_xor_options, - desc=('output picture of mid-sagittal, axial, ' - 'and coronal slices')) + desc=("output picture of mid-sagittal, axial, and coronal slices"), + ) all_axial = traits.Bool( position=10, - argstr='-A', + argstr="-A", xor=_xor_options, - requires=['image_width'], - desc='output all axial slices into one picture') + requires=["image_width"], + desc="output all axial slices into one picture", + ) sample_axial = traits.Int( position=10, - argstr='-S %d', + argstr="-S %d", xor=_xor_options, - requires=['image_width'], - desc=('output every n axial slices into one ' - 'picture')) - image_width = traits.Int( - position=-2, argstr='%d', desc='max picture width') + requires=["image_width"], + desc=("output every n axial slices into one picture"), + ) + image_width = traits.Int(position=-2, argstr="%d", desc="max picture width") out_file = File( position=-1, genfile=True, - argstr='%s', - desc='picture to write', - hash_files=False) - scaling = traits.Float(position=0, argstr='-s %f', desc='image scale') + argstr="%s", + desc="picture to write", + hash_files=False, + ) + scaling = traits.Float(position=0, argstr="-s %f", desc="image scale") class SlicerOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='picture to write') + out_file = File(exists=True, desc="picture to write") class Slicer(FSLCommand): @@ -1161,100 +1215,96 @@ class Slicer(FSLCommand): """ - _cmd = 'slicer' + + _cmd = "slicer" input_spec = SlicerInputSpec output_spec = SlicerOutputSpec def _format_arg(self, name, spec, value): - if name == 'show_orientation': + if name == "show_orientation": if value: - return '' + return "" else: - return '-u' + return "-u" elif name == "label_slices": if value: - return '-L' + return "-L" else: - return '' - return super(Slicer, self)._format_arg(name, spec, value) + return "" + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, ext='.png') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, ext=".png") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class PlotTimeSeriesInputSpec(FSLCommandInputSpec): - in_file = traits.Either( File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr="%s", position=1, - desc=("file or list of files with columns of " - "timecourse information")) + desc=("file or list of files with columns of timecourse information"), + ) plot_start = traits.Int( argstr="--start=%d", - xor=("plot_range", ), - desc="first column from in-file to plot") + xor=("plot_range",), + desc="first column from in-file to plot", + ) plot_finish = traits.Int( argstr="--finish=%d", - xor=("plot_range", ), - desc="final column from in-file to plot") - plot_range = traits.Tuple( + xor=("plot_range",), + desc="final column from in-file to plot", + ) + plot_range = Tuple( traits.Int, traits.Int, argstr="%s", xor=("plot_start", "plot_finish"), - desc=("first and last columns from the in-file " - "to plot")) + desc=("first and last columns from the in-file to plot"), + ) title = traits.Str(argstr="%s", desc="plot title") legend_file = File(exists=True, argstr="--legend=%s", desc="legend file") labels = traits.Either( - traits.Str, - traits.List(traits.Str), - argstr="%s", - desc="label or list of labels") - y_min = traits.Float( - argstr="--ymin=%.2f", desc="minumum y value", xor=("y_range", )) - y_max = traits.Float( - argstr="--ymax=%.2f", desc="maximum y value", xor=("y_range", )) - y_range = traits.Tuple( + traits.Str, traits.List(traits.Str), argstr="%s", desc="label or list of labels" + ) + y_min = traits.Float(argstr="--ymin=%.2f", desc="minimum y value", xor=("y_range",)) + y_max = traits.Float(argstr="--ymax=%.2f", desc="maximum y value", xor=("y_range",)) + y_range = Tuple( traits.Float, traits.Float, argstr="%s", xor=("y_min", "y_max"), - desc="min and max y axis values") + desc="min and max y axis values", + ) x_units = traits.Int( argstr="-u %d", usedefault=True, default_value=1, - desc=("scaling units for x-axis (between 1 and length of in file)")) - plot_size = traits.Tuple( - traits.Int, - traits.Int, - argstr="%s", - desc="plot image height and width") - x_precision = traits.Int( - argstr="--precision=%d", desc="precision of x-axis labels") - sci_notation = traits.Bool( - argstr="--sci", desc="switch on scientific notation") + desc=("scaling units for x-axis (between 1 and length of in file)"), + ) + plot_size = Tuple( + traits.Int, traits.Int, argstr="%s", desc="plot image height and width" + ) + x_precision = traits.Int(argstr="--precision=%d", desc="precision of x-axis labels") + sci_notation = traits.Bool(argstr="--sci", desc="switch on scientific notation") out_file = File( - argstr="-o %s", genfile=True, desc="image to write", hash_files=False) + argstr="-o %s", genfile=True, desc="image to write", hash_files=False + ) class PlotTimeSeriesOutputSpec(TraitedSpec): - - out_file = File(exists=True, desc='image to write') + out_file = File(exists=True, desc="image to write") class PlotTimeSeries(FSLCommand): @@ -1272,6 +1322,7 @@ class PlotTimeSeries(FSLCommand): """ + _cmd = "fsl_tsplot" input_spec = PlotTimeSeriesInputSpec output_spec = PlotTimeSeriesOutputSpec @@ -1290,14 +1341,14 @@ def _format_arg(self, name, spec, value): else: return "-a %s" % value elif name == "title": - return "-t \'%s\'" % value + return "-t '%s'" % value elif name == "plot_range": return "--start=%d --finish=%d" % value elif name == "y_range": return "--ymin=%d --ymax=%d" % value elif name == "plot_size": return "-h %d -w %d" % value - return super(PlotTimeSeries, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1307,51 +1358,49 @@ def _list_outputs(self): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file - out_file = self._gen_fname(infile, ext='.png') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(infile, ext=".png") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class PlotMotionParamsInputSpec(FSLCommandInputSpec): - in_file = traits.Either( File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr="%s", position=1, - desc="file with motion parameters") + desc="file with motion parameters", + ) in_source = traits.Enum( "spm", "fsl", mandatory=True, - desc=("which program generated the motion " - "parameter file - fsl, spm")) + desc=("which program generated the motion parameter file - fsl, spm"), + ) plot_type = traits.Enum( "rotations", "translations", "displacement", argstr="%s", mandatory=True, - desc=("which motion type to plot - rotations, " - "translations, displacement")) - plot_size = traits.Tuple( - traits.Int, - traits.Int, - argstr="%s", - desc="plot image height and width") + desc=("which motion type to plot - rotations, translations, displacement"), + ) + plot_size = Tuple( + traits.Int, traits.Int, argstr="%s", desc="plot image height and width" + ) out_file = File( - argstr="-o %s", genfile=True, desc="image to write", hash_files=False) + argstr="-o %s", genfile=True, desc="image to write", hash_files=False + ) class PlotMotionParamsOutputSpec(TraitedSpec): - - out_file = File(exists=True, desc='image to write') + out_file = File(exists=True, desc="image to write") class PlotMotionParams(FSLCommand): @@ -1378,38 +1427,41 @@ class PlotMotionParams(FSLCommand): translations, while SPM prints them in the opposite order. This interface should be able to plot timecourses of motion parameters generated from other sources as long as they fall under one of these two patterns. For - more flexibilty, see the :class:`fsl.PlotTimeSeries` interface. + more flexibility, see the :class:`fsl.PlotTimeSeries` interface. """ - _cmd = 'fsl_tsplot' + + _cmd = "fsl_tsplot" input_spec = PlotMotionParamsInputSpec output_spec = PlotMotionParamsOutputSpec def _format_arg(self, name, spec, value): - if name == "plot_type": source = self.inputs.in_source - if self.inputs.plot_type == 'displacement': - title = '-t \'MCFLIRT estimated mean displacement (mm)\'' - labels = '-a abs,rel' - return '%s %s' % (title, labels) + if self.inputs.plot_type == "displacement": + title = "-t 'MCFLIRT estimated mean displacement (mm)'" + labels = "-a abs,rel" + return f"{title} {labels}" # Get the right starting and ending position depending on source # package sfdict = dict( - fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3)) + fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3) + ) # Format the title properly - sfstr = "--start=%d --finish=%d" % sfdict["%s_%s" % (source, - value[:3])] + sfstr = "--start=%d --finish=%d" % sfdict[f"{source}_{value[:3]}"] titledict = dict(fsl="MCFLIRT", spm="Realign") unitdict = dict(rot="radians", tra="mm") - title = "\'%s estimated %s (%s)\'" % (titledict[source], value, - unitdict[value[:3]]) + title = "'{} estimated {} ({})'".format( + titledict[source], + value, + unitdict[value[:3]], + ) - return "-t %s %s -a x,y,z" % (title, sfstr) + return f"-t {title} {sfstr} -a x,y,z" elif name == "plot_size": return "-h %d -w %d" % value elif name == "in_file": @@ -1419,7 +1471,7 @@ def _format_arg(self, name, spec, value): else: return "-i %s" % value - return super(PlotMotionParams, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1429,16 +1481,18 @@ def _list_outputs(self): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file - plttype = dict( - rot="rot", tra="trans", dis="disp")[self.inputs.plot_type[:3]] + plttype = dict(rot="rot", tra="trans", dis="disp")[ + self.inputs.plot_type[:3] + ] out_file = fname_presuffix( - infile, suffix="_%s.png" % plttype, use_ext=False) - outputs['out_file'] = os.path.abspath(out_file) + infile, suffix="_%s.png" % plttype, use_ext=False + ) + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1448,38 +1502,39 @@ class ConvertXFMInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=-1, - desc="input transformation matrix") + desc="input transformation matrix", + ) in_file2 = File( exists=True, argstr="%s", position=-2, - desc="second input matrix (for use with fix_scale_skew or concat_xfm)") + desc="second input matrix (for use with fix_scale_skew or concat_xfm)", + ) _options = ["invert_xfm", "concat_xfm", "fix_scale_skew"] invert_xfm = traits.Bool( - argstr="-inverse", - position=-3, - xor=_options, - desc="invert input transformation") + argstr="-inverse", position=-3, xor=_options, desc="invert input transformation" + ) concat_xfm = traits.Bool( argstr="-concat", position=-3, xor=_options, requires=["in_file2"], - desc=("write joint transformation of two input " - "matrices")) + desc=("write joint transformation of two input matrices"), + ) fix_scale_skew = traits.Bool( argstr="-fixscaleskew", position=-3, xor=_options, requires=["in_file2"], - desc=("use secondary matrix to fix scale and " - "skew")) + desc=("use secondary matrix to fix scale and skew"), + ) out_file = File( genfile=True, argstr="-omat %s", position=1, desc="final transformation matrix", - hash_files=False) + hash_files=False, + ) class ConvertXFMOutputSpec(TraitedSpec): @@ -1514,24 +1569,21 @@ def _list_outputs(self): _, infile1, _ = split_filename(self.inputs.in_file) if self.inputs.invert_xfm: outfile = fname_presuffix( - infile1, - suffix="_inv.mat", - newpath=os.getcwd(), - use_ext=False) + infile1, suffix="_inv.mat", newpath=os.getcwd(), use_ext=False + ) else: if self.inputs.concat_xfm: _, infile2, _ = split_filename(self.inputs.in_file2) outfile = fname_presuffix( - "%s_%s" % (infile1, infile2), + f"{infile1}_{infile2}", suffix=".mat", newpath=os.getcwd(), - use_ext=False) + use_ext=False, + ) else: outfile = fname_presuffix( - infile1, - suffix="_fix.mat", - newpath=os.getcwd(), - use_ext=False) + infile1, suffix="_fix.mat", newpath=os.getcwd(), use_ext=False + ) outputs["out_file"] = os.path.abspath(outfile) return outputs @@ -1542,29 +1594,22 @@ def _gen_filename(self, name): class SwapDimensionsInputSpec(FSLCommandInputSpec): - in_file = File( - exists=True, - mandatory=True, - argstr="%s", - position="1", - desc="input image") - _dims = [ - "x", "-x", "y", "-y", "z", "-z", "RL", "LR", "AP", "PA", "IS", "SI" - ] - new_dims = traits.Tuple( + exists=True, mandatory=True, argstr="%s", position="1", desc="input image" + ) + _dims = ["x", "-x", "y", "-y", "z", "-z", "RL", "LR", "AP", "PA", "IS", "SI"] + new_dims = Tuple( traits.Enum(_dims), traits.Enum(_dims), traits.Enum(_dims), argstr="%s %s %s", mandatory=True, - desc="3-tuple of new dimension order") - out_file = File( - genfile=True, argstr="%s", desc="image to write", hash_files=False) + desc="3-tuple of new dimension order", + ) + out_file = File(genfile=True, argstr="%s", desc="image to write", hash_files=False) class SwapDimensionsOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="image with new dimensions") @@ -1577,6 +1622,7 @@ class SwapDimensions(FSLCommand): (RL, LR, AP, PA, IS, SI). """ + _cmd = "fslswapdim" input_spec = SwapDimensionsInputSpec output_spec = SwapDimensionsOutputSpec @@ -1586,7 +1632,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( - self.inputs.in_file, suffix='_newdims') + self.inputs.in_file, suffix="_newdims" + ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -1602,20 +1649,21 @@ class PowerSpectrumInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="input 4D file to estimate the power spectrum", - argstr='%s', + argstr="%s", position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - desc='name of output 4D file for power spectrum', - argstr='%s', + desc="name of output 4D file for power spectrum", + argstr="%s", position=1, genfile=True, - hash_files=False) + hash_files=False, + ) class PowerSpectrumOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc="path/name of the output 4D power spectrum file") + out_file = File(exists=True, desc="path/name of the output 4D power spectrum file") class PowerSpectrum(FSLCommand): @@ -1632,41 +1680,42 @@ class PowerSpectrum(FSLCommand): """ - _cmd = 'fslpspec' + _cmd = "fslpspec" input_spec = PowerSpectrumInputSpec output_spec = PowerSpectrumOutputSpec def _gen_outfilename(self): out_file = self.inputs.out_file if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_ps') + out_file = self._gen_fname(self.inputs.in_file, suffix="_ps") return out_file def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None class SigLossInputSpec(FSLCommandInputSpec): - in_file = File( - mandatory=True, exists=True, argstr='-i %s', desc='b0 fieldmap file') + in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( - argstr='-s %s', desc='output signal loss estimate file', genfile=True) + argstr="-s %s", desc="output signal loss estimate file", genfile=True + ) - mask_file = File(exists=True, argstr='-m %s', desc='brain mask file') - echo_time = traits.Float(argstr='--te=%f', desc='echo time in seconds') + mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") + echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( - 'x', 'y', 'z', argstr='-d %s', desc='slicing direction') + "x", "y", "z", argstr="-d %s", desc="slicing direction" + ) class SigLossOuputSpec(TraitedSpec): - out_file = File(exists=True, desc='signal loss estimate file') + out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): @@ -1682,22 +1731,23 @@ class SigLoss(FSLCommand): """ + input_spec = SigLossInputSpec output_spec = SigLossOuputSpec - _cmd = 'sigloss' + _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']) and \ - isdefined(self.inputs.in_file): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_sigloss') + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_sigloss" + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1724,95 +1774,118 @@ class Reorient2Std(FSLCommand): """ - _cmd = 'fslreorient2std' + + _cmd = "fslreorient2std" input_spec = Reorient2StdInputSpec output_spec = Reorient2StdOutputSpec def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_fname(self.inputs.in_file, suffix="_reoriented") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class InvWarpInputSpec(FSLCommandInputSpec): warp = File( exists=True, - argstr='--warp=%s', + argstr="--warp=%s", mandatory=True, - desc=('Name of file containing warp-coefficients/fields. This ' - 'would typically be the output from the --cout switch of' - ' fnirt (but can also use fields, like the output from ' - '--fout).')) + desc=( + "Name of file containing warp-coefficients/fields. This " + "would typically be the output from the --cout switch of" + " fnirt (but can also use fields, like the output from " + "--fout)." + ), + ) reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, - desc=('Name of a file in target space. Note that the ' - 'target space is now different from the target ' - 'space that was used to create the --warp file. It ' - 'would typically be the file that was specified ' - 'with the --in argument when running fnirt.')) + desc=( + "Name of a file in target space. Note that the " + "target space is now different from the target " + "space that was used to create the --warp file. It " + "would typically be the file that was specified " + "with the --in argument when running fnirt." + ), + ) inverse_warp = File( - argstr='--out=%s', - name_source=['warp'], + argstr="--out=%s", + name_source=["warp"], hash_files=False, - name_template='%s_inverse', - desc=('Name of output file, containing warps that are ' - 'the "reverse" of those in --warp. This will be ' - 'a field-file (rather than a file of spline ' - 'coefficients), and it will have any affine ' - 'component included as part of the ' - 'displacements.')) + name_template="%s_inverse", + desc=( + "Name of output file, containing warps that are " + 'the "reverse" of those in --warp. This will be ' + "a field-file (rather than a file of spline " + "coefficients), and it will have any affine " + "component included as part of the " + "displacements." + ), + ) absolute = traits.Bool( - argstr='--abs', - xor=['relative'], - desc=('If set it indicates that the warps in --warp' - ' should be interpreted as absolute, provided' - ' that it is not created by fnirt (which ' - 'always uses relative warps). If set it also ' - 'indicates that the output --out should be ' - 'absolute.')) + argstr="--abs", + xor=["relative"], + desc=( + "If set it indicates that the warps in --warp" + " should be interpreted as absolute, provided" + " that it is not created by fnirt (which " + "always uses relative warps). If set it also " + "indicates that the output --out should be " + "absolute." + ), + ) relative = traits.Bool( - argstr='--rel', - xor=['absolute'], - desc=('If set it indicates that the warps in --warp' - ' should be interpreted as relative. I.e. the' - ' values in --warp are displacements from the' - ' coordinates in the --ref space. If set it ' - 'also indicates that the output --out should ' - 'be relative.')) + argstr="--rel", + xor=["absolute"], + desc=( + "If set it indicates that the warps in --warp" + " should be interpreted as relative. I.e. the" + " values in --warp are displacements from the" + " coordinates in the --ref space. If set it " + "also indicates that the output --out should " + "be relative." + ), + ) niter = traits.Int( - argstr='--niter=%d', - desc=('Determines how many iterations of the ' - 'gradient-descent search that should be run.')) + argstr="--niter=%d", + desc=( + "Determines how many iterations of the " + "gradient-descent search that should be run." + ), + ) regularise = traits.Float( - argstr='--regularise=%f', - desc='Regularization strength (deafult=1.0).') + argstr="--regularise=%f", desc="Regularization strength (default=1.0)." + ) noconstraint = traits.Bool( - argstr='--noconstraint', desc='Do not apply Jacobian constraint') + argstr="--noconstraint", desc="Do not apply Jacobian constraint" + ) jacobian_min = traits.Float( - argstr='--jmin=%f', - desc=('Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)')) + argstr="--jmin=%f", + desc=("Minimum acceptable Jacobian value for constraint (default 0.01)"), + ) jacobian_max = traits.Float( - argstr='--jmax=%f', - desc=('Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)')) + argstr="--jmax=%f", + desc=("Maximum acceptable Jacobian value for constraint (default 100.0)"), + ) class InvWarpOutputSpec(TraitedSpec): inverse_warp = File( exists=True, - desc=('Name of output file, containing warps that are ' - 'the "reverse" of those in --warp.')) + desc=( + "Name of output file, containing warps that are " + 'the "reverse" of those in --warp.' + ), + ) class InvWarp(FSLCommand): @@ -1838,7 +1911,7 @@ class InvWarp(FSLCommand): input_spec = InvWarpInputSpec output_spec = InvWarpOutputSpec - _cmd = 'invwarp' + _cmd = "invwarp" class ComplexInputSpec(FSLCommandInputSpec): @@ -1848,82 +1921,68 @@ class ComplexInputSpec(FSLCommandInputSpec): real_in_file = File(exists=True, argstr="%s", position=2) imaginary_in_file = File(exists=True, argstr="%s", position=3) magnitude_in_file = File(exists=True, argstr="%s", position=2) - phase_in_file = File(exists=True, argstr='%s', position=3) + phase_in_file = File(exists=True, argstr="%s", position=3) _ofs = [ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_out_file', 'imaginary_out_file' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_out_file", + "imaginary_out_file", ] _conversion = [ - 'real_polar', - 'real_cartesian', - 'complex_cartesian', - 'complex_polar', - 'complex_split', - 'complex_merge', + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ] complex_out_file = File( - genfile=True, argstr="%s", position=-3, xor=_ofs + _conversion[:2]) + genfile=True, argstr="%s", position=-3, xor=_ofs + _conversion[:2] + ) magnitude_out_file = File( genfile=True, argstr="%s", position=-4, - xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:], + ) phase_out_file = File( genfile=True, argstr="%s", position=-3, - xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:], + ) real_out_file = File( genfile=True, argstr="%s", position=-4, - xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:], + ) imaginary_out_file = File( genfile=True, argstr="%s", position=-3, - xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:], + ) - start_vol = traits.Int(position=-2, argstr='%d') - end_vol = traits.Int(position=-1, argstr='%d') + start_vol = traits.Int(position=-2, argstr="%d") + end_vol = traits.Int(position=-1, argstr="%d") - real_polar = traits.Bool( - argstr='-realpolar', - xor=_conversion, - position=1, - ) + real_polar = traits.Bool(argstr="-realpolar", xor=_conversion, position=1) # requires=['complex_in_file','magnitude_out_file','phase_out_file']) - real_cartesian = traits.Bool( - argstr='-realcartesian', - xor=_conversion, - position=1, - ) + real_cartesian = traits.Bool(argstr="-realcartesian", xor=_conversion, position=1) # requires=['complex_in_file','real_out_file','imaginary_out_file']) - complex_cartesian = traits.Bool( - argstr='-complex', - xor=_conversion, - position=1, - ) + complex_cartesian = traits.Bool(argstr="-complex", xor=_conversion, position=1) # requires=['real_in_file','imaginary_in_file','complex_out_file']) - complex_polar = traits.Bool( - argstr='-complexpolar', - xor=_conversion, - position=1, - ) + complex_polar = traits.Bool(argstr="-complexpolar", xor=_conversion, position=1) # requires=['magnitude_in_file','phase_in_file', # 'magnitude_out_file','phase_out_file']) - complex_split = traits.Bool( - argstr='-complexsplit', - xor=_conversion, - position=1, - ) + complex_split = traits.Bool(argstr="-complexsplit", xor=_conversion, position=1) # requires=['complex_in_file','complex_out_file']) complex_merge = traits.Bool( - argstr='-complexmerge', - xor=_conversion + ['start_vol', 'end_vol'], - position=1, + argstr="-complexmerge", xor=_conversion + ["start_vol", "end_vol"], position=1 ) @@ -1951,7 +2010,8 @@ class Complex(FSLCommand): """ - _cmd = 'fslcomplex' + + _cmd = "fslcomplex" input_spec = ComplexInputSpec output_spec = ComplexOuputSpec @@ -1964,10 +2024,10 @@ def _parse_inputs(self, skip=None): skip += self.inputs._ofs[:1] + self.inputs._ofs[3:] else: skip += self.inputs._ofs[1:] - return super(Complex, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _gen_filename(self, name): - if name == 'complex_out_file': + if name == "complex_out_file": if self.inputs.complex_cartesian: in_file = self.inputs.real_in_file elif self.inputs.complex_polar: @@ -1977,14 +2037,13 @@ def _gen_filename(self, name): else: return None return self._gen_fname(in_file, suffix="_cplx") - elif name == 'magnitude_out_file': + elif name == "magnitude_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_mag") - elif name == 'phase_out_file': - return self._gen_fname( - self.inputs.complex_in_file, suffix="_phase") - elif name == 'real_out_file': + elif name == "phase_out_file": + return self._gen_fname(self.inputs.complex_in_file, suffix="_phase") + elif name == "real_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_real") - elif name == 'imaginary_out_file': + elif name == "imaginary_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_imag") return None @@ -1996,110 +2055,140 @@ def _get_output(self, name): def _list_outputs(self): outputs = self.output_spec().get() - if self.inputs.complex_cartesian or self.inputs.complex_polar or \ - self.inputs.complex_split or self.inputs.complex_merge: - outputs['complex_out_file'] = self._get_output('complex_out_file') + if ( + self.inputs.complex_cartesian + or self.inputs.complex_polar + or self.inputs.complex_split + or self.inputs.complex_merge + ): + outputs["complex_out_file"] = self._get_output("complex_out_file") elif self.inputs.real_cartesian: - outputs['real_out_file'] = self._get_output('real_out_file') - outputs['imaginary_out_file'] = self._get_output( - 'imaginary_out_file') + outputs["real_out_file"] = self._get_output("real_out_file") + outputs["imaginary_out_file"] = self._get_output("imaginary_out_file") elif self.inputs.real_polar: - outputs['magnitude_out_file'] = self._get_output( - 'magnitude_out_file') - outputs['phase_out_file'] = self._get_output('phase_out_file') + outputs["magnitude_out_file"] = self._get_output("magnitude_out_file") + outputs["phase_out_file"] = self._get_output("phase_out_file") return outputs class WarpUtilsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, - desc=('Name of file containing warp-coefficients/fields. This ' - 'would typically be the output from the --cout switch of ' - 'fnirt (but can also use fields, like the output from ' - '--fout).')) + desc=( + "Name of file containing warp-coefficients/fields. This " + "would typically be the output from the --cout switch of " + "fnirt (but can also use fields, like the output from " + "--fout)." + ), + ) reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, - desc=('Name of a file in target space. Note that the ' - 'target space is now different from the target ' - 'space that was used to create the --warp file. It ' - 'would typically be the file that was specified ' - 'with the --in argument when running fnirt.')) + desc=( + "Name of a file in target space. Note that the " + "target space is now different from the target " + "space that was used to create the --warp file. It " + "would typically be the file that was specified " + "with the --in argument when running fnirt." + ), + ) out_format = traits.Enum( - 'spline', - 'field', - argstr='--outformat=%s', - desc=('Specifies the output format. If set to field (default) ' - 'the output will be a (4D) field-file. If set to spline ' - 'the format will be a (4D) file of spline coefficients.')) - - warp_resolution = traits.Tuple( + "spline", + "field", + argstr="--outformat=%s", + desc=( + "Specifies the output format. If set to field (default) " + "the output will be a (4D) field-file. If set to spline " + "the format will be a (4D) file of spline coefficients." + ), + ) + + warp_resolution = Tuple( traits.Float, traits.Float, traits.Float, - argstr='--warpres=%0.4f,%0.4f,%0.4f', - desc=('Specifies the resolution/knot-spacing of the splines pertaining' - ' to the coefficients in the --out file. This parameter is only ' - 'relevant if --outformat is set to spline. It should be noted ' - 'that if the --in file has a higher resolution, the resulting ' - 'coefficients will pertain to the closest (in a least-squares' - ' sense) file in the space of fields with the --warpres' - ' resolution. It should also be noted that the resolution ' - 'will always be an integer multiple of the voxel ' - 'size.')) - - knot_space = traits.Tuple( + argstr="--warpres=%0.4f,%0.4f,%0.4f", + desc=( + "Specifies the resolution/knot-spacing of the splines pertaining" + " to the coefficients in the --out file. This parameter is only " + "relevant if --outformat is set to spline. It should be noted " + "that if the --in file has a higher resolution, the resulting " + "coefficients will pertain to the closest (in a least-squares" + " sense) file in the space of fields with the --warpres" + " resolution. It should also be noted that the resolution " + "will always be an integer multiple of the voxel " + "size." + ), + ) + + knot_space = Tuple( traits.Int, traits.Int, traits.Int, - argstr='--knotspace=%d,%d,%d', - desc=('Alternative (to --warpres) specification of the resolution of ' - 'the output spline-field.')) + argstr="--knotspace=%d,%d,%d", + desc=( + "Alternative (to --warpres) specification of the resolution of " + "the output spline-field." + ), + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", position=-1, - name_source=['in_file'], - output_name='out_file', - desc=('Name of output file. The format of the output depends on what ' - 'other parameters are set. The default format is a (4D) ' - 'field-file. If the --outformat is set to spline the format ' - 'will be a (4D) file of spline coefficients.')) + name_source=["in_file"], + output_name="out_file", + desc=( + "Name of output file. The format of the output depends on what " + "other parameters are set. The default format is a (4D) " + "field-file. If the --outformat is set to spline the format " + "will be a (4D) file of spline coefficients." + ), + ) write_jacobian = traits.Bool( False, mandatory=True, usedefault=True, - desc='Switch on --jac flag with automatically generated filename') + desc="Switch on --jac flag with automatically generated filename", + ) out_jacobian = File( - argstr='--jac=%s', - desc=('Specifies that a (3D) file of Jacobian determinants ' - 'corresponding to --in should be produced and written to ' - 'filename.')) + argstr="--jac=%s", + desc=( + "Specifies that a (3D) file of Jacobian determinants " + "corresponding to --in should be produced and written to " + "filename." + ), + ) with_affine = traits.Bool( False, - argstr='--withaff', - desc=('Specifies that the affine transform (i.e. that which was ' - 'specified for the --aff parameter in fnirt) should be ' - 'included as displacements in the --out file. That can be ' - 'useful for interfacing with software that cannot decode ' - 'FSL/fnirt coefficient-files (where the affine transform is ' - 'stored separately from the displacements).')) + argstr="--withaff", + desc=( + "Specifies that the affine transform (i.e. that which was " + "specified for the --aff parameter in fnirt) should be " + "included as displacements in the --out file. That can be " + "useful for interfacing with software that cannot decode " + "FSL/fnirt coefficient-files (where the affine transform is " + "stored separately from the displacements)." + ), + ) class WarpUtilsOutputSpec(TraitedSpec): out_file = File( - desc=('Name of output file, containing the warp as field or ' - 'coefficients.')) + desc=("Name of output file, containing the warp as field or coefficients.") + ) out_jacobian = File( - desc=('Name of output file, containing the map of the determinant of ' - 'the Jacobian')) + desc=( + "Name of output file, containing the map of the determinant of " + "the Jacobian" + ) + ) class WarpUtils(FSLCommand): @@ -2127,160 +2216,175 @@ class WarpUtils(FSLCommand): input_spec = WarpUtilsInputSpec output_spec = WarpUtilsOutputSpec - _cmd = 'fnirtfileutils' + _cmd = "fnirtfileutils" def _parse_inputs(self, skip=None): if skip is None: skip = [] - suffix = 'field' - if (isdefined(self.inputs.out_format) - and self.inputs.out_format == 'spline'): - suffix = 'coeffs' + suffix = "field" + if isdefined(self.inputs.out_format) and self.inputs.out_format == "spline": + suffix = "coeffs" - trait_spec = self.inputs.trait('out_file') + trait_spec = self.inputs.trait("out_file") trait_spec.name_template = "%s_" + suffix if self.inputs.write_jacobian: if not isdefined(self.inputs.out_jacobian): - jac_spec = self.inputs.trait('out_jacobian') - jac_spec.name_source = ['in_file'] - jac_spec.name_template = '%s_jac' - jac_spec.output_name = 'out_jacobian' + jac_spec = self.inputs.trait("out_jacobian") + jac_spec.name_source = ["in_file"] + jac_spec.name_template = "%s_jac" + jac_spec.output_name = "out_jacobian" else: - skip += ['out_jacobian'] + skip += ["out_jacobian"] - skip += ['write_jacobian'] - return super(WarpUtils, self)._parse_inputs(skip=skip) + skip += ["write_jacobian"] + return super()._parse_inputs(skip=skip) class ConvertWarpInputSpec(FSLCommandInputSpec): reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, position=1, - desc='Name of a file in target space of the full transform.') + desc="Name of a file in target space of the full transform.", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", position=-1, - name_source=['reference'], - name_template='%s_concatwarp', - output_name='out_file', - desc=('Name of output file, containing warps that are the combination ' - 'of all those given as arguments. The format of this will be a ' - 'field-file (rather than spline coefficients) with any affine ' - 'components included.')) + name_source=["reference"], + name_template="%s_concatwarp", + output_name="out_file", + desc=( + "Name of output file, containing warps that are the combination " + "of all those given as arguments. The format of this will be a " + "field-file (rather than spline coefficients) with any affine " + "components included." + ), + ) premat = File( exists=True, - argstr='--premat=%s', - desc='filename for pre-transform (affine matrix)') + argstr="--premat=%s", + desc="filename for pre-transform (affine matrix)", + ) warp1 = File( exists=True, - argstr='--warp1=%s', - desc='Name of file containing initial ' - 'warp-fields/coefficients (follows premat). This could ' - 'e.g. be a fnirt-transform from a subjects structural ' - 'scan to an average of a group of subjects.') + argstr="--warp1=%s", + desc="Name of file containing initial " + "warp-fields/coefficients (follows premat). This could " + "e.g. be a fnirt-transform from a subjects structural " + "scan to an average of a group of subjects.", + ) midmat = File( exists=True, argstr="--midmat=%s", - desc="Name of file containing mid-warp-affine transform") + desc="Name of file containing mid-warp-affine transform", + ) warp2 = File( exists=True, - argstr='--warp2=%s', - desc='Name of file containing secondary warp-fields/coefficients ' - '(after warp1/midmat but before postmat). This could e.g. be a ' - 'fnirt-transform from the average of a group of subjects to some ' - 'standard space (e.g. MNI152).') + argstr="--warp2=%s", + desc="Name of file containing secondary warp-fields/coefficients " + "(after warp1/midmat but before postmat). This could e.g. be a " + "fnirt-transform from the average of a group of subjects to some " + "standard space (e.g. MNI152).", + ) postmat = File( exists=True, - argstr='--postmat=%s', - desc='Name of file containing an affine transform (applied last). It ' - 'could e.g. be an affine transform that maps the MNI152-space ' - 'into a better approximation to the Talairach-space (if indeed ' - 'there is one).') + argstr="--postmat=%s", + desc="Name of file containing an affine transform (applied last). It " + "could e.g. be an affine transform that maps the MNI152-space " + "into a better approximation to the Talairach-space (if indeed " + "there is one).", + ) shift_in_file = File( exists=True, - argstr='--shiftmap=%s', + argstr="--shiftmap=%s", desc='Name of file containing a "shiftmap", a non-linear transform ' - 'with displacements only in one direction (applied first, before ' - 'premat). This would typically be a fieldmap that has been ' - 'pre-processed using fugue that maps a subjects functional (EPI) ' - 'data onto an undistorted space (i.e. a space that corresponds ' - 'to his/her true anatomy).') + "with displacements only in one direction (applied first, before " + "premat). This would typically be a fieldmap that has been " + "pre-processed using fugue that maps a subjects functional (EPI) " + "data onto an undistorted space (i.e. a space that corresponds " + "to his/her true anatomy).", + ) shift_direction = traits.Enum( - 'y-', - 'y', - 'x', - 'x-', - 'z', - 'z-', + "y-", + "y", + "x", + "x-", + "z", + "z-", argstr="--shiftdir=%s", - requires=['shift_in_file'], - desc='Indicates the direction that the distortions from ' - '--shiftmap goes. It depends on the direction and ' - 'polarity of the phase-encoding in the EPI sequence.') + requires=["shift_in_file"], + desc="Indicates the direction that the distortions from " + "--shiftmap goes. It depends on the direction and " + "polarity of the phase-encoding in the EPI sequence.", + ) cons_jacobian = traits.Bool( False, - argstr='--constrainj', - desc='Constrain the Jacobian of the warpfield to lie within specified ' - 'min/max limits.') + argstr="--constrainj", + desc="Constrain the Jacobian of the warpfield to lie within specified " + "min/max limits.", + ) jacobian_min = traits.Float( - argstr='--jmin=%f', - desc='Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)') + argstr="--jmin=%f", + desc="Minimum acceptable Jacobian value for constraint (default 0.01)", + ) jacobian_max = traits.Float( - argstr='--jmax=%f', - desc='Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)') + argstr="--jmax=%f", + desc="Maximum acceptable Jacobian value for constraint (default 100.0)", + ) abswarp = traits.Bool( - argstr='--abs', - xor=['relwarp'], - desc='If set it indicates that the warps in --warp1 and --warp2 should' - ' be interpreted as absolute. I.e. the values in --warp1/2 are ' - 'the coordinates in the next space, rather than displacements. ' - 'This flag is ignored if --warp1/2 was created by fnirt, which ' - 'always creates relative displacements.') + argstr="--abs", + xor=["relwarp"], + desc="If set it indicates that the warps in --warp1 and --warp2 should" + " be interpreted as absolute. I.e. the values in --warp1/2 are " + "the coordinates in the next space, rather than displacements. " + "This flag is ignored if --warp1/2 was created by fnirt, which " + "always creates relative displacements.", + ) relwarp = traits.Bool( - argstr='--rel', - xor=['abswarp'], - desc='If set it indicates that the warps in --warp1/2 should be ' - 'interpreted as relative. I.e. the values in --warp1/2 are ' - 'displacements from the coordinates in the next space.') + argstr="--rel", + xor=["abswarp"], + desc="If set it indicates that the warps in --warp1/2 should be " + "interpreted as relative. I.e. the values in --warp1/2 are " + "displacements from the coordinates in the next space.", + ) out_abswarp = traits.Bool( - argstr='--absout', - xor=['out_relwarp'], - desc='If set it indicates that the warps in --out should be absolute, ' - 'i.e. the values in --out are displacements from the coordinates ' - 'in --ref.') + argstr="--absout", + xor=["out_relwarp"], + desc="If set it indicates that the warps in --out should be absolute, " + "i.e. the values in --out are displacements from the coordinates " + "in --ref.", + ) out_relwarp = traits.Bool( - argstr='--relout', - xor=['out_abswarp'], - desc='If set it indicates that the warps in --out should be relative, ' - 'i.e. the values in --out are displacements from the coordinates ' - 'in --ref.') + argstr="--relout", + xor=["out_abswarp"], + desc="If set it indicates that the warps in --out should be relative, " + "i.e. the values in --out are displacements from the coordinates " + "in --ref.", + ) class ConvertWarpOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='Name of output file, containing the warp as field or ' - 'coefficients.') + desc="Name of output file, containing the warp as field or coefficients.", + ) class ConvertWarp(FSLCommand): @@ -2306,59 +2410,63 @@ class ConvertWarp(FSLCommand): input_spec = ConvertWarpInputSpec output_spec = ConvertWarpOutputSpec - _cmd = 'convertwarp' + _cmd = "convertwarp" class WarpPointsBaseInputSpec(CommandLineInputSpec): in_coords = File( exists=True, position=-1, - argstr='%s', + argstr="%s", mandatory=True, - desc='filename of file containing coordinates') + desc="filename of file containing coordinates", + ) xfm_file = File( exists=True, - argstr='-xfm %s', - xor=['warp_file'], - desc='filename of affine transform (e.g. source2dest.mat)') + argstr="-xfm %s", + xor=["warp_file"], + desc="filename of affine transform (e.g. source2dest.mat)", + ) warp_file = File( exists=True, - argstr='-warp %s', - xor=['xfm_file'], - desc='filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)') + argstr="-warp %s", + xor=["xfm_file"], + desc="filename of warpfield (e.g. intermediate2dest_warp.nii.gz)", + ) coord_vox = traits.Bool( True, - argstr='-vox', - xor=['coord_mm'], - desc='all coordinates in voxels - default') + argstr="-vox", + xor=["coord_mm"], + desc="all coordinates in voxels - default", + ) coord_mm = traits.Bool( - False, argstr='-mm', xor=['coord_vox'], desc='all coordinates in mm') + False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" + ) out_file = File( - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', - desc='output file name') + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", + desc="output file name", + ) class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File( - exists=True, - argstr='-src %s', - mandatory=True, - desc='filename of source image') + exists=True, argstr="-src %s", mandatory=True, desc="filename of source image" + ) dest_file = File( exists=True, - argstr='-dest %s', + argstr="-dest %s", mandatory=True, - desc='filename of destination image') + desc="filename of destination image", + ) class WarpPointsOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='Name of output file, containing the warp as field or ' - 'coefficients.') + desc="Name of output file, containing the warp as field or coefficients.", + ) class WarpPoints(CommandLine): @@ -2387,35 +2495,35 @@ class WarpPoints(CommandLine): input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'img2imgcoord' - _terminal_output = 'stream' + _cmd = "img2imgcoord" + _terminal_output = "stream" def __init__(self, command=None, **inputs): self._tmpfile = None self._in_file = None self._outformat = None - super(WarpPoints, self).__init__(command=command, **inputs) + super().__init__(command=command, **inputs) def _format_arg(self, name, trait_spec, value): - if name == 'out_file': - return '' + if name == "out_file": + return "" - return super(WarpPoints, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) - setattr(self, '_in_file', fname) - setattr(self, '_outformat', ext[1:]) - first_args = super(WarpPoints, - self)._parse_inputs(skip=['in_coords', 'out_file']) + self._in_file = fname + self._outformat = ext[1:] + first_args = super()._parse_inputs(skip=["in_coords", "out_file"]) - second_args = fname + '.txt' + second_args = fname + ".txt" - if ext in ['.vtk', '.trk']: + if ext in [".vtk", ".trk"]: if self._tmpfile is None: self._tmpfile = tempfile.NamedTemporaryFile( - suffix='.txt', dir=os.getcwd(), delete=False).name + suffix=".txt", dir=os.getcwd(), delete=False + ).name second_args = self._tmpfile return first_args + [second_args] @@ -2425,16 +2533,15 @@ def _vtk_to_coords(self, in_file, out_file=None): from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): - raise ImportError( - 'TVTK is required and tvtk package was not found') + raise ImportError("TVTK is required and tvtk package was not found") - reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') + reader = tvtk.PolyDataReader(file_name=in_file + ".vtk") reader.update() mesh = VTKInfo.vtk_output(reader) points = mesh.points if out_file is None: - out_file, _ = op.splitext(in_file) + '.txt' + out_file, _ = op.splitext(in_file) + ".txt" np.savetxt(out_file, points) return out_file @@ -2444,8 +2551,7 @@ def _coords_to_vtk(self, points, out_file): from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): - raise ImportError( - 'TVTK is required and tvtk package was not found') + raise ImportError("TVTK is required and tvtk package was not found") reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() @@ -2459,37 +2565,37 @@ def _coords_to_vtk(self, points, out_file): def _trk_to_coords(self, in_file, out_file=None): from nibabel.trackvis import TrackvisFile + trkfile = TrackvisFile.from_file(in_file) streamlines = trkfile.streamlines if out_file is None: out_file, _ = op.splitext(in_file) - np.savetxt(streamlines, out_file + '.txt') - return out_file + '.txt' + np.savetxt(out_file + ".txt", streamlines) + return out_file + ".txt" def _coords_to_trk(self, points, out_file): - raise NotImplementedError('trk files are not yet supported') + raise NotImplementedError("trk files are not yet supported") def _overload_extension(self, value, name): - if name == 'out_file': - return '%s.%s' % (value, getattr(self, '_outformat')) + if name == "out_file": + return "{}.{}".format(value, self._outformat) def _run_interface(self, runtime): - fname = getattr(self, '_in_file') - outformat = getattr(self, '_outformat') + fname = self._in_file + outformat = self._outformat tmpfile = None - if outformat == 'vtk': + if outformat == "vtk": tmpfile = self._tmpfile self._vtk_to_coords(fname, out_file=tmpfile) - elif outformat == 'trk': + elif outformat == "trk": tmpfile = self._tmpfile self._trk_to_coords(fname, out_file=tmpfile) - runtime = super(WarpPoints, self)._run_interface(runtime) - newpoints = np.fromstring( - '\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') + runtime = super()._run_interface(runtime) + newpoints = np.fromstring("\n".join(runtime.stdout.split("\n")[1:]), sep=" ") if tmpfile is not None: try: @@ -2497,11 +2603,11 @@ def _run_interface(self, runtime): except: pass - out_file = self._filename_from_source('out_file') + out_file = self._filename_from_source("out_file") - if outformat == 'vtk': + if outformat == "vtk": self._coords_to_vtk(newpoints, out_file) - elif outformat == 'trk': + elif outformat == "trk": self._coords_to_trk(newpoints, out_file) else: np.savetxt(out_file, newpoints.reshape(-1, 3)) @@ -2511,20 +2617,19 @@ def _run_interface(self, runtime): class WarpPointsToStdInputSpec(WarpPointsBaseInputSpec): img_file = File( - exists=True, - argstr='-img %s', - mandatory=True, - desc=('filename of input image')) + exists=True, argstr="-img %s", mandatory=True, desc=("filename of input image") + ) std_file = File( exists=True, - argstr='-std %s', + argstr="-std %s", mandatory=True, - desc=('filename of destination image')) + desc=("filename of destination image"), + ) premat_file = File( exists=True, - argstr='-premat %s', - desc=('filename of pre-warp affine transform ' - '(e.g. example_func2highres.mat)')) + argstr="-premat %s", + desc=("filename of pre-warp affine transform (e.g. example_func2highres.mat)"), + ) class WarpPointsToStd(WarpPoints): @@ -2555,45 +2660,51 @@ class WarpPointsToStd(WarpPoints): input_spec = WarpPointsToStdInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'img2stdcoord' - _terminal_output = 'file_split' + _cmd = "img2stdcoord" + _terminal_output = "file_split" class WarpPointsFromStdInputSpec(CommandLineInputSpec): img_file = File( exists=True, - argstr='-img %s', + argstr="-img %s", mandatory=True, - desc='filename of a destination image') + desc="filename of a destination image", + ) std_file = File( exists=True, - argstr='-std %s', + argstr="-std %s", mandatory=True, - desc='filename of the image in standard space') + desc="filename of the image in standard space", + ) in_coords = File( exists=True, position=-2, - argstr='%s', + argstr="%s", mandatory=True, - desc='filename of file containing coordinates') + desc="filename of file containing coordinates", + ) xfm_file = File( exists=True, - argstr='-xfm %s', - xor=['warp_file'], - desc='filename of affine transform (e.g. source2dest.mat)') + argstr="-xfm %s", + xor=["warp_file"], + desc="filename of affine transform (e.g. source2dest.mat)", + ) warp_file = File( exists=True, - argstr='-warp %s', - xor=['xfm_file'], - desc='filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)') + argstr="-warp %s", + xor=["xfm_file"], + desc="filename of warpfield (e.g. intermediate2dest_warp.nii.gz)", + ) coord_vox = traits.Bool( True, - argstr='-vox', - xor=['coord_mm'], - desc='all coordinates in voxels - default') + argstr="-vox", + xor=["coord_mm"], + desc="all coordinates in voxels - default", + ) coord_mm = traits.Bool( - False, argstr='-mm', xor=['coord_vox'], desc='all coordinates in mm') + False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" + ) class WarpPointsFromStd(CommandLine): @@ -2622,67 +2733,73 @@ class WarpPointsFromStd(CommandLine): input_spec = WarpPointsFromStdInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'std2imgcoord' + _cmd = "std2imgcoord" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath('stdout.nipype') + outputs["out_file"] = op.abspath("stdout.nipype") return outputs class MotionOutliersInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - desc="unfiltered 4D image", - argstr="-i %s") + exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s" + ) out_file = File( argstr="-o %s", - name_source='in_file', - name_template='%s_outliers.txt', + name_source="in_file", + name_template="%s_outliers.txt", keep_extension=True, - desc='output outlier file name', - hash_files=False) - mask = File( - exists=True, argstr="-m %s", desc="mask image for calculating metric") + desc="output outlier file name", + hash_files=False, + ) + mask = File(exists=True, argstr="-m %s", desc="mask image for calculating metric") metric = traits.Enum( - 'refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], + "refrms", + ["refrms", "dvars", "refmse", "fd", "fdrms"], argstr="--%s", - desc='metrics: refrms - RMS intensity difference to reference volume ' - 'as metric [default metric], refmse - Mean Square Error version ' - 'of refrms (used in original version of fsl_motion_outliers), ' - 'dvars - DVARS, fd - frame displacement, fdrms - FD with RMS ' - 'matrix calculation') + desc="metrics: refrms - RMS intensity difference to reference volume " + "as metric [default metric], refmse - Mean Square Error version " + "of refrms (used in original version of fsl_motion_outliers), " + "dvars - DVARS, fd - frame displacement, fdrms - FD with RMS " + "matrix calculation", + ) threshold = traits.Float( argstr="--thresh=%g", - desc=("specify absolute threshold value " - "(otherwise use box-plot cutoff = P75 + " - "1.5*IQR)")) + desc=( + "specify absolute threshold value " + "(otherwise use box-plot cutoff = P75 + " + "1.5*IQR)" + ), + ) no_motion_correction = traits.Bool( - argstr="--nomoco", - desc="do not run motion correction (assumed already done)") + argstr="--nomoco", desc="do not run motion correction (assumed already done)" + ) dummy = traits.Int( argstr="--dummy=%d", - desc='number of dummy scans to delete (before running anything and ' - 'creating EVs)') + desc="number of dummy scans to delete (before running anything and " + "creating EVs)", + ) out_metric_values = File( argstr="-s %s", - name_source='in_file', - name_template='%s_metrics.txt', + name_source="in_file", + name_template="%s_metrics.txt", keep_extension=True, - desc='output metric values (DVARS etc.) file name', - hash_files=False) + desc="output metric values (DVARS etc.) file name", + hash_files=False, + ) out_metric_plot = File( argstr="-p %s", - name_source='in_file', - name_template='%s_metrics.png', + name_source="in_file", + name_template="%s_metrics.png", hash_files=False, keep_extension=True, - desc='output metric values plot (DVARS etc.) file name') + desc="output metric values plot (DVARS etc.) file name", + ) class MotionOutliersOutputSpec(TraitedSpec): - out_file = File(exists=True) + out_file = File() out_metric_values = File(exists=True) out_metric_plot = File(exists=True) @@ -2702,4 +2819,93 @@ class MotionOutliers(FSLCommand): input_spec = MotionOutliersInputSpec output_spec = MotionOutliersOutputSpec - _cmd = 'fsl_motion_outliers' + _cmd = "fsl_motion_outliers" + + +class Text2VestInputSpec(FSLCommandInputSpec): + in_file = File( + exists=True, + mandatory=True, + desc="plain text file representing your design, contrast, or f-test matrix", + argstr="%s", + position=0, + ) + + out_file = File( + mandatory=True, + desc=( + "file name to store matrix data in the format used by FSL tools" + " (e.g., design.mat, design.con design.fts)" + ), + argstr="%s", + position=1, + ) + + +class Text2VestOutputSpec(TraitedSpec): + out_file = File(desc="matrix data in the format used by FSL tools") + + +class Text2Vest(FSLCommand): + """ + Use FSL Text2Vest`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ + to convert your plain text design matrix data into the format used by the FSL tools. + + Examples + -------- + >>> from nipype.interfaces.fsl import Text2Vest + >>> t2v = Text2Vest() + >>> t2v.inputs.in_file = "design.txt" + >>> t2v.inputs.out_file = "design.mat" + >>> t2v.cmdline + 'Text2Vest design.txt design.mat' + >>> res = t2v.run() # doctest: +SKIP + """ + + input_spec = Text2VestInputSpec + output_spec = Text2VestOutputSpec + + _cmd = "Text2Vest" + + +class Vest2TextInputSpec(FSLCommandInputSpec): + in_file = File( + exists=True, + mandatory=True, + desc="matrix data stored in the format used by FSL tools", + argstr="%s", + position=0, + ) + + out_file = File( + "design.txt", + usedefault=True, + desc="file name to store text output from matrix", + argstr="%s", + position=1, + ) + + +class Vest2TextOutputSpec(TraitedSpec): + out_file = File(desc="plain text representation of FSL matrix") + + +class Vest2Text(FSLCommand): + """ + Use FSL Vest2Text`https://web.mit.edu/fsl_v5.0.10/fsl/doc/wiki/GLM(2f)CreatingDesignMatricesByHand.html`_ + to convert your design.mat design.con and design.fts files into plain text. + + Examples + -------- + >>> from nipype.interfaces.fsl import Vest2Text + >>> v2t = Vest2Text() + >>> v2t.inputs.in_file = "design.mat" + >>> v2t.cmdline + 'Vest2Text design.mat design.txt' + >>> res = v2t.run() # doctest: +SKIP + """ + + input_spec = Vest2TextInputSpec + output_spec = Vest2TextOutputSpec + + _cmd = "Vest2Text" diff --git a/nipype/interfaces/image.py b/nipype/interfaces/image.py index 061bd1e2cc..f86ae7ef15 100644 --- a/nipype/interfaces/image.py +++ b/nipype/interfaces/image.py @@ -1,29 +1,32 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..utils.filemanip import fname_presuffix -from .base import (SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, - traits, File) +from .base import SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, traits, File + +from looseversion import LooseVersion class RescaleInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, - desc='Skull-stripped image to rescale') - ref_file = File(exists=True, mandatory=True, - desc='Skull-stripped reference image') - invert = traits.Bool(desc='Invert contrast of rescaled image') - percentile = traits.Range(low=0., high=50., value=0., usedefault=True, - desc='Percentile to use for reference to allow ' - 'for outliers - 1 indicates the 1st and ' - '99th percentiles in the input file will ' - 'be mapped to the 99th and 1st percentiles ' - 'in the reference; 0 indicates minima and ' - 'maxima will be mapped') + in_file = File(exists=True, mandatory=True, desc="Skull-stripped image to rescale") + ref_file = File(exists=True, mandatory=True, desc="Skull-stripped reference image") + invert = traits.Bool(desc="Invert contrast of rescaled image") + percentile = traits.Range( + low=0.0, + high=50.0, + value=0.0, + usedefault=True, + desc="Percentile to use for reference to allow " + "for outliers - 1 indicates the 1st and " + "99th percentiles in the input file will " + "be mapped to the 99th and 1st percentiles " + "in the reference; 0 indicates minima and " + "maxima will be mapped", + ) class RescaleOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Rescaled image') + out_file = File(exists=True, desc="Rescaled image") class Rescale(SimpleInterface): @@ -42,8 +45,8 @@ class Rescale(SimpleInterface): Examples -------- - To use a high-resolution T1w image as a registration target for a T2\* - image, it may be useful to invert the T1w image and rescale to the T2\* + To use a high-resolution T1w image as a registration target for a T2\\* + image, it may be useful to invert the T1w image and rescale to the T2\\* range. Using the 1st and 99th percentiles may reduce the impact of outlier voxels. @@ -56,6 +59,7 @@ class Rescale(SimpleInterface): >>> res = invert_t1w.run() # doctest: +SKIP """ + input_spec = RescaleInputSpec output_spec = RescaleOutputSpec @@ -64,13 +68,13 @@ def _run_interface(self, runtime): import nibabel as nb img = nb.load(self.inputs.in_file) - data = img.get_data() - ref_data = nb.load(self.inputs.ref_file).get_data() + data = img.get_fdata() + ref_data = nb.load(self.inputs.ref_file).get_fdata() in_mask = data > 0 ref_mask = ref_data > 0 - q = [self.inputs.percentile, 100. - self.inputs.percentile] + q = [self.inputs.percentile, 100.0 - self.inputs.percentile] in_low, in_high = np.percentile(data[in_mask], q) ref_low, ref_high = np.percentile(ref_data[ref_mask], q) scale_factor = (ref_high - ref_low) / (in_high - in_low) @@ -78,103 +82,110 @@ def _run_interface(self, runtime): signal = in_high - data if self.inputs.invert else data - in_low out_data = in_mask * (signal * scale_factor + ref_low) - suffix = '_inv' if self.inputs.invert else '_rescaled' - out_file = fname_presuffix(self.inputs.in_file, suffix=suffix, - newpath=runtime.cwd) + suffix = "_inv" if self.inputs.invert else "_rescaled" + out_file = fname_presuffix( + self.inputs.in_file, suffix=suffix, newpath=runtime.cwd + ) img.__class__(out_data, img.affine, img.header).to_filename(out_file) - self._results['out_file'] = out_file + self._results["out_file"] = out_file return runtime -_axes = ('RL', 'AP', 'SI') +_axes = ("RL", "AP", "SI") _orientations = tuple( - ''.join((x[i], y[j], z[k])) - for x in _axes for y in _axes for z in _axes + "".join((x[i], y[j], z[k])) + for x in _axes + for y in _axes + for z in _axes if x != y != z != x - for i in (0, 1) for j in (0, 1) for k in (0, 1)) + for i in (0, 1) + for j in (0, 1) + for k in (0, 1) +) class ReorientInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='Input image') - orientation = traits.Enum(_orientations, usedefault=True, - desc='Target axis orientation') + in_file = File(exists=True, mandatory=True, desc="Input image") + orientation = traits.Enum( + _orientations, usedefault=True, desc="Target axis orientation" + ) class ReorientOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Reoriented image') - transform = File(exists=True, - desc='Affine transform from input orientation to output') + out_file = File(exists=True, desc="Reoriented image") + transform = File( + exists=True, desc="Affine transform from input orientation to output" + ) class Reorient(SimpleInterface): """Conform an image to a given orientation -Flips and reorder the image data array so that the axes match the -directions indicated in ``orientation``. -The default ``RAS`` orientation corresponds to the first axis being ordered -from left to right, the second axis from posterior to anterior, and the -third axis from inferior to superior. + Flips and reorder the image data array so that the axes match the + directions indicated in ``orientation``. + The default ``RAS`` orientation corresponds to the first axis being ordered + from left to right, the second axis from posterior to anterior, and the + third axis from inferior to superior. -For oblique images, the original orientation is considered to be the -closest plumb orientation. + For oblique images, the original orientation is considered to be the + closest plumb orientation. -No resampling is performed, and thus the output image is not de-obliqued -or registered to any other image or template. + No resampling is performed, and thus the output image is not de-obliqued + or registered to any other image or template. -The effective transform is calculated from the original affine matrix to -the reoriented affine matrix. + The effective transform is calculated from the original affine matrix to + the reoriented affine matrix. -Examples --------- + Examples + -------- -If an image is not reoriented, the original file is not modified + If an image is not reoriented, the original file is not modified -.. testsetup:: + .. testsetup:: - >>> def print_affine(matrix): - ... print(str(matrix).replace(']', ' ').replace('[', ' ')) + >>> def print_affine(matrix): + ... print(str(matrix).replace(']', ' ').replace('[', ' ')) ->>> import numpy as np ->>> from nipype.interfaces.image import Reorient ->>> reorient = Reorient(orientation='LPS') ->>> reorient.inputs.in_file = 'segmentation0.nii.gz' ->>> res = reorient.run() ->>> res.outputs.out_file -'segmentation0.nii.gz' + >>> import numpy as np + >>> from nipype.interfaces.image import Reorient + >>> reorient = Reorient(orientation='LPS') + >>> reorient.inputs.in_file = 'segmentation0.nii.gz' + >>> res = reorient.run() + >>> res.outputs.out_file + 'segmentation0.nii.gz' ->>> print_affine(np.loadtxt(res.outputs.transform)) -1. 0. 0. 0. -0. 1. 0. 0. -0. 0. 1. 0. -0. 0. 0. 1. + >>> print_affine(np.loadtxt(res.outputs.transform)) + 1. 0. 0. 0. + 0. 1. 0. 0. + 0. 0. 1. 0. + 0. 0. 0. 1. ->>> reorient.inputs.orientation = 'RAS' ->>> res = reorient.run() ->>> res.outputs.out_file # doctest: +ELLIPSIS -'.../segmentation0_ras.nii.gz' + >>> reorient.inputs.orientation = 'RAS' + >>> res = reorient.run() + >>> res.outputs.out_file # doctest: +ELLIPSIS + '.../segmentation0_ras.nii.gz' ->>> print_affine(np.loadtxt(res.outputs.transform)) --1. 0. 0. 60. - 0. -1. 0. 72. - 0. 0. 1. 0. - 0. 0. 0. 1. + >>> print_affine(np.loadtxt(res.outputs.transform)) + -1. 0. 0. 60. + 0. -1. 0. 72. + 0. 0. 1. 0. + 0. 0. 0. 1. -.. testcleanup:: + .. testcleanup:: - >>> import os - >>> os.unlink(res.outputs.out_file) - >>> os.unlink(res.outputs.transform) + >>> import os + >>> os.unlink(res.outputs.out_file) + >>> os.unlink(res.outputs.transform) + """ -""" input_spec = ReorientInputSpec output_spec = ReorientOutputSpec def _run_interface(self, runtime): import numpy as np import nibabel as nb - from nibabel.orientations import ( - axcodes2ornt, ornt_transform, inv_ornt_aff) + from nibabel.orientations import axcodes2ornt, ornt_transform, inv_ornt_aff fname = self.inputs.in_file orig_img = nb.load(fname) @@ -186,52 +197,50 @@ def _run_interface(self, runtime): transform = ornt_transform(orig_ornt, targ_ornt) affine_xfm = inv_ornt_aff(transform, orig_img.shape) - # Check can be eliminated when minimum nibabel version >= 2.2 - if hasattr(orig_img, 'as_reoriented'): + # Check can be eliminated when minimum nibabel version >= 2.4 + if LooseVersion(nb.__version__) >= LooseVersion("2.4.0"): reoriented = orig_img.as_reoriented(transform) else: reoriented = _as_reoriented_backport(orig_img, transform) # Image may be reoriented if reoriented is not orig_img: - suffix = '_' + self.inputs.orientation.lower() - out_name = fname_presuffix(fname, suffix=suffix, - newpath=runtime.cwd) + suffix = "_" + self.inputs.orientation.lower() + out_name = fname_presuffix(fname, suffix=suffix, newpath=runtime.cwd) reoriented.to_filename(out_name) else: out_name = fname - mat_name = fname_presuffix(fname, suffix='.mat', - newpath=runtime.cwd, use_ext=False) - np.savetxt(mat_name, affine_xfm, fmt='%.08f') + mat_name = fname_presuffix( + fname, suffix=".mat", newpath=runtime.cwd, use_ext=False + ) + np.savetxt(mat_name, affine_xfm, fmt="%.08f") - self._results['out_file'] = out_name - self._results['transform'] = mat_name + self._results["out_file"] = out_name + self._results["transform"] = mat_name return runtime def _as_reoriented_backport(img, ornt): - """Backport of img.as_reoriented as of nibabel 2.2.0""" + """Backport of img.as_reoriented as of nibabel 2.4.0""" import numpy as np import nibabel as nb from nibabel.orientations import inv_ornt_aff + if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return img - t_arr = nb.apply_orientation(img.get_data(), ornt) + t_arr = nb.apply_orientation(img.dataobj, ornt) new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape)) reoriented = img.__class__(t_arr, new_aff, img.header) if isinstance(reoriented, nb.Nifti1Pair): # Also apply the transform to the dim_info fields - new_dim = list(reoriented.header.get_dim_info()) - for idx, value in enumerate(new_dim): - # For each value, leave as None if it was that way, - # otherwise check where we have mapped it to - if value is None: - continue - new_dim[idx] = np.where(ornt[:, 0] == idx)[0] + new_dim = [ + None if orig_dim is None else int(ornt[orig_dim, 0]) + for orig_dim in img.header.get_dim_info() + ] reoriented.header.set_dim_info(*new_dim) diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index e11ba47479..46cdfb44f2 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Set of interfaces that allow interaction with data. Currently @@ -11,10 +10,6 @@ To come : XNATSink """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object, zip, filter, range, open, str - import glob import fnmatch import string @@ -31,15 +26,32 @@ from .. import config, logging from ..utils.filemanip import ( - copyfile, simplify_list, ensure_list, - get_related_files) + copyfile, + simplify_list, + ensure_list, + get_related_files, + split_filename, +) from ..utils.misc import human_order_sorted, str2bool from .base import ( - TraitedSpec, traits, Str, File, Directory, BaseInterface, InputMultiPath, - isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec, - LibraryBaseInterface) - -iflogger = logging.getLogger('nipype.interface') + TraitedSpec, + traits, + Tuple, + Str, + File, + Directory, + BaseInterface, + InputMultiPath, + isdefined, + OutputMultiPath, + DynamicTraitedSpec, + Undefined, + BaseInterfaceInputSpec, + LibraryBaseInterface, + SimpleInterface, +) + +iflogger = logging.getLogger("nipype.interface") def copytree(src, dst, use_hardlink=False): @@ -54,7 +66,7 @@ def copytree(src, dst, use_hardlink=False): try: os.makedirs(dst) except OSError as why: - if 'File exists' in why.strerror: + if "File exists" in why.strerror: pass else: raise why @@ -70,9 +82,10 @@ def copytree(src, dst, use_hardlink=False): srcname, dstname, True, - hashmethod='content', - use_hardlink=use_hardlink) - except (IOError, os.error) as why: + hashmethod="content", + use_hardlink=use_hardlink, + ) + except OSError as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files @@ -83,7 +96,7 @@ def copytree(src, dst, use_hardlink=False): def add_traits(base, names, trait_type=None): - """ Add traits to a traited class. + """Add traits to a traited class. All traits are set to Undefined by default """ @@ -101,7 +114,7 @@ def add_traits(base, names, trait_type=None): def _get_head_bucket(s3_resource, bucket_name): - """ Try to get the header info of a bucket, in order to + """Try to get the header info of a bucket, in order to check if it exists and its permissions """ @@ -111,21 +124,26 @@ def _get_head_bucket(s3_resource, bucket_name): try: s3_resource.meta.client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as exc: - error_code = int(exc.response['Error']['Code']) + error_code = int(exc.response["Error"]["Code"]) if error_code == 403: - err_msg = 'Access to bucket: %s is denied; check credentials'\ - % bucket_name + err_msg = "Access to bucket: %s is denied; check credentials" % bucket_name raise Exception(err_msg) elif error_code == 404: - err_msg = 'Bucket: %s does not exist; check spelling and try '\ - 'again' % bucket_name + err_msg = ( + "Bucket: %s does not exist; check spelling and try " + "again" % bucket_name + ) raise Exception(err_msg) else: - err_msg = 'Unable to connect to bucket: %s. Error message:\n%s'\ - % (bucket_name, exc) + err_msg = "Unable to connect to bucket: {}. Error message:\n{}".format( + bucket_name, + exc, + ) except Exception as exc: - err_msg = 'Unable to connect to bucket: %s. Error message:\n%s'\ - % (bucket_name, exc) + err_msg = "Unable to connect to bucket: {}. Error message:\n{}".format( + bucket_name, + exc, + ) raise Exception(err_msg) @@ -137,22 +155,21 @@ def _list_outputs(self): raise NotImplementedError def _outputs(self): - return self._add_output_traits(super(IOBase, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return base # Class to track percentage of S3 file upload -class ProgressPercentage(object): - ''' - Callable class instsance (via __call__ method) that displays +class ProgressPercentage: + """ + Callable class instance (via __call__ method) that displays upload percentage of a file to S3 - ''' + """ def __init__(self, filename): - ''' - ''' + """ """ # Import packages import threading @@ -164,8 +181,7 @@ def __init__(self, filename): self._lock = threading.Lock() def __call__(self, bytes_amount): - ''' - ''' + """ """ # Import packages import sys @@ -177,8 +193,11 @@ def __call__(self, bytes_amount): percentage = (self._seen_so_far // self._size) * 100 else: percentage = 0 - progress_str = '%d / %d (%.2f%%)\r'\ - % (self._seen_so_far, self._size, percentage) + progress_str = "%d / %d (%.2f%%)\r" % ( + self._seen_so_far, + self._size, + percentage, + ) # Write to stdout sys.stdout.write(progress_str) @@ -187,124 +206,130 @@ def __call__(self, bytes_amount): # DataSink inputs class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - ''' - ''' + """ """ # Init inputspec data attributes - base_directory = Directory( - desc='Path to the base directory for storing data.') - container = Str( - desc='Folder within base directory in which to store output') + base_directory = Str(desc="Path to the base directory for storing data.") + container = Str(desc="Folder within base directory in which to store output") parameterization = traits.Bool( - True, usedefault=True, desc='store output in parametrized structure') - strip_dir = Directory(desc='path to strip out of filename') + True, usedefault=True, desc="store output in parametrized structure" + ) + strip_dir = Str(desc="path to strip out of filename") substitutions = InputMultiPath( - traits.Tuple(Str, Str), - desc=('List of 2-tuples reflecting string ' - 'to substitute and string to replace ' - 'it with')) - regexp_substitutions = \ - InputMultiPath(traits.Tuple(Str, Str), - desc=('List of 2-tuples reflecting a pair of a ' - 'Python regexp pattern and a replacement ' - 'string. Invoked after string `substitutions`')) + Tuple(Str, Str), + desc=( + "List of 2-tuples reflecting string " + "to substitute and string to replace " + "it with" + ), + ) + regexp_substitutions = InputMultiPath( + Tuple(Str, Str), + desc=( + "List of 2-tuples reflecting a pair of a " + "Python regexp pattern and a replacement " + "string. Invoked after string `substitutions`" + ), + ) _outputs = traits.Dict(Str, value={}, usedefault=True) remove_dest_dir = traits.Bool( - False, usedefault=True, desc='remove dest directory when copying dirs') + False, usedefault=True, desc="remove dest directory when copying dirs" + ) # AWS S3 data attributes - creds_path = Str(desc='Filepath to AWS credentials file for S3 bucket ' - 'access; if not specified, the credentials will ' - 'be taken from the AWS_ACCESS_KEY_ID and ' - 'AWS_SECRET_ACCESS_KEY environment variables') - encrypt_bucket_keys = traits.Bool(desc='Flag indicating whether to use S3 ' - 'server-side AES-256 encryption') + creds_path = Str( + desc="Filepath to AWS credentials file for S3 bucket " + "access; if not specified, the credentials will " + "be taken from the AWS_ACCESS_KEY_ID and " + "AWS_SECRET_ACCESS_KEY environment variables" + ) + encrypt_bucket_keys = traits.Bool( + desc="Flag indicating whether to use S3 server-side AES-256 encryption" + ) # Set this if user wishes to override the bucket with their own - bucket = traits.Any(desc='Boto3 S3 bucket for manual override of bucket') + bucket = traits.Any(desc="Boto3 S3 bucket for manual override of bucket") # Set this if user wishes to have local copy of files as well - local_copy = Str(desc='Copy files locally as well as to S3 bucket') + local_copy = Str(desc="Copy files locally as well as to S3 bucket") # Set call-able inputs attributes def __setattr__(self, key, value): - if key not in self.copyable_trait_names(): if not isdefined(value): - super(DataSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(DataSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) # DataSink outputs class DataSinkOutputSpec(TraitedSpec): - # Init out file - out_file = traits.Any(desc='datasink output') + out_file = traits.Any(desc="datasink output") # Custom DataSink class class DataSink(IOBase): - """ Generic datasink module to store structured outputs - - Primarily for use within a workflow. This interface allows arbitrary - creation of input attributes. The names of these attributes define the - directory structure to create for storage of the files or directories. - - The attributes take the following form: - - string[[.[@]]string[[.[@]]string]] ... + """ + Generic datasink module to store structured outputs. - where parts between [] are optional. + Primarily for use within a workflow. This interface allows arbitrary + creation of input attributes. The names of these attributes define the + directory structure to create for storage of the files or directories. - An attribute such as contrasts.@con will create a 'contrasts' directory - to store the results linked to the attribute. If the @ is left out, such - as in 'contrasts.con', a subdirectory 'con' will be created under - 'contrasts'. + The attributes take the following form:: - the general form of the output is:: + string[[.[@]]string[[.[@]]string]] ... - 'base_directory/container/parameterization/destloc/filename' + where parts between ``[]`` are optional. - destloc = string[[.[@]]string[[.[@]]string]] and - filename comesfrom the input to the connect statement. + An attribute such as contrasts.@con will create a 'contrasts' directory + to store the results linked to the attribute. If the ``@`` is left out, such + as in 'contrasts.con', a subdirectory 'con' will be created under + 'contrasts'. - .. warning:: + The general form of the output is:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + 'base_directory/container/parameterization/destloc/filename' - .. note:: + ``destloc = string[[.[@]]string[[.[@]]string]]`` and + ``filename`` come from the input to the connect statement. - If both substitutions and regexp_substitutions are used, then - substitutions are applied first followed by regexp_substitutions. + .. warning:: - This interface **cannot** be used in a MapNode as the inputs are - defined only when the connect statement is executed. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + .. note:: - >>> ds = DataSink() - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + If both substitutions and regexp_substitutions are used, then + substitutions are applied first followed by regexp_substitutions. - To use DataSink in a MapNode, its inputs have to be defined at the - time the interface is created. + This interface **cannot** be used in a MapNode as the inputs are + defined only when the connect statement is executed. - >>> ds = DataSink(infields=['contasts.@con']) - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + Examples + -------- + >>> ds = DataSink() + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP + + To use DataSink in a MapNode, its inputs have to be defined at the + time the interface is created. + + >>> ds = DataSink(infields=['contasts.@con']) + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP """ @@ -321,7 +346,7 @@ def __init__(self, infields=None, force_run=True, **kwargs): Indicates the input fields to be dynamically created """ - super(DataSink, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -343,10 +368,9 @@ def _get_dst(self, src): if self.inputs.parameterization: dst = path if isdefined(self.inputs.strip_dir): - dst = dst.replace(self.inputs.strip_dir, '') + dst = dst.replace(self.inputs.strip_dir, "") folders = [ - folder for folder in dst.split(os.path.sep) - if folder.startswith('_') + folder for folder in dst.split(os.path.sep) if folder.startswith("_") ] dst = os.path.sep.join(folders) if fname: @@ -368,22 +392,32 @@ def _substitute(self, pathstr): oldpathstr = pathstr pathstr = pathstr.replace(key, val) if pathstr != oldpathstr: - iflogger.debug('sub.str: %s -> %s using %r -> %r', - oldpathstr, pathstr, key, val) + iflogger.debug( + "sub.str: %s -> %s using %r -> %r", + oldpathstr, + pathstr, + key, + val, + ) if isdefined(self.inputs.regexp_substitutions): for key, val in self.inputs.regexp_substitutions: oldpathstr = pathstr pathstr, _ = re.subn(key, val, pathstr) if pathstr != oldpathstr: - iflogger.debug('sub.regexp: %s -> %s using %r -> %r', - oldpathstr, pathstr, key, val) + iflogger.debug( + "sub.regexp: %s -> %s using %r -> %r", + oldpathstr, + pathstr, + key, + val, + ) if pathstr_ != pathstr: - iflogger.info('sub: %s -> %s', pathstr_, pathstr) + iflogger.info("sub: %s -> %s", pathstr_, pathstr) return pathstr # Check for s3 in base directory def _check_s3_base_dir(self): - ''' + """ Method to see if the datasink's base directory specifies an S3 bucket path; if it does, it parses the path for the bucket name in the form 's3://bucket_name/...' and returns it @@ -399,38 +433,25 @@ def _check_s3_base_dir(self): bucket_name : string name of the S3 bucket to connect to; if the base directory is not a valid S3 path, defaults to '' - ''' + """ - # Init variables - s3_str = 's3://' - bucket_name = '' + s3_str = "s3://" + bucket_name = "" base_directory = self.inputs.base_directory if not isdefined(base_directory): s3_flag = False return s3_flag, bucket_name - # Explicitly lower-case the "s3" - if base_directory.lower().startswith(s3_str): - base_dir_sp = base_directory.split('/') - base_dir_sp[0] = base_dir_sp[0].lower() - base_directory = '/'.join(base_dir_sp) - - # Check if 's3://' in base dir - if base_directory.startswith(s3_str): - # Expects bucket name to be 's3://bucket_name/base_dir/..' - bucket_name = base_directory.split(s3_str)[1].split('/')[0] - s3_flag = True - # Otherwise it's just a normal datasink - else: - s3_flag = False + s3_flag = base_directory.lower().startswith(s3_str) + if s3_flag: + bucket_name = base_directory[len(s3_str) :].partition("/")[0] - # Return s3_flag return s3_flag, bucket_name # Function to return AWS secure environment variables def _return_aws_keys(self): - ''' + """ Method to return AWS access key id and secret access key using credentials found in a local file. @@ -445,7 +466,7 @@ def _return_aws_keys(self): string of the AWS access key ID aws_secret_access_key : string string of the AWS secret access key - ''' + """ # Import packages import os @@ -455,40 +476,39 @@ def _return_aws_keys(self): # Check if creds exist if creds_path and os.path.exists(creds_path): - with open(creds_path, 'r') as creds_in: + with open(creds_path) as creds_in: # Grab csv rows row1 = creds_in.readline() row2 = creds_in.readline() # Are they root or user keys - if 'User Name' in row1: + if "User Name" in row1: # And split out for keys - aws_access_key_id = row2.split(',')[1] - aws_secret_access_key = row2.split(',')[2] - elif 'AWSAccessKeyId' in row1: + aws_access_key_id = row2.split(",")[1] + aws_secret_access_key = row2.split(",")[2] + elif "AWSAccessKeyId" in row1: # And split out for keys - aws_access_key_id = row1.split('=')[1] - aws_secret_access_key = row2.split('=')[1] + aws_access_key_id = row1.split("=")[1] + aws_secret_access_key = row2.split("=")[1] else: - err_msg = 'Credentials file not recognized, check file is correct' + err_msg = "Credentials file not recognized, check file is correct" raise Exception(err_msg) # Strip any carriage return/line feeds - aws_access_key_id = aws_access_key_id.replace('\r', '').replace( - '\n', '') - aws_secret_access_key = aws_secret_access_key.replace('\r', - '').replace( - '\n', '') + aws_access_key_id = aws_access_key_id.replace("\r", "").replace("\n", "") + aws_secret_access_key = aws_secret_access_key.replace("\r", "").replace( + "\n", "" + ) else: - aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID') - aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY') + aws_access_key_id = os.getenv("AWS_ACCESS_KEY_ID") + aws_secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY") # Return keys return aws_access_key_id, aws_secret_access_key # Fetch bucket object def _fetch_bucket(self, bucket_name): - ''' + """ Method to return a bucket object which can be used to interact with an AWS S3 bucket using credentials found in a local file. @@ -504,15 +524,14 @@ def _fetch_bucket(self, bucket_name): bucket : boto3.resources.factory.s3.Bucket boto3 s3 Bucket object which is used to interact with files in an S3 bucket on AWS - ''' + """ # Import packages try: import boto3 import botocore - except ImportError as exc: - err_msg = 'Boto3 package is not installed - install boto3 and '\ - 'try again.' + except ImportError: + err_msg = "Boto3 package is not installed - install boto3 and try again." raise Exception(err_msg) # Init variables @@ -520,45 +539,46 @@ def _fetch_bucket(self, bucket_name): # Get AWS credentials try: - aws_access_key_id, aws_secret_access_key = \ - self._return_aws_keys() + aws_access_key_id, aws_secret_access_key = self._return_aws_keys() except Exception as exc: - err_msg = 'There was a problem extracting the AWS credentials '\ - 'from the credentials file provided: %s. Error:\n%s'\ - % (creds_path, exc) + err_msg = ( + "There was a problem extracting the AWS credentials " + "from the credentials file provided: %s. Error:\n%s" % (creds_path, exc) + ) raise Exception(err_msg) # Try and get AWS credentials if a creds_path is specified if aws_access_key_id and aws_secret_access_key: # Init connection - iflogger.info('Connecting to S3 bucket: %s with credentials...', - bucket_name) + iflogger.info( + "Connecting to S3 bucket: %s with credentials...", bucket_name + ) # Use individual session for each instance of DataSink # Better when datasinks are being used in multi-threading, see: # http://boto3.readthedocs.org/en/latest/guide/resources.html#multithreading session = boto3.session.Session( aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key) + aws_secret_access_key=aws_secret_access_key, + ) else: - iflogger.info('Connecting to S3 bucket: %s with IAM role...', - bucket_name) + iflogger.info("Connecting to S3 bucket: %s with IAM role...", bucket_name) # Lean on AWS environment / IAM role authentication and authorization session = boto3.session.Session() - s3_resource = session.resource('s3', use_ssl=True) + s3_resource = session.resource("s3", use_ssl=True) # And try fetch the bucket with the name argument try: _get_head_bucket(s3_resource, bucket_name) - except Exception as exc: - + except Exception: # Try to connect anonymously s3_resource.meta.client.meta.events.register( - 'choose-signer.s3.*', botocore.handlers.disable_signing) + "choose-signer.s3.*", botocore.handlers.disable_signing + ) - iflogger.info('Connecting to AWS: %s anonymously...', bucket_name) + iflogger.info("Connecting to AWS: %s anonymously...", bucket_name) _get_head_bucket(s3_resource, bucket_name) # Explicitly declare a secure SSL connection for bucket object @@ -567,12 +587,11 @@ def _fetch_bucket(self, bucket_name): # Return the bucket return bucket - # Send up to S3 method def _upload_to_s3(self, bucket, src, dst): - ''' + """ Method to upload outputs to S3 bucket instead of on local disk - ''' + """ # Import packages import hashlib @@ -580,13 +599,12 @@ def _upload_to_s3(self, bucket, src, dst): from botocore.exceptions import ClientError - # Init variables - s3_str = 's3://' + s3_str = "s3://" s3_prefix = s3_str + bucket.name # Explicitly lower-case the "s3" - if dst[:len(s3_str)].lower() == s3_str: - dst = s3_str + dst[len(s3_str):] + if dst.lower().startswith(s3_str): + dst = s3_str + dst[len(s3_str) :] # If src is a directory, collect files (this assumes dst is a dir too) if os.path.isdir(src): @@ -594,10 +612,7 @@ def _upload_to_s3(self, bucket, src, dst): for root, dirs, files in os.walk(src): src_files.extend([os.path.join(root, fil) for fil in files]) # Make the dst files have the dst folder as base dir - dst_files = [ - os.path.join(dst, - src_f.split(src)[1]) for src_f in src_files - ] + dst_files = [os.path.join(dst, src_f.split(src)[1]) for src_f in src_files] else: src_files = [src] dst_files = [dst] @@ -606,7 +621,7 @@ def _upload_to_s3(self, bucket, src, dst): for src_idx, src_f in enumerate(src_files): # Get destination filename/keyname dst_f = dst_files[src_idx] - dst_k = dst_f.replace(s3_prefix, '').lstrip('/') + dst_k = dst_f.replace(s3_prefix, "").lstrip("/") # See if same file is already up there try: @@ -614,43 +629,39 @@ def _upload_to_s3(self, bucket, src, dst): dst_md5 = dst_obj.e_tag.strip('"') # See if same file is already there - src_read = open(src_f, 'rb').read() + src_read = open(src_f, "rb").read() src_md5 = hashlib.md5(src_read).hexdigest() # Move to next loop iteration if dst_md5 == src_md5: - iflogger.info('File %s already exists on S3, skipping...', - dst_f) + iflogger.info("File %s already exists on S3, skipping...", dst_f) continue else: - iflogger.info('Overwriting previous S3 file...') + iflogger.info("Overwriting previous S3 file...") except ClientError: - iflogger.info('New file to S3') + iflogger.info("New file to S3") # Copy file up to S3 (either encrypted or not) - iflogger.info('Uploading %s to S3 bucket, %s, as %s...', src_f, - bucket.name, dst_f) + iflogger.info( + "Uploading %s to S3 bucket, %s, as %s...", src_f, bucket.name, dst_f + ) if self.inputs.encrypt_bucket_keys: - extra_args = {'ServerSideEncryption': 'AES256'} + extra_args = {"ServerSideEncryption": "AES256"} else: extra_args = {} bucket.upload_file( - src_f, - dst_k, - ExtraArgs=extra_args, - Callback=ProgressPercentage(src_f)) + src_f, dst_k, ExtraArgs=extra_args, Callback=ProgressPercentage(src_f) + ) # List outputs, main run routine def _list_outputs(self): - """Execute this module. - """ + """Execute this module.""" # Init variables outputs = self.output_spec().get() out_files = [] # Use hardlink - use_hardlink = str2bool( - config.get('execution', 'try_hard_link_datasink')) + use_hardlink = str2bool(config.get("execution", "try_hard_link_datasink")) # Set local output directory if specified if isdefined(self.inputs.local_copy): @@ -659,7 +670,7 @@ def _list_outputs(self): outdir = self.inputs.base_directory # If base directory isn't given, assume current directory if not isdefined(outdir): - outdir = '.' + outdir = "." # Check if base directory reflects S3 bucket upload s3_flag, bucket_name = self._check_s3_base_dir() @@ -675,18 +686,21 @@ def _list_outputs(self): # If encountering an exception during bucket access, set output # base directory to a local folder except Exception as exc: - s3dir = '' + s3dir = "" if not isdefined(self.inputs.local_copy): local_out_exception = os.path.join( - os.path.expanduser('~'), - 's3_datasink_' + bucket_name) + os.path.expanduser("~"), "s3_datasink_" + bucket_name + ) outdir = local_out_exception # Log local copying directory iflogger.info( - 'Access to S3 failed! Storing outputs locally at: ' - '%s\nError: %s', outdir, exc) + "Access to S3 failed! Storing outputs locally at: " + "%s\nError: %s", + outdir, + exc, + ) else: - s3dir = '' + s3dir = "" # If container input is given, append that to outdir if isdefined(self.inputs.container): @@ -701,7 +715,7 @@ def _list_outputs(self): try: os.makedirs(outdir) except OSError as inst: - if 'File exists' in inst.strerror: + if "File exists" in inst.strerror: pass else: raise (inst) @@ -715,8 +729,8 @@ def _list_outputs(self): tempoutdir = outdir if s3_flag: s3tempoutdir = s3dir - for d in key.split('.'): - if d[0] == '@': + for d in key.split("."): + if d[0] == "@": continue tempoutdir = os.path.join(tempoutdir, d) if s3_flag: @@ -732,7 +746,7 @@ def _list_outputs(self): # Format src and dst files src = os.path.abspath(src) if not os.path.isfile(src): - src = os.path.join(src, '') + src = os.path.join(src, "") dst = self._get_dst(src) if s3_flag: s3dst = os.path.join(s3tempoutdir, dst) @@ -747,36 +761,37 @@ def _list_outputs(self): out_files.append(s3dst) # Otherwise, copy locally src -> dst if not s3_flag or isdefined(self.inputs.local_copy): - # Create output directory if it doesnt exist + # Create output directory if it doesn't exist if not os.path.exists(path): try: os.makedirs(path) except OSError as inst: - if 'File exists' in inst.strerror: + if "File exists" in inst.strerror: pass else: raise (inst) # If src is a file, copy it to dst if os.path.isfile(src): - iflogger.debug('copyfile: %s %s', src, dst) + iflogger.debug("copyfile: %s %s", src, dst) copyfile( src, dst, copy=True, - hashmethod='content', - use_hardlink=use_hardlink) + hashmethod="content", + use_hardlink=use_hardlink, + ) out_files.append(dst) # If src is a directory, copy entire contents to dst dir elif os.path.isdir(src): if os.path.exists(dst) and self.inputs.remove_dest_dir: - iflogger.debug('removing: %s', dst) + iflogger.debug("removing: %s", dst) shutil.rmtree(dst) - iflogger.debug('copydir: %s %s', src, dst) + iflogger.debug("copydir: %s %s", src, dst) copytree(src, dst) out_files.append(dst) # Return outputs dictionary - outputs['out_file'] = out_files + outputs["out_file"] = out_files return outputs @@ -785,55 +800,76 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): anon = traits.Bool( False, usedefault=True, - desc= - 'Use anonymous connection to s3. If this is set to True, boto may print' - + - ' a urlopen error, but this does not prevent data from being downloaded.' - ) - region = Str('us-east-1', usedefault=True, desc='Region of s3 bucket') - bucket = Str( - mandatory=True, desc='Amazon S3 bucket where your data is stored') + desc="Use anonymous connection to s3. If this is set to True, boto may print" + " a urlopen error, but this does not prevent data from being downloaded.", + ) + region = Str("us-east-1", usedefault=True, desc="Region of s3 bucket") + bucket = Str(mandatory=True, desc="Amazon S3 bucket where your data is stored") bucket_path = Str( - '', - usedefault=True, - desc='Location within your bucket for subject data.') + "", usedefault=True, desc="Location within your bucket for subject data." + ) local_directory = Directory( exists=True, - desc='Path to the local directory for subject data to be downloaded ' - 'and accessed. Should be on HDFS for Spark jobs.') + desc="Path to the local directory for subject data to be downloaded " + "and accessed. Should be on HDFS for Spark jobs.", + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc='Generate exception if list is empty for a given field') + desc="Generate exception if list is empty for a given field", + ) sort_filelist = traits.Bool( - mandatory=True, desc='Sort the filelist that matches the template') + mandatory=True, desc="Sort the filelist that matches the template" + ) template = Str( mandatory=True, - desc='Layout used to get files. Relative to bucket_path if defined.' - 'Uses regex rather than glob style formatting.') + desc="Layout used to get files. Relative to bucket_path if defined." + "Uses regex rather than glob style formatting.", + ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), - desc='Information to plug into template') + desc="Information to plug into template", + ) class S3DataGrabber(LibraryBaseInterface, IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files from - Amazon S3 + """ + Pull data from an Amazon S3 Bucket. + + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files from + Amazon S3 - Works exactly like DataGrabber, except, you must specify an - S3 "bucket" and "bucket_path" to search for your data and a - "local_directory" to store the data. "local_directory" - should be a location on HDFS for Spark jobs. Additionally, - "template" uses regex style formatting, rather than the - glob-style found in the original DataGrabber. + Works exactly like DataGrabber, except, you must specify an + S3 "bucket" and "bucket_path" to search for your data and a + "local_directory" to store the data. "local_directory" + should be a location on HDFS for Spark jobs. Additionally, + "template" uses regex style formatting, rather than the + glob-style found in the original DataGrabber. + + Examples + -------- + >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) + >>> s3grab.inputs.bucket = 'openneuro' + >>> s3grab.inputs.sort_filelist = True + >>> s3grab.inputs.template = '*' + >>> s3grab.inputs.anon = True + >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' + >>> s3grab.inputs.local_directory = '/tmp' + >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', + ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} + >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], + ... 'func': [['subj_id', 'subj_id']]} + >>> s3grab.inputs.subj_id = 'sub-01' + >>> s3grab.run() # doctest: +SKIP """ + input_spec = S3DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True - _pkg = 'boto' + _pkg = "boto" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -849,8 +885,8 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] - super(S3DataGrabber, self).__init__(**kwargs) + outfields = ["outfiles"] + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -860,11 +896,13 @@ def __init__(self, infields=None, outfields=None, **kwargs): self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates - self.inputs.add_trait('field_template', - traits.Dict( - traits.Enum(outfields), - desc="arguments that fit into template")) - undefined_traits['field_template'] = Undefined + self.inputs.add_trait( + "field_template", + traits.Dict( + traits.Enum(outfields), desc="arguments that fit into template" + ), + ) + undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: @@ -889,67 +927,69 @@ def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check import boto + if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} # get list of all files in s3 bucket conn = boto.connect_s3(anon=self.inputs.anon) bkt = conn.get_bucket(self.inputs.bucket) - bkt_files = list( - k.key for k in bkt.list(prefix=self.inputs.bucket_path)) + bkt_files = [k.key for k in bkt.list(prefix=self.inputs.bucket_path)] # keys are outfields, args are template args for the outfield for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[ - key] # template override for multiple outfields + key + ] # template override for multiple outfields if isdefined(self.inputs.bucket_path): template = os.path.join(self.inputs.bucket_path, template) if not args: - filelist = [] - for fname in bkt_files: - if re.match(template, fname): - filelist.append(fname) + filelist = [fname for fname in bkt_files if re.match(template, fname)] if len(filelist) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, template) + msg = "Output key: {} Template: {} returned no files".format( + key, + template, + ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) else: if self.inputs.sort_filelist: filelist = human_order_sorted(filelist) outputs[key] = simplify_list(filelist) - for argnum, arglist in enumerate(args): + for arglist in args: maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -961,18 +1001,20 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + f"{e}: Template {template} failed to convert " + f"with args {tuple(argtuple)}" + ) outfiles = [] for fname in bkt_files: if re.match(filledtemplate, fname): outfiles.append(fname) if len(outfiles) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, filledtemplate) + msg = "Output key: {} Template: {} returned no files".format( + key, + filledtemplate, + ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) outputs[key].append(None) @@ -980,7 +1022,7 @@ def _list_outputs(self): if self.inputs.sort_filelist: outfiles = human_order_sorted(outfiles) outputs[key].append(simplify_list(outfiles)) - if any([val is None for val in outputs[key]]): + if None in outputs[key]: outputs[key] = [] if len(outputs[key]) == 0: outputs[key] = None @@ -1006,16 +1048,19 @@ def _list_outputs(self): # directory, returning the local path. def s3tolocal(self, s3path, bkt): import boto + # path formatting - if not os.path.split(self.inputs.local_directory)[1] == '': - self.inputs.local_directory += '/' - if not os.path.split(self.inputs.bucket_path)[1] == '': - self.inputs.bucket_path += '/' - if self.inputs.template[0] == '/': - self.inputs.template = self.inputs.template[1:] - - localpath = s3path.replace(self.inputs.bucket_path, - self.inputs.local_directory) + local_directory = str(self.inputs.local_directory) + bucket_path = str(self.inputs.bucket_path) + template = str(self.inputs.template) + if os.path.basename(local_directory) != "": + local_directory += "/" + if os.path.basename(bucket_path) != "": + bucket_path += "/" + if template[0] == "/": + template = template[1:] + + localpath = s3path.replace(bucket_path, local_directory) localdir = os.path.split(localpath)[0] if not os.path.exists(localdir): os.makedirs(localdir) @@ -1027,78 +1072,83 @@ def s3tolocal(self, s3path, bkt): class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): base_directory = Directory( - exists=True, - desc='Path to the base directory consisting of subject data.') + exists=True, desc="Path to the base directory consisting of subject data." + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc='Generate exception if list is empty for a given field') + desc="Generate exception if list is empty for a given field", + ) drop_blank_outputs = traits.Bool( - False, usedefault=True, - desc="Remove ``None`` entries from output lists" - ) + False, usedefault=True, desc="Remove ``None`` entries from output lists" + ) sort_filelist = traits.Bool( - mandatory=True, desc='Sort the filelist that matches the template') + mandatory=True, desc="Sort the filelist that matches the template" + ) template = Str( mandatory=True, - desc='Layout used to get files. relative to base directory if defined') + desc="Layout used to get files. relative to base directory if defined", + ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), - desc='Information to plug into template') + desc="Information to plug into template", + ) class DataGrabber(IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files - + """ + Find files on a filesystem. - .. attention:: + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files - Doesn't support directories currently + .. important:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import DataGrabber + Examples + -------- + >>> from nipype.interfaces.io import DataGrabber - Pick all files from current directory + Pick all files from current directory - >>> dg = DataGrabber() - >>> dg.inputs.template = '*' + >>> dg = DataGrabber() + >>> dg.inputs.template = '*' - Pick file foo/foo.nii from current directory + Pick file foo/foo.nii from current directory - >>> dg.inputs.template = '%s/%s.dcm' - >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] + >>> dg.inputs.template = '%s/%s.dcm' + >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = DataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.arg1 = 'foo' - >>> dg.inputs.arg2 = 'foo' + >>> dg = DataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.arg1 = 'foo' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.base_directory = '.' - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.base_directory = '.' + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ + input_spec = DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -1117,8 +1167,8 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] - super(DataGrabber, self).__init__(**kwargs) + outfields = ["outfiles"] + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -1128,11 +1178,13 @@ def __init__(self, infields=None, outfields=None, **kwargs): self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates - self.inputs.add_trait('field_template', - traits.Dict( - traits.Enum(outfields), - desc="arguments that fit into template")) - undefined_traits['field_template'] = Undefined + self.inputs.add_trait( + "field_template", + traits.Dict( + traits.Enum(outfields), desc="arguments that fit into template" + ), + ) + undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: @@ -1159,55 +1211,60 @@ def _list_outputs(self): for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if isdefined(self.inputs.base_directory): template = os.path.join( - os.path.abspath(self.inputs.base_directory), template) + os.path.abspath(self.inputs.base_directory), template + ) else: template = os.path.abspath(template) if not args: filelist = glob.glob(template) if len(filelist) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, template) + msg = "Output key: {} Template: {} returned no files".format( + key, + template, + ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) else: if self.inputs.sort_filelist: filelist = human_order_sorted(filelist) outputs[key] = simplify_list(filelist) - for argnum, arglist in enumerate(args): + for arglist in args: maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -1219,15 +1276,17 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + f"{e}: Template {template} failed to convert " + f"with args {tuple(argtuple)}" + ) outfiles = glob.glob(filledtemplate) if len(outfiles) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, filledtemplate) + msg = "Output key: {} Template: {} returned no files".format( + key, + filledtemplate, + ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) outputs[key].append(None) @@ -1238,7 +1297,7 @@ def _list_outputs(self): if self.inputs.drop_blank_outputs: outputs[key] = [x for x in outputs[key] if x is not None] else: - if any([val is None for val in outputs[key]]): + if None in outputs[key]: outputs[key] = [] if len(outputs[key]) == 0: outputs[key] = None @@ -1248,62 +1307,68 @@ def _list_outputs(self): class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - - base_directory = Directory( - exists=True, desc="Root path common to templates.") + base_directory = Directory(exists=True, desc="Root path common to templates.") sort_filelist = traits.Bool( True, usedefault=True, - desc="When matching mutliple files, return them" - " in sorted order.") + desc="When matching multiple files, return them in sorted order.", + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc="Raise an exception if a template pattern " - "matches no files.") + desc="Raise an exception if a template pattern matches no files.", + ) force_lists = traits.Either( traits.Bool(), traits.List(Str()), default=False, usedefault=True, - desc=("Whether to return outputs as a list even" - " when only one file matches the template. " - "Either a boolean that applies to all output " - "fields or a list of output field names to " - "coerce to a list")) + desc=( + "Whether to return outputs as a list even" + " when only one file matches the template. " + "Either a boolean that applies to all output " + "fields or a list of output field names to " + "coerce to a list" + ), + ) class SelectFiles(IOBase): - """Flexibly collect data from disk to feed into workflows. + """ + Flexibly collect data from disk to feed into workflows. - This interface uses the {}-based string formatting syntax to plug + This interface uses Python's {}-based string formatting syntax to plug values (possibly known only at workflow execution time) into string - templates and collect files from persistant storage. These templates - can also be combined with glob wildcards. The field names in the - formatting template (i.e. the terms in braces) will become inputs - fields on the interface, and the keys in the templates dictionary - will form the output fields. + templates and collect files from persistent storage. These templates can + also be combined with glob wildcards (``*``, ``?``) and character ranges (``[...]``). + The field names in the formatting template (i.e. the terms in braces) will + become inputs fields on the interface, and the keys in the templates + dictionary will form the output fields. Examples -------- - >>> import pprint >>> from nipype import SelectFiles, Node >>> templates={"T1": "{subject_id}/struct/T1.nii", - ... "epi": "{subject_id}/func/f[0, 1].nii"} + ... "epi": "{subject_id}/func/f[0,1].nii"} >>> dg = Node(SelectFiles(templates), "selectfiles") >>> dg.inputs.subject_id = "subj1" >>> pprint.pprint(dg.outputs.get()) # doctest: {'T1': , 'epi': } - The same thing with dynamic grabbing of specific files: + Note that SelectFiles does not support lists as inputs for the dynamic + fields. Attempts to do so may lead to unexpected results because brackets + also express glob character ranges. For example, - >>> templates["epi"] = "{subject_id}/func/f{run!s}.nii" + >>> templates["epi"] = "{subject_id}/func/f{run}.nii" >>> dg = Node(SelectFiles(templates), "selectfiles") >>> dg.inputs.subject_id = "subj1" - >>> dg.inputs.run = [2, 4] + >>> dg.inputs.run = [10, 11] + + would match f0.nii or f1.nii, not f10.nii or f11.nii. """ + input_spec = SelectFilesInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -1324,14 +1389,14 @@ def __init__(self, templates, **kwargs): used to select files. """ - super(SelectFiles, self).__init__(**kwargs) + super().__init__(**kwargs) # Infer the infields and outfields from the template infields = [] for name, template in list(templates.items()): for _, field_name, _, _ in string.Formatter().parse(template): if field_name is not None: - field_name = re.match("\w+", field_name).group() + field_name = re.match(r"\w+", field_name).group() if field_name not in infields: infields.append(field_name) @@ -1353,8 +1418,9 @@ def _add_output_traits(self, base): def _list_outputs(self): """Find the files and expose them as interface outputs.""" outputs = {} - info = dict([(k, v) for k, v in list(self.inputs.__dict__.items()) - if k in self._infields]) + info = { + k: v for k, v in list(self.inputs.__dict__.items()) if k in self._infields + } force_lists = self.inputs.force_lists if isinstance(force_lists, bool): @@ -1364,18 +1430,17 @@ def _list_outputs(self): bad_fields = ", ".join(list(bad_fields)) plural = "s" if len(bad_fields) > 1 else "" verb = "were" if len(bad_fields) > 1 else "was" - msg = ("The field%s '%s' %s set in 'force_lists' and not in " - "'templates'.") % (plural, bad_fields, verb) + msg = ( + "The field%s '%s' %s set in 'force_lists' and not in 'templates'." + ) % (plural, bad_fields, verb) raise ValueError(msg) for field, template in list(self._templates.items()): - find_dirs = template[-1] == os.sep # Build the full template path if isdefined(self.inputs.base_directory): - template = op.abspath( - op.join(self.inputs.base_directory, template)) + template = op.abspath(op.join(self.inputs.base_directory, template)) else: template = op.abspath(template) @@ -1389,10 +1454,12 @@ def _list_outputs(self): # Handle the case where nothing matched if not filelist: - msg = "No files were found matching %s template: %s" % ( - field, filled_template) + msg = "No files were found matching {} template: {}".format( + field, + filled_template, + ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) @@ -1410,43 +1477,39 @@ def _list_outputs(self): class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - root_paths = traits.Either( - traits.List(), - Str(), - mandatory=True, - ) + root_paths = traits.Either(traits.List(), Str(), mandatory=True) match_regex = Str( - '(.+)', - usedefault=True, - desc=("Regular expression for matching paths.")) + "(.+)", usedefault=True, desc=("Regular expression for matching paths.") + ) ignore_regexes = traits.List( - desc=("List of regular expressions, " - "if any match the path it will be " - "ignored.")) - max_depth = traits.Int(desc="The maximum depth to search beneath " - "the root_paths") - min_depth = traits.Int(desc="The minimum depth to search beneath " - "the root paths") + desc=( + "List of regular expressions, " + "if any match the path it will be " + "ignored." + ) + ) + max_depth = traits.Int(desc="The maximum depth to search beneath the root_paths") + min_depth = traits.Int(desc="The minimum depth to search beneath the root paths") unpack_single = traits.Bool( - False, usedefault=True, desc="Unpack single results from list") + False, usedefault=True, desc="Unpack single results from list" + ) class DataFinder(IOBase): - """Search for paths that match a given regular expression. Allows a less + r"""Search for paths that match a given regular expression. Allows a less proscriptive approach to gathering input files compared to DataGrabber. Will recursively search any subdirectories by default. This can be limited with the min/max depth options. Matched paths are available in the output 'out_paths'. Any named groups of - captured text from the regular expression are also available as ouputs of + captured text from the regular expression are also available as outputs of the same name. Examples -------- - >>> from nipype.interfaces.io import DataFinder >>> df = DataFinder() >>> df.inputs.root_paths = '.' - >>> df.inputs.match_regex = '.+/(?P.+(qT1|ep2d_fid_T1).+)/(?P.+)\.nii.gz' + >>> df.inputs.match_regex = r'.+/(?P.+(qT1|ep2d_fid_T1).+)/(?P.+)\.nii.gz' >>> result = df.run() # doctest: +SKIP >>> result.outputs.out_paths # doctest: +SKIP ['./027-ep2d_fid_T1_Gd4/acquisition.nii.gz', @@ -1480,10 +1543,10 @@ def _match_path(self, target_path): if match is not None: match_dict = match.groupdict() if self.result is None: - self.result = {'out_paths': []} + self.result = {"out_paths": []} for key in list(match_dict.keys()): self.result[key] = [] - self.result['out_paths'].append(target_path) + self.result["out_paths"].append(target_path) for key, val in list(match_dict.items()): self.result[key].append(val) @@ -1503,14 +1566,15 @@ def _run_interface(self, runtime): if self.inputs.ignore_regexes is Undefined: self.ignore_regexes = [] else: - self.ignore_regexes = \ - [re.compile(regex) - for regex in self.inputs.ignore_regexes] + self.ignore_regexes = [ + re.compile(regex) for regex in self.inputs.ignore_regexes + ] self.result = None for root_path in self.inputs.root_paths: - # Handle tilda/env variables and remove extra seperators + # Handle tilda/env variables and remove extra separators root_path = os.path.normpath( - os.path.expandvars(os.path.expanduser(root_path))) + os.path.expandvars(os.path.expanduser(root_path)) + ) # Check if the root_path is a file if os.path.isfile(root_path): if min_depth == 0: @@ -1519,7 +1583,7 @@ def _run_interface(self, runtime): # Walk through directory structure checking paths for curr_dir, sub_dirs, files in os.walk(root_path): # Determine the current depth from the root_path - curr_depth = (curr_dir.count(os.sep) - root_path.count(os.sep)) + curr_depth = curr_dir.count(os.sep) - root_path.count(os.sep) # If the max path depth has been reached, clear sub_dirs # and files if max_depth is not None and curr_depth >= max_depth: @@ -1532,19 +1596,19 @@ def _run_interface(self, runtime): for infile in files: full_path = os.path.join(curr_dir, infile) self._match_path(full_path) - if (self.inputs.unpack_single and len(self.result['out_paths']) == 1): + if self.inputs.unpack_single and len(self.result["out_paths"]) == 1: for key, vals in list(self.result.items()): self.result[key] = vals[0] else: - # sort all keys acording to out_paths + # sort all keys according to out_paths for key in list(self.result.keys()): if key == "out_paths": continue sort_tuples = human_order_sorted( - list(zip(self.result["out_paths"], self.result[key]))) + list(zip(self.result["out_paths"], self.result[key])) + ) self.result[key] = [x for (_, x) in sort_tuples] - self.result["out_paths"] = human_order_sorted( - self.result["out_paths"]) + self.result["out_paths"] = human_order_sorted(self.result["out_paths"]) if not self.result: raise RuntimeError("Regular expression did not match any files!") @@ -1558,157 +1622,166 @@ def _list_outputs(self): class FSSourceInputSpec(BaseInterfaceInputSpec): - subjects_dir = Directory(exists=True, mandatory=True, - desc='Freesurfer subjects directory.') - subject_id = Str(mandatory=True, - desc='Subject name for whom to retrieve data') - hemi = traits.Enum('both', 'lh', 'rh', usedefault=True, - desc='Selects hemisphere specific outputs') + subjects_dir = Directory( + exists=True, mandatory=True, desc="Freesurfer subjects directory." + ) + subject_id = Str(mandatory=True, desc="Subject name for whom to retrieve data") + hemi = traits.Enum( + "both", "lh", "rh", usedefault=True, desc="Selects hemisphere specific outputs" + ) class FSSourceOutputSpec(TraitedSpec): - T1 = File( - exists=True, desc='Intensity normalized whole-head volume', loc='mri') + T1 = File(exists=True, desc="Intensity normalized whole-head volume", loc="mri") aseg = File( exists=True, - loc='mri', - desc='Volumetric map of regions from automatic segmentation') - brain = File( - exists=True, desc='Intensity normalized brain-only volume', loc='mri') - brainmask = File( - exists=True, desc='Skull-stripped (brain-only) volume', loc='mri') - filled = File(exists=True, desc='Subcortical mass volume', loc='mri') - norm = File( - exists=True, desc='Normalized skull-stripped volume', loc='mri') - nu = File( - exists=True, - desc='Non-uniformity corrected whole-head volume', - loc='mri') - orig = File( - exists=True, - desc='Base image conformed to Freesurfer space', - loc='mri') + loc="mri", + desc="Volumetric map of regions from automatic segmentation", + ) + brain = File(exists=True, desc="Intensity normalized brain-only volume", loc="mri") + brainmask = File(exists=True, desc="Skull-stripped (brain-only) volume", loc="mri") + filled = File(exists=True, desc="Subcortical mass volume", loc="mri") + norm = File(exists=True, desc="Normalized skull-stripped volume", loc="mri") + nu = File(exists=True, desc="Non-uniformity corrected whole-head volume", loc="mri") + orig = File(exists=True, desc="Base image conformed to Freesurfer space", loc="mri") rawavg = File( - exists=True, desc='Volume formed by averaging input images', loc='mri') + exists=True, desc="Volume formed by averaging input images", loc="mri" + ) ribbon = OutputMultiPath( File(exists=True), - desc='Volumetric maps of cortical ribbons', - loc='mri', - altkey='*ribbon') - wm = File(exists=True, desc='Segmented white-matter volume', loc='mri') + desc="Volumetric maps of cortical ribbons", + loc="mri", + altkey="*ribbon", + ) + wm = File(exists=True, desc="Segmented white-matter volume", loc="mri") wmparc = File( exists=True, - loc='mri', - desc='Aparc parcellation projected into subcortical white matter') + loc="mri", + desc="Aparc parcellation projected into subcortical white matter", + ) curv = OutputMultiPath( - File(exists=True), desc='Maps of surface curvature', loc='surf') + File(exists=True), desc="Maps of surface curvature", loc="surf" + ) avg_curv = OutputMultiPath( File(exists=True), - desc='Average atlas curvature, sampled to subject', - loc='surf') + desc="Average atlas curvature, sampled to subject", + loc="surf", + ) inflated = OutputMultiPath( - File(exists=True), desc='Inflated surface meshes', loc='surf') + File(exists=True), desc="Inflated surface meshes", loc="surf" + ) pial = OutputMultiPath( - File(exists=True), - desc='Gray matter/pia mater surface meshes', - loc='surf') + File(exists=True), desc="Gray matter/pia matter surface meshes", loc="surf" + ) area_pial = OutputMultiPath( File(exists=True), - desc='Mean area of triangles each vertex on the pial surface is ' - 'associated with', - loc='surf', - altkey='area.pial') + desc="Mean area of triangles each vertex on the pial surface is " + "associated with", + loc="surf", + altkey="area.pial", + ) curv_pial = OutputMultiPath( File(exists=True), - desc='Curvature of pial surface', - loc='surf', - altkey='curv.pial') + desc="Curvature of pial surface", + loc="surf", + altkey="curv.pial", + ) smoothwm = OutputMultiPath( - File(exists=True), loc='surf', desc='Smoothed original surface meshes') + File(exists=True), loc="surf", desc="Smoothed original surface meshes" + ) sphere = OutputMultiPath( - File(exists=True), desc='Spherical surface meshes', loc='surf') + File(exists=True), desc="Spherical surface meshes", loc="surf" + ) sulc = OutputMultiPath( - File(exists=True), desc='Surface maps of sulcal depth', loc='surf') + File(exists=True), desc="Surface maps of sulcal depth", loc="surf" + ) thickness = OutputMultiPath( - File(exists=True), - loc='surf', - desc='Surface maps of cortical thickness') + File(exists=True), loc="surf", desc="Surface maps of cortical thickness" + ) volume = OutputMultiPath( - File(exists=True), desc='Surface maps of cortical volume', loc='surf') + File(exists=True), desc="Surface maps of cortical volume", loc="surf" + ) white = OutputMultiPath( - File(exists=True), desc='White/gray matter surface meshes', loc='surf') + File(exists=True), desc="White/gray matter surface meshes", loc="surf" + ) jacobian_white = OutputMultiPath( File(exists=True), - desc='Distortion required to register to spherical atlas', - loc='surf') + desc="Distortion required to register to spherical atlas", + loc="surf", + ) graymid = OutputMultiPath( File(exists=True), - desc='Graymid/midthickness surface meshes', - loc='surf', - altkey=['graymid', 'midthickness']) + desc="Graymid/midthickness surface meshes", + loc="surf", + altkey=["graymid", "midthickness"], + ) label = OutputMultiPath( File(exists=True), - desc='Volume and surface label files', - loc='label', - altkey='*label') + desc="Volume and surface label files", + loc="label", + altkey="*label", + ) annot = OutputMultiPath( - File(exists=True), - desc='Surface annotation files', - loc='label', - altkey='*annot') + File(exists=True), desc="Surface annotation files", loc="label", altkey="*annot" + ) aparc_aseg = OutputMultiPath( File(exists=True), - loc='mri', - altkey='aparc*aseg', - desc='Aparc parcellation projected into aseg volume') + loc="mri", + altkey="aparc*aseg", + desc="Aparc parcellation projected into aseg volume", + ) sphere_reg = OutputMultiPath( File(exists=True), - loc='surf', - altkey='sphere.reg', - desc='Spherical registration file') + loc="surf", + altkey="sphere.reg", + desc="Spherical registration file", + ) aseg_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aseg', - desc='Automated segmentation statistics file') + loc="stats", + altkey="aseg", + desc="Automated segmentation statistics file", + ) wmparc_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='wmparc', - desc='White matter parcellation statistics file') + loc="stats", + altkey="wmparc", + desc="White matter parcellation statistics file", + ) aparc_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aparc', - desc='Aparc parcellation statistics files') + loc="stats", + altkey="aparc", + desc="Aparc parcellation statistics files", + ) BA_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='BA', - desc='Brodmann Area statistics files') + loc="stats", + altkey="BA", + desc="Brodmann Area statistics files", + ) aparc_a2009s_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aparc.a2009s', - desc='Aparc a2009s parcellation statistics files') + loc="stats", + altkey="aparc.a2009s", + desc="Aparc a2009s parcellation statistics files", + ) curv_stats = OutputMultiPath( - File(exists=True), - loc='stats', - altkey='curv', - desc='Curvature statistics files') + File(exists=True), loc="stats", altkey="curv", desc="Curvature statistics files" + ) entorhinal_exvivo_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='entorhinal_exvivo', - desc='Entorhinal exvivo statistics files') + loc="stats", + altkey="entorhinal_exvivo", + desc="Entorhinal exvivo statistics files", + ) class FreeSurferSource(IOBase): - """Generates freesurfer subject info from their directories + """Generates freesurfer subject info from their directories. Examples -------- - >>> from nipype.interfaces.io import FreeSurferSource >>> fs = FreeSurferSource() >>> #fs.inputs.subjects_dir = '.' @@ -1719,36 +1792,35 @@ class FreeSurferSource(IOBase): >>> res = fs.run() # doctest: +SKIP """ + input_spec = FSSourceInputSpec output_spec = FSSourceOutputSpec _always_run = True - _additional_metadata = ['loc', 'altkey'] + _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): - globsuffix = '' - if dirval == 'mri': - globsuffix = '.mgz' - elif dirval == 'stats': - globsuffix = '.stats' - globprefix = '' - if dirval in ('surf', 'label', 'stats'): - if self.inputs.hemi != 'both': - globprefix = self.inputs.hemi + '.' + globsuffix = "" + if dirval == "mri": + globsuffix = ".mgz" + elif dirval == "stats": + globsuffix = ".stats" + globprefix = "" + if dirval in ("surf", "label", "stats"): + if self.inputs.hemi != "both": + globprefix = self.inputs.hemi + "." else: - globprefix = '?h.' - if key in ('aseg_stats', 'wmparc_stats'): - globprefix = '' - elif key == 'ribbon': - if self.inputs.hemi != 'both': - globprefix = self.inputs.hemi + '.' + globprefix = "?h." + if key in ("aseg_stats", "wmparc_stats"): + globprefix = "" + elif key == "ribbon": + if self.inputs.hemi != "both": + globprefix = self.inputs.hemi + "." else: - globprefix = '*' + globprefix = "*" keys = ensure_list(altkey) if altkey else [key] - globfmt = os.path.join(path, dirval, ''.join((globprefix, '{}', - globsuffix))) + globfmt = os.path.join(path, dirval, f"{globprefix}{{}}{globsuffix}") return [ - os.path.abspath(f) for key in keys - for f in glob.glob(globfmt.format(key)) + os.path.abspath(f) for key in keys for f in glob.glob(globfmt.format(key)) ] def _list_outputs(self): @@ -1757,73 +1829,76 @@ def _list_outputs(self): output_traits = self._outputs() outputs = output_traits.get() for k in list(outputs.keys()): - val = self._get_files(subject_path, k, - output_traits.traits()[k].loc, - output_traits.traits()[k].altkey) + val = self._get_files( + subject_path, + k, + output_traits.traits()[k].loc, + output_traits.traits()[k].altkey, + ) if val: outputs[k] = simplify_list(val) return outputs class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - query_template = Str( mandatory=True, - desc=('Layout used to get files. Relative to base ' - 'directory if defined')) + desc="Layout used to get files. Relative to base directory if defined", + ) query_template_args = traits.Dict( Str, traits.List(traits.List), value=dict(outfiles=[]), usedefault=True, - desc='Information to plug into template') + desc="Information to plug into template", + ) - server = Str(mandatory=True, requires=['user', 'pwd'], xor=['config']) + server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() - config = File(mandatory=True, xor=['server']) + config = File(mandatory=True, xor=["server"]) - cache_dir = Directory(desc='Cache directory') + cache_dir = Directory(desc="Cache directory") class XNATSource(LibraryBaseInterface, IOBase): - """ Generic XNATSource module that wraps around the pyxnat module in - an intelligent way for neuroimaging tasks to grab files and data - from an XNAT server. - - Examples - -------- - - >>> from nipype.interfaces.io import XNATSource - - Pick all files from current directory - - >>> dg = XNATSource() - >>> dg.inputs.template = '*' - - >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) - >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ - '/assessors/%s/%s_resources/files' - >>> dg.inputs.project = 'IMAGEN' - >>> dg.inputs.subject = 'IMAGEN_000000001274' - >>> dg.inputs.experiment = '*SessionA*' - >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' - >>> dg.inputs.inout = 'out' + """ + Pull data from an XNAT server. - >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) - >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ - '/assessors/*%s_nii/out_resources/files' - >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] - >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] - >>> dg.inputs.sid = 'IMAGEN_000000001274' + Generic XNATSource module that wraps around the pyxnat module in + an intelligent way for neuroimaging tasks to grab files and data + from an XNAT server. + Examples + -------- + Pick all files from current directory + + >>> dg = XNATSource() + >>> dg.inputs.template = '*' + + >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) + >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ + '/assessors/%s/%s_resources/files' + >>> dg.inputs.project = 'IMAGEN' + >>> dg.inputs.subject = 'IMAGEN_000000001274' + >>> dg.inputs.experiment = '*SessionA*' + >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' + >>> dg.inputs.inout = 'out' + + >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) + >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ + '/assessors/*%s_nii/out_resources/files' + >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] + >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] + >>> dg.inputs.sid = 'IMAGEN_000000001274' """ + input_spec = XNATSourceInputSpec output_spec = DynamicTraitedSpec - _pkg = 'pyxnat' + _pkg = "pyxnat" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -1838,7 +1913,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): See class examples for usage """ - super(XNATSource, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -1846,15 +1921,17 @@ def __init__(self, infields=None, outfields=None, **kwargs): for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined - self.inputs.query_template_args['outfiles'] = [infields] + self.inputs.query_template_args["outfiles"] = [infields] if outfields: # add ability to insert field specific templates self.inputs.add_trait( - 'field_template', + "field_template", traits.Dict( traits.Enum(outfields), - desc="arguments that fit into query_template")) - undefined_traits['field_template'] = Undefined + desc="arguments that fit into query_template", + ), + ) + undefined_traits["field_template"] = Undefined # self.inputs.remove_trait('query_template_args') outdict = {} for key in outfields: @@ -1880,52 +1957,59 @@ def _list_outputs(self): if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: - xnat = pyxnat.Interface(self.inputs.server, self.inputs.user, - self.inputs.pwd, cache_dir) + xnat = pyxnat.Interface( + self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir + ) if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = ("%s requires a value for input '%s' " - "because it was listed in 'infields'" % - (self.__class__.__name__, key)) + msg = ( + "%s requires a value for input '%s' " + "because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.query_template_args.items()): outputs[key] = [] template = self.inputs.query_template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if not args: - file_objects = xnat.select(template).get('obj') + file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError('Template %s returned no files' % template) - outputs[key] = simplify_list([ - str(file_object.get()) for file_object in file_objects - if file_object.exists() - ]) - for argnum, arglist in enumerate(args): + raise OSError("Template %s returned no files" % template) + outputs[key] = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) + for arglist in args: maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): - raise ValueError('incompatible number ' - 'of arguments for %s' % key) + raise ValueError( + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and \ - hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -1933,29 +2017,31 @@ def _list_outputs(self): argtuple.append(arg) if argtuple: target = template % tuple(argtuple) - file_objects = xnat.select(target).get('obj') + file_objects = xnat.select(target).get("obj") if file_objects == []: - raise IOError('Template %s ' - 'returned no files' % target) - - outfiles = simplify_list([ - str(file_object.get()) - for file_object in file_objects - if file_object.exists() - ]) + raise OSError("Template %s returned no files" % target) + + outfiles = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) else: - file_objects = xnat.select(template).get('obj') + file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError('Template %s ' - 'returned no files' % template) + raise OSError("Template %s returned no files" % template) - outfiles = simplify_list([ - str(file_object.get()) - for file_object in file_objects - if file_object.exists() - ]) + outfiles = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) outputs[key].insert(i, outfiles) if len(outputs[key]) == 0: @@ -1966,58 +2052,65 @@ def _list_outputs(self): class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - _outputs = traits.Dict(Str, value={}, usedefault=True) - server = Str(mandatory=True, requires=['user', 'pwd'], xor=['config']) + server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() - config = File(mandatory=True, xor=['server']) - cache_dir = Directory(desc='') + config = File(mandatory=True, xor=["server"]) + cache_dir = Directory(desc="") - project_id = Str( - desc='Project in which to store the outputs', mandatory=True) + project_id = Str(desc="Project in which to store the outputs", mandatory=True) - subject_id = Str(desc='Set to subject id', mandatory=True) + subject_id = Str(desc="Set to subject id", mandatory=True) - experiment_id = Str(desc='Set to workflow name', mandatory=True) + experiment_id = Str(desc="Set to workflow name", mandatory=True) assessor_id = Str( - desc=('Option to customize ouputs representation in XNAT - ' - 'assessor level will be used with specified id'), - xor=['reconstruction_id']) + desc=( + "Option to customize outputs representation in XNAT - " + "assessor level will be used with specified id" + ), + xor=["reconstruction_id"], + ) reconstruction_id = Str( - desc=('Option to customize ouputs representation in XNAT - ' - 'reconstruction level will be used with specified id'), - xor=['assessor_id']) + desc=( + "Option to customize outputs representation in XNAT - " + "reconstruction level will be used with specified id" + ), + xor=["assessor_id"], + ) share = traits.Bool( False, - desc=('Option to share the subjects from the original project' - 'instead of creating new ones when possible - the created ' - 'experiments are then shared back to the original project'), - usedefault=True) + desc=( + "Option to share the subjects from the original project" + "instead of creating new ones when possible - the created " + "experiments are then shared back to the original project" + ), + usedefault=True, + ) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): self._outputs[key] = value else: - super(XNATSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class XNATSink(LibraryBaseInterface, IOBase): - """ Generic datasink module that takes a directory containing a - list of nifti files and provides a set of structured output - fields. + """Generic datasink module that takes a directory containing a + list of nifti files and provides a set of structured output + fields. """ + input_spec = XNATSinkInputSpec - _pkg = 'pyxnat' + _pkg = "pyxnat" def _list_outputs(self): - """Execute this module. - """ + """Execute this module.""" import pyxnat # setup XNAT connection @@ -2026,34 +2119,35 @@ def _list_outputs(self): if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: - xnat = pyxnat.Interface(self.inputs.server, self.inputs.user, - self.inputs.pwd, cache_dir) + xnat = pyxnat.Interface( + self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir + ) # if possible share the subject from the original project if self.inputs.share: subject_id = self.inputs.subject_id result = xnat.select( - 'xnat:subjectData', - ['xnat:subjectData/PROJECT', 'xnat:subjectData/SUBJECT_ID' - ]).where('xnat:subjectData/SUBJECT_ID = %s AND' % subject_id) + "xnat:subjectData", + ["xnat:subjectData/PROJECT", "xnat:subjectData/SUBJECT_ID"], + ).where("xnat:subjectData/SUBJECT_ID = %s AND" % subject_id) # subject containing raw data exists on the server - if (result.data and isinstance(result.data[0], dict)): + if result.data and isinstance(result.data[0], dict): result = result.data[0] - shared = xnat.select('/project/%s/subject/%s' % - (self.inputs.project_id, - self.inputs.subject_id)) + shared = xnat.select( + "/project/%s/subject/%s" + % (self.inputs.project_id, self.inputs.subject_id) + ) if not shared.exists(): # subject not in share project - - share_project = xnat.select( - '/project/%s' % self.inputs.project_id) + share_project = xnat.select("/project/%s" % self.inputs.project_id) if not share_project.exists(): # check project exists share_project.insert() - subject = xnat.select('/project/%(project)s' - '/subject/%(subject_id)s' % result) + subject = xnat.select( + "/project/%(project)s/subject/%(subject_id)s" % result + ) subject.share(str(self.inputs.project_id)) @@ -2061,46 +2155,46 @@ def _list_outputs(self): uri_template_args = dict( project_id=quote_id(self.inputs.project_id), subject_id=self.inputs.subject_id, - experiment_id=quote_id(self.inputs.experiment_id)) + experiment_id=quote_id(self.inputs.experiment_id), + ) if self.inputs.share: - uri_template_args['original_project'] = result['project'] + uri_template_args["original_project"] = result["project"] if self.inputs.assessor_id: - uri_template_args['assessor_id'] = quote_id( - self.inputs.assessor_id) + uri_template_args["assessor_id"] = quote_id(self.inputs.assessor_id) elif self.inputs.reconstruction_id: - uri_template_args['reconstruction_id'] = quote_id( - self.inputs.reconstruction_id) + uri_template_args["reconstruction_id"] = quote_id( + self.inputs.reconstruction_id + ) # gather outputs and upload them for key, files in list(self.inputs._outputs.items()): - for name in ensure_list(files): - if isinstance(name, list): for i, file_name in enumerate(name): - push_file(self, xnat, file_name, '%s_' % i + key, - uri_template_args) + push_file( + self, xnat, file_name, "%s_" % i + key, uri_template_args + ) else: push_file(self, xnat, name, key, uri_template_args) def quote_id(string): - return str(string).replace('_', '---') + return str(string).replace("_", "---") def unquote_id(string): - return str(string).replace('---', '_') + return str(string).replace("---", "_") def push_file(self, xnat, file_name, out_key, uri_template_args): - # grab info from output file names val_list = [ - unquote_id(val) for part in os.path.split(file_name)[0].split(os.sep) - for val in part.split('_')[1:] - if part.startswith('_') and len(part.split('_')) % 2 + unquote_id(val) + for part in os.path.split(file_name)[0].split(os.sep) + for val in part.split("_")[1:] + if part.startswith("_") and len(part.split("_")) % 2 ] keymap = dict(list(zip(val_list[1::2], val_list[2::2]))) @@ -2111,38 +2205,41 @@ def push_file(self, xnat, file_name, out_key, uri_template_args): _label.extend([key, val]) # select and define container level - uri_template_args['container_type'] = None + uri_template_args["container_type"] = None - for container in ['assessor_id', 'reconstruction_id']: + for container in ["assessor_id", "reconstruction_id"]: if getattr(self.inputs, container): - uri_template_args['container_type'] = container.split('_id')[0] - uri_template_args['container_id'] = uri_template_args[container] + uri_template_args["container_type"] = container.split("_id")[0] + uri_template_args["container_id"] = uri_template_args[container] - if uri_template_args['container_type'] is None: - uri_template_args['container_type'] = 'reconstruction' + if uri_template_args["container_type"] is None: + uri_template_args["container_type"] = "reconstruction" - uri_template_args['container_id'] = unquote_id( - uri_template_args['experiment_id']) + uri_template_args["container_id"] = unquote_id( + uri_template_args["experiment_id"] + ) if _label: - uri_template_args['container_id'] += ( - '_results_%s' % '_'.join(_label)) + uri_template_args["container_id"] += "_results_%s" % "_".join(_label) else: - uri_template_args['container_id'] += '_results' + uri_template_args["container_id"] += "_results" # define resource level - uri_template_args['resource_label'] = ('%s_%s' % - (uri_template_args['container_id'], - out_key.split('.')[0])) + uri_template_args["resource_label"] = "{}_{}".format( + uri_template_args["container_id"], + out_key.split(".")[0], + ) # define file level - uri_template_args['file_name'] = os.path.split( - os.path.abspath(unquote_id(file_name)))[1] + uri_template_args["file_name"] = os.path.split( + os.path.abspath(unquote_id(file_name)) + )[1] uri_template = ( - '/project/%(project_id)s/subject/%(subject_id)s' - '/experiment/%(experiment_id)s/%(container_type)s/%(container_id)s' - '/out/resource/%(resource_label)s/file/%(file_name)s') + "/project/%(project_id)s/subject/%(subject_id)s" + "/experiment/%(experiment_id)s/%(container_type)s/%(container_id)s" + "/out/resource/%(resource_label)s/file/%(file_name)s" + ) # unquote values before uploading for key in list(uri_template_args.keys()): @@ -2150,18 +2247,18 @@ def push_file(self, xnat, file_name, out_key, uri_template_args): # upload file remote_file = xnat.select(uri_template % uri_template_args) - remote_file.insert( - file_name, experiments='xnat:imageSessionData', use_label=True) + remote_file.insert(file_name, experiments="xnat:imageSessionData", use_label=True) # shares the experiment back to the original project if relevant - if 'original_project' in uri_template_args: - + if "original_project" in uri_template_args: experiment_template = ( - '/project/%(original_project)s' - '/subject/%(subject_id)s/experiment/%(experiment_id)s') + "/project/%(original_project)s" + "/subject/%(subject_id)s/experiment/%(experiment_id)s" + ) xnat.select(experiment_template % uri_template_args).share( - uri_template_args['original_project']) + uri_template_args["original_project"] + ) def capture_provenance(): @@ -2178,45 +2275,49 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SQLiteSink(LibraryBaseInterface, IOBase): - """ Very simple frontend for storing values into SQLite database. + """ + Very simple frontend for storing values into SQLite database. - .. warning:: + .. warning:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + Examples + -------- - >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_file = 'my_database.db' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_file = 'my_database.db' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ + input_spec = SQLiteSinkInputSpec - _pkg = 'sqlite3' + _pkg = "sqlite3" def __init__(self, input_names, **inputs): - - super(SQLiteSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = ensure_list(input_names) - add_traits(self.inputs, [name for name in self._input_names]) + add_traits(self.inputs, self._input_names) def _list_outputs(self): - """Execute this module. - """ + """Execute this module.""" import sqlite3 - conn = sqlite3.connect( - self.inputs.database_file, check_same_thread=False) + + conn = sqlite3.connect(self.inputs.database_file, check_same_thread=False) c = conn.cursor() - c.execute("INSERT OR REPLACE INTO %s (" % self.inputs.table_name + - ",".join(self._input_names) + ") VALUES (" + - ",".join(["?"] * len(self._input_names)) + ")", - [getattr(self.inputs, name) for name in self._input_names]) + c.execute( + "INSERT OR REPLACE INTO %s (" % self.inputs.table_name + + ",".join(self._input_names) + + ") VALUES (" + + ",".join(["?"] * len(self._input_names)) + + ")", + [getattr(self.inputs, name) for name in self._input_names], + ) conn.commit() c.close() return None @@ -2224,157 +2325,164 @@ def _list_outputs(self): class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): host = Str( - 'localhost', + "localhost", mandatory=True, - requires=['username', 'password'], - xor=['config'], - usedefault=True) + requires=["username", "password"], + xor=["config"], + usedefault=True, + ) config = File( - mandatory=True, - xor=['host'], - desc="MySQL Options File (same format as my.cnf)") - database_name = Str( - mandatory=True, desc='Otherwise known as the schema name') + mandatory=True, xor=["host"], desc="MySQL Options File (same format as my.cnf)" + ) + database_name = Str(mandatory=True, desc="Otherwise known as the schema name") table_name = Str(mandatory=True) username = Str() password = Str() class MySQLSink(IOBase): - """ Very simple frontend for storing values into MySQL database. + """ + Very simple frontend for storing values into MySQL database. - Examples - -------- + Examples + -------- - >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_name = 'my_database' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.username = 'root' - >>> sql.inputs.password = 'secret' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_name = 'my_database' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.username = 'root' + >>> sql.inputs.password = 'secret' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ + input_spec = MySQLSinkInputSpec def __init__(self, input_names, **inputs): - - super(MySQLSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = ensure_list(input_names) - add_traits(self.inputs, [name for name in self._input_names]) + add_traits(self.inputs, self._input_names) def _list_outputs(self): - """Execute this module. - """ + """Execute this module.""" import MySQLdb + if isdefined(self.inputs.config): conn = MySQLdb.connect( - db=self.inputs.database_name, - read_default_file=self.inputs.config) + db=self.inputs.database_name, read_default_file=self.inputs.config + ) else: conn = MySQLdb.connect( host=self.inputs.host, user=self.inputs.username, passwd=self.inputs.password, - db=self.inputs.database_name) + db=self.inputs.database_name, + ) c = conn.cursor() - c.execute("REPLACE INTO %s (" % self.inputs.table_name + - ",".join(self._input_names) + ") VALUES (" + - ",".join(["%s"] * len(self._input_names)) + ")", - [getattr(self.inputs, name) for name in self._input_names]) + c.execute( + "REPLACE INTO %s (" % self.inputs.table_name + + ",".join(self._input_names) + + ") VALUES (" + + ",".join(["%s"] * len(self._input_names)) + + ")", + [getattr(self.inputs, name) for name in self._input_names], + ) conn.commit() c.close() return None class SSHDataGrabberInputSpec(DataGrabberInputSpec): - hostname = Str(mandatory=True, desc='Server hostname.') - username = Str(desc='Server username.') - password = traits.Password(desc='Server password.') + hostname = Str(mandatory=True, desc="Server hostname.") + username = Str(desc="Server username.") + password = traits.Password(desc="Server password.") download_files = traits.Bool( True, usedefault=True, - desc='If false it will return the file names without downloading them') + desc="If false it will return the file names without downloading them", + ) base_directory = Str( - mandatory=True, - desc='Path to the base directory consisting of subject data.') + mandatory=True, desc="Path to the base directory consisting of subject data." + ) template_expression = traits.Enum( - ['fnmatch', 'regexp'], + ["fnmatch", "regexp"], usedefault=True, - desc='Use either fnmatch or regexp to express templates') + desc="Use either fnmatch or regexp to express templates", + ) ssh_log_to_file = Str( - '', - usedefault=True, - desc='If set SSH commands will be logged to the given file') + "", usedefault=True, desc="If set SSH commands will be logged to the given file" + ) class SSHDataGrabber(LibraryBaseInterface, DataGrabber): - """ Extension of DataGrabber module that downloads the file list and - optionally the files from a SSH server. The SSH operation must - not need user and password so an SSH agent must be active in - where this module is being run. - + """ + Extension of DataGrabber module that downloads the file list and + optionally the files from a SSH server. The SSH operation must + not need user and password so an SSH agent must be active in + where this module is being run. - .. attention:: - Doesn't support directories currently + .. attention:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import SSHDataGrabber - >>> dg = SSHDataGrabber() - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub/example' + Examples + -------- + >>> from nipype.interfaces.io import SSHDataGrabber + >>> dg = SSHDataGrabber() + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub/example' - Pick all files from the base directory + Pick all files from the base directory - >>> dg.inputs.template = '*' + >>> dg.inputs.template = '*' - Pick all files starting with "s" and a number from current directory + Pick all files starting with "s" and a number from current directory - >>> dg.inputs.template_expression = 'regexp' - >>> dg.inputs.template = 'pop[0-9].*' + >>> dg.inputs.template_expression = 'regexp' + >>> dg.inputs.template = 'pop[0-9].*' - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = SSHDataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub' - >>> dg.inputs.template = '%s/%s.txt' - >>> dg.inputs.arg1 = 'example' - >>> dg.inputs.arg2 = 'foo' + >>> dg = SSHDataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub' + >>> dg.inputs.template = '%s/%s.txt' + >>> dg.inputs.arg1 = 'example' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.hostname = 'myhost.com' - >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.hostname = 'myhost.com' + >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ + input_spec = SSHDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = False - _pkg = 'paramiko' + _pkg = "paramiko" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -2390,18 +2498,21 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] + outfields = ["outfiles"] kwargs = kwargs.copy() - kwargs['infields'] = infields - kwargs['outfields'] = outfields - super(SSHDataGrabber, self).__init__(**kwargs) - if (None in (self.inputs.username, self.inputs.password)): - raise ValueError("either both username and password " - "are provided or none of them") - - if (self.inputs.template_expression == 'regexp' - and self.inputs.template[-1] != '$'): - self.inputs.template += '$' + kwargs["infields"] = infields + kwargs["outfields"] = outfields + super().__init__(**kwargs) + if None in (self.inputs.username, self.inputs.password): + raise ValueError( + "either both username and password are provided or none of them" + ) + + if ( + self.inputs.template_expression == "regexp" + and self.inputs.template[-1] != "$" + ): + self.inputs.template += "$" def _get_files_over_ssh(self, template): """Get the files matching template over an SSH connection.""" @@ -2414,19 +2525,19 @@ def _get_files_over_ssh(self, template): template_dir = os.path.dirname(template) template_base = os.path.basename(template) every_file_in_dir = sftp.listdir(template_dir) - if self.inputs.template_expression == 'fnmatch': + if self.inputs.template_expression == "fnmatch": outfiles = fnmatch.filter(every_file_in_dir, template_base) - elif self.inputs.template_expression == 'regexp': + elif self.inputs.template_expression == "regexp": regexp = re.compile(template_base) outfiles = list(filter(regexp.match, every_file_in_dir)) else: - raise ValueError('template_expression value invalid') + raise ValueError("template_expression value invalid") if len(outfiles) == 0: # no files - msg = 'Output template: %s returned no files' % template + msg = "Output template: %s returned no files" % template if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) @@ -2440,22 +2551,25 @@ def _get_files_over_ssh(self, template): # actually download the files, if desired if self.inputs.download_files: - files_to_download = copy.copy(outfiles) # make sure new list! + files_to_download = copy.copy(outfiles) # make sure new list! # check to see if there are any related files to download for file_to_download in files_to_download: related_to_current = get_related_files( - file_to_download, include_this_file=False) + file_to_download, include_this_file=False + ) existing_related_not_downloading = [ - f for f in related_to_current - if f in every_file_in_dir and f not in files_to_download] + f + for f in related_to_current + if f in every_file_in_dir and f not in files_to_download + ] files_to_download.extend(existing_related_not_downloading) for f in files_to_download: try: sftp.get(os.path.join(template_dir, f), f) - except IOError: - iflogger.info('remote file %s not found' % f) + except OSError: + iflogger.info("remote file %s not found" % f) # return value outfiles = simplify_list(outfiles) @@ -2473,41 +2587,42 @@ def _list_outputs(self): for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if not args: outputs[key] = self._get_files_over_ssh(template) - for argnum, arglist in enumerate(args): + for arglist in args: maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) - outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -2519,14 +2634,14 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + f"{e}: Template {template} failed to convert " + f"with args {tuple(argtuple)}" + ) outputs[key].append(self._get_files_over_ssh(filledtemplate)) # disclude where there was any invalid matches - if any([val is None for val in outputs[key]]): + if None in outputs[key]: outputs[key] = [] # no outputs is None, not empty list @@ -2544,29 +2659,33 @@ def _list_outputs(self): def _get_ssh_client(self): import paramiko + config = paramiko.SSHConfig() - config.parse(open(os.path.expanduser('~/.ssh/config'))) + config.parse(open(os.path.expanduser("~/.ssh/config"))) host = config.lookup(self.inputs.hostname) - if 'proxycommand' in host: + if "proxycommand" in host: proxy = paramiko.ProxyCommand( - subprocess.check_output([ - os.environ['SHELL'], '-c', - 'echo %s' % host['proxycommand'] - ]).strip()) + subprocess.check_output( + [os.environ["SHELL"], "-c", "echo %s" % host["proxycommand"]] + ).strip() + ) else: proxy = None client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(host['hostname'], username=host['user'], sock=proxy) + client.connect(host["hostname"], username=host["user"], sock=proxy) return client class JSONFileGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - in_file = File(exists=True, desc='JSON source file') + in_file = File(exists=True, desc="JSON source file") defaults = traits.Dict( - desc=('JSON dictionary that sets default output' - 'values, overridden by values found in in_file')) + desc=( + "JSON dictionary that sets default output" + "values, overridden by values found in in_file" + ) + ) class JSONFileGrabber(IOBase): @@ -2589,6 +2708,7 @@ class JSONFileGrabber(IOBase): >>> pprint.pprint(res.outputs.get()) # doctest:, +ELLIPSIS {'param1': 'exampleStr', 'param2': 4, 'param3': 1.0} """ + input_spec = JSONFileGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -2596,16 +2716,14 @@ class JSONFileGrabber(IOBase): def _list_outputs(self): import simplejson - outputs = {} if isdefined(self.inputs.in_file): - with open(self.inputs.in_file, 'r') as f: - data = simplejson.load(f) - - if not isinstance(data, dict): - raise RuntimeError('JSON input has no dictionary structure') + with open(self.inputs.in_file) as f: + outputs = simplejson.load(f) - for key, value in list(data.items()): - outputs[key] = value + if not isinstance(outputs, dict): + raise RuntimeError("JSON input has no dictionary structure") + else: + outputs = {} if isdefined(self.inputs.defaults): defaults = self.inputs.defaults @@ -2617,24 +2735,23 @@ def _list_outputs(self): class JSONFileSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - out_file = File(desc='JSON sink file') - in_dict = traits.Dict( - value={}, usedefault=True, desc='input JSON dictionary') + out_file = File(desc="JSON sink file") + in_dict = traits.Dict(value={}, usedefault=True, desc="input JSON dictionary") _outputs = traits.Dict(value={}, usedefault=True) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): - super(JSONFileSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(JSONFileSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class JSONFileSinkOutputSpec(TraitedSpec): - out_file = File(desc='JSON sink file') + out_file = File(desc="JSON sink file") class JSONFileSink(IOBase): @@ -2643,33 +2760,33 @@ class JSONFileSink(IOBase): Entries already existing in in_dict will be overridden by matching entries dynamically added as inputs. - .. warning:: - - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + .. warning:: - Examples - -------- + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - >>> jsonsink = JSONFileSink(input_names=['subject_id', - ... 'some_measurement']) - >>> jsonsink.inputs.subject_id = 's1' - >>> jsonsink.inputs.some_measurement = 11.4 - >>> jsonsink.run() # doctest: +SKIP + Examples + -------- + >>> jsonsink = JSONFileSink(input_names=['subject_id', + ... 'some_measurement']) + >>> jsonsink.inputs.subject_id = 's1' + >>> jsonsink.inputs.some_measurement = 11.4 + >>> jsonsink.run() # doctest: +SKIP - Using a dictionary as input: + Using a dictionary as input: - >>> dictsink = JSONFileSink() - >>> dictsink.inputs.in_dict = {'subject_id': 's1', - ... 'some_measurement': 11.4} - >>> dictsink.run() # doctest: +SKIP + >>> dictsink = JSONFileSink() + >>> dictsink.inputs.in_dict = {'subject_id': 's1', + ... 'some_measurement': 11.4} + >>> dictsink.run() # doctest: +SKIP """ + input_spec = JSONFileSinkInputSpec output_spec = JSONFileSinkOutputSpec def __init__(self, infields=[], force_run=True, **inputs): - super(JSONFileSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = infields undefined_traits = {} @@ -2683,8 +2800,8 @@ def __init__(self, infields=[], force_run=True, **inputs): self._always_run = True def _process_name(self, name, val): - if '.' in name: - newkeys = name.split('.') + if "." in name: + newkeys = name.split(".") name = newkeys.pop(0) nested_dict = {newkeys.pop(): val} @@ -2699,7 +2816,7 @@ def _list_outputs(self): import os.path as op if not isdefined(self.inputs.out_file): - out_file = op.abspath('datasink.json') + out_file = op.abspath("datasink.json") else: out_file = op.abspath(self.inputs.out_file) @@ -2707,42 +2824,54 @@ def _list_outputs(self): # Overwrite in_dict entries automatically for key, val in list(self.inputs._outputs.items()): - if not isdefined(val) or key == 'trait_added': + if not isdefined(val) or key == "trait_added": continue key, val = self._process_name(key, val) out_dict[key] = val - with open(out_file, 'w') as f: + with open(out_file, "w") as f: f.write(str(simplejson.dumps(out_dict, ensure_ascii=False))) outputs = self.output_spec().get() - outputs['out_file'] = out_file + outputs["out_file"] = out_file return outputs class BIDSDataGrabberInputSpec(DynamicTraitedSpec): - base_dir = Directory(exists=True, - desc='Path to BIDS Directory.', - mandatory=True) - output_query = traits.Dict(key_trait=Str, - value_trait=traits.Dict, - desc='Queries for outfield outputs') - raise_on_empty = traits.Bool(True, usedefault=True, - desc='Generate exception if list is empty ' - 'for a given field') - return_type = traits.Enum('file', 'namedtuple', usedefault=True) - strict = traits.Bool(desc='Return only BIDS "proper" files (e.g., ' - 'ignore derivatives/, sourcedata/, etc.)') + base_dir = Directory(exists=True, desc="Path to BIDS Directory.", mandatory=True) + output_query = traits.Dict( + key_trait=Str, value_trait=traits.Dict, desc="Queries for outfield outputs" + ) + load_layout = Directory( + exists=True, desc="Path to load already saved Bidslayout.", mandatory=False + ) + raise_on_empty = traits.Bool( + True, + usedefault=True, + desc="Generate exception if list is empty for a given field", + ) + index_derivatives = traits.Bool( + False, mandatory=True, usedefault=True, desc="Index derivatives/ sub-directory" + ) + extra_derivatives = traits.List( + Directory(exists=True), desc="Additional derivative directories to index" + ) class BIDSDataGrabber(LibraryBaseInterface, IOBase): - - """ BIDS datagrabber module that wraps around pybids to allow arbitrary + """BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. Examples -------- + .. setup:: + + >>> try: + ... import bids + ... except ImportError: + ... pytest.skip() + By default, the BIDSDataGrabber fetches anatomical and functional images from a project, and makes BIDS entities (e.g. subject) available for filtering outputs. @@ -2758,17 +2887,18 @@ class BIDSDataGrabber(LibraryBaseInterface, IOBase): are filtered on common entities, which can be explicitly defined as infields. - >>> bg = BIDSDataGrabber(infields = ['subject'], outfields = ['dwi']) + >>> bg = BIDSDataGrabber(infields = ['subject']) >>> bg.inputs.base_dir = 'ds005/' >>> bg.inputs.subject = '01' - >>> bg.inputs.output_query['dwi'] = dict(modality='dwi') + >>> bg.inputs.output_query['dwi'] = dict(datatype='dwi') >>> results = bg.run() # doctest: +SKIP """ + input_spec = BIDSDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True - _pkg = 'bids' + _pkg = "bids" def __init__(self, infields=None, **kwargs): """ @@ -2777,24 +2907,29 @@ def __init__(self, infields=None, **kwargs): infields : list of str Indicates the input fields to be dynamically created """ - super(BIDSDataGrabber, self).__init__(**kwargs) + super().__init__(**kwargs) if not isdefined(self.inputs.output_query): self.inputs.output_query = { - "func": {"modality": "func", 'extensions': ['nii', '.nii.gz']}, - "anat": {"modality": "anat", 'extensions': ['nii', '.nii.gz']}, - } + "bold": { + "datatype": "func", + "suffix": "bold", + "extension": ["nii", ".nii.gz"], + }, + "T1w": { + "datatype": "anat", + "suffix": "T1w", + "extension": ["nii", ".nii.gz"], + }, + } # If infields is empty, use all BIDS entities if infields is None: - # Version resilience - try: - from bids import layout as bidslayout - except ImportError: - from bids import grabbids as bidslayout - bids_config = join(dirname(bidslayout.__file__), 'config', 'bids.json') - bids_config = json.load(open(bids_config, 'r')) - infields = [i['name'] for i in bids_config['entities']] + from bids import layout as bidslayout + + bids_config = join(dirname(bidslayout.__file__), "config", "bids.json") + bids_config = json.load(open(bids_config)) + infields = [i["name"] for i in bids_config["entities"]] self._infields = infields or [] @@ -2802,20 +2937,23 @@ def __init__(self, infields=None, **kwargs): undefined_traits = {} for key in self._infields: self.inputs.add_trait(key, traits.Any) - undefined_traits[key] = kwargs[key] if key in kwargs else Undefined + undefined_traits[key] = kwargs.get(key, Undefined) self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _list_outputs(self): - # Version resilience - try: - from bids import BIDSLayout - except ImportError: - from bids.grabbids import BIDSLayout - exclude = None - if self.inputs.strict: - exclude = ['derivatives/', 'code/', 'sourcedata/'] - layout = BIDSLayout(self.inputs.base_dir, exclude=exclude) + from bids import BIDSLayout + + # if load_layout is given load layout which is on some datasets much faster + if isdefined(self.inputs.load_layout): + layout = BIDSLayout.load(self.inputs.load_layout) + else: + layout = BIDSLayout( + self.inputs.base_dir, derivatives=self.inputs.index_derivatives + ) + + if isdefined(self.inputs.extra_derivatives): + layout.add_derivatives(self.inputs.extra_derivatives) # If infield is not given nm input value, silently ignore filters = {} @@ -2828,11 +2966,11 @@ def _list_outputs(self): for key, query in self.inputs.output_query.items(): args = query.copy() args.update(filters) - filelist = layout.get(return_type=self.inputs.return_type, **args) + filelist = layout.get(return_type="file", **args) if len(filelist) == 0: - msg = 'Output key: %s returned no files' % key + msg = "Output key: %s returned no files" % key if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: iflogger.warning(msg) filelist = Undefined @@ -2842,3 +2980,61 @@ def _list_outputs(self): def _add_output_traits(self, base): return add_traits(base, list(self.inputs.output_query.keys())) + + +class ExportFileInputSpec(BaseInterfaceInputSpec): + in_file = File(exists=True, mandatory=True, desc="Input file name") + out_file = File(mandatory=True, desc="Output file name") + check_extension = traits.Bool( + True, + usedefault=True, + desc="Ensure that the input and output file extensions match", + ) + clobber = traits.Bool(desc="Permit overwriting existing files") + + +class ExportFileOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="Output file name") + + +class ExportFile(SimpleInterface): + """Export a file to an absolute path. + + This interface copies an input file to a named output file. + This is useful to save individual files to a specific location, + instead of more flexible interfaces like DataSink. + + Examples + -------- + >>> from nipype.interfaces.io import ExportFile + >>> import os.path as op + >>> ef = ExportFile() + >>> ef.inputs.in_file = "T1.nii.gz" + >>> os.mkdir("output_folder") + >>> ef.inputs.out_file = op.abspath("output_folder/sub1_out.nii.gz") + >>> res = ef.run() + >>> os.path.exists(res.outputs.out_file) + True + + """ + + input_spec = ExportFileInputSpec + output_spec = ExportFileOutputSpec + + def _run_interface(self, runtime): + if not self.inputs.clobber and op.exists(self.inputs.out_file): + raise FileExistsError(self.inputs.out_file) + if not op.isabs(self.inputs.out_file): + raise ValueError("Out_file must be an absolute path.") + if ( + self.inputs.check_extension + and split_filename(self.inputs.in_file)[2] + != split_filename(self.inputs.out_file)[2] + ): + raise RuntimeError( + "%s and %s have different extensions" + % (self.inputs.in_file, self.inputs.out_file) + ) + shutil.copy(str(self.inputs.in_file), str(self.inputs.out_file)) + self._results["out_file"] = self.inputs.out_file + return runtime diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index fed7bfeb57..de959988f4 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -1,88 +1,80 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" General matlab interface code """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open +"""Interfaces to run MATLAB scripts.""" import os from .. import config -from .base import (CommandLineInputSpec, InputMultiPath, isdefined, - CommandLine, traits, File, Directory) +from .base import ( + CommandLineInputSpec, + InputMultiPath, + isdefined, + CommandLine, + traits, + File, + Directory, +) def get_matlab_command(): - if 'NIPYPE_NO_MATLAB' in os.environ: - return None - - try: - matlab_cmd = os.environ['MATLABCMD'] - except: - matlab_cmd = 'matlab' - - try: - res = CommandLine( - command='which', - args=matlab_cmd, - resource_monitor=False, - terminal_output='allatonce').run() - matlab_path = res.runtime.stdout.strip() - except Exception: - return None - return matlab_cmd + """Determine whether Matlab is installed and can be executed.""" + if "NIPYPE_NO_MATLAB" not in os.environ: + from nipype.utils.filemanip import which + + return which(os.getenv("MATLABCMD", "matlab")) no_matlab = get_matlab_command() is None class MatlabInputSpec(CommandLineInputSpec): - """ Basic expected inputs to Matlab interface """ + """Basic expected inputs to Matlab interface""" script = traits.Str( - argstr='-r \"%s;exit\"', - desc='m-code to run', - mandatory=True, - position=-1) + argstr='-r "%s;exit"', desc="m-code to run", mandatory=True, position=-1 + ) uses_mcr = traits.Bool( - desc='use MCR interface', - xor=['nodesktop', 'nosplash', 'single_comp_thread'], - nohash=True) + desc="use MCR interface", + xor=["nodesktop", "nosplash", "single_comp_thread"], + nohash=True, + ) nodesktop = traits.Bool( True, - argstr='-nodesktop', + argstr="-nodesktop", usedefault=True, - desc='Switch off desktop mode on unix platforms', - nohash=True) + desc="Switch off desktop mode on unix platforms", + nohash=True, + ) nosplash = traits.Bool( True, - argstr='-nosplash', + argstr="-nosplash", usedefault=True, - desc='Switch of splash screen', - nohash=True) - logfile = File(argstr='-logfile %s', desc='Save matlab output to log') + desc="Switch of splash screen", + nohash=True, + ) + logfile = File(argstr="-logfile %s", desc="Save matlab output to log") single_comp_thread = traits.Bool( - argstr="-singleCompThread", - desc="force single threaded operation", - nohash=True) + argstr="-singleCompThread", desc="force single threaded operation", nohash=True + ) # non-commandline options - mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True) + mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) script_file = File( - 'pyscript.m', usedefault=True, desc='Name of file to write m-code to') - paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') + "pyscript.m", usedefault=True, desc="Name of file to write m-code to" + ) + paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") prescript = traits.List( - ["ver,", "try,"], - usedefault=True, - desc='prescript to be added before code') + ["ver,", "try,"], usedefault=True, desc="prescript to be added before code" + ) postscript = traits.List( [ - "\n,catch ME,", "fprintf(2,'MATLAB code threw an exception:\\n');", + "\n,catch ME,", + "fprintf(2,'MATLAB code threw an exception:\\n');", "fprintf(2,'%s\\n',ME.message);", "if length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\nLine:%d\\n',ME.stack.file,ME.stack.name,ME.stack.line);, end;", - "end;" + "end;", ], - desc='script added after code', - usedefault=True) + desc="script added after code", + usedefault=True, + ) class MatlabCommand(CommandLine): @@ -94,7 +86,7 @@ class MatlabCommand(CommandLine): >>> out = mlab.run() # doctest: +SKIP """ - _cmd = 'matlab' + _cmd = "matlab" _default_matlab_cmd = None _default_mfile = None _default_paths = None @@ -104,7 +96,7 @@ def __init__(self, matlab_cmd=None, **inputs): """initializes interface to matlab (default 'matlab -nodesktop -nosplash') """ - super(MatlabCommand, self).__init__(**inputs) + super().__init__(**inputs) if matlab_cmd and isdefined(matlab_cmd): self._cmd = matlab_cmd elif self._default_matlab_cmd: @@ -116,13 +108,14 @@ def __init__(self, matlab_cmd=None, **inputs): if self._default_paths and not isdefined(self.inputs.paths): self.inputs.paths = self._default_paths - if not isdefined(self.inputs.single_comp_thread) and \ - not isdefined(self.inputs.uses_mcr): - if config.getboolean('execution', 'single_thread_matlab'): + if not isdefined(self.inputs.single_comp_thread) and not isdefined( + self.inputs.uses_mcr + ): + if config.getboolean("execution", "single_thread_matlab"): self.inputs.single_comp_thread = True # For matlab commands force all output to be returned since matlab # does not have a clean way of notifying an error - self.terminal_output = 'allatonce' + self.terminal_output = "allatonce" @classmethod def set_default_matlab_cmd(cls, matlab_cmd): @@ -158,28 +151,28 @@ def set_default_paths(cls, paths): cls._default_paths = paths def _run_interface(self, runtime): - self.terminal_output = 'allatonce' - runtime = super(MatlabCommand, self)._run_interface(runtime) + self.terminal_output = "allatonce" + runtime = super()._run_interface(runtime) try: # Matlab can leave the terminal in a barbbled state - os.system('stty sane') + os.system("stty sane") except: # We might be on a system where stty doesn't exist pass - if 'MATLAB code threw an exception' in runtime.stderr: + if "MATLAB code threw an exception" in runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): - if name in ['script']: + if name in ["script"]: argstr = trait_spec.argstr if self.inputs.uses_mcr: - argstr = '%s' + argstr = "%s" return self._gen_matlab_command(argstr, value) - return super(MatlabCommand, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _gen_matlab_command(self, argstr, script_lines): - """ Generates commands and, if mfile specified, writes it to disk.""" + """Generates commands and, if mfile specified, writes it to disk.""" cwd = os.getcwd() mfile = self.inputs.mfile or self.inputs.uses_mcr paths = [] @@ -192,33 +185,37 @@ def _gen_matlab_command(self, argstr, script_lines): # prescript takes different default value depending on the mfile argument if mfile: prescript.insert( - 0, - "fprintf(1,'Executing %s at %s:\\n',mfilename(),datestr(now));" + 0, "fprintf(1,'Executing %s at %s:\\n',mfilename(),datestr(now));" ) else: - prescript.insert( - 0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") + prescript.insert(0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") for path in paths: - prescript.append("addpath('%s');\n" % path) + # addpath() is not available after compilation + # https://www.mathworks.com/help/compiler/ismcc.html + # https://www.mathworks.com/help/compiler/isdeployed.html + prescript.append("if ~(ismcc || isdeployed), addpath('%s'); end;\n" % path) if not mfile: # clean up the code of comments and replace newlines with commas - script_lines = ','.join([ - line for line in script_lines.split("\n") - if not line.strip().startswith("%") - ]) + script_lines = ",".join( + [ + line + for line in script_lines.split("\n") + if not line.strip().startswith("%") + ] + ) - script_lines = '\n'.join(prescript) + script_lines + '\n'.join( - postscript) + script_lines = "\n".join(prescript) + script_lines + "\n".join(postscript) if mfile: - with open(os.path.join(cwd, self.inputs.script_file), - 'wt') as mfile: + with open(os.path.join(cwd, self.inputs.script_file), "w") as mfile: mfile.write(script_lines) if self.inputs.uses_mcr: - script = '%s' % (os.path.join(cwd, self.inputs.script_file)) + script = "%s" % (os.path.join(cwd, self.inputs.script_file)) else: - script = "addpath('%s');%s" % ( - cwd, self.inputs.script_file.split('.')[0]) + script = "addpath('{}');{}".format( + cwd, + self.inputs.script_file.split(".")[0], + ) else: - script = ''.join(script_lines.split('\n')) + script = "".join(script_lines.split("\n")) return argstr % script diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index 231f1fea9d..097fdf4010 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -1,152 +1,166 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Fixes meshes: -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""MeshFix corrects topological errors in polygonal meshes.""" import os.path as op from ..utils.filemanip import split_filename -from .base import (CommandLine, CommandLineInputSpec, traits, TraitedSpec, - isdefined, File) +from .base import ( + CommandLine, + CommandLineInputSpec, + traits, + TraitedSpec, + isdefined, + File, +) class MeshFixInputSpec(CommandLineInputSpec): number_of_biggest_shells = traits.Int( - argstr='--shells %d', desc="Only the N biggest shells are kept") + argstr="--shells %d", desc="Only the N biggest shells are kept" + ) epsilon_angle = traits.Range( - argstr='-a %f', + argstr="-a %f", low=0.0, high=2.0, - desc="Epsilon angle in degrees (must be between 0 and 2)") + desc="Epsilon angle in degrees (must be between 0 and 2)", + ) join_overlapping_largest_components = traits.Bool( - argstr='-j', - xor=['join_closest_components'], - desc='Join 2 biggest components if they overlap, remove the rest.') + argstr="-j", + xor=["join_closest_components"], + desc="Join 2 biggest components if they overlap, remove the rest.", + ) join_closest_components = traits.Bool( - argstr='-jc', - xor=['join_closest_components'], - desc='Join the closest pair of components.') + argstr="-jc", + xor=["join_closest_components"], + desc="Join the closest pair of components.", + ) quiet_mode = traits.Bool( - argstr='-q', desc="Quiet mode, don't write much to stdout.") + argstr="-q", desc="Quiet mode, don't write much to stdout." + ) - dont_clean = traits.Bool(argstr='--no-clean', desc="Don't Clean") + dont_clean = traits.Bool(argstr="--no-clean", desc="Don't Clean") save_as_stl = traits.Bool( - xor=['save_as_vrml', 'save_as_freesurfer_mesh'], - argstr='--stl', - desc="Result is saved in stereolithographic format (.stl)") + xor=["save_as_vrml", "save_as_freesurfer_mesh"], + argstr="--stl", + desc="Result is saved in stereolithographic format (.stl)", + ) save_as_vrml = traits.Bool( - argstr='--wrl', - xor=['save_as_stl', 'save_as_freesurfer_mesh'], - desc="Result is saved in VRML1.0 format (.wrl)") + argstr="--wrl", + xor=["save_as_stl", "save_as_freesurfer_mesh"], + desc="Result is saved in VRML1.0 format (.wrl)", + ) save_as_freesurfer_mesh = traits.Bool( - argstr='--fsmesh', - xor=['save_as_vrml', 'save_as_stl'], - desc="Result is saved in freesurfer mesh format") + argstr="--fsmesh", + xor=["save_as_vrml", "save_as_stl"], + desc="Result is saved in freesurfer mesh format", + ) - remove_handles = traits.Bool( - argstr='--remove-handles', desc="Remove handles") + remove_handles = traits.Bool(argstr="--remove-handles", desc="Remove handles") uniform_remeshing_steps = traits.Int( - argstr='-u %d', - requires=['uniform_remeshing_vertices'], - desc="Number of steps for uniform remeshing of the whole mesh") + argstr="-u %d", + requires=["uniform_remeshing_vertices"], + desc="Number of steps for uniform remeshing of the whole mesh", + ) uniform_remeshing_vertices = traits.Int( - argstr='--vertices %d', - requires=['uniform_remeshing_steps'], + argstr="--vertices %d", + requires=["uniform_remeshing_steps"], desc="Constrains the number of vertices." - "Must be used with uniform_remeshing_steps") + "Must be used with uniform_remeshing_steps", + ) laplacian_smoothing_steps = traits.Int( - argstr='--smooth %d', - desc="The number of laplacian smoothing steps to apply") + argstr="--smooth %d", desc="The number of laplacian smoothing steps to apply" + ) x_shift = traits.Int( - argstr='--smooth %d', - desc="Shifts the coordinates of the vertices when saving. Output must be in FreeSurfer format" + argstr="--smooth %d", + desc="Shifts the coordinates of the vertices when saving. Output must be in FreeSurfer format", ) # Cutting, decoupling, dilation cut_outer = traits.Int( - argstr='--cut-outer %d', - desc="Remove triangles of 1st that are outside of the 2nd shell.") + argstr="--cut-outer %d", + desc="Remove triangles of 1st that are outside of the 2nd shell.", + ) cut_inner = traits.Int( - argstr='--cut-inner %d', - desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards." + argstr="--cut-inner %d", + desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards.", ) decouple_inin = traits.Int( - argstr='--decouple-inin %d', + argstr="--decouple-inin %d", desc="Treat 1st file as inner, 2nd file as outer component." - "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.", ) decouple_outin = traits.Int( - argstr='--decouple-outin %d', + argstr="--decouple-outin %d", desc="Treat 1st file as outer, 2nd file as inner component." - "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.", ) decouple_outout = traits.Int( - argstr='--decouple-outout %d', + argstr="--decouple-outout %d", desc="Treat 1st file as outer, 2nd file as inner component." - "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.", ) finetuning_inwards = traits.Bool( - argstr='--fineTuneIn ', - requires=['finetuning_distance', 'finetuning_substeps'], + argstr="--fineTuneIn ", + requires=["finetuning_distance", "finetuning_substeps"], position=-3, - desc="Used to fine-tune the minimal distance between surfaces." + desc="Used to fine-tune the minimal distance between surfaces.", ) finetuning_outwards = traits.Bool( - argstr='--fineTuneOut ', - requires=['finetuning_distance', 'finetuning_substeps'], + argstr="--fineTuneOut ", + requires=["finetuning_distance", "finetuning_substeps"], position=-3, - xor=['finetuning_inwards'], - desc='Similar to finetuning_inwards, but ensures minimal distance in the other direction' + xor=["finetuning_inwards"], + desc="Similar to finetuning_inwards, but ensures minimal distance in the other direction", ) finetuning_distance = traits.Float( - argstr='%f', - requires=['finetuning_substeps'], + argstr="%f", + requires=["finetuning_substeps"], position=-2, desc="Used to fine-tune the minimal distance between surfaces." - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)" + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) finetuning_substeps = traits.Int( - argstr='%d', - requires=['finetuning_distance'], + argstr="%d", + requires=["finetuning_distance"], position=-1, desc="Used to fine-tune the minimal distance between surfaces." - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)" + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) dilation = traits.Int( - argstr='--dilate %d', - desc="Dilate the surface by d. d < 0 means shrinking.") + argstr="--dilate %d", desc="Dilate the surface by d. d < 0 means shrinking." + ) set_intersections_to_one = traits.Bool( - argstr='--intersect', + argstr="--intersect", desc="If the mesh contains intersections, return value = 1." - "If saved in gmsh format, intersections will be highlighted.") + "If saved in gmsh format, intersections will be highlighted.", + ) in_file1 = File(exists=True, argstr="%s", position=1, mandatory=True) in_file2 = File(exists=True, argstr="%s", position=2) output_type = traits.Enum( - 'off', ['stl', 'msh', 'wrl', 'vrml', 'fs', 'off'], + "off", + ["stl", "msh", "wrl", "vrml", "fs", "off"], usedefault=True, - desc='The output type to save the file as.') + desc="The output type to save the file as.", + ) out_filename = File( - genfile=True, - argstr="-o %s", - desc='The output filename for the fixed mesh file') + genfile=True, argstr="-o %s", desc="The output filename for the fixed mesh file" + ) class MeshFixOutputSpec(TraitedSpec): - mesh_file = File(exists=True, desc='The output mesh file') + mesh_file = File(exists=True, desc="The output mesh file") class MeshFix(CommandLine): @@ -179,7 +193,8 @@ class MeshFix(CommandLine): >>> fix.cmdline 'meshfix lh-pial.stl rh-pial.stl -o lh-pial_fixed.off' """ - _cmd = 'meshfix' + + _cmd = "meshfix" input_spec = MeshFixInputSpec output_spec = MeshFixOutputSpec @@ -187,33 +202,32 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_filename): path, name, ext = split_filename(self.inputs.out_filename) - ext = ext.replace('.', '') - out_types = ['stl', 'msh', 'wrl', 'vrml', 'fs', 'off'] + ext = ext.replace(".", "") + out_types = ["stl", "msh", "wrl", "vrml", "fs", "off"] # Make sure that the output filename uses one of the possible file types if any(ext == out_type.lower() for out_type in out_types): - outputs['mesh_file'] = op.abspath(self.inputs.out_filename) + outputs["mesh_file"] = op.abspath(self.inputs.out_filename) else: - outputs['mesh_file'] = op.abspath( - name + '.' + self.inputs.output_type) + outputs["mesh_file"] = op.abspath(name + "." + self.inputs.output_type) else: - outputs['mesh_file'] = op.abspath(self._gen_outfilename()) + outputs["mesh_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file1) - if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == 'fs': - self.inputs.output_type = 'fs' + if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == "fs": + self.inputs.output_type = "fs" self.inputs.save_as_freesurfer_mesh = True - if self.inputs.save_as_stl or self.inputs.output_type == 'stl': - self.inputs.output_type = 'stl' + if self.inputs.save_as_stl or self.inputs.output_type == "stl": + self.inputs.output_type = "stl" self.inputs.save_as_stl = True - if self.inputs.save_as_vrml or self.inputs.output_type == 'vrml': - self.inputs.output_type = 'vrml' + if self.inputs.save_as_vrml or self.inputs.output_type == "vrml": + self.inputs.output_type = "vrml" self.inputs.save_as_vrml = True - return name + '_fixed.' + self.inputs.output_type + return name + "_fixed." + self.inputs.output_type diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index 1ebea58b64..b05ef82b5d 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -1,7 +1,8 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The minc module provides classes for interfacing with the `MINC +"""The MINC (McConnell Brain Imaging Centre, Montreal Neurological Institute) toolkit. + +The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. @@ -9,7 +10,7 @@ http://carlo-hamalainen.net """ -from .base import (Info) +from .base import Info from .minc import ( Average, diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py index 67b7938176..8731627693 100644 --- a/nipype/interfaces/minc/base.py +++ b/nipype/interfaces/minc/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC @@ -8,32 +7,27 @@ Author: Carlo Hamalainen http://carlo-hamalainen.net """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object import os import os.path import warnings from ..base import CommandLine -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) def check_minc(): - """Returns True if and only if MINC is installed.' - """ + """Returns True if and only if MINC is installed.'""" return Info.version() is not None def no_minc(): - """Returns True if and only if MINC is *not* installed. - """ + """Returns True if and only if MINC is *not* installed.""" return not check_minc() -class Info(object): +class Info: """Handle MINC version information. version refers to the version of MINC on the system @@ -55,47 +49,41 @@ def version(): """ try: clout = CommandLine( - command='mincinfo', - args='-version', - terminal_output='allatonce').run() - except IOError: + command="mincinfo", args="-version", terminal_output="allatonce" + ).run() + except OSError: return None out = clout.runtime.stdout def read_program_version(s): - if 'program' in s: - return s.split(':')[1].strip() + if "program" in s: + return s.split(":")[1].strip() return None def read_libminc_version(s): - if 'libminc' in s: - return s.split(':')[1].strip() + if "libminc" in s: + return s.split(":")[1].strip() return None def read_netcdf_version(s): - if 'netcdf' in s: - return ' '.join(s.split(':')[1:]).strip() + if "netcdf" in s: + return " ".join(s.split(":")[1:]).strip() return None def read_hdf5_version(s): - if 'HDF5' in s: - return s.split(':')[1].strip() + if "HDF5" in s: + return s.split(":")[1].strip() return None - versions = { - 'minc': None, - 'libminc': None, - 'netcdf': None, - 'hdf5': None, - } - - for l in out.split('\n'): - for (name, f) in [ - ('minc', read_program_version), - ('libminc', read_libminc_version), - ('netcdf', read_netcdf_version), - ('hdf5', read_hdf5_version), + versions = {"minc": None, "libminc": None, "netcdf": None, "hdf5": None} + + for l in out.split("\n"): + for name, f in [ + ("minc", read_program_version), + ("libminc", read_libminc_version), + ("netcdf", read_netcdf_version), + ("hdf5", read_hdf5_version), ]: if f(l) is not None: versions[name] = f(l) @@ -129,11 +117,13 @@ def aggregate_filename(files, new_suffix): path = os.getcwd() - if common_prefix == '': + if common_prefix == "": return os.path.abspath( os.path.join( - path, - os.path.splitext(files[0])[0] + '_' + new_suffix + '.mnc')) + path, os.path.splitext(files[0])[0] + "_" + new_suffix + ".mnc" + ) + ) else: return os.path.abspath( - os.path.join(path, common_prefix + '_' + new_suffix + '.mnc')) + os.path.join(path, common_prefix + "_" + new_suffix + ".mnc") + ) diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index 8ac8babe52..bf80e23732 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -1,220 +1,242 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. -Author: Carlo Hamalainen - http://carlo-hamalainen.net +Author: `Carlo Hamalainen `__ """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - import glob import os import os.path import re import warnings -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, - StdOutCommandLineInputSpec, StdOutCommandLine, File, - Directory, InputMultiPath, OutputMultiPath, traits, - isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + StdOutCommandLineInputSpec, + StdOutCommandLine, + File, + Directory, + InputMultiPath, + OutputMultiPath, + traits, + Tuple, + isdefined, +) from .base import aggregate_filename -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class ExtractInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.raw', - keep_extension=False) + name_template="%s.raw", + keep_extension=False, + ) _xor_write = ( - 'write_ascii', - 'write_ascii', - 'write_byte', - 'write_short', - 'write_int', - 'write_long', - 'write_float', - 'write_double', - 'write_signed', - 'write_unsigned', + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", ) write_ascii = traits.Bool( - desc='Write out data as ascii strings (default).', - argstr='-ascii', - xor=_xor_write) + desc="Write out data as ascii strings (default).", + argstr="-ascii", + xor=_xor_write, + ) write_byte = traits.Bool( - desc='Write out data as bytes.', argstr='-byte', xor=_xor_write) + desc="Write out data as bytes.", argstr="-byte", xor=_xor_write + ) write_short = traits.Bool( - desc='Write out data as short integers.', - argstr='-short', - xor=_xor_write) + desc="Write out data as short integers.", argstr="-short", xor=_xor_write + ) write_int = traits.Bool( - desc='Write out data as 32-bit integers.', - argstr='-int', - xor=_xor_write) + desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write + ) write_long = traits.Bool( - desc='Superseded by write_int.', argstr='-long', xor=_xor_write) + desc="Superseded by write_int.", argstr="-long", xor=_xor_write + ) write_float = traits.Bool( - desc='Write out data as single precision floating-point values.', - argstr='-float', - xor=_xor_write) + desc="Write out data as single precision floating-point values.", + argstr="-float", + xor=_xor_write, + ) write_double = traits.Bool( - desc='Write out data as double precision floating-point values.', - argstr='-double', - xor=_xor_write) + desc="Write out data as double precision floating-point values.", + argstr="-double", + xor=_xor_write, + ) - _xor_signed = ('write_signed', 'write_unsigned') + _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( - desc='Write out signed data.', argstr='-signed', xor=_xor_signed) + desc="Write out signed data.", argstr="-signed", xor=_xor_signed + ) write_unsigned = traits.Bool( - desc='Write out unsigned data.', argstr='-unsigned', xor=_xor_signed) + desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed + ) - write_range = traits.Tuple( + write_range = Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc= - 'Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.', + argstr="-range %s %s", + desc="Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.", ) - _xor_normalize = ( - 'normalize', - 'nonormalize', - ) + _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( - desc='Normalize integer pixel values to file max and min.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize integer pixel values to file max and min.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Turn off pixel normalization.', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize + ) - image_range = traits.Tuple( + image_range = Tuple( traits.Float, traits.Float, - desc='Specify the range of real image values for normalization.', - argstr='-image_range %s %s') + desc="Specify the range of real image values for normalization.", + argstr="-image_range %s %s", + ) image_minimum = traits.Float( - desc=('Specify the minimum real image value for normalization.' - 'Default value: 1.79769e+308.'), - argstr='-image_minimum %s') + desc=( + "Specify the minimum real image value for normalization." + "Default value: 1.79769e+308." + ), + argstr="-image_minimum %s", + ) image_maximum = traits.Float( - desc=('Specify the maximum real image value for normalization.' - 'Default value: 1.79769e+308.'), - argstr='-image_maximum %s') + desc=( + "Specify the maximum real image value for normalization." + "Default value: 1.79769e+308." + ), + argstr="-image_maximum %s", + ) start = InputMultiPath( traits.Int, - desc='Specifies corner of hyperslab (C conventions for indices).', - sep=',', - argstr='-start %s', + desc="Specifies corner of hyperslab (C conventions for indices).", + sep=",", + argstr="-start %s", ) count = InputMultiPath( traits.Int, - desc='Specifies edge lengths of hyperslab to read.', - sep=',', - argstr='-count %s', + desc="Specifies edge lengths of hyperslab to read.", + sep=",", + argstr="-count %s", ) # FIXME Can we make sure that len(start) == len(count)? - _xor_flip = ('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction') + _xor_flip = ( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ) flip_positive_direction = traits.Bool( - desc='Flip images to always have positive direction.', - argstr='-positive_direction', - xor=_xor_flip) + desc="Flip images to always have positive direction.", + argstr="-positive_direction", + xor=_xor_flip, + ) flip_negative_direction = traits.Bool( - desc='Flip images to always have negative direction.', - argstr='-negative_direction', - xor=_xor_flip) + desc="Flip images to always have negative direction.", + argstr="-negative_direction", + xor=_xor_flip, + ) flip_any_direction = traits.Bool( - desc='Do not flip images (Default).', - argstr='-any_direction', - xor=_xor_flip) + desc="Do not flip images (Default).", argstr="-any_direction", xor=_xor_flip + ) - _xor_x_flip = ('flip_x_positive', 'flip_x_negative', 'flip_x_any') + _xor_x_flip = ("flip_x_positive", "flip_x_negative", "flip_x_any") flip_x_positive = traits.Bool( - desc='Flip images to give positive xspace:step value (left-to-right).', - argstr='+xdirection', - xor=_xor_x_flip) + desc="Flip images to give positive xspace:step value (left-to-right).", + argstr="+xdirection", + xor=_xor_x_flip, + ) flip_x_negative = traits.Bool( - desc='Flip images to give negative xspace:step value (right-to-left).', - argstr='-xdirection', - xor=_xor_x_flip) + desc="Flip images to give negative xspace:step value (right-to-left).", + argstr="-xdirection", + xor=_xor_x_flip, + ) flip_x_any = traits.Bool( - desc='Don\'t flip images along x-axis (default).', - argstr='-xanydirection', - xor=_xor_x_flip) + desc="Don't flip images along x-axis (default).", + argstr="-xanydirection", + xor=_xor_x_flip, + ) - _xor_y_flip = ('flip_y_positive', 'flip_y_negative', 'flip_y_any') + _xor_y_flip = ("flip_y_positive", "flip_y_negative", "flip_y_any") flip_y_positive = traits.Bool( - desc='Flip images to give positive yspace:step value (post-to-ant).', - argstr='+ydirection', - xor=_xor_y_flip) + desc="Flip images to give positive yspace:step value (post-to-ant).", + argstr="+ydirection", + xor=_xor_y_flip, + ) flip_y_negative = traits.Bool( - desc='Flip images to give negative yspace:step value (ant-to-post).', - argstr='-ydirection', - xor=_xor_y_flip) + desc="Flip images to give negative yspace:step value (ant-to-post).", + argstr="-ydirection", + xor=_xor_y_flip, + ) flip_y_any = traits.Bool( - desc='Don\'t flip images along y-axis (default).', - argstr='-yanydirection', - xor=_xor_y_flip) + desc="Don't flip images along y-axis (default).", + argstr="-yanydirection", + xor=_xor_y_flip, + ) - _xor_z_flip = ('flip_z_positive', 'flip_z_negative', 'flip_z_any') + _xor_z_flip = ("flip_z_positive", "flip_z_negative", "flip_z_any") flip_z_positive = traits.Bool( - desc='Flip images to give positive zspace:step value (inf-to-sup).', - argstr='+zdirection', - xor=_xor_z_flip) + desc="Flip images to give positive zspace:step value (inf-to-sup).", + argstr="+zdirection", + xor=_xor_z_flip, + ) flip_z_negative = traits.Bool( - desc='Flip images to give negative zspace:step value (sup-to-inf).', - argstr='-zdirection', - xor=_xor_z_flip) + desc="Flip images to give negative zspace:step value (sup-to-inf).", + argstr="-zdirection", + xor=_xor_z_flip, + ) flip_z_any = traits.Bool( - desc='Don\'t flip images along z-axis (default).', - argstr='-zanydirection', - xor=_xor_z_flip) + desc="Don't flip images along z-axis (default).", + argstr="-zanydirection", + xor=_xor_z_flip, + ) class ExtractOutputSpec(TraitedSpec): - output_file = File(desc='output file in raw/text format', exists=True) + output_file = File(desc="output file in raw/text format", exists=True) class Extract(StdOutCommandLine): @@ -235,94 +257,100 @@ class Extract(StdOutCommandLine): input_spec = ExtractInputSpec output_spec = ExtractOutputSpec - _cmd = 'mincextract' + _cmd = "mincextract" class ToRawInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.raw', - keep_extension=False) + name_template="%s.raw", + keep_extension=False, + ) - _xor_write = ('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double') + _xor_write = ( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ) write_byte = traits.Bool( - desc='Write out data as bytes.', argstr='-byte', xor=_xor_write) + desc="Write out data as bytes.", argstr="-byte", xor=_xor_write + ) write_short = traits.Bool( - desc='Write out data as short integers.', - argstr='-short', - xor=_xor_write) + desc="Write out data as short integers.", argstr="-short", xor=_xor_write + ) write_int = traits.Bool( - desc='Write out data as 32-bit integers.', - argstr='-int', - xor=_xor_write) + desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write + ) write_long = traits.Bool( - desc='Superseded by write_int.', argstr='-long', xor=_xor_write) + desc="Superseded by write_int.", argstr="-long", xor=_xor_write + ) write_float = traits.Bool( - desc='Write out data as single precision floating-point values.', - argstr='-float', - xor=_xor_write) + desc="Write out data as single precision floating-point values.", + argstr="-float", + xor=_xor_write, + ) write_double = traits.Bool( - desc='Write out data as double precision floating-point values.', - argstr='-double', - xor=_xor_write) + desc="Write out data as double precision floating-point values.", + argstr="-double", + xor=_xor_write, + ) - _xor_signed = ('write_signed', 'write_unsigned') + _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( - desc='Write out signed data.', argstr='-signed', xor=_xor_signed) + desc="Write out signed data.", argstr="-signed", xor=_xor_signed + ) write_unsigned = traits.Bool( - desc='Write out unsigned data.', argstr='-unsigned', xor=_xor_signed) + desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed + ) - write_range = traits.Tuple( + write_range = Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc=('Specify the range of output values.' - 'Default value: 1.79769e+308 1.79769e+308.'), + argstr="-range %s %s", + desc=( + "Specify the range of output values." + "Default value: 1.79769e+308 1.79769e+308." + ), ) - _xor_normalize = ( - 'normalize', - 'nonormalize', - ) + _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( - desc='Normalize integer pixel values to file max and min.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize integer pixel values to file max and min.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Turn off pixel normalization.', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize + ) class ToRawOutputSpec(TraitedSpec): - output_file = File(desc='output file in raw format', exists=True) + output_file = File(desc="output file in raw format", exists=True) class ToRaw(StdOutCommandLine): """Dump a chunk of MINC file data. This program is largely - superceded by mincextract (see Extract). + superseded by mincextract (see Extract). Examples -------- @@ -339,40 +367,42 @@ class ToRaw(StdOutCommandLine): input_spec = ToRawInputSpec output_spec = ToRawOutputSpec - _cmd = 'minctoraw' + _cmd = "minctoraw" class ConvertInputSpec(CommandLineInputSpec): input_file = File( - desc='input file for converting', + desc="input file for converting", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_convert_output.mnc') + name_template="%s_convert_output.mnc", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + default_value=True, + ) + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") template = traits.Bool( - desc= - ('Create a template file. The dimensions, variables, and' - 'attributes of the input file are preserved but all data it set to zero.' - ), - argstr='-template', + desc=( + "Create a template file. The dimensions, variables, and" + "attributes of the input file are preserved but all data it set to zero." + ), + argstr="-template", ) compression = traits.Enum( @@ -386,20 +416,19 @@ class ConvertInputSpec(CommandLineInputSpec): 7, 8, 9, - argstr='-compress %s', - desc='Set the compression level, from 0 (disabled) to 9 (maximum).', + argstr="-compress %s", + desc="Set the compression level, from 0 (disabled) to 9 (maximum).", ) chunk = traits.Range( low=0, - desc= - 'Set the target block size for chunking (0 default, >1 block size).', - argstr='-chunk %d', + desc="Set the target block size for chunking (0 default, >1 block size).", + argstr="-chunk %d", ) class ConvertOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Convert(CommandLine): @@ -416,42 +445,39 @@ class Convert(CommandLine): input_spec = ConvertInputSpec output_spec = ConvertOutputSpec - _cmd = 'mincconvert' + _cmd = "mincconvert" class CopyInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to copy', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file to copy", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_copy.mnc') + name_template="%s_copy.mnc", + ) - _xor_pixel = ('pixel_values', 'real_values') + _xor_pixel = ("pixel_values", "real_values") pixel_values = traits.Bool( - desc='Copy pixel values as is.', - argstr='-pixel_values', - xor=_xor_pixel) + desc="Copy pixel values as is.", argstr="-pixel_values", xor=_xor_pixel + ) real_values = traits.Bool( - desc='Copy real pixel intensities (default).', - argstr='-real_values', - xor=_xor_pixel) + desc="Copy real pixel intensities (default).", + argstr="-real_values", + xor=_xor_pixel, + ) class CopyOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Copy(CommandLine): @@ -468,72 +494,74 @@ class Copy(CommandLine): input_spec = CopyInputSpec output_spec = CopyOutputSpec - _cmd = 'minccopy' + _cmd = "minccopy" class ToEcatInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to convert', + desc="input file to convert", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_to_ecat.v', - keep_extension=False) + name_template="%s_to_ecat.v", + keep_extension=False, + ) ignore_patient_variable = traits.Bool( - desc='Ignore informations from the minc patient variable.', - argstr='-ignore_patient_variable', + desc="Ignore information from the minc patient variable.", + argstr="-ignore_patient_variable", ) ignore_study_variable = traits.Bool( - desc='Ignore informations from the minc study variable.', - argstr='-ignore_study_variable', + desc="Ignore information from the minc study variable.", + argstr="-ignore_study_variable", ) ignore_acquisition_variable = traits.Bool( - desc='Ignore informations from the minc acquisition variable.', - argstr='-ignore_acquisition_variable', + desc="Ignore information from the minc acquisition variable.", + argstr="-ignore_acquisition_variable", ) ignore_ecat_acquisition_variable = traits.Bool( - desc='Ignore informations from the minc ecat_acquisition variable.', - argstr='-ignore_ecat_acquisition_variable', + desc="Ignore information from the minc ecat_acquisition variable.", + argstr="-ignore_ecat_acquisition_variable", ) ignore_ecat_main = traits.Bool( - desc='Ignore informations from the minc ecat-main variable.', - argstr='-ignore_ecat_main', + desc="Ignore information from the minc ecat-main variable.", + argstr="-ignore_ecat_main", ) ignore_ecat_subheader_variable = traits.Bool( - desc='Ignore informations from the minc ecat-subhdr variable.', - argstr='-ignore_ecat_subheader_variable', + desc="Ignore information from the minc ecat-subhdr variable.", + argstr="-ignore_ecat_subheader_variable", ) no_decay_corr_fctr = traits.Bool( - desc='Do not compute the decay correction factors', - argstr='-no_decay_corr_fctr', + desc="Do not compute the decay correction factors", argstr="-no_decay_corr_fctr" ) voxels_as_integers = traits.Bool( - desc=('Voxel values are treated as integers, scale and' - 'calibration factors are set to unity'), - argstr='-label', + desc=( + "Voxel values are treated as integers, scale and" + "calibration factors are set to unity" + ), + argstr="-label", ) class ToEcatOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class ToEcat(CommandLine): @@ -556,85 +584,78 @@ class ToEcat(CommandLine): input_spec = ToEcatInputSpec output_spec = ToEcatOutputSpec - _cmd = 'minctoecat' + _cmd = "minctoecat" class DumpInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_dump.txt', - keep_extension=False) - - _xor_coords_or_header = ( - 'coordinate_data', - 'header_data', + name_template="%s_dump.txt", + keep_extension=False, ) + _xor_coords_or_header = ("coordinate_data", "header_data") + coordinate_data = traits.Bool( - desc='Coordinate variable data and header information.', - argstr='-c', - xor=_xor_coords_or_header) + desc="Coordinate variable data and header information.", + argstr="-c", + xor=_xor_coords_or_header, + ) header_data = traits.Bool( - desc='Header information only, no data.', - argstr='-h', - xor=_xor_coords_or_header) - - _xor_annotations = ( - 'annotations_brief', - 'annotations_full', + desc="Header information only, no data.", argstr="-h", xor=_xor_coords_or_header ) + _xor_annotations = ("annotations_brief", "annotations_full") + annotations_brief = traits.Enum( - 'c', - 'f', - argstr='-b %s', - desc='Brief annotations for C or Fortran indices in data.', - xor=_xor_annotations) + "c", + "f", + argstr="-b %s", + desc="Brief annotations for C or Fortran indices in data.", + xor=_xor_annotations, + ) annotations_full = traits.Enum( - 'c', - 'f', - argstr='-f %s', - desc='Full annotations for C or Fortran indices in data.', - xor=_xor_annotations) + "c", + "f", + argstr="-f %s", + desc="Full annotations for C or Fortran indices in data.", + xor=_xor_annotations, + ) variables = InputMultiPath( traits.Str, - desc='Output data for specified variables only.', - sep=',', - argstr='-v %s') + desc="Output data for specified variables only.", + sep=",", + argstr="-v %s", + ) line_length = traits.Range( - low=0, - desc='Line length maximum in data section (default 80).', - argstr='-l %d') + low=0, desc="Line length maximum in data section (default 80).", argstr="-l %d" + ) netcdf_name = traits.Str( - desc='Name for netCDF (default derived from file name).', - argstr='-n %s') + desc="Name for netCDF (default derived from file name).", argstr="-n %s" + ) precision = traits.Either( traits.Int(), - traits.Tuple(traits.Int, traits.Int), - desc='Display floating-point values with less precision', - argstr='%s', + Tuple(traits.Int, traits.Int), + desc="Display floating-point values with less precision", + argstr="%s", ) # See _format_arg in Dump for actual formatting. class DumpOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Dump(StdOutCommandLine): @@ -656,208 +677,218 @@ class Dump(StdOutCommandLine): input_spec = DumpInputSpec output_spec = DumpOutputSpec - _cmd = 'mincdump' + _cmd = "mincdump" def _format_arg(self, name, spec, value): - if name == 'precision': + if name == "precision": if isinstance(value, int): - return '-p %d' % value - elif isinstance(value, tuple) and isinstance( - value[0], int) and isinstance(value[1], int): - return '-p %d,%d' % ( - value[0], - value[1], - ) + return "-p %d" % value + elif ( + isinstance(value, tuple) + and isinstance(value[0], int) + and isinstance(value[1], int) + ): + return "-p %d,%d" % (value[0], value[1]) else: - raise ValueError('Invalid precision argument: ' + str(value)) - return super(Dump, self)._format_arg(name, spec, value) + raise ValueError("Invalid precision argument: " + str(value)) + return super()._format_arg(name, spec, value) class AverageInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s)', + File(exists=True), + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, - xor=_xor_input_files) + xor=_xor_input_files, + ) - filelist = traits.File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + filelist = File( + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", exists=True, mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_averaged.mnc') + name_template="%s_averaged.mnc", + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) - - _xor_verbose = ( - 'verbose', - 'quiet', + default_value=True, ) + _xor_verbose = ("verbose", "quiet") + verbose = traits.Bool( - desc='Print out log messages (default).', - argstr='-verbose', - xor=_xor_verbose) + desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose + ) quiet = traits.Bool( - desc='Do not print out log messages.', - argstr='-quiet', - xor=_xor_verbose) + desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose + ) - debug = traits.Bool(desc='Print out debugging messages.', argstr='-debug') + debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") - _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', - ) + _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") check_dimensions = traits.Bool( - desc='Check that dimension info matches across files (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that dimension info matches across files (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check dimension info.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check dimension info.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', + desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", ) - _xor_normalize = ( - 'normalize', - 'nonormalize', - ) + _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( - desc='Normalize data sets for mean intensity.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize data sets for mean intensity.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Do not normalize data sets (default).', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Do not normalize data sets (default).", + argstr="-nonormalize", + xor=_xor_normalize, + ) - voxel_range = traits.Tuple( + voxel_range = Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.') + argstr="-range %d %d", + desc="Valid range for output data.", + ) - sdfile = traits.File( - desc='Specify an output sd file (default=none).', argstr='-sdfile %s') + sdfile = File(desc="Specify an output sd file (default=none).", argstr="-sdfile %s") - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc= - 'Copy all of the header from the first file (default for one file).', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file (default for one file).", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc= - 'Do not copy all of the header from the first file (default for many files)).', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file (default for many files)).", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) avgdim = traits.Str( - desc='Specify a dimension along which we wish to average.', - argstr='-avgdim %s') + desc="Specify a dimension along which we wish to average.", argstr="-avgdim %s" + ) binarize = traits.Bool( - desc='Binarize the volume by looking for values in a given range.', - argstr='-binarize') + desc="Binarize the volume by looking for values in a given range.", + argstr="-binarize", + ) - binrange = traits.Tuple( + binrange = Tuple( traits.Float, traits.Float, - argstr='-binrange %s %s', - desc= - 'Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.' + argstr="-binrange %s %s", + desc="Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.", ) binvalue = traits.Float( - desc=('Specify a target value (+/- 0.5) for' - 'binarization. Default value: -1.79769e+308'), - argstr='-binvalue %s') + desc=( + "Specify a target value (+/- 0.5) for" + "binarization. Default value: -1.79769e+308" + ), + argstr="-binvalue %s", + ) weights = InputMultiPath( traits.Str, desc='Specify weights for averaging (",,...").', - sep=',', - argstr='-weights %s', + sep=",", + argstr="-weights %s", ) width_weighted = traits.Bool( - desc='Weight by dimension widths when -avgdim is used.', - argstr='-width_weighted', - requires=('avgdim', )) + desc="Weight by dimension widths when -avgdim is used.", + argstr="-width_weighted", + requires=("avgdim",), + ) class AverageOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Average(CommandLine): @@ -877,43 +908,42 @@ class Average(CommandLine): input_spec = AverageInputSpec output_spec = AverageOutputSpec - _cmd = 'mincaverage' + _cmd = "mincaverage" class BlobInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to blob', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file to blob", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_blob.mnc') + name_template="%s_blob.mnc", + ) trace = traits.Bool( - desc='compute the trace (approximate growth and shrinkage) -- FAST', - argstr='-trace') + desc="compute the trace (approximate growth and shrinkage) -- FAST", + argstr="-trace", + ) determinant = traits.Bool( - desc='compute the determinant (exact growth and shrinkage) -- SLOW', - argstr='-determinant') + desc="compute the determinant (exact growth and shrinkage) -- SLOW", + argstr="-determinant", + ) translation = traits.Bool( - desc='compute translation (structure displacement)', - argstr='-translation') + desc="compute translation (structure displacement)", argstr="-translation" + ) magnitude = traits.Bool( - desc='compute the magnitude of the displacement vector', - argstr='-magnitude') + desc="compute the magnitude of the displacement vector", argstr="-magnitude" + ) class BlobOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Blob(CommandLine): @@ -931,198 +961,213 @@ class Blob(CommandLine): input_spec = BlobInputSpec output_spec = BlobOutputSpec - _cmd = 'mincblob' + _cmd = "mincblob" class CalcInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s) for calculation', + File(exists=True), + desc="input file(s) for calculation", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_calc.mnc') + name_template="%s_calc.mnc", + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) - - _xor_verbose = ( - 'verbose', - 'quiet', + default_value=True, ) + _xor_verbose = ("verbose", "quiet") + verbose = traits.Bool( - desc='Print out log messages (default).', - argstr='-verbose', - xor=_xor_verbose) + desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose + ) quiet = traits.Bool( - desc='Do not print out log messages.', - argstr='-quiet', - xor=_xor_verbose) + desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose + ) - debug = traits.Bool(desc='Print out debugging messages.', argstr='-debug') + debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") - filelist = traits.File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + filelist = File( + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc='Copy all of the header from the first file.', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file.", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc='Do not copy all of the header from the first file.', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file.", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) - voxel_range = traits.Tuple( + voxel_range = Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.', + argstr="-range %d %d", + desc="Valid range for output data.", ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', - argstr='-max_buffer_size_in_kb %d') - - _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', + desc="Specify the maximum size of the internal buffers (in kbytes).", + argstr="-max_buffer_size_in_kb %d", ) + _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") + check_dimensions = traits.Bool( - desc='Check that files have matching dimensions (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that files have matching dimensions (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check that files have matching dimensions.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check that files have matching dimensions.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( - desc='Ignore invalid data (NaN) for accumulations.', - argstr='-ignore_nan') + desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" + ) propagate_nan = traits.Bool( - desc='Invalid data in any file at a voxel produces a NaN (default).', - argstr='-propagate_nan') + desc="Invalid data in any file at a voxel produces a NaN (default).", + argstr="-propagate_nan", + ) # FIXME Double-check that these are mutually exclusive? - _xor_nan_zero_illegal = ('output_nan', 'output_zero', - 'output_illegal_value') + _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( - desc='Output NaN when an illegal operation is done (default).', - argstr='-nan', - xor=_xor_nan_zero_illegal) + desc="Output NaN when an illegal operation is done (default).", + argstr="-nan", + xor=_xor_nan_zero_illegal, + ) output_zero = traits.Bool( - desc='Output zero when an illegal operation is done.', - argstr='-zero', - xor=_xor_nan_zero_illegal) + desc="Output zero when an illegal operation is done.", + argstr="-zero", + xor=_xor_nan_zero_illegal, + ) output_illegal = traits.Bool( - desc= - 'Value to write out when an illegal operation is done. Default value: 1.79769e+308', - argstr='-illegal_value', - xor=_xor_nan_zero_illegal) + desc="Value to write out when an illegal operation is done. Default value: 1.79769e+308", + argstr="-illegal_value", + xor=_xor_nan_zero_illegal, + ) - _xor_expression = ('expression', 'expfile') + _xor_expression = ("expression", "expfile") expression = traits.Str( - desc='Expression to use in calculations.', - argstr='-expression \'%s\'', + desc="Expression to use in calculations.", + argstr="-expression '%s'", xor=_xor_expression, - mandatory=True) - expfile = traits.File( - desc='Name of file containing expression.', - argstr='-expfile %s', + mandatory=True, + ) + expfile = File( + desc="Name of file containing expression.", + argstr="-expfile %s", xor=_xor_expression, - mandatory=True) + mandatory=True, + ) # FIXME test this one, the argstr will probably need tweaking, see # _format_arg. outfiles = traits.List( - traits.Tuple( + Tuple( traits.Str, - traits.File, - argstr='-outfile %s %s', - desc= - ('List of (symbol, file) tuples indicating that output should be written' - 'to the specified file, taking values from the symbol which should be' - 'created in the expression (see the EXAMPLES section). If this option' - 'is given, then all non-option arguments are taken as input files.' - 'This option can be used multiple times for multiple output files.' - ))) + File, + argstr="-outfile %s %s", + desc=( + "List of (symbol, file) tuples indicating that output should be written" + "to the specified file, taking values from the symbol which should be" + "created in the expression (see the EXAMPLES section). If this option" + "is given, then all non-option arguments are taken as input files." + "This option can be used multiple times for multiple output files." + ), + ) + ) eval_width = traits.Int( - desc='Number of voxels to evaluate simultaneously.', - argstr='-eval_width %s') + desc="Number of voxels to evaluate simultaneously.", argstr="-eval_width %s" + ) class CalcOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Calc(CommandLine): @@ -1142,7 +1187,7 @@ class Calc(CommandLine): input_spec = CalcInputSpec output_spec = CalcOutputSpec - _cmd = 'minccalc' + _cmd = "minccalc" # FIXME mincbbox produces output like @@ -1155,47 +1200,49 @@ class Calc(CommandLine): class BBoxInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( - desc='output file containing bounding box corners', + desc="output file containing bounding box corners", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_bbox.txt', - keep_extension=False) + name_template="%s_bbox.txt", + keep_extension=False, + ) threshold = traits.Int( 0, - desc='VIO_Real value threshold for bounding box. Default value: 0.', - argstr='-threshold') + desc="VIO_Real value threshold for bounding box. Default value: 0.", + argstr="-threshold", + ) - _xor_one_two = ('one_line', 'two_lines') + _xor_one_two = ("one_line", "two_lines") one_line = traits.Bool( - desc='Output on one line (default): start_x y z width_x y z', - argstr='-one_line', - xor=_xor_one_two) + desc="Output on one line (default): start_x y z width_x y z", + argstr="-one_line", + xor=_xor_one_two, + ) two_lines = traits.Bool( - desc='Output on two lines: start_x y z \n width_x y z', - argstr='-two_lines', - xor=_xor_one_two) + desc="""Write output with two rows (start and width).""", + argstr="-two_lines", + xor=_xor_one_two, + ) format_mincresample = traits.Bool( - desc= - 'Output format for mincresample: (-step x y z -start x y z -nelements x y z', - argstr='-mincresample') + desc="Output format for mincresample: (-step x y z -start x y z -nelements x y z", + argstr="-mincresample", + ) format_mincreshape = traits.Bool( - desc='Output format for mincreshape: (-start x,y,z -count dx,dy,dz', - argstr='-mincreshape') + desc="Output format for mincreshape: (-start x,y,z -count dx,dy,dz", + argstr="-mincreshape", + ) format_minccrop = traits.Bool( - desc='Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2', - argstr='-minccrop') + desc="Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2", + argstr="-minccrop", + ) # FIXME Not implemented, will clash with our parsing of the output? # Command-specific options: @@ -1206,8 +1253,7 @@ class BBoxInputSpec(StdOutCommandLineInputSpec): class BBoxOutputSpec(TraitedSpec): - output_file = File( - desc='output file containing bounding box corners', exists=True) + output_file = File(desc="output file containing bounding box corners", exists=True) class BBox(StdOutCommandLine): @@ -1215,18 +1261,18 @@ class BBox(StdOutCommandLine): Examples -------- - >>> from nipype.interfaces.minc import BBox >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> bbox = BBox(input_file=file0) >>> bbox.run() # doctest: +SKIP + """ input_spec = BBoxInputSpec output_spec = BBoxOutputSpec - _cmd = 'mincbbox' + _cmd = "mincbbox" class BeastInputSpec(CommandLineInputSpec): @@ -1239,13 +1285,13 @@ class BeastInputSpec(CommandLineInputSpec): -positive: Specify mask of positive segmentation (inside mask) instead of the default mask. -output_selection: Specify file to output selected files. -count: Specify file to output the patch count. - -mask: Specify a segmentation mask instead of the the default mask. + -mask: Specify a segmentation mask instead of the default mask. -no_mask: Do not apply a segmentation mask. Perform the segmentation over the entire image. -no_positive: Do not apply a positive mask. Generic options for all commands: -help: Print summary of command-line options and abort -version: Print version number of program and exit - Copyright (C) 2011 Simon Fristed Eskildsen, Vladimir Fonov, + Copyright (C) 2011 Simon Fristed Eskildsen, Vladimir Fonov, Pierrick Coupe, Jose V. Manjon This program comes with ABSOLUTELY NO WARRANTY; for details type 'cat COPYING'. @@ -1276,94 +1322,117 @@ class BeastInputSpec(CommandLineInputSpec): """ probability_map = traits.Bool( - desc='Output the probability map instead of crisp mask.', - argstr='-probability') + desc="Output the probability map instead of crisp mask.", argstr="-probability" + ) flip_images = traits.Bool( - desc= - 'Flip images around the mid-sagittal plane to increase patch count.', - argstr='-flip') + desc="Flip images around the mid-sagittal plane to increase patch count.", + argstr="-flip", + ) load_moments = traits.Bool( - desc=('Do not calculate moments instead use precalculated' - 'library moments. (for optimization purposes)'), - argstr='-load_moments') - fill_holes = traits.Bool( - desc='Fill holes in the binary output.', argstr='-fill') + desc=( + "Do not calculate moments instead use precalculated" + "library moments. (for optimization purposes)" + ), + argstr="-load_moments", + ) + fill_holes = traits.Bool(desc="Fill holes in the binary output.", argstr="-fill") median_filter = traits.Bool( - desc='Apply a median filter on the probability map.', argstr='-median') + desc="Apply a median filter on the probability map.", argstr="-median" + ) nlm_filter = traits.Bool( - desc='Apply an NLM filter on the probability map (experimental).', - argstr='-nlm_filter') + desc="Apply an NLM filter on the probability map (experimental).", + argstr="-nlm_filter", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - configuration_file = traits.File( - desc='Specify configuration file.', argstr='-configuration %s') + configuration_file = File( + desc="Specify configuration file.", argstr="-configuration %s" + ) voxel_size = traits.Int( - 4, usedefault=True, - desc=('Specify voxel size for calculations (4, 2, or 1).' - 'Default value: 4. Assumes no multiscale. Use configuration' - 'file for multiscale.'), - argstr='-voxel_size %s') + 4, + usedefault=True, + desc=( + "Specify voxel size for calculations (4, 2, or 1)." + "Default value: 4. Assumes no multiscale. Use configuration" + "file for multiscale." + ), + argstr="-voxel_size %s", + ) abspath = traits.Bool( - desc= - 'File paths in the library are absolute (default is relative to library root).', - argstr='-abspath', + desc="File paths in the library are absolute (default is relative to library root).", + argstr="-abspath", usedefault=True, - default_value=True) + default_value=True, + ) patch_size = traits.Int( - 1, usedefault=True, - desc='Specify patch size for single scale approach. Default value: 1.', - argstr='-patch_size %s') + 1, + usedefault=True, + desc="Specify patch size for single scale approach. Default value: 1.", + argstr="-patch_size %s", + ) search_area = traits.Int( - 2, usedefault=True, - desc= - 'Specify size of search area for single scale approach. Default value: 2.', - argstr='-search_area %s') + 2, + usedefault=True, + desc="Specify size of search area for single scale approach. Default value: 2.", + argstr="-search_area %s", + ) confidence_level_alpha = traits.Float( - 0.5, usedefault=True, - desc='Specify confidence level Alpha. Default value: 0.5', - argstr='-alpha %s') + 0.5, + usedefault=True, + desc="Specify confidence level Alpha. Default value: 0.5", + argstr="-alpha %s", + ) smoothness_factor_beta = traits.Float( - 0.5, usedefault=True, - desc='Specify smoothness factor Beta. Default value: 0.25', - argstr='-beta %s') + 0.5, + usedefault=True, + desc="Specify smoothness factor Beta. Default value: 0.25", + argstr="-beta %s", + ) threshold_patch_selection = traits.Float( - 0.95, usedefault=True, - desc='Specify threshold for patch selection. Default value: 0.95', - argstr='-threshold %s') + 0.95, + usedefault=True, + desc="Specify threshold for patch selection. Default value: 0.95", + argstr="-threshold %s", + ) number_selected_images = traits.Int( - 20, usedefault=True, - desc='Specify number of selected images. Default value: 20', - argstr='-selection_num %s') + 20, + usedefault=True, + desc="Specify number of selected images. Default value: 20", + argstr="-selection_num %s", + ) same_resolution = traits.Bool( - desc='Output final mask with the same resolution as input file.', - argstr='-same_resolution') - - library_dir = traits.Directory( - desc='library directory', position=-3, argstr='%s', mandatory=True) - input_file = traits.File( - desc='input file', position=-2, argstr='%s', mandatory=True) - output_file = traits.File( - desc='output file', + desc="Output final mask with the same resolution as input file.", + argstr="-same_resolution", + ) + + library_dir = Directory( + desc="library directory", position=-3, argstr="%s", mandatory=True + ) + input_file = File(desc="input file", position=-2, argstr="%s", mandatory=True) + output_file = File( + desc="output file", position=-1, - argstr='%s', - name_source=['input_file'], + argstr="%s", + name_source=["input_file"], hash_files=False, - name_template='%s_beast_mask.mnc') + name_template="%s_beast_mask.mnc", + ) class BeastOutputSpec(TraitedSpec): - output_file = File(desc='output mask file', exists=True) + output_file = File(desc="output mask file", exists=True) class Beast(CommandLine): @@ -1383,150 +1452,158 @@ class Beast(CommandLine): input_spec = BeastInputSpec output_spec = BeastOutputSpec - _cmd = 'mincbeast' + _cmd = "mincbeast" class PikInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) - _xor_image_type = ('jpg', 'png') + _xor_image_type = ("jpg", "png") - jpg = traits.Bool(desc='Output a jpg file.', xor=_xor_image_type) - png = traits.Bool(desc='Output a png file (default).', xor=_xor_image_type) + jpg = traits.Bool(desc="Output a jpg file.", xor=_xor_image_type) + png = traits.Bool(desc="Output a png file (default).", xor=_xor_image_type) output_file = File( - desc='output file', - argstr='%s', + desc="output file", + argstr="%s", genfile=True, position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.png', - keep_extension=False) + name_template="%s.png", + keep_extension=False, + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME not implemented: --verbose # --fake # --lookup ==> arguments to pass to minclookup scale = traits.Int( - 2, usedefault=True, - desc=('Scaling factor for resulting image. By default images are' - 'output at twice their original resolution.'), - argstr='--scale %s') + 2, + usedefault=True, + desc=( + "Scaling factor for resulting image. By default images are" + "output at twice their original resolution." + ), + argstr="--scale %s", + ) width = traits.Int( - desc= - 'Autoscale the resulting image to have a fixed image width (in pixels).', - argstr='--width %s') + desc="Autoscale the resulting image to have a fixed image width (in pixels).", + argstr="--width %s", + ) depth = traits.Enum( 8, 16, - desc='Bitdepth for resulting image 8 or 16 (MSB machines only!)', - argstr='--depth %s') + desc="Bitdepth for resulting image 8 or 16 (MSB machines only!)", + argstr="--depth %s", + ) - _xor_title = ('title_string', 'title_with_filename') + _xor_title = ("title_string", "title_with_filename") title = traits.Either( - traits.Bool(desc='Use input filename as title in resulting image.'), - traits.Str(desc='Add a title to the resulting image.'), - argstr='%s') # see _format_arg for actual arg string + traits.Bool(desc="Use input filename as title in resulting image."), + traits.Str(desc="Add a title to the resulting image."), + argstr="%s", + ) # see _format_arg for actual arg string title_size = traits.Int( - desc='Font point size for the title.', - argstr='--title_size %s', - requires=['title']) + desc="Font point size for the title.", + argstr="--title_size %s", + requires=["title"], + ) annotated_bar = traits.Bool( - desc= - 'create an annotated bar to match the image (use height of the output image)', - argstr='--anot_bar') + desc="create an annotated bar to match the image (use height of the output image)", + argstr="--anot_bar", + ) # FIXME tuple of floats? Not voxel values? Man page doesn't specify. - minc_range = traits.Tuple( + minc_range = Tuple( traits.Float, traits.Float, - desc='Valid range of values for MINC file.', - argstr='--range %s %s') + desc="Valid range of values for MINC file.", + argstr="--range %s %s", + ) - _xor_image_range = ('image_range', 'auto_range') + _xor_image_range = ("image_range", "auto_range") - image_range = traits.Tuple( + image_range = Tuple( traits.Float, traits.Float, - desc='Range of image values to use for pixel intensity.', - argstr='--image_range %s %s', - xor=_xor_image_range) + desc="Range of image values to use for pixel intensity.", + argstr="--image_range %s %s", + xor=_xor_image_range, + ) auto_range = traits.Bool( - desc= - 'Automatically determine image range using a 5 and 95% PcT. (histogram)', - argstr='--auto_range', - xor=_xor_image_range) + desc="Automatically determine image range using a 5 and 95% PcT. (histogram)", + argstr="--auto_range", + xor=_xor_image_range, + ) start = traits.Int( - desc='Slice number to get. (note this is in voxel co-ordinates).', - argstr='--slice %s') # FIXME Int is correct? + desc="Slice number to get. (note this is in voxel coordinates).", + argstr="--slice %s", + ) # FIXME Int is correct? - _xor_slice = ('slice_z', 'slice_y', 'slice_x') + _xor_slice = ("slice_z", "slice_y", "slice_x") slice_z = traits.Bool( - desc='Get an axial/transverse (z) slice.', argstr='-z', xor=_xor_slice) - slice_y = traits.Bool( - desc='Get a coronal (y) slice.', argstr='-y', xor=_xor_slice) + desc="Get an axial/transverse (z) slice.", argstr="-z", xor=_xor_slice + ) + slice_y = traits.Bool(desc="Get a coronal (y) slice.", argstr="-y", xor=_xor_slice) slice_x = traits.Bool( - desc='Get a sagittal (x) slice.', argstr='-x', - xor=_xor_slice) # FIXME typo in man page? sagital? + desc="Get a sagittal (x) slice.", argstr="-x", xor=_xor_slice + ) # FIXME typo in man page? sagittal? triplanar = traits.Bool( - desc='Create a triplanar view of the input file.', - argstr='--triplanar') + desc="Create a triplanar view of the input file.", argstr="--triplanar" + ) tile_size = traits.Int( - desc='Pixel size for each image in a triplanar.', - argstr='--tilesize %s') + desc="Pixel size for each image in a triplanar.", argstr="--tilesize %s" + ) - _xor_sagittal_offset = ('sagittal_offset', 'sagittal_offset_perc') + _xor_sagittal_offset = ("sagittal_offset", "sagittal_offset_perc") sagittal_offset = traits.Int( - desc='Offset the sagittal slice from the centre.', - argstr='--sagittal_offset %s') + desc="Offset the sagittal slice from the centre.", argstr="--sagittal_offset %s" + ) sagittal_offset_perc = traits.Range( low=0, high=100, - desc='Offset the sagittal slice by a percentage from the centre.', - argstr='--sagittal_offset_perc %d', + desc="Offset the sagittal slice by a percentage from the centre.", + argstr="--sagittal_offset_perc %d", ) - _xor_vertical_horizontal = ('vertical_triplanar_view', - 'horizontal_triplanar_view') + _xor_vertical_horizontal = ("vertical_triplanar_view", "horizontal_triplanar_view") vertical_triplanar_view = traits.Bool( - desc='Create a vertical triplanar view (Default).', - argstr='--vertical', - xor=_xor_vertical_horizontal) + desc="Create a vertical triplanar view (Default).", + argstr="--vertical", + xor=_xor_vertical_horizontal, + ) horizontal_triplanar_view = traits.Bool( - desc='Create a horizontal triplanar view.', - argstr='--horizontal', - xor=_xor_vertical_horizontal) + desc="Create a horizontal triplanar view.", + argstr="--horizontal", + xor=_xor_vertical_horizontal, + ) - lookup = traits.Str( - desc='Arguments to pass to minclookup', argstr='--lookup %s') + lookup = traits.Str(desc="Arguments to pass to minclookup", argstr="--lookup %s") class PikOutputSpec(TraitedSpec): - output_file = File(desc='output image', exists=True) + output_file = File(desc="output image", exists=True) class Pik(CommandLine): @@ -1549,102 +1626,104 @@ class Pik(CommandLine): input_spec = PikInputSpec output_spec = PikOutputSpec - _cmd = 'mincpik' + _cmd = "mincpik" def _format_arg(self, name, spec, value): - if name == 'title': + if name == "title": if isinstance(value, bool) and value: - return '--title' + return "--title" elif isinstance(value, str): - return '--title --title_text %s' % (value, ) + return f"--title --title_text {value}" else: - raise ValueError( - 'Unknown value for "title" argument: ' + str(value)) - return super(Pik, self)._format_arg(name, spec, value) + raise ValueError('Unknown value for "title" argument: ' + str(value)) + return super()._format_arg(name, spec, value) class BlurInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) - output_file_base = File(desc='output file base', argstr='%s', position=-1) + output_file_base = File(desc="output file base", argstr="%s", position=-1) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - _xor_kernel = ('gaussian', 'rect') + _xor_kernel = ("gaussian", "rect") gaussian = traits.Bool( - desc='Use a gaussian smoothing kernel (default).', - argstr='-gaussian', - xor=_xor_kernel) + desc="Use a gaussian smoothing kernel (default).", + argstr="-gaussian", + xor=_xor_kernel, + ) rect = traits.Bool( - desc='Use a rect (box) smoothing kernel.', - argstr='-rect', - xor=_xor_kernel) + desc="Use a rect (box) smoothing kernel.", argstr="-rect", xor=_xor_kernel + ) gradient = traits.Bool( - desc='Create the gradient magnitude volume as well.', - argstr='-gradient') + desc="Create the gradient magnitude volume as well.", argstr="-gradient" + ) partial = traits.Bool( - desc= - 'Create the partial derivative and gradient magnitude volumes as well.', - argstr='-partial') + desc="Create the partial derivative and gradient magnitude volumes as well.", + argstr="-partial", + ) no_apodize = traits.Bool( - desc='Do not apodize the data before blurring.', argstr='-no_apodize') + desc="Do not apodize the data before blurring.", argstr="-no_apodize" + ) - _xor_main_options = ('fwhm', 'fwhm3d', 'standard_dev') + _xor_main_options = ("fwhm", "fwhm3d", "standard_dev") fwhm = traits.Float( 0, - desc='Full-width-half-maximum of gaussian kernel. Default value: 0.', - argstr='-fwhm %s', + desc="Full-width-half-maximum of gaussian kernel. Default value: 0.", + argstr="-fwhm %s", xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) standard_dev = traits.Float( 0, - desc='Standard deviation of gaussian kernel. Default value: 0.', - argstr='-standarddev %s', + desc="Standard deviation of gaussian kernel. Default value: 0.", + argstr="-standarddev %s", xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) - fwhm3d = traits.Tuple( + fwhm3d = Tuple( traits.Float, traits.Float, traits.Float, - argstr='-3dfwhm %s %s %s', - desc=('Full-width-half-maximum of gaussian kernel.' - 'Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308.'), + argstr="-3dfwhm %s %s %s", + desc=( + "Full-width-half-maximum of gaussian kernel." + "Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308." + ), xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) dimensions = traits.Enum( 3, 1, 2, - desc= - 'Number of dimensions to blur (either 1,2 or 3). Default value: 3.', - argstr='-dimensions %s') + desc="Number of dimensions to blur (either 1,2 or 3). Default value: 3.", + argstr="-dimensions %s", + ) class BlurOutputSpec(TraitedSpec): - output_file = File(desc='Blurred output file.', exists=True) + output_file = File(desc="Blurred output file.", exists=True) - gradient_dxyz = File(desc='Gradient dxyz.') - partial_dx = File(desc='Partial gradient dx.') - partial_dy = File(desc='Partial gradient dy.') - partial_dz = File(desc='Partial gradient dz.') - partial_dxyz = File(desc='Partial gradient dxyz.') + gradient_dxyz = File(desc="Gradient dxyz.") + partial_dx = File(desc="Partial gradient dx.") + partial_dy = File(desc="Partial gradient dy.") + partial_dz = File(desc="Partial gradient dz.") + partial_dxyz = File(desc="Partial gradient dxyz.") class Blur(StdOutCommandLine): @@ -1686,7 +1765,7 @@ class Blur(StdOutCommandLine): input_spec = BlurInputSpec output_spec = BlurOutputSpec - _cmd = 'mincblur' + _cmd = "mincblur" def _gen_output_base(self): output_file_base = self.inputs.output_file_base @@ -1694,13 +1773,11 @@ def _gen_output_base(self): if isdefined(output_file_base): return output_file_base else: - base_file_name = os.path.split( - self.inputs.input_file)[1] # e.g. 'foo.mnc' - base_file_name_no_ext = os.path.splitext(base_file_name)[ - 0] # e.g. 'foo' + base_file_name = os.path.split(self.inputs.input_file)[1] # e.g. 'foo.mnc' + base_file_name_no_ext = os.path.splitext(base_file_name)[0] # e.g. 'foo' output_base = os.path.join( - os.getcwd(), base_file_name_no_ext + - '_bluroutput') # e.g. '/tmp/blah/foo_bluroutput' + os.getcwd(), base_file_name_no_ext + "_bluroutput" + ) # e.g. '/tmp/blah/foo_bluroutput' # return os.path.splitext(self.inputs.input_file)[0] + # '_bluroutput' return output_base @@ -1710,23 +1787,23 @@ def _list_outputs(self): output_file_base = self._gen_output_base() - outputs['output_file'] = output_file_base + '_blur.mnc' + outputs["output_file"] = output_file_base + "_blur.mnc" if isdefined(self.inputs.gradient): - outputs['gradient_dxyz'] = output_file_base + '_dxyz.mnc' + outputs["gradient_dxyz"] = output_file_base + "_dxyz.mnc" if isdefined(self.inputs.partial): - outputs['partial_dx'] = output_file_base + '_dx.mnc' - outputs['partial_dy'] = output_file_base + '_dy.mnc' - outputs['partial_dz'] = output_file_base + '_dz.mnc' - outputs['partial_dxyz'] = output_file_base + '_dxyz.mnc' + outputs["partial_dx"] = output_file_base + "_dx.mnc" + outputs["partial_dy"] = output_file_base + "_dy.mnc" + outputs["partial_dz"] = output_file_base + "_dz.mnc" + outputs["partial_dxyz"] = output_file_base + "_dxyz.mnc" return outputs @property def cmdline(self): output_file_base = self.inputs.output_file_base - orig_cmdline = super(Blur, self).cmdline + orig_cmdline = super().cmdline if isdefined(output_file_base): return orig_cmdline @@ -1734,156 +1811,174 @@ def cmdline(self): # FIXME this seems like a bit of a hack. Can we force output_file # to show up in cmdline by default, even if it isn't specified in # the instantiation of Pik? - return '%s %s' % (orig_cmdline, self._gen_output_base()) + return f"{orig_cmdline} {self._gen_output_base()}" class MathInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s) for calculation', + File(exists=True), + desc="input file(s) for calculation", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, - xor=_xor_input_files) + xor=_xor_input_files, + ) output_file = File( - desc='output file', - argstr='%s', + desc="output file", + argstr="%s", genfile=True, position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_mincmath.mnc') + name_template="%s_mincmath.mnc", + ) - filelist = traits.File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + filelist = File( + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", exists=True, mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc= - 'Copy all of the header from the first file (default for one file).', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file (default for one file).", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc= - 'Do not copy all of the header from the first file (default for many files)).', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file (default for many files)).", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) - voxel_range = traits.Tuple( + voxel_range = Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.') + argstr="-range %d %d", + desc="Valid range for output data.", + ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', + desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", ) - _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', - ) + _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") check_dimensions = traits.Bool( - desc='Check that dimension info matches across files (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that dimension info matches across files (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check dimension info.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check dimension info.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) dimension = traits.Str( - desc= - 'Specify a dimension along which we wish to perform a calculation.', - argstr='-dimension %s') + desc="Specify a dimension along which we wish to perform a calculation.", + argstr="-dimension %s", + ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( - desc='Ignore invalid data (NaN) for accumulations.', - argstr='-ignore_nan') + desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" + ) propagate_nan = traits.Bool( - desc='Invalid data in any file at a voxel produces a NaN (default).', - argstr='-propagate_nan') + desc="Invalid data in any file at a voxel produces a NaN (default).", + argstr="-propagate_nan", + ) # FIXME Double-check that these are mutually exclusive? - _xor_nan_zero_illegal = ('output_nan', 'output_zero', - 'output_illegal_value') + _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( - desc='Output NaN when an illegal operation is done (default).', - argstr='-nan', - xor=_xor_nan_zero_illegal) + desc="Output NaN when an illegal operation is done (default).", + argstr="-nan", + xor=_xor_nan_zero_illegal, + ) output_zero = traits.Bool( - desc='Output zero when an illegal operation is done.', - argstr='-zero', - xor=_xor_nan_zero_illegal) + desc="Output zero when an illegal operation is done.", + argstr="-zero", + xor=_xor_nan_zero_illegal, + ) output_illegal = traits.Bool( - desc=('Value to write out when an illegal operation' - 'is done. Default value: 1.79769e+308'), - argstr='-illegal_value', - xor=_xor_nan_zero_illegal) + desc=( + "Value to write out when an illegal operation" + "is done. Default value: 1.79769e+308" + ), + argstr="-illegal_value", + xor=_xor_nan_zero_illegal, + ) # FIXME A whole bunch of the parameters will be mutually exclusive, e.g. surely can't do sqrt and abs at the same time? # Or does mincmath do one and then the next? @@ -1893,156 +1988,186 @@ class MathInputSpec(CommandLineInputSpec): ########################################################################## bool_or_const_traits = [ - 'test_gt', 'test_lt', 'test_eq', 'test_ne', 'test_ge', 'test_le', - 'calc_add', 'calc_sub', 'calc_mul', 'calc_div' + "test_gt", + "test_lt", + "test_eq", + "test_ne", + "test_ge", + "test_le", + "calc_add", + "calc_sub", + "calc_mul", + "calc_div", ] test_gt = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 > vol2 or vol1 > constant.', - argstr='-gt') + desc="Test for vol1 > vol2 or vol1 > constant.", + argstr="-gt", + ) test_lt = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 < vol2 or vol1 < constant.', - argstr='-lt') + desc="Test for vol1 < vol2 or vol1 < constant.", + argstr="-lt", + ) test_eq = traits.Either( traits.Bool(), traits.Float(), - desc='Test for integer vol1 == vol2 or vol1 == constant.', - argstr='-eq') + desc="Test for integer vol1 == vol2 or vol1 == constant.", + argstr="-eq", + ) test_ne = traits.Either( traits.Bool(), traits.Float(), - desc='Test for integer vol1 != vol2 or vol1 != const.', - argstr='-ne') + desc="Test for integer vol1 != vol2 or vol1 != const.", + argstr="-ne", + ) test_ge = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 >= vol2 or vol1 >= const.', - argstr='-ge') + desc="Test for vol1 >= vol2 or vol1 >= const.", + argstr="-ge", + ) test_le = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 <= vol2 or vol1 <= const.', - argstr='-le') + desc="Test for vol1 <= vol2 or vol1 <= const.", + argstr="-le", + ) calc_add = traits.Either( traits.Bool(), traits.Float(), - desc='Add N volumes or volume + constant.', - argstr='-add') + desc="Add N volumes or volume + constant.", + argstr="-add", + ) calc_sub = traits.Either( traits.Bool(), traits.Float(), - desc='Subtract 2 volumes or volume - constant.', - argstr='-sub') + desc="Subtract 2 volumes or volume - constant.", + argstr="-sub", + ) calc_mul = traits.Either( traits.Bool(), traits.Float(), - desc='Multiply N volumes or volume * constant.', - argstr='-mult') + desc="Multiply N volumes or volume * constant.", + argstr="-mult", + ) calc_div = traits.Either( traits.Bool(), traits.Float(), - desc='Divide 2 volumes or volume / constant.', - argstr='-div') + desc="Divide 2 volumes or volume / constant.", + argstr="-div", + ) ###################################### # Traits that expect a single volume # ###################################### single_volume_traits = [ - 'invert', 'calc_not', 'sqrt', 'square', 'abs', 'exp', 'log', 'scale', - 'clamp', 'segment', 'nsegment', 'isnan', 'isnan' + "invert", + "calc_not", + "sqrt", + "square", + "abs", + "exp", + "log", + "scale", + "clamp", + "segment", + "nsegment", + "isnan", + "isnan", ] # FIXME enforce this in _parse_inputs and check for other members invert = traits.Either( - traits.Float(), desc='Calculate 1/c.', argstr='-invert -const %s') + traits.Float(), desc="Calculate 1/c.", argstr="-invert -const %s" + ) - calc_not = traits.Bool(desc='Calculate !vol1.', argstr='-not') + calc_not = traits.Bool(desc="Calculate !vol1.", argstr="-not") - sqrt = traits.Bool(desc='Take square root of a volume.', argstr='-sqrt') - square = traits.Bool(desc='Take square of a volume.', argstr='-square') - abs = traits.Bool(desc='Take absolute value of a volume.', argstr='-abs') + sqrt = traits.Bool(desc="Take square root of a volume.", argstr="-sqrt") + square = traits.Bool(desc="Take square of a volume.", argstr="-square") + abs = traits.Bool(desc="Take absolute value of a volume.", argstr="-abs") - exp = traits.Tuple( + exp = Tuple( traits.Float, traits.Float, - argstr='-exp -const2 %s %s', - desc='Calculate c2*exp(c1*x). Both constants must be specified.') + argstr="-exp -const2 %s %s", + desc="Calculate c2*exp(c1*x). Both constants must be specified.", + ) - log = traits.Tuple( + log = Tuple( traits.Float, traits.Float, - argstr='-log -const2 %s %s', - desc='Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.') + argstr="-log -const2 %s %s", + desc="Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.", + ) - scale = traits.Tuple( + scale = Tuple( traits.Float, traits.Float, - argstr='-scale -const2 %s %s', - desc='Scale a volume: volume * c1 + c2.') + argstr="-scale -const2 %s %s", + desc="Scale a volume: volume * c1 + c2.", + ) - clamp = traits.Tuple( + clamp = Tuple( traits.Float, traits.Float, - argstr='-clamp -const2 %s %s', - desc='Clamp a volume to lie between two values.') + argstr="-clamp -const2 %s %s", + desc="Clamp a volume to lie between two values.", + ) - segment = traits.Tuple( + segment = Tuple( traits.Float, traits.Float, - argstr='-segment -const2 %s %s', - desc= - 'Segment a volume using range of -const2: within range = 1, outside range = 0.' + argstr="-segment -const2 %s %s", + desc="Segment a volume using range of -const2: within range = 1, outside range = 0.", ) - nsegment = traits.Tuple( + nsegment = Tuple( traits.Float, traits.Float, - argstr='-nsegment -const2 %s %s', - desc='Opposite of -segment: within range = 0, outside range = 1.') + argstr="-nsegment -const2 %s %s", + desc="Opposite of -segment: within range = 0, outside range = 1.", + ) - isnan = traits.Bool(desc='Test for NaN values in vol1.', argstr='-isnan') + isnan = traits.Bool(desc="Test for NaN values in vol1.", argstr="-isnan") - nisnan = traits.Bool(desc='Negation of -isnan.', argstr='-nisnan') + nisnan = traits.Bool(desc="Negation of -isnan.", argstr="-nisnan") ############################################ # Traits that expect precisely two volumes # ############################################ - two_volume_traits = ['percentdiff'] + two_volume_traits = ["percentdiff"] percentdiff = traits.Float( - desc= - 'Percent difference between 2 volumes, thresholded (const def=0.0).', - argstr='-percentdiff') + desc="Percent difference between 2 volumes, thresholded (const def=0.0).", + argstr="-percentdiff", + ) ##################################### # Traits that expect N >= 1 volumes # ##################################### - n_volume_traits = [ - 'count_valid', 'maximum', 'minimum', 'calc_add', 'calc_or' - ] + n_volume_traits = ["count_valid", "maximum", "minimum", "calc_add", "calc_or"] count_valid = traits.Bool( - desc='Count the number of valid values in N volumes.', - argstr='-count_valid') + desc="Count the number of valid values in N volumes.", argstr="-count_valid" + ) - maximum = traits.Bool(desc='Find maximum of N volumes.', argstr='-maximum') - minimum = traits.Bool(desc='Find minimum of N volumes.', argstr='-minimum') + maximum = traits.Bool(desc="Find maximum of N volumes.", argstr="-maximum") + minimum = traits.Bool(desc="Find minimum of N volumes.", argstr="-minimum") - calc_and = traits.Bool( - desc='Calculate vol1 && vol2 (&& ...).', argstr='-and') - calc_or = traits.Bool( - desc='Calculate vol1 || vol2 (|| ...).', argstr='-or') + calc_and = traits.Bool(desc="Calculate vol1 && vol2 (&& ...).", argstr="-and") + calc_or = traits.Bool(desc="Calculate vol1 || vol2 (|| ...).", argstr="-or") class MathOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Math(StdOutCommandLine): @@ -2068,7 +2193,7 @@ class Math(StdOutCommandLine): input_spec = MathInputSpec output_spec = MathOutputSpec - _cmd = 'mincmath' + _cmd = "mincmath" def _format_arg(self, name, spec, value): assert value is not None @@ -2080,24 +2205,16 @@ def _format_arg(self, name, spec, value): if isinstance(value, bool) and value: return spec.argstr elif isinstance(value, bool) and not value: - raise ValueError('Does not make sense to specify %s=False' % - (name, )) + raise ValueError(f"Does not make sense to specify {name}=False") elif isinstance(value, float): - return '%s -const %s' % ( - spec.argstr, - value, - ) + return f"{spec.argstr} -const {value}" else: - raise ValueError('Invalid %s argument: %s' % ( - name, - value, - )) + raise ValueError(f"Invalid {name} argument: {value}") - return super(Math, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _parse_inputs(self): - """A number of the command line options expect precisely one or two files. - """ + """A number of the command line options expect precisely one or two files.""" nr_input_files = len(self.inputs.input_files) @@ -2108,22 +2225,19 @@ def _parse_inputs(self): if isinstance(t, bool): if nr_input_files != 2: raise ValueError( - 'Due to the %s option we expected 2 files but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 2 files but input_files is of length %d" + % (n, nr_input_files) + ) elif isinstance(t, float): if nr_input_files != 1: raise ValueError( - 'Due to the %s option we expected 1 file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 1 file but input_files is of length %d" + % (n, nr_input_files) + ) else: raise ValueError( - 'Argument should be a bool or const, but got: %s' % t) + "Argument should be a bool or const, but got: %s" % t + ) for n in self.input_spec.single_volume_traits: t = self.inputs.__getattribute__(n) @@ -2131,11 +2245,9 @@ def _parse_inputs(self): if isdefined(t): if nr_input_files != 1: raise ValueError( - 'Due to the %s option we expected 1 file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 1 file but input_files is of length %d" + % (n, nr_input_files) + ) for n in self.input_spec.two_volume_traits: t = self.inputs.__getattribute__(n) @@ -2143,11 +2255,9 @@ def _parse_inputs(self): if isdefined(t): if nr_input_files != 2: raise ValueError( - 'Due to the %s option we expected 2 files but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 2 files but input_files is of length %d" + % (n, nr_input_files) + ) for n in self.input_spec.n_volume_traits: t = self.inputs.__getattribute__(n) @@ -2155,13 +2265,11 @@ def _parse_inputs(self): if isdefined(t): if not nr_input_files >= 1: raise ValueError( - 'Due to the %s option we expected at least one file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected at least one file but input_files is of length %d" + % (n, nr_input_files) + ) - return super(Math, self)._parse_inputs() + return super()._parse_inputs() class ResampleInputSpec(CommandLineInputSpec): @@ -2175,58 +2283,58 @@ class ResampleInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file for resampling', + desc="input file for resampling", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_resample.mnc') + name_template="%s_resample.mnc", + ) # This is a dummy input. - input_grid_files = InputMultiPath( - traits.File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)") - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - _xor_interpolation = ('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', - 'sinc_interpolation') + _xor_interpolation = ( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ) trilinear_interpolation = traits.Bool( - desc='Do trilinear interpolation.', - argstr='-trilinear', - xor=_xor_interpolation) + desc="Do trilinear interpolation.", argstr="-trilinear", xor=_xor_interpolation + ) tricubic_interpolation = traits.Bool( - desc='Do tricubic interpolation.', - argstr='-tricubic', - xor=_xor_interpolation) + desc="Do tricubic interpolation.", argstr="-tricubic", xor=_xor_interpolation + ) nearest_neighbour_interpolation = traits.Bool( - desc='Do nearest neighbour interpolation.', - argstr='-nearest_neighbour', - xor=_xor_interpolation) + desc="Do nearest neighbour interpolation.", + argstr="-nearest_neighbour", + xor=_xor_interpolation, + ) sinc_interpolation = traits.Bool( - desc='Do windowed sinc interpolation.', - argstr='-sinc', - xor=_xor_interpolation) + desc="Do windowed sinc interpolation.", argstr="-sinc", xor=_xor_interpolation + ) half_width_sinc_window = traits.Enum( 5, @@ -2239,293 +2347,343 @@ class ResampleInputSpec(CommandLineInputSpec): 8, 9, 10, - desc='Set half-width of sinc window (1-10). Default value: 5.', - argstr='-width %s', - requires=['sinc_interpolation']) + desc="Set half-width of sinc window (1-10). Default value: 5.", + argstr="-width %s", + requires=["sinc_interpolation"], + ) - _xor_sinc_window_type = ('sinc_window_hanning', 'sinc_window_hamming') + _xor_sinc_window_type = ("sinc_window_hanning", "sinc_window_hamming") sinc_window_hanning = traits.Bool( - desc='Set sinc window type to Hanning.', - argstr='-hanning', + desc="Set sinc window type to Hanning.", + argstr="-hanning", xor=_xor_sinc_window_type, - requires=['sinc_interpolation']) + requires=["sinc_interpolation"], + ) sinc_window_hamming = traits.Bool( - desc='Set sinc window type to Hamming.', - argstr='-hamming', + desc="Set sinc window type to Hamming.", + argstr="-hamming", xor=_xor_sinc_window_type, - requires=['sinc_interpolation']) + requires=["sinc_interpolation"], + ) - transformation = traits.File( - desc='File giving world transformation. (Default = identity).', + transformation = File( + desc="File giving world transformation. (Default = identity).", exists=True, - argstr='-transformation %s') + argstr="-transformation %s", + ) invert_transformation = traits.Bool( - desc='Invert the transformation before using it.', - argstr='-invert_transformation') + desc="Invert the transformation before using it.", + argstr="-invert_transformation", + ) - _xor_input_sampling = ('vio_transform', 'no_input_sampling') + _xor_input_sampling = ("vio_transform", "no_input_sampling") vio_transform = traits.Bool( - desc='VIO_Transform the input sampling with the transform (default).', - argstr='-tfm_input_sampling', - xor=_xor_input_sampling) + desc="VIO_Transform the input sampling with the transform (default).", + argstr="-tfm_input_sampling", + xor=_xor_input_sampling, + ) no_input_sampling = traits.Bool( - desc='Use the input sampling without transforming (old behaviour).', - argstr='-use_input_sampling', - xor=_xor_input_sampling) + desc="Use the input sampling without transforming (old behaviour).", + argstr="-use_input_sampling", + xor=_xor_input_sampling, + ) - like = traits.File( - desc='Specifies a model file for the resampling.', - argstr='-like %s', - exists=True) + like = File( + desc="Specifies a model file for the resampling.", + argstr="-like %s", + exists=True, + ) _xor_format = ( - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) - output_range = traits.Tuple( + output_range = Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc= - 'Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.' + argstr="-range %s %s", + desc="Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.", ) - _xor_slices = ('transverse', 'sagittal', 'coronal') + _xor_slices = ("transverse", "sagittal", "coronal") transverse_slices = traits.Bool( - desc='Write out transverse slices.', - argstr='-transverse', - xor=_xor_slices) + desc="Write out transverse slices.", argstr="-transverse", xor=_xor_slices + ) sagittal_slices = traits.Bool( - desc='Write out sagittal slices', argstr='-sagittal', xor=_xor_slices) + desc="Write out sagittal slices", argstr="-sagittal", xor=_xor_slices + ) coronal_slices = traits.Bool( - desc='Write out coronal slices', argstr='-coronal', xor=_xor_slices) + desc="Write out coronal slices", argstr="-coronal", xor=_xor_slices + ) - _xor_fill = ('nofill', 'fill') + _xor_fill = ("nofill", "fill") no_fill = traits.Bool( - desc='Use value zero for points outside of input volume.', - argstr='-nofill', - xor=_xor_fill) + desc="Use value zero for points outside of input volume.", + argstr="-nofill", + xor=_xor_fill, + ) fill = traits.Bool( - desc='Use a fill value for points outside of input volume.', - argstr='-fill', - xor=_xor_fill) + desc="Use a fill value for points outside of input volume.", + argstr="-fill", + xor=_xor_fill, + ) fill_value = traits.Float( - desc=('Specify a fill value for points outside of input volume.' - 'Default value: 1.79769e+308.'), - argstr='-fillvalue %s', - requires=['fill']) + desc=( + "Specify a fill value for points outside of input volume." + "Default value: 1.79769e+308." + ), + argstr="-fillvalue %s", + requires=["fill"], + ) - _xor_scale = ('keep_real_range', 'nokeep_real_range') + _xor_scale = ("keep_real_range", "nokeep_real_range") keep_real_range = traits.Bool( - desc='Keep the real scale of the input volume.', - argstr='-keep_real_range', - xor=_xor_scale) + desc="Keep the real scale of the input volume.", + argstr="-keep_real_range", + xor=_xor_scale, + ) nokeep_real_range = traits.Bool( - desc='Do not keep the real scale of the data (default).', - argstr='-nokeep_real_range', - xor=_xor_scale) + desc="Do not keep the real scale of the data (default).", + argstr="-nokeep_real_range", + xor=_xor_scale, + ) - _xor_spacetype = ('spacetype', 'talairach') + _xor_spacetype = ("spacetype", "talairach") spacetype = traits.Str( - desc='Set the spacetype attribute to a specified string.', - argstr='-spacetype %s') - talairach = traits.Bool( - desc='Output is in Talairach space.', argstr='-talairach') + desc="Set the spacetype attribute to a specified string.", + argstr="-spacetype %s", + ) + talairach = traits.Bool(desc="Output is in Talairach space.", argstr="-talairach") - origin = traits.Tuple( + origin = Tuple( traits.Float, traits.Float, traits.Float, - desc=('Origin of first pixel in 3D space.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-origin %s %s %s') + desc=( + "Origin of first pixel in 3D space." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-origin %s %s %s", + ) standard_sampling = traits.Bool( - desc='Set the sampling to standard values (step, start and dircos).', - argstr='-standard_sampling') # FIXME Bool? + desc="Set the sampling to standard values (step, start and dircos).", + argstr="-standard_sampling", + ) # FIXME Bool? units = traits.Str( - desc='Specify the units of the output sampling.', - argstr='-units %s') # FIXME String? + desc="Specify the units of the output sampling.", argstr="-units %s" + ) # FIXME String? # Elements along each dimension. # FIXME Ints? Ranges? # FIXME Check that this xor behaves correctly. - _xor_nelements = ('nelements', 'nelements_x_y_or_z') + _xor_nelements = ("nelements", "nelements_x_y_or_z") # nr elements along each dimension - nelements = traits.Tuple( + nelements = Tuple( traits.Int, traits.Int, traits.Int, - desc='Number of elements along each dimension (X, Y, Z).', - argstr='-nelements %s %s %s', - xor=_xor_nelements) + desc="Number of elements along each dimension (X, Y, Z).", + argstr="-nelements %s %s %s", + xor=_xor_nelements, + ) # FIXME Is mincresample happy if we only specify one of these, or do we # need the requires=...? xnelements = traits.Int( - desc='Number of elements along the X dimension.', - argstr='-xnelements %s', - requires=('ynelements', 'znelements'), - xor=_xor_nelements) + desc="Number of elements along the X dimension.", + argstr="-xnelements %s", + requires=("ynelements", "znelements"), + xor=_xor_nelements, + ) ynelements = traits.Int( - desc='Number of elements along the Y dimension.', - argstr='-ynelements %s', - requires=('xnelements', 'znelements'), - xor=_xor_nelements) + desc="Number of elements along the Y dimension.", + argstr="-ynelements %s", + requires=("xnelements", "znelements"), + xor=_xor_nelements, + ) znelements = traits.Int( - desc='Number of elements along the Z dimension.', - argstr='-znelements %s', - requires=('xnelements', 'ynelements'), - xor=_xor_nelements) + desc="Number of elements along the Z dimension.", + argstr="-znelements %s", + requires=("xnelements", "ynelements"), + xor=_xor_nelements, + ) # step size along each dimension - _xor_step = ('step', 'step_x_y_or_z') + _xor_step = ("step", "step_x_y_or_z") - step = traits.Tuple( + step = Tuple( traits.Int, traits.Int, traits.Int, - desc= - 'Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).', - argstr='-step %s %s %s', - xor=_xor_nelements) + desc="Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).", + argstr="-step %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xstep = traits.Int( - desc='Step size along the X dimension. Default value: 0.', - argstr='-xstep %s', - requires=('ystep', 'zstep'), - xor=_xor_step) + desc="Step size along the X dimension. Default value: 0.", + argstr="-xstep %s", + requires=("ystep", "zstep"), + xor=_xor_step, + ) ystep = traits.Int( - desc='Step size along the Y dimension. Default value: 0.', - argstr='-ystep %s', - requires=('xstep', 'zstep'), - xor=_xor_step) + desc="Step size along the Y dimension. Default value: 0.", + argstr="-ystep %s", + requires=("xstep", "zstep"), + xor=_xor_step, + ) zstep = traits.Int( - desc='Step size along the Z dimension. Default value: 0.', - argstr='-zstep %s', - requires=('xstep', 'ystep'), - xor=_xor_step) + desc="Step size along the Z dimension. Default value: 0.", + argstr="-zstep %s", + requires=("xstep", "ystep"), + xor=_xor_step, + ) # start point along each dimension - _xor_start = ('start', 'start_x_y_or_z') + _xor_start = ("start", "start_x_y_or_z") - start = traits.Tuple( + start = Tuple( traits.Float, traits.Float, traits.Float, - desc=('Start point along each dimension (X, Y, Z).' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-start %s %s %s', - xor=_xor_nelements) + desc=( + "Start point along each dimension (X, Y, Z)." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-start %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xstart = traits.Float( - desc='Start point along the X dimension. Default value: 1.79769e+308.', - argstr='-xstart %s', - requires=('ystart', 'zstart'), - xor=_xor_start) + desc="Start point along the X dimension. Default value: 1.79769e+308.", + argstr="-xstart %s", + requires=("ystart", "zstart"), + xor=_xor_start, + ) ystart = traits.Float( - desc='Start point along the Y dimension. Default value: 1.79769e+308.', - argstr='-ystart %s', - requires=('xstart', 'zstart'), - xor=_xor_start) + desc="Start point along the Y dimension. Default value: 1.79769e+308.", + argstr="-ystart %s", + requires=("xstart", "zstart"), + xor=_xor_start, + ) zstart = traits.Float( - desc='Start point along the Z dimension. Default value: 1.79769e+308.', - argstr='-zstart %s', - requires=('xstart', 'ystart'), - xor=_xor_start) + desc="Start point along the Z dimension. Default value: 1.79769e+308.", + argstr="-zstart %s", + requires=("xstart", "ystart"), + xor=_xor_start, + ) # dircos along each dimension - _xor_dircos = ('dircos', 'dircos_x_y_or_z') + _xor_dircos = ("dircos", "dircos_x_y_or_z") - dircos = traits.Tuple( + dircos = Tuple( traits.Float, traits.Float, traits.Float, desc=( - 'Direction cosines along each dimension (X, Y, Z). Default value:' - '1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ...' - ' 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308.' + "Direction cosines along each dimension (X, Y, Z). Default value:" + "1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ..." + " 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308." ), - argstr='-dircos %s %s %s', - xor=_xor_nelements) + argstr="-dircos %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xdircos = traits.Float( - desc=('Direction cosines along the X dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-xdircos %s', - requires=('ydircos', 'zdircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the X dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-xdircos %s", + requires=("ydircos", "zdircos"), + xor=_xor_dircos, + ) ydircos = traits.Float( - desc=('Direction cosines along the Y dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-ydircos %s', - requires=('xdircos', 'zdircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the Y dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-ydircos %s", + requires=("xdircos", "zdircos"), + xor=_xor_dircos, + ) zdircos = traits.Float( - desc=('Direction cosines along the Z dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-zdircos %s', - requires=('xdircos', 'ydircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the Z dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-zdircos %s", + requires=("xdircos", "ydircos"), + xor=_xor_dircos, + ) class ResampleOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Resample(StdOutCommandLine): @@ -2544,7 +2702,7 @@ class Resample(StdOutCommandLine): input_spec = ResampleInputSpec output_spec = ResampleOutputSpec - _cmd = 'mincresample' + _cmd = "mincresample" class NormInputSpec(CommandLineInputSpec): @@ -2562,89 +2720,95 @@ class NormInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to normalise', + desc="input file to normalise", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_norm.mnc') + name_template="%s_norm.mnc", + ) - output_threshold_mask = traits.File( - desc='File in which to store the threshold mask.', - argstr='-threshold_mask %s', - name_source=['input_file'], + output_threshold_mask = File( + desc="File in which to store the threshold mask.", + argstr="-threshold_mask %s", + name_source=["input_file"], hash_files=False, - name_template='%s_norm_threshold_mask.mnc') + name_template="%s_norm_threshold_mask.mnc", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # Normalisation Options - mask = traits.File( - desc='Calculate the image normalisation within a mask.', - argstr='-mask %s', - exists=True) + mask = File( + desc="Calculate the image normalisation within a mask.", + argstr="-mask %s", + exists=True, + ) clamp = traits.Bool( - desc='Force the ouput range between limits [default].', - argstr='-clamp', + desc="Force the output range between limits [default].", + argstr="-clamp", usedefault=True, - default_value=True) + default_value=True, + ) cutoff = traits.Range( low=0.0, high=100.0, - desc= - 'Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]', - argstr='-cutoff %s', + desc="Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]", + argstr="-cutoff %s", ) - lower = traits.Float(desc='Lower real value to use.', argstr='-lower %s') - upper = traits.Float(desc='Upper real value to use.', argstr='-upper %s') + lower = traits.Float(desc="Lower real value to use.", argstr="-lower %s") + upper = traits.Float(desc="Upper real value to use.", argstr="-upper %s") out_floor = traits.Float( - desc='Output files maximum [default: 0]', - argstr='-out_floor %s') # FIXME is this a float? + desc="Output files maximum [default: 0]", argstr="-out_floor %s" + ) # FIXME is this a float? out_ceil = traits.Float( - desc='Output files minimum [default: 100]', - argstr='-out_ceil %s') # FIXME is this a float? + desc="Output files minimum [default: 100]", argstr="-out_ceil %s" + ) # FIXME is this a float? # Threshold Options threshold = traits.Bool( - desc= - 'Threshold the image (set values below threshold_perc to -out_floor).', - argstr='-threshold') + desc="Threshold the image (set values below threshold_perc to -out_floor).", + argstr="-threshold", + ) threshold_perc = traits.Range( low=0.0, high=100.0, - desc= - 'Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].', - argstr='-threshold_perc %s') + desc="Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].", + argstr="-threshold_perc %s", + ) threshold_bmt = traits.Bool( - desc='Use the resulting image BiModalT as the threshold.', - argstr='-threshold_bmt') + desc="Use the resulting image BiModalT as the threshold.", + argstr="-threshold_bmt", + ) threshold_blur = traits.Float( - desc='Blur FWHM for intensity edges then thresholding [default: 2].', - argstr='-threshold_blur %s') + desc="Blur FWHM for intensity edges then thresholding [default: 2].", + argstr="-threshold_blur %s", + ) class NormOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_threshold_mask = File(desc='threshold mask file') + output_file = File(desc="output file", exists=True) + output_threshold_mask = File(desc="threshold mask file") class Norm(CommandLine): @@ -2662,7 +2826,7 @@ class Norm(CommandLine): input_spec = NormInputSpec output_spec = NormOutputSpec - _cmd = 'mincnorm' + _cmd = "mincnorm" """ @@ -2703,50 +2867,53 @@ class VolcentreInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to centre', + desc="input file to centre", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_volcentre.mnc') + name_template="%s_volcentre.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) com = traits.Bool( - desc= - 'Use the CoM of the volume for the new centre (via mincstats). Default: False', - argstr='-com') + desc="Use the CoM of the volume for the new centre (via mincstats). Default: False", + argstr="-com", + ) - centre = traits.Tuple( + centre = Tuple( traits.Float, traits.Float, traits.Float, - argstr='-centre %s %s %s', - desc='Centre to use (x,y,z) [default: 0 0 0].', + argstr="-centre %s %s %s", + desc="Centre to use (x,y,z) [default: 0 0 0].", ) zero_dircos = traits.Bool( - desc='Set the direction cosines to identity [default].', - argstr='-zero_dircos') + desc="Set the direction cosines to identity [default].", argstr="-zero_dircos" + ) class VolcentreOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Volcentre(CommandLine): @@ -2763,7 +2930,7 @@ class Volcentre(CommandLine): input_spec = VolcentreInputSpec output_spec = VolcentreOutputSpec - _cmd = 'volcentre' + _cmd = "volcentre" class VolpadInputSpec(CommandLineInputSpec): @@ -2791,55 +2958,59 @@ class VolpadInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to centre', + desc="input file to centre", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_volpad.mnc') + name_template="%s_volpad.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) auto = traits.Bool( - desc= - 'Automatically determine padding distances (uses -distance as max). Default: False.', - argstr='-auto') + desc="Automatically determine padding distances (uses -distance as max). Default: False.", + argstr="-auto", + ) auto_freq = traits.Float( - desc= - 'Frequency of voxels over bimodalt threshold to stop at [default: 500].', - argstr='-auto_freq %s') + desc="Frequency of voxels over bimodalt threshold to stop at [default: 500].", + argstr="-auto_freq %s", + ) distance = traits.Int( - desc='Padding distance (in voxels) [default: 4].', - argstr='-distance %s') + desc="Padding distance (in voxels) [default: 4].", argstr="-distance %s" + ) smooth = traits.Bool( - desc='Smooth (blur) edges before padding. Default: False.', - argstr='-smooth') + desc="Smooth (blur) edges before padding. Default: False.", argstr="-smooth" + ) smooth_distance = traits.Int( - desc='Smoothing distance (in voxels) [default: 4].', - argstr='-smooth_distance %s') + desc="Smoothing distance (in voxels) [default: 4].", + argstr="-smooth_distance %s", + ) class VolpadOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Volpad(CommandLine): @@ -2856,52 +3027,56 @@ class Volpad(CommandLine): input_spec = VolpadInputSpec output_spec = VolpadOutputSpec - _cmd = 'volpad' + _cmd = "volpad" class VolisoInputSpec(CommandLineInputSpec): - input_file = File( - desc='input file to convert to isotropic sampling', + desc="input file to convert to isotropic sampling", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_voliso.mnc') + name_template="%s_voliso.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='--verbose') + desc="Print out log messages. Default: False.", argstr="--verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='--clobber', + desc="Overwrite existing file.", + argstr="--clobber", usedefault=True, - default_value=True) + default_value=True, + ) maxstep = traits.Float( - desc='The target maximum step desired in the output volume.', - argstr='--maxstep %s') + desc="The target maximum step desired in the output volume.", + argstr="--maxstep %s", + ) minstep = traits.Float( - desc='The target minimum step desired in the output volume.', - argstr='--minstep %s') + desc="The target minimum step desired in the output volume.", + argstr="--minstep %s", + ) avgstep = traits.Bool( - desc= - 'Calculate the maximum step from the average steps of the input volume.', - argstr='--avgstep') + desc="Calculate the maximum step from the average steps of the input volume.", + argstr="--avgstep", + ) class VolisoOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Voliso(CommandLine): @@ -2919,42 +3094,41 @@ class Voliso(CommandLine): input_spec = VolisoInputSpec output_spec = VolisoOutputSpec - _cmd = 'voliso' + _cmd = "voliso" class GennlxfmInputSpec(CommandLineInputSpec): output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['like'], + name_source=["like"], hash_files=False, - name_template='%s_gennlxfm.xfm') + name_template="%s_gennlxfm.xfm", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) ident = traits.Bool( - desc='Generate an identity xfm. Default: False.', argstr='-ident') - step = traits.Int( - desc='Output ident xfm step [default: 1].', argstr='-step %s') - - like = File( - desc='Generate a nlxfm like this file.', - exists=True, - argstr='-like %s', + desc="Generate an identity xfm. Default: False.", argstr="-ident" ) + step = traits.Int(desc="Output ident xfm step [default: 1].", argstr="-step %s") + + like = File(desc="Generate a nlxfm like this file.", exists=True, argstr="-like %s") class GennlxfmOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid", exists=True) class Gennlxfm(CommandLine): @@ -2977,51 +3151,53 @@ class Gennlxfm(CommandLine): input_spec = GennlxfmInputSpec output_spec = GennlxfmOutputSpec - _cmd = 'gennlxfm' + _cmd = "gennlxfm" def _list_outputs(self): - outputs = super(Gennlxfm, self)._list_outputs() - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + outputs = super()._list_outputs() + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs class XfmConcatInputSpec(CommandLineInputSpec): input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s)', + File(exists=True), + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) # This is a dummy input. - input_grid_files = InputMultiPath( - traits.File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)") output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_xfmconcat.xfm') + name_template="%s_xfmconcat.xfm", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class XfmConcatOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grids = OutputMultiPath(File(exists=True), desc='output grids') + output_file = File(desc="output file", exists=True) + output_grids = OutputMultiPath(File(exists=True), desc="output grids") class XfmConcat(CommandLine): @@ -3040,64 +3216,60 @@ class XfmConcat(CommandLine): input_spec = XfmConcatInputSpec output_spec = XfmConcatOutputSpec - _cmd = 'xfmconcat' + _cmd = "xfmconcat" def _list_outputs(self): - outputs = super(XfmConcat, self)._list_outputs() + outputs = super()._list_outputs() - if os.path.exists(outputs['output_file']): - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grids'] = glob.glob( - re.sub('.(nlxfm|xfm)$', '_grid_*.mnc', - outputs['output_file'])) + if os.path.exists(outputs["output_file"]): + if "grid" in open(outputs["output_file"]).read(): + outputs["output_grids"] = glob.glob( + re.sub(".(nlxfm|xfm)$", "_grid_*.mnc", outputs["output_file"]) + ) return outputs class BestLinRegInputSpec(CommandLineInputSpec): source = File( - desc='source Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-4, + desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-4 ) target = File( - desc='target Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-3, + desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-3 ) output_xfm = File( - desc='output xfm file', + desc="output xfm file", genfile=True, - argstr='%s', + argstr="%s", position=-2, - name_source=['source'], + name_source=["source"], hash_files=False, - name_template='%s_bestlinreg.xfm', - keep_extension=False) + name_template="%s_bestlinreg.xfm", + keep_extension=False, + ) output_mnc = File( - desc='output mnc file', + desc="output mnc file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['source'], + name_source=["source"], hash_files=False, - name_template='%s_bestlinreg.mnc', - keep_extension=False) + name_template="%s_bestlinreg.mnc", + keep_extension=False, + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME Very bare implementation, none of these are done yet: """ @@ -3111,8 +3283,8 @@ class BestLinRegInputSpec(CommandLineInputSpec): class BestLinRegOutputSpec(TraitedSpec): - output_xfm = File(desc='output xfm file', exists=True) - output_mnc = File(desc='output mnc file', exists=True) + output_xfm = File(desc="output xfm file", exists=True) + output_mnc = File(desc="output mnc file", exists=True) class BestLinReg(CommandLine): @@ -3136,69 +3308,58 @@ class BestLinReg(CommandLine): input_spec = BestLinRegInputSpec output_spec = BestLinRegOutputSpec - _cmd = 'bestlinreg' + _cmd = "bestlinreg" class NlpFitInputSpec(CommandLineInputSpec): source = File( - desc='source Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-3, + desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-3 ) target = File( - desc='target Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-2 ) - output_xfm = File( - desc='output xfm file', - genfile=True, - argstr='%s', - position=-1, - ) + output_xfm = File(desc="output xfm file", genfile=True, argstr="%s", position=-1) # This is a dummy input. - input_grid_files = InputMultiPath( - traits.File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)") config_file = File( - desc='File containing the fitting configuration use.', - argstr='-config_file %s', + desc="File containing the fitting configuration use.", + argstr="-config_file %s", mandatory=True, - exists=True) + exists=True, + ) init_xfm = File( - desc='Initial transformation (default identity).', - argstr='-init_xfm %s', + desc="Initial transformation (default identity).", + argstr="-init_xfm %s", mandatory=True, - exists=True) + exists=True, + ) source_mask = File( - desc='Source mask to use during fitting.', - argstr='-source_mask %s', + desc="Source mask to use during fitting.", + argstr="-source_mask %s", mandatory=True, - exists=True) + exists=True, + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class NlpFitOutputSpec(TraitedSpec): - output_xfm = File(desc='output xfm file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_xfm = File(desc="output xfm file", exists=True) + output_grid = File(desc="output grid file", exists=True) class NlpFit(CommandLine): @@ -3226,80 +3387,82 @@ class NlpFit(CommandLine): input_spec = NlpFitInputSpec output_spec = NlpFitOutputSpec - _cmd = 'nlpfit' + _cmd = "nlpfit" def _gen_filename(self, name): - if name == 'output_xfm': + if name == "output_xfm": output_xfm = self.inputs.output_xfm if isdefined(output_xfm): return os.path.abspath(output_xfm) else: - return aggregate_filename( - [self.inputs.source, self.inputs.target], - 'nlpfit_xfm_output') + '.xfm' + return ( + aggregate_filename( + [self.inputs.source, self.inputs.target], "nlpfit_xfm_output" + ) + + ".xfm" + ) else: - raise NotImplemented + raise NotImplementedError def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_xfm'] = os.path.abspath( - self._gen_filename('output_xfm')) + outputs["output_xfm"] = os.path.abspath(self._gen_filename("output_xfm")) - assert os.path.exists(outputs['output_xfm']) - if 'grid' in open(outputs['output_xfm'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_xfm']) + assert os.path.exists(outputs["output_xfm"]) + if "grid" in open(outputs["output_xfm"]).read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_xfm"] + ) return outputs class XfmAvgInputSpec(CommandLineInputSpec): input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s)', + File(exists=True), + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) # This is a dummy input. - input_grid_files = InputMultiPath( - traits.File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)") - output_file = File( - desc='output file', - genfile=True, - argstr='%s', - position=-1, - ) + output_file = File(desc="output file", genfile=True, argstr="%s", position=-1) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME xor these: avg_linear = traits.Bool( - desc='average the linear part [default].', argstr='-avg_linear') + desc="average the linear part [default].", argstr="-avg_linear" + ) avg_nonlinear = traits.Bool( - desc='average the non-linear part [default].', argstr='-avg_nonlinear') + desc="average the non-linear part [default].", argstr="-avg_nonlinear" + ) ignore_linear = traits.Bool( - desc='opposite of -avg_linear.', argstr='-ignore_linear') + desc="opposite of -avg_linear.", argstr="-ignore_linear" + ) ignore_nonlinear = traits.Bool( - desc='opposite of -avg_nonlinear.', argstr='-ignore_nonline') + desc="opposite of -avg_nonlinear.", argstr="-ignore_nonline" + ) class XfmAvgOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid file", exists=True) class XfmAvg(CommandLine): @@ -3325,62 +3488,59 @@ class XfmAvg(CommandLine): input_spec = XfmAvgInputSpec output_spec = XfmAvgOutputSpec - _cmd = 'xfmavg' + _cmd = "xfmavg" def _gen_filename(self, name): - if name == 'output_file': + if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: - return aggregate_filename(self.inputs.input_files, - 'xfmavg_output') + '.xfm' + return ( + aggregate_filename(self.inputs.input_files, "xfmavg_output") + + ".xfm" + ) else: - raise NotImplemented + raise NotImplementedError def _gen_outfilename(self): - return self._gen_filename('output_file') + return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = os.path.abspath(self._gen_outfilename()) + outputs["output_file"] = os.path.abspath(self._gen_outfilename()) - assert os.path.exists(outputs['output_file']) - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + assert os.path.exists(outputs["output_file"]) + if "grid" in open(outputs["output_file"]).read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs class XfmInvertInputSpec(CommandLineInputSpec): - input_file = traits.File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2) - - output_file = File( - desc='output file', - genfile=True, - argstr='%s', - position=-1, + input_file = File( + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) + output_file = File(desc="output file", genfile=True, argstr="%s", position=-1) + verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class XfmInvertOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid file", exists=True) class XfmInvert(CommandLine): @@ -3399,86 +3559,98 @@ class XfmInvert(CommandLine): input_spec = XfmInvertInputSpec output_spec = XfmInvertOutputSpec - _cmd = 'xfminvert' + _cmd = "xfminvert" def _gen_filename(self, name): - if name == 'output_file': + if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: - return aggregate_filename([self.inputs.input_file], - 'xfminvert_output') + '.xfm' + return ( + aggregate_filename([self.inputs.input_file], "xfminvert_output") + + ".xfm" + ) else: - raise NotImplemented + raise NotImplementedError def _gen_outfilename(self): - return self._gen_filename('output_file') + return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = os.path.abspath(self._gen_outfilename()) + outputs["output_file"] = os.path.abspath(self._gen_outfilename()) - assert os.path.exists(outputs['output_file']) - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + assert os.path.exists(outputs["output_file"]) + if "grid" in open(outputs["output_file"]).read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs class BigAverageInputSpec(CommandLineInputSpec): input_files = InputMultiPath( - traits.File(exists=True), - desc='input file(s)', + File(exists=True), + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_bigaverage.mnc') + name_template="%s_bigaverage.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='--verbose') + desc="Print out log messages. Default: False.", argstr="--verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='--clobber', + desc="Overwrite existing file.", + argstr="--clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME Redumentary implementation, various parameters not implemented. # TODO! output_float = traits.Bool( - desc='Output files with float precision.', argstr='--float') + desc="Output files with float precision.", argstr="--float" + ) robust = traits.Bool( - desc=('Perform robust averaging, features that are outside 1 standard' - 'deviation from the mean are downweighted. Works well for noisy' - 'data with artifacts. see the --tmpdir option if you have a' - 'large number of input files.'), - argstr='-robust') + desc=( + "Perform robust averaging, features that are outside 1 standard" + "deviation from the mean are downweighted. Works well for noisy" + "data with artifacts. see the --tmpdir option if you have a" + "large number of input files." + ), + argstr="-robust", + ) # Should Nipype deal with where the temp directory is? - tmpdir = Directory(desc='temporary files directory', argstr='-tmpdir %s') + tmpdir = Directory(desc="temporary files directory", argstr="-tmpdir %s") sd_file = File( - desc='Place standard deviation image in specified file.', - argstr='--sdfile %s', - name_source=['input_files'], + desc="Place standard deviation image in specified file.", + argstr="--sdfile %s", + name_source=["input_files"], hash_files=False, - name_template='%s_bigaverage_stdev.mnc') + name_template="%s_bigaverage_stdev.mnc", + ) class BigAverageOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - sd_file = File(desc='standard deviation image', exists=True) + output_file = File(desc="output file", exists=True) + sd_file = File(desc="standard deviation image", exists=True) class BigAverage(CommandLine): @@ -3516,42 +3688,41 @@ class BigAverage(CommandLine): input_spec = BigAverageInputSpec output_spec = BigAverageOutputSpec - _cmd = 'mincbigaverage' + _cmd = "mincbigaverage" class ReshapeInputSpec(CommandLineInputSpec): - input_file = traits.File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2) + input_file = File( + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_reshape.mnc') + name_template="%s_reshape.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME MANY options not implemented! - write_short = traits.Bool( - desc='Convert to short integer data.', argstr='-short') + write_short = traits.Bool(desc="Convert to short integer data.", argstr="-short") class ReshapeOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Reshape(CommandLine): @@ -3574,78 +3745,76 @@ class Reshape(CommandLine): input_spec = ReshapeInputSpec output_spec = ReshapeOutputSpec - _cmd = 'mincreshape' + _cmd = "mincreshape" class VolSymmInputSpec(CommandLineInputSpec): - input_file = traits.File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-3) + input_file = File( + desc="input file", exists=True, mandatory=True, argstr="%s", position=-3 + ) - trans_file = traits.File( - desc='output xfm trans file', + trans_file = File( + desc="output xfm trans file", genfile=True, - argstr='%s', + argstr="%s", position=-2, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_vol_symm.xfm', - keep_extension=False) + name_template="%s_vol_symm.xfm", + keep_extension=False, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_vol_symm.mnc') + name_template="%s_vol_symm.mnc", + ) # This is a dummy input. - input_grid_files = InputMultiPath( - traits.File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)") verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME MANY options not implemented! - fit_linear = traits.Bool(desc='Fit using a linear xfm.', argstr='-linear') - fit_nonlinear = traits.Bool( - desc='Fit using a non-linear xfm.', argstr='-nonlinear') + fit_linear = traits.Bool(desc="Fit using a linear xfm.", argstr="-linear") + fit_nonlinear = traits.Bool(desc="Fit using a non-linear xfm.", argstr="-nonlinear") # FIXME This changes the input/output behaviour of trans_file! Split into # two separate interfaces? nofit = traits.Bool( - desc='Use the input transformation instead of generating one.', - argstr='-nofit') + desc="Use the input transformation instead of generating one.", argstr="-nofit" + ) config_file = File( - desc= - 'File containing the fitting configuration (nlpfit -help for info).', - argstr='-config_file %s', - exists=True) + desc="File containing the fitting configuration (nlpfit -help for info).", + argstr="-config_file %s", + exists=True, + ) - x = traits.Bool(desc='Flip volume in x-plane (default).', argstr='-x') - y = traits.Bool(desc='Flip volume in y-plane.', argstr='-y') - z = traits.Bool(desc='Flip volume in z-plane.', argstr='-z') + x = traits.Bool(desc="Flip volume in x-plane (default).", argstr="-x") + y = traits.Bool(desc="Flip volume in y-plane.", argstr="-y") + z = traits.Bool(desc="Flip volume in z-plane.", argstr="-z") class VolSymmOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - trans_file = File(desc='xfm trans file', exists=True) + output_file = File(desc="output file", exists=True) + trans_file = File(desc="xfm trans file", exists=True) output_grid = File( - desc='output grid file', exists=True) # FIXME Is exists=True correct? + desc="output grid file", exists=True + ) # FIXME Is exists=True correct? class VolSymm(CommandLine): @@ -3671,15 +3840,16 @@ class VolSymm(CommandLine): input_spec = VolSymmInputSpec output_spec = VolSymmOutputSpec - _cmd = 'volsymm' + _cmd = "volsymm" def _list_outputs(self): - outputs = super(VolSymm, self)._list_outputs() + outputs = super()._list_outputs() # Have to manually check for the grid files. - if os.path.exists(outputs['trans_file']): - if 'grid' in open(outputs['trans_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['trans_file']) + if os.path.exists(outputs["trans_file"]): + if "grid" in open(outputs["trans_file"]).read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["trans_file"] + ) return outputs diff --git a/nipype/interfaces/minc/testdata.py b/nipype/interfaces/minc/testdata.py index 0a8d6bfd88..1d2ff36d6f 100644 --- a/nipype/interfaces/minc/testdata.py +++ b/nipype/interfaces/minc/testdata.py @@ -1,18 +1,10 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -import os from ...testing import example_data -minc2Dfile = example_data('minc_test_2D_00.mnc') -minc3Dfile = example_data('minc_test_3D_00.mnc') +minc2Dfile = example_data("minc_test_2D_00.mnc") +minc3Dfile = example_data("minc_test_3D_00.mnc") -nlp_config = example_data('minc_nlp.conf') +nlp_config = example_data("minc_nlp.conf") -def nonempty_minc_data(i, shape='2D'): - return example_data('minc_test_%s_%.2d.mnc' % ( - shape, - i, - )) +def nonempty_minc_data(i, shape="2D"): + return example_data("minc_test_%s_%.2d.mnc" % (shape, i)) diff --git a/nipype/interfaces/minc/tests/__init__.py b/nipype/interfaces/minc/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/minc/tests/__init__.py +++ b/nipype/interfaces/minc/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/minc/tests/test_auto_Average.py b/nipype/interfaces/minc/tests/test_auto_Average.py index 57bdc5ccd2..7017967d61 100644 --- a/nipype/interfaces/minc/tests/test_auto_Average.py +++ b/nipype/interfaces/minc/tests/test_auto_Average.py @@ -1,144 +1,236 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Average def test_Average_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avgdim=dict(argstr='-avgdim %s', ), - binarize=dict(argstr='-binarize', ), - binrange=dict(argstr='-binrange %s %s', ), - binvalue=dict(argstr='-binvalue %s', ), + args=dict( + argstr="%s", + ), + avgdim=dict( + argstr="-avgdim %s", + ), + binarize=dict( + argstr="-binarize", + ), + binrange=dict( + argstr="-binrange %s %s", + ), + binvalue=dict( + argstr="-binvalue %s", + ), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-check_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-copy_header", + xor=("copy_header", "no_copy_header"), + ), + debug=dict( + argstr="-debug", ), - debug=dict(argstr='-debug', ), environ=dict( nohash=True, usedefault=True, ), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", + extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', - xor=('input_files', 'filelist'), + sep=" ", + xor=("input_files", "filelist"), ), max_buffer_size_in_kb=dict( - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", usedefault=True, ), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-nocopy_header", + xor=("copy_header", "no_copy_header"), ), nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + argstr="-nonormalize", + xor=("normalize", "nonormalize"), ), normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), + argstr="-normalize", + xor=("normalize", "nonormalize"), ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_averaged.mnc', + name_source=["input_files"], + name_template="%s_averaged.mnc", position=-1, ), quiet=dict( - argstr='-quiet', - xor=('verbose', 'quiet'), + argstr="-quiet", + xor=("verbose", "quiet"), + ), + sdfile=dict( + argstr="-sdfile %s", + extensions=None, + ), + two=dict( + argstr="-2", ), - sdfile=dict(argstr='-sdfile %s', ), - two=dict(argstr='-2', ), verbose=dict( - argstr='-verbose', - xor=('verbose', 'quiet'), + argstr="-verbose", + xor=("verbose", "quiet"), + ), + voxel_range=dict( + argstr="-range %d %d", ), - voxel_range=dict(argstr='-range %d %d', ), weights=dict( - argstr='-weights %s', - sep=',', + argstr="-weights %s", + sep=",", ), width_weighted=dict( - argstr='-width_weighted', - requires=('avgdim', ), + argstr="-width_weighted", + requires=("avgdim",), ), ) inputs = Average.input_spec() @@ -146,8 +238,14 @@ def test_Average_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Average_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Average.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_BBox.py b/nipype/interfaces/minc/tests/test_auto_BBox.py index a34aeab9b5..532cb14d5f 100644 --- a/nipype/interfaces/minc/tests/test_auto_BBox.py +++ b/nipype/interfaces/minc/tests/test_auto_BBox.py @@ -1,43 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import BBox def test_BBox_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - format_minccrop=dict(argstr='-minccrop', ), - format_mincresample=dict(argstr='-mincresample', ), - format_mincreshape=dict(argstr='-mincreshape', ), + format_minccrop=dict( + argstr="-minccrop", + ), + format_mincresample=dict( + argstr="-mincresample", + ), + format_mincreshape=dict( + argstr="-mincreshape", + ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), one_line=dict( - argstr='-one_line', - xor=('one_line', 'two_lines'), + argstr="-one_line", + xor=("one_line", "two_lines"), ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), output_file=dict( + extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_bbox.txt', + name_source=["input_file"], + name_template="%s_bbox.txt", position=-1, ), - threshold=dict(argstr='-threshold', ), + threshold=dict( + argstr="-threshold", + ), two_lines=dict( - argstr='-two_lines', - xor=('one_line', 'two_lines'), + argstr="-two_lines", + xor=("one_line", "two_lines"), ), ) inputs = BBox.input_spec() @@ -45,8 +57,14 @@ def test_BBox_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BBox_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = BBox.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Beast.py b/nipype/interfaces/minc/tests/test_auto_Beast.py index 4834cf3c4a..487550a33a 100644 --- a/nipype/interfaces/minc/tests/test_auto_Beast.py +++ b/nipype/interfaces/minc/tests/test_auto_Beast.py @@ -1,74 +1,94 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Beast def test_Beast_inputs(): input_map = dict( abspath=dict( - argstr='-abspath', + argstr="-abspath", usedefault=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), confidence_level_alpha=dict( - argstr='-alpha %s', + argstr="-alpha %s", usedefault=True, ), - configuration_file=dict(argstr='-configuration %s', ), + configuration_file=dict( + argstr="-configuration %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - fill_holes=dict(argstr='-fill', ), - flip_images=dict(argstr='-flip', ), + fill_holes=dict( + argstr="-fill", + ), + flip_images=dict( + argstr="-flip", + ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), library_dir=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-3, ), - load_moments=dict(argstr='-load_moments', ), - median_filter=dict(argstr='-median', ), - nlm_filter=dict(argstr='-nlm_filter', ), + load_moments=dict( + argstr="-load_moments", + ), + median_filter=dict( + argstr="-median", + ), + nlm_filter=dict( + argstr="-nlm_filter", + ), number_selected_images=dict( - argstr='-selection_num %s', + argstr="-selection_num %s", usedefault=True, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source=['input_file'], - name_template='%s_beast_mask.mnc', + name_source=["input_file"], + name_template="%s_beast_mask.mnc", position=-1, ), patch_size=dict( - argstr='-patch_size %s', + argstr="-patch_size %s", usedefault=True, ), - probability_map=dict(argstr='-probability', ), - same_resolution=dict(argstr='-same_resolution', ), + probability_map=dict( + argstr="-probability", + ), + same_resolution=dict( + argstr="-same_resolution", + ), search_area=dict( - argstr='-search_area %s', + argstr="-search_area %s", usedefault=True, ), smoothness_factor_beta=dict( - argstr='-beta %s', + argstr="-beta %s", usedefault=True, ), threshold_patch_selection=dict( - argstr='-threshold %s', + argstr="-threshold %s", usedefault=True, ), voxel_size=dict( - argstr='-voxel_size %s', + argstr="-voxel_size %s", usedefault=True, ), ) @@ -77,8 +97,14 @@ def test_Beast_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Beast_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Beast.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py index fb9061040a..57a8929878 100644 --- a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py +++ b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import BestLinReg def test_BestLinReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( @@ -15,44 +16,56 @@ def test_BestLinReg_inputs(): usedefault=True, ), output_mnc=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], - name_template='%s_bestlinreg.mnc', + name_source=["source"], + name_template="%s_bestlinreg.mnc", position=-1, ), output_xfm=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], - name_template='%s_bestlinreg.xfm', + name_source=["source"], + name_template="%s_bestlinreg.xfm", position=-2, ), source=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), target=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - verbose=dict(argstr='-verbose', ), + verbose=dict( + argstr="-verbose", + ), ) inputs = BestLinReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BestLinReg_outputs(): output_map = dict( - output_mnc=dict(), - output_xfm=dict(), + output_mnc=dict( + extensions=None, + ), + output_xfm=dict( + extensions=None, + ), ) outputs = BestLinReg.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_BigAverage.py b/nipype/interfaces/minc/tests/test_auto_BigAverage.py index ce1fb2b91e..1eefb273d6 100644 --- a/nipype/interfaces/minc/tests/test_auto_BigAverage.py +++ b/nipype/interfaces/minc/tests/test_auto_BigAverage.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import BigAverage def test_BigAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='--clobber', + argstr="--clobber", usedefault=True, ), environ=dict( @@ -15,39 +16,55 @@ def test_BigAverage_inputs(): usedefault=True, ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', + sep=" ", ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_bigaverage.mnc', + name_source=["input_files"], + name_template="%s_bigaverage.mnc", position=-1, ), - output_float=dict(argstr='--float', ), - robust=dict(argstr='-robust', ), + output_float=dict( + argstr="--float", + ), + robust=dict( + argstr="-robust", + ), sd_file=dict( - argstr='--sdfile %s', + argstr="--sdfile %s", + extensions=None, hash_files=False, - name_source=['input_files'], - name_template='%s_bigaverage_stdev.mnc', + name_source=["input_files"], + name_template="%s_bigaverage_stdev.mnc", + ), + tmpdir=dict( + argstr="-tmpdir %s", + ), + verbose=dict( + argstr="--verbose", ), - tmpdir=dict(argstr='-tmpdir %s', ), - verbose=dict(argstr='--verbose', ), ) inputs = BigAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BigAverage_outputs(): output_map = dict( - output_file=dict(), - sd_file=dict(), + output_file=dict( + extensions=None, + ), + sd_file=dict( + extensions=None, + ), ) outputs = BigAverage.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Blob.py b/nipype/interfaces/minc/tests/test_auto_Blob.py index b489ac944a..ae2b445c73 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blob.py +++ b/nipype/interfaces/minc/tests/test_auto_Blob.py @@ -1,40 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Blob def test_Blob_inputs(): input_map = dict( - args=dict(argstr='%s', ), - determinant=dict(argstr='-determinant', ), + args=dict( + argstr="%s", + ), + determinant=dict( + argstr="-determinant", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - magnitude=dict(argstr='-magnitude', ), + magnitude=dict( + argstr="-magnitude", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_blob.mnc', + name_source=["input_file"], + name_template="%s_blob.mnc", position=-1, ), - trace=dict(argstr='-trace', ), - translation=dict(argstr='-translation', ), + trace=dict( + argstr="-trace", + ), + translation=dict( + argstr="-translation", + ), ) inputs = Blob.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Blob_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Blob.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Blur.py b/nipype/interfaces/minc/tests/test_auto_Blur.py index fb6e405012..87647b5f62 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blur.py +++ b/nipype/interfaces/minc/tests/test_auto_Blur.py @@ -1,54 +1,65 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Blur def test_Blur_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - dimensions=dict(argstr='-dimensions %s', ), + dimensions=dict( + argstr="-dimensions %s", + ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( - argstr='-fwhm %s', + argstr="-fwhm %s", mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=("fwhm", "fwhm3d", "standard_dev"), ), fwhm3d=dict( - argstr='-3dfwhm %s %s %s', + argstr="-3dfwhm %s %s %s", mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=("fwhm", "fwhm3d", "standard_dev"), ), gaussian=dict( - argstr='-gaussian', - xor=('gaussian', 'rect'), + argstr="-gaussian", + xor=("gaussian", "rect"), + ), + gradient=dict( + argstr="-gradient", ), - gradient=dict(argstr='-gradient', ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - no_apodize=dict(argstr='-no_apodize', ), + no_apodize=dict( + argstr="-no_apodize", + ), output_file_base=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - partial=dict(argstr='-partial', ), + partial=dict( + argstr="-partial", + ), rect=dict( - argstr='-rect', - xor=('gaussian', 'rect'), + argstr="-rect", + xor=("gaussian", "rect"), ), standard_dev=dict( - argstr='-standarddev %s', + argstr="-standarddev %s", mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=("fwhm", "fwhm3d", "standard_dev"), ), ) inputs = Blur.input_spec() @@ -56,14 +67,28 @@ def test_Blur_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Blur_outputs(): output_map = dict( - gradient_dxyz=dict(), - output_file=dict(), - partial_dx=dict(), - partial_dxyz=dict(), - partial_dy=dict(), - partial_dz=dict(), + gradient_dxyz=dict( + extensions=None, + ), + output_file=dict( + extensions=None, + ), + partial_dx=dict( + extensions=None, + ), + partial_dxyz=dict( + extensions=None, + ), + partial_dy=dict( + extensions=None, + ), + partial_dz=dict( + extensions=None, + ), ) outputs = Blur.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Calc.py b/nipype/interfaces/minc/tests/test_auto_Calc.py index 7bec782c1c..d0d4f61fbe 100644 --- a/nipype/interfaces/minc/tests/test_auto_Calc.py +++ b/nipype/interfaces/minc/tests/test_auto_Calc.py @@ -1,154 +1,250 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Calc def test_Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-check_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-copy_header", + xor=("copy_header", "no_copy_header"), + ), + debug=dict( + argstr="-debug", ), - debug=dict(argstr='-debug', ), environ=dict( nohash=True, usedefault=True, ), - eval_width=dict(argstr='-eval_width %s', ), + eval_width=dict( + argstr="-eval_width %s", + ), expfile=dict( - argstr='-expfile %s', + argstr="-expfile %s", + extensions=None, mandatory=True, - xor=('expression', 'expfile'), + xor=("expression", "expfile"), ), expression=dict( argstr="-expression '%s'", mandatory=True, - xor=('expression', 'expfile'), + xor=("expression", "expfile"), ), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", + extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), + ), + ignore_nan=dict( + argstr="-ignore_nan", ), - ignore_nan=dict(argstr='-ignore_nan', ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', + sep=" ", + ), + max_buffer_size_in_kb=dict( + argstr="-max_buffer_size_in_kb %d", ), - max_buffer_size_in_kb=dict(argstr='-max_buffer_size_in_kb %d', ), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-nocopy_header", + xor=("copy_header", "no_copy_header"), ), outfiles=dict(), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_calc.mnc', + name_source=["input_files"], + name_template="%s_calc.mnc", position=-1, ), output_illegal=dict( - argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-illegal_value", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( - argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-nan", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( - argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-zero", + xor=("output_nan", "output_zero", "output_illegal_value"), + ), + propagate_nan=dict( + argstr="-propagate_nan", ), - propagate_nan=dict(argstr='-propagate_nan', ), quiet=dict( - argstr='-quiet', - xor=('verbose', 'quiet'), + argstr="-quiet", + xor=("verbose", "quiet"), + ), + two=dict( + argstr="-2", ), - two=dict(argstr='-2', ), verbose=dict( - argstr='-verbose', - xor=('verbose', 'quiet'), + argstr="-verbose", + xor=("verbose", "quiet"), + ), + voxel_range=dict( + argstr="-range %d %d", ), - voxel_range=dict(argstr='-range %d %d', ), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Calc_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Convert.py b/nipype/interfaces/minc/tests/test_auto_Convert.py index 6df596c682..57963b9b81 100644 --- a/nipype/interfaces/minc/tests/test_auto_Convert.py +++ b/nipype/interfaces/minc/tests/test_auto_Convert.py @@ -1,44 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Convert def test_Convert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chunk=dict(argstr='-chunk %d', ), + args=dict( + argstr="%s", + ), + chunk=dict( + argstr="-chunk %d", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - compression=dict(argstr='-compress %s', ), + compression=dict( + argstr="-compress %s", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_convert_output.mnc', + name_source=["input_file"], + name_template="%s_convert_output.mnc", position=-1, ), - template=dict(argstr='-template', ), - two=dict(argstr='-2', ), + template=dict( + argstr="-template", + ), + two=dict( + argstr="-2", + ), ) inputs = Convert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Convert_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Convert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Copy.py b/nipype/interfaces/minc/tests/test_auto_Copy.py index e91470ba6d..a6bb527e7a 100644 --- a/nipype/interfaces/minc/tests/test_auto_Copy.py +++ b/nipype/interfaces/minc/tests/test_auto_Copy.py @@ -1,35 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Copy def test_Copy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_copy.mnc', + name_source=["input_file"], + name_template="%s_copy.mnc", position=-1, ), pixel_values=dict( - argstr='-pixel_values', - xor=('pixel_values', 'real_values'), + argstr="-pixel_values", + xor=("pixel_values", "real_values"), ), real_values=dict( - argstr='-real_values', - xor=('pixel_values', 'real_values'), + argstr="-real_values", + xor=("pixel_values", "real_values"), ), ) inputs = Copy.input_spec() @@ -37,8 +40,14 @@ def test_Copy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Copy_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Dump.py b/nipype/interfaces/minc/tests/test_auto_Dump.py index bcca2a4801..4253bc20cc 100644 --- a/nipype/interfaces/minc/tests/test_auto_Dump.py +++ b/nipype/interfaces/minc/tests/test_auto_Dump.py @@ -1,54 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Dump def test_Dump_inputs(): input_map = dict( annotations_brief=dict( - argstr='-b %s', - xor=('annotations_brief', 'annotations_full'), + argstr="-b %s", + xor=("annotations_brief", "annotations_full"), ), annotations_full=dict( - argstr='-f %s', - xor=('annotations_brief', 'annotations_full'), + argstr="-f %s", + xor=("annotations_brief", "annotations_full"), + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), coordinate_data=dict( - argstr='-c', - xor=('coordinate_data', 'header_data'), + argstr="-c", + xor=("coordinate_data", "header_data"), ), environ=dict( nohash=True, usedefault=True, ), header_data=dict( - argstr='-h', - xor=('coordinate_data', 'header_data'), + argstr="-h", + xor=("coordinate_data", "header_data"), ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - line_length=dict(argstr='-l %d', ), - netcdf_name=dict(argstr='-n %s', ), + line_length=dict( + argstr="-l %d", + ), + netcdf_name=dict( + argstr="-n %s", + ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), output_file=dict( + extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_dump.txt', + name_source=["input_file"], + name_template="%s_dump.txt", position=-1, ), - precision=dict(argstr='%s', ), + precision=dict( + argstr="%s", + ), variables=dict( - argstr='-v %s', - sep=',', + argstr="-v %s", + sep=",", ), ) inputs = Dump.input_spec() @@ -56,8 +66,14 @@ def test_Dump_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dump_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Dump.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Extract.py b/nipype/interfaces/minc/tests/test_auto_Extract.py index 77126eac18..6c34b443f0 100644 --- a/nipype/interfaces/minc/tests/test_auto_Extract.py +++ b/nipype/interfaces/minc/tests/test_auto_Extract.py @@ -1,152 +1,236 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Extract def test_Extract_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), count=dict( - argstr='-count %s', - sep=',', + argstr="-count %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), flip_any_direction=dict( - argstr='-any_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-any_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_negative_direction=dict( - argstr='-negative_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-negative_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_positive_direction=dict( - argstr='-positive_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-positive_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_x_any=dict( - argstr='-xanydirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="-xanydirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_negative=dict( - argstr='-xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="-xdirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_positive=dict( - argstr='+xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="+xdirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_y_any=dict( - argstr='-yanydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="-yanydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_negative=dict( - argstr='-ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="-ydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_positive=dict( - argstr='+ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="+ydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_z_any=dict( - argstr='-zanydirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + argstr="-zanydirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_negative=dict( - argstr='-zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + argstr="-zdirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_positive=dict( - argstr='+zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + argstr="+zdirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), + ), + image_maximum=dict( + argstr="-image_maximum %s", + ), + image_minimum=dict( + argstr="-image_minimum %s", + ), + image_range=dict( + argstr="-image_range %s %s", ), - image_maximum=dict(argstr='-image_maximum %s', ), - image_minimum=dict(argstr='-image_minimum %s', ), - image_range=dict(argstr='-image_range %s %s', ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + argstr="-nonormalize", + xor=("normalize", "nonormalize"), ), normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), + argstr="-normalize", + xor=("normalize", "nonormalize"), ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), output_file=dict( + extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.raw', + name_source=["input_file"], + name_template="%s.raw", position=-1, ), start=dict( - argstr='-start %s', - sep=',', + argstr="-start %s", + sep=",", ), write_ascii=dict( - argstr='-ascii', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-ascii", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_byte=dict( - argstr='-byte', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-byte", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_double=dict( - argstr='-double', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-double", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_float=dict( - argstr='-float', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-float", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_int=dict( - argstr='-int', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-int", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_long=dict( - argstr='-long', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-long", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), + ), + write_range=dict( + argstr="-range %s %s", ), - write_range=dict(argstr='-range %s %s', ), write_short=dict( - argstr='-short', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-short", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_signed=dict( - argstr='-signed', - xor=('write_signed', 'write_unsigned'), + argstr="-signed", + xor=("write_signed", "write_unsigned"), ), write_unsigned=dict( - argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + argstr="-unsigned", + xor=("write_signed", "write_unsigned"), ), ) inputs = Extract.input_spec() @@ -154,8 +238,14 @@ def test_Extract_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Extract_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Extract.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py index c89dc65de6..91bcc57e3c 100644 --- a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py +++ b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py @@ -1,41 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Gennlxfm def test_Gennlxfm_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - ident=dict(argstr='-ident', ), - like=dict(argstr='-like %s', ), + ident=dict( + argstr="-ident", + ), + like=dict( + argstr="-like %s", + extensions=None, + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['like'], - name_template='%s_gennlxfm.xfm', + name_source=["like"], + name_template="%s_gennlxfm.xfm", position=-1, ), - step=dict(argstr='-step %s', ), - verbose=dict(argstr='-verbose', ), + step=dict( + argstr="-step %s", + ), + verbose=dict( + argstr="-verbose", + ), ) inputs = Gennlxfm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Gennlxfm_outputs(): output_map = dict( - output_file=dict(), - output_grid=dict(), + output_file=dict( + extensions=None, + ), + output_grid=dict( + extensions=None, + ), ) outputs = Gennlxfm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Math.py b/nipype/interfaces/minc/tests/test_auto_Math.py index 1d011034d2..86858235cd 100644 --- a/nipype/interfaces/minc/tests/test_auto_Math.py +++ b/nipype/interfaces/minc/tests/test_auto_Math.py @@ -1,167 +1,316 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Math def test_Math_inputs(): input_map = dict( - abs=dict(argstr='-abs', ), - args=dict(argstr='%s', ), - calc_add=dict(argstr='-add', ), - calc_and=dict(argstr='-and', ), - calc_div=dict(argstr='-div', ), - calc_mul=dict(argstr='-mult', ), - calc_not=dict(argstr='-not', ), - calc_or=dict(argstr='-or', ), - calc_sub=dict(argstr='-sub', ), + abs=dict( + argstr="-abs", + ), + args=dict( + argstr="%s", + ), + calc_add=dict( + argstr="-add", + ), + calc_and=dict( + argstr="-and", + ), + calc_div=dict( + argstr="-div", + ), + calc_mul=dict( + argstr="-mult", + ), + calc_not=dict( + argstr="-not", + ), + calc_or=dict( + argstr="-or", + ), + calc_sub=dict( + argstr="-sub", + ), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-check_dimensions", + xor=("check_dimensions", "no_check_dimensions"), + ), + clamp=dict( + argstr="-clamp -const2 %s %s", ), - clamp=dict(argstr='-clamp -const2 %s %s', ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-copy_header", + xor=("copy_header", "no_copy_header"), + ), + count_valid=dict( + argstr="-count_valid", + ), + dimension=dict( + argstr="-dimension %s", ), - count_valid=dict(argstr='-count_valid', ), - dimension=dict(argstr='-dimension %s', ), environ=dict( nohash=True, usedefault=True, ), - exp=dict(argstr='-exp -const2 %s %s', ), + exp=dict( + argstr="-exp -const2 %s %s", + ), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", + extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), + ), + ignore_nan=dict( + argstr="-ignore_nan", ), - ignore_nan=dict(argstr='-ignore_nan', ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', - xor=('input_files', 'filelist'), + sep=" ", + xor=("input_files", "filelist"), + ), + invert=dict( + argstr="-invert -const %s", + ), + isnan=dict( + argstr="-isnan", + ), + log=dict( + argstr="-log -const2 %s %s", ), - invert=dict(argstr='-invert -const %s', ), - isnan=dict(argstr='-isnan', ), - log=dict(argstr='-log -const2 %s %s', ), max_buffer_size_in_kb=dict( - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", usedefault=True, ), - maximum=dict(argstr='-maximum', ), - minimum=dict(argstr='-minimum', ), - nisnan=dict(argstr='-nisnan', ), + maximum=dict( + argstr="-maximum", + ), + minimum=dict( + argstr="-minimum", + ), + nisnan=dict( + argstr="-nisnan", + ), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-nocopy_header", + xor=("copy_header", "no_copy_header"), + ), + nsegment=dict( + argstr="-nsegment -const2 %s %s", ), - nsegment=dict(argstr='-nsegment -const2 %s %s', ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_mincmath.mnc', + name_source=["input_files"], + name_template="%s_mincmath.mnc", position=-1, ), output_illegal=dict( - argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-illegal_value", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( - argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-nan", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( - argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), - ), - percentdiff=dict(argstr='-percentdiff', ), - propagate_nan=dict(argstr='-propagate_nan', ), - scale=dict(argstr='-scale -const2 %s %s', ), - segment=dict(argstr='-segment -const2 %s %s', ), - sqrt=dict(argstr='-sqrt', ), - square=dict(argstr='-square', ), - test_eq=dict(argstr='-eq', ), - test_ge=dict(argstr='-ge', ), - test_gt=dict(argstr='-gt', ), - test_le=dict(argstr='-le', ), - test_lt=dict(argstr='-lt', ), - test_ne=dict(argstr='-ne', ), - two=dict(argstr='-2', ), - voxel_range=dict(argstr='-range %d %d', ), + argstr="-zero", + xor=("output_nan", "output_zero", "output_illegal_value"), + ), + percentdiff=dict( + argstr="-percentdiff", + ), + propagate_nan=dict( + argstr="-propagate_nan", + ), + scale=dict( + argstr="-scale -const2 %s %s", + ), + segment=dict( + argstr="-segment -const2 %s %s", + ), + sqrt=dict( + argstr="-sqrt", + ), + square=dict( + argstr="-square", + ), + test_eq=dict( + argstr="-eq", + ), + test_ge=dict( + argstr="-ge", + ), + test_gt=dict( + argstr="-gt", + ), + test_le=dict( + argstr="-le", + ), + test_lt=dict( + argstr="-lt", + ), + test_ne=dict( + argstr="-ne", + ), + two=dict( + argstr="-2", + ), + voxel_range=dict( + argstr="-range %d %d", + ), ) inputs = Math.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Math_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Math.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_NlpFit.py b/nipype/interfaces/minc/tests/test_auto_NlpFit.py index 1a728a90dd..39b1df4743 100644 --- a/nipype/interfaces/minc/tests/test_auto_NlpFit.py +++ b/nipype/interfaces/minc/tests/test_auto_NlpFit.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import NlpFit def test_NlpFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), config_file=dict( - argstr='-config_file %s', + argstr="-config_file %s", + extensions=None, mandatory=True, ), environ=dict( @@ -19,40 +21,53 @@ def test_NlpFit_inputs(): usedefault=True, ), init_xfm=dict( - argstr='-init_xfm %s', + argstr="-init_xfm %s", + extensions=None, mandatory=True, ), input_grid_files=dict(), output_xfm=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), source=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), source_mask=dict( - argstr='-source_mask %s', + argstr="-source_mask %s", + extensions=None, mandatory=True, ), target=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - verbose=dict(argstr='-verbose', ), + verbose=dict( + argstr="-verbose", + ), ) inputs = NlpFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NlpFit_outputs(): output_map = dict( - output_grid=dict(), - output_xfm=dict(), + output_grid=dict( + extensions=None, + ), + output_xfm=dict( + extensions=None, + ), ) outputs = NlpFit.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Norm.py b/nipype/interfaces/minc/tests/test_auto_Norm.py index 3a0d28f06d..f8c2060250 100644 --- a/nipype/interfaces/minc/tests/test_auto_Norm.py +++ b/nipype/interfaces/minc/tests/test_auto_Norm.py @@ -1,62 +1,93 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Norm def test_Norm_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clamp=dict( - argstr='-clamp', + argstr="-clamp", usedefault=True, ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - cutoff=dict(argstr='-cutoff %s', ), + cutoff=dict( + argstr="-cutoff %s", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - lower=dict(argstr='-lower %s', ), - mask=dict(argstr='-mask %s', ), - out_ceil=dict(argstr='-out_ceil %s', ), - out_floor=dict(argstr='-out_floor %s', ), + lower=dict( + argstr="-lower %s", + ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), + out_ceil=dict( + argstr="-out_ceil %s", + ), + out_floor=dict( + argstr="-out_floor %s", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_norm.mnc', + name_source=["input_file"], + name_template="%s_norm.mnc", position=-1, ), output_threshold_mask=dict( - argstr='-threshold_mask %s', + argstr="-threshold_mask %s", + extensions=None, hash_files=False, - name_source=['input_file'], - name_template='%s_norm_threshold_mask.mnc', - ), - threshold=dict(argstr='-threshold', ), - threshold_blur=dict(argstr='-threshold_blur %s', ), - threshold_bmt=dict(argstr='-threshold_bmt', ), - threshold_perc=dict(argstr='-threshold_perc %s', ), - upper=dict(argstr='-upper %s', ), + name_source=["input_file"], + name_template="%s_norm_threshold_mask.mnc", + ), + threshold=dict( + argstr="-threshold", + ), + threshold_blur=dict( + argstr="-threshold_blur %s", + ), + threshold_bmt=dict( + argstr="-threshold_bmt", + ), + threshold_perc=dict( + argstr="-threshold_perc %s", + ), + upper=dict( + argstr="-upper %s", + ), ) inputs = Norm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Norm_outputs(): output_map = dict( - output_file=dict(), - output_threshold_mask=dict(), + output_file=dict( + extensions=None, + ), + output_threshold_mask=dict( + extensions=None, + ), ) outputs = Norm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Pik.py b/nipype/interfaces/minc/tests/test_auto_Pik.py index d74d9a86ad..3323ef74a1 100644 --- a/nipype/interfaces/minc/tests/test_auto_Pik.py +++ b/nipype/interfaces/minc/tests/test_auto_Pik.py @@ -1,90 +1,125 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Pik def test_Pik_inputs(): input_map = dict( - annotated_bar=dict(argstr='--anot_bar', ), - args=dict(argstr='%s', ), + annotated_bar=dict( + argstr="--anot_bar", + ), + args=dict( + argstr="%s", + ), auto_range=dict( - argstr='--auto_range', - xor=('image_range', 'auto_range'), + argstr="--auto_range", + xor=("image_range", "auto_range"), ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - depth=dict(argstr='--depth %s', ), + depth=dict( + argstr="--depth %s", + ), environ=dict( nohash=True, usedefault=True, ), horizontal_triplanar_view=dict( - argstr='--horizontal', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + argstr="--horizontal", + xor=("vertical_triplanar_view", "horizontal_triplanar_view"), ), image_range=dict( - argstr='--image_range %s %s', - xor=('image_range', 'auto_range'), + argstr="--image_range %s %s", + xor=("image_range", "auto_range"), ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - jpg=dict(xor=('jpg', 'png'), ), - lookup=dict(argstr='--lookup %s', ), - minc_range=dict(argstr='--range %s %s', ), + jpg=dict( + xor=("jpg", "png"), + ), + lookup=dict( + argstr="--lookup %s", + ), + minc_range=dict( + argstr="--range %s %s", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.png', + name_source=["input_file"], + name_template="%s.png", position=-1, ), - png=dict(xor=('jpg', 'png'), ), - sagittal_offset=dict(argstr='--sagittal_offset %s', ), - sagittal_offset_perc=dict(argstr='--sagittal_offset_perc %d', ), + png=dict( + xor=("jpg", "png"), + ), + sagittal_offset=dict( + argstr="--sagittal_offset %s", + ), + sagittal_offset_perc=dict( + argstr="--sagittal_offset_perc %d", + ), scale=dict( - argstr='--scale %s', + argstr="--scale %s", usedefault=True, ), slice_x=dict( - argstr='-x', - xor=('slice_z', 'slice_y', 'slice_x'), + argstr="-x", + xor=("slice_z", "slice_y", "slice_x"), ), slice_y=dict( - argstr='-y', - xor=('slice_z', 'slice_y', 'slice_x'), + argstr="-y", + xor=("slice_z", "slice_y", "slice_x"), ), slice_z=dict( - argstr='-z', - xor=('slice_z', 'slice_y', 'slice_x'), + argstr="-z", + xor=("slice_z", "slice_y", "slice_x"), + ), + start=dict( + argstr="--slice %s", + ), + tile_size=dict( + argstr="--tilesize %s", + ), + title=dict( + argstr="%s", ), - start=dict(argstr='--slice %s', ), - tile_size=dict(argstr='--tilesize %s', ), - title=dict(argstr='%s', ), title_size=dict( - argstr='--title_size %s', - requires=['title'], + argstr="--title_size %s", + requires=["title"], + ), + triplanar=dict( + argstr="--triplanar", ), - triplanar=dict(argstr='--triplanar', ), vertical_triplanar_view=dict( - argstr='--vertical', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + argstr="--vertical", + xor=("vertical_triplanar_view", "horizontal_triplanar_view"), + ), + width=dict( + argstr="--width %s", ), - width=dict(argstr='--width %s', ), ) inputs = Pik.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pik_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Pik.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Resample.py b/nipype/interfaces/minc/tests/test_auto_Resample.py index bd00bd224d..59f2ae180d 100644 --- a/nipype/interfaces/minc/tests/test_auto_Resample.py +++ b/nipype/interfaces/minc/tests/test_auto_Resample.py @@ -1,240 +1,337 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Resample def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), coronal_slices=dict( - argstr='-coronal', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-coronal", + xor=("transverse", "sagittal", "coronal"), ), dircos=dict( - argstr='-dircos %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-dircos %s %s %s", + xor=("nelements", "nelements_x_y_or_z"), ), environ=dict( nohash=True, usedefault=True, ), fill=dict( - argstr='-fill', - xor=('nofill', 'fill'), + argstr="-fill", + xor=("nofill", "fill"), ), fill_value=dict( - argstr='-fillvalue %s', - requires=['fill'], + argstr="-fillvalue %s", + requires=["fill"], ), format_byte=dict( - argstr='-byte', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-byte", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-double", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-float", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-int", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-long", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-short", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-signed", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), half_width_sinc_window=dict( - argstr='-width %s', - requires=['sinc_interpolation'], + argstr="-width %s", + requires=["sinc_interpolation"], ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), input_grid_files=dict(), - invert_transformation=dict(argstr='-invert_transformation', ), + invert_transformation=dict( + argstr="-invert_transformation", + ), keep_real_range=dict( - argstr='-keep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), + argstr="-keep_real_range", + xor=("keep_real_range", "nokeep_real_range"), + ), + like=dict( + argstr="-like %s", + extensions=None, ), - like=dict(argstr='-like %s', ), nearest_neighbour_interpolation=dict( - argstr='-nearest_neighbour', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-nearest_neighbour", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), nelements=dict( - argstr='-nelements %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-nelements %s %s %s", + xor=("nelements", "nelements_x_y_or_z"), ), no_fill=dict( - argstr='-nofill', - xor=('nofill', 'fill'), + argstr="-nofill", + xor=("nofill", "fill"), ), no_input_sampling=dict( - argstr='-use_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + argstr="-use_input_sampling", + xor=("vio_transform", "no_input_sampling"), ), nokeep_real_range=dict( - argstr='-nokeep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), + argstr="-nokeep_real_range", + xor=("keep_real_range", "nokeep_real_range"), + ), + origin=dict( + argstr="-origin %s %s %s", ), - origin=dict(argstr='-origin %s %s %s', ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_resample.mnc', + name_source=["input_file"], + name_template="%s_resample.mnc", position=-1, ), - output_range=dict(argstr='-range %s %s', ), + output_range=dict( + argstr="-range %s %s", + ), sagittal_slices=dict( - argstr='-sagittal', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-sagittal", + xor=("transverse", "sagittal", "coronal"), ), sinc_interpolation=dict( - argstr='-sinc', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-sinc", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), sinc_window_hamming=dict( - argstr='-hamming', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), + argstr="-hamming", + requires=["sinc_interpolation"], + xor=("sinc_window_hanning", "sinc_window_hamming"), ), sinc_window_hanning=dict( - argstr='-hanning', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), + argstr="-hanning", + requires=["sinc_interpolation"], + xor=("sinc_window_hanning", "sinc_window_hamming"), + ), + spacetype=dict( + argstr="-spacetype %s", + ), + standard_sampling=dict( + argstr="-standard_sampling", ), - spacetype=dict(argstr='-spacetype %s', ), - standard_sampling=dict(argstr='-standard_sampling', ), start=dict( - argstr='-start %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-start %s %s %s", + xor=("nelements", "nelements_x_y_or_z"), ), step=dict( - argstr='-step %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-step %s %s %s", + xor=("nelements", "nelements_x_y_or_z"), + ), + talairach=dict( + argstr="-talairach", + ), + transformation=dict( + argstr="-transformation %s", + extensions=None, ), - talairach=dict(argstr='-talairach', ), - transformation=dict(argstr='-transformation %s', ), transverse_slices=dict( - argstr='-transverse', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-transverse", + xor=("transverse", "sagittal", "coronal"), ), tricubic_interpolation=dict( - argstr='-tricubic', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-tricubic", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), trilinear_interpolation=dict( - argstr='-trilinear', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-trilinear", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), + ), + two=dict( + argstr="-2", + ), + units=dict( + argstr="-units %s", ), - two=dict(argstr='-2', ), - units=dict(argstr='-units %s', ), vio_transform=dict( - argstr='-tfm_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + argstr="-tfm_input_sampling", + xor=("vio_transform", "no_input_sampling"), ), xdircos=dict( - argstr='-xdircos %s', - requires=('ydircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-xdircos %s", + requires=("ydircos", "zdircos"), + xor=("dircos", "dircos_x_y_or_z"), ), xnelements=dict( - argstr='-xnelements %s', - requires=('ynelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-xnelements %s", + requires=("ynelements", "znelements"), + xor=("nelements", "nelements_x_y_or_z"), ), xstart=dict( - argstr='-xstart %s', - requires=('ystart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + argstr="-xstart %s", + requires=("ystart", "zstart"), + xor=("start", "start_x_y_or_z"), ), xstep=dict( - argstr='-xstep %s', - requires=('ystep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + argstr="-xstep %s", + requires=("ystep", "zstep"), + xor=("step", "step_x_y_or_z"), ), ydircos=dict( - argstr='-ydircos %s', - requires=('xdircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-ydircos %s", + requires=("xdircos", "zdircos"), + xor=("dircos", "dircos_x_y_or_z"), ), ynelements=dict( - argstr='-ynelements %s', - requires=('xnelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-ynelements %s", + requires=("xnelements", "znelements"), + xor=("nelements", "nelements_x_y_or_z"), ), ystart=dict( - argstr='-ystart %s', - requires=('xstart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + argstr="-ystart %s", + requires=("xstart", "zstart"), + xor=("start", "start_x_y_or_z"), ), ystep=dict( - argstr='-ystep %s', - requires=('xstep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + argstr="-ystep %s", + requires=("xstep", "zstep"), + xor=("step", "step_x_y_or_z"), ), zdircos=dict( - argstr='-zdircos %s', - requires=('xdircos', 'ydircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-zdircos %s", + requires=("xdircos", "ydircos"), + xor=("dircos", "dircos_x_y_or_z"), ), znelements=dict( - argstr='-znelements %s', - requires=('xnelements', 'ynelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-znelements %s", + requires=("xnelements", "ynelements"), + xor=("nelements", "nelements_x_y_or_z"), ), zstart=dict( - argstr='-zstart %s', - requires=('xstart', 'ystart'), - xor=('start', 'start_x_y_or_z'), + argstr="-zstart %s", + requires=("xstart", "ystart"), + xor=("start", "start_x_y_or_z"), ), zstep=dict( - argstr='-zstep %s', - requires=('xstep', 'ystep'), - xor=('step', 'step_x_y_or_z'), + argstr="-zstep %s", + requires=("xstep", "ystep"), + xor=("step", "step_x_y_or_z"), ), ) inputs = Resample.input_spec() @@ -242,8 +339,14 @@ def test_Resample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Reshape.py b/nipype/interfaces/minc/tests/test_auto_Reshape.py index 45e6ddeb4a..50286b41e5 100644 --- a/nipype/interfaces/minc/tests/test_auto_Reshape.py +++ b/nipype/interfaces/minc/tests/test_auto_Reshape.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Reshape def test_Reshape_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( @@ -15,28 +16,40 @@ def test_Reshape_inputs(): usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_reshape.mnc', + name_source=["input_file"], + name_template="%s_reshape.mnc", position=-1, ), - verbose=dict(argstr='-verbose', ), - write_short=dict(argstr='-short', ), + verbose=dict( + argstr="-verbose", + ), + write_short=dict( + argstr="-short", + ), ) inputs = Reshape.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reshape_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Reshape.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_ToEcat.py b/nipype/interfaces/minc/tests/test_auto_ToEcat.py index 26a3ac0436..f6ce521232 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToEcat.py +++ b/nipype/interfaces/minc/tests/test_auto_ToEcat.py @@ -1,48 +1,70 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import ToEcat def test_ToEcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), ignore_acquisition_variable=dict( - argstr='-ignore_acquisition_variable', ), + argstr="-ignore_acquisition_variable", + ), ignore_ecat_acquisition_variable=dict( - argstr='-ignore_ecat_acquisition_variable', ), - ignore_ecat_main=dict(argstr='-ignore_ecat_main', ), + argstr="-ignore_ecat_acquisition_variable", + ), + ignore_ecat_main=dict( + argstr="-ignore_ecat_main", + ), ignore_ecat_subheader_variable=dict( - argstr='-ignore_ecat_subheader_variable', ), - ignore_patient_variable=dict(argstr='-ignore_patient_variable', ), - ignore_study_variable=dict(argstr='-ignore_study_variable', ), + argstr="-ignore_ecat_subheader_variable", + ), + ignore_patient_variable=dict( + argstr="-ignore_patient_variable", + ), + ignore_study_variable=dict( + argstr="-ignore_study_variable", + ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - no_decay_corr_fctr=dict(argstr='-no_decay_corr_fctr', ), + no_decay_corr_fctr=dict( + argstr="-no_decay_corr_fctr", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_to_ecat.v', + name_source=["input_file"], + name_template="%s_to_ecat.v", position=-1, ), - voxels_as_integers=dict(argstr='-label', ), + voxels_as_integers=dict( + argstr="-label", + ), ) inputs = ToEcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ToEcat_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = ToEcat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_ToRaw.py b/nipype/interfaces/minc/tests/test_auto_ToRaw.py index e010da322d..39940170f6 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToRaw.py +++ b/nipype/interfaces/minc/tests/test_auto_ToRaw.py @@ -1,78 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import ToRaw def test_ToRaw_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + argstr="-nonormalize", + xor=("normalize", "nonormalize"), ), normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), + argstr="-normalize", + xor=("normalize", "nonormalize"), ), out_file=dict( - argstr='> %s', + argstr="> %s", + extensions=None, genfile=True, position=-1, ), output_file=dict( + extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.raw', + name_source=["input_file"], + name_template="%s.raw", position=-1, ), write_byte=dict( - argstr='-byte', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-byte", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_double=dict( - argstr='-double', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-double", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_float=dict( - argstr='-float', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-float", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_int=dict( - argstr='-int', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-int", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_long=dict( - argstr='-long', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-long", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), + ), + write_range=dict( + argstr="-range %s %s", ), - write_range=dict(argstr='-range %s %s', ), write_short=dict( - argstr='-short', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-short", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_signed=dict( - argstr='-signed', - xor=('write_signed', 'write_unsigned'), + argstr="-signed", + xor=("write_signed", "write_unsigned"), ), write_unsigned=dict( - argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + argstr="-unsigned", + xor=("write_signed", "write_unsigned"), ), ) inputs = ToRaw.input_spec() @@ -80,8 +122,14 @@ def test_ToRaw_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ToRaw_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = ToRaw.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_VolSymm.py b/nipype/interfaces/minc/tests/test_auto_VolSymm.py index 048ffcde9b..a1b89616f2 100644 --- a/nipype/interfaces/minc/tests/test_auto_VolSymm.py +++ b/nipype/interfaces/minc/tests/test_auto_VolSymm.py @@ -1,61 +1,90 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import VolSymm def test_VolSymm_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - config_file=dict(argstr='-config_file %s', ), + config_file=dict( + argstr="-config_file %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - fit_linear=dict(argstr='-linear', ), - fit_nonlinear=dict(argstr='-nonlinear', ), + fit_linear=dict( + argstr="-linear", + ), + fit_nonlinear=dict( + argstr="-nonlinear", + ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), input_grid_files=dict(), - nofit=dict(argstr='-nofit', ), + nofit=dict( + argstr="-nofit", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_vol_symm.mnc', + name_source=["input_file"], + name_template="%s_vol_symm.mnc", position=-1, ), trans_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_vol_symm.xfm', + name_source=["input_file"], + name_template="%s_vol_symm.xfm", position=-2, ), - verbose=dict(argstr='-verbose', ), - x=dict(argstr='-x', ), - y=dict(argstr='-y', ), - z=dict(argstr='-z', ), + verbose=dict( + argstr="-verbose", + ), + x=dict( + argstr="-x", + ), + y=dict( + argstr="-y", + ), + z=dict( + argstr="-z", + ), ) inputs = VolSymm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VolSymm_outputs(): output_map = dict( - output_file=dict(), - output_grid=dict(), - trans_file=dict(), + output_file=dict( + extensions=None, + ), + output_grid=dict( + extensions=None, + ), + trans_file=dict( + extensions=None, + ), ) outputs = VolSymm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Volcentre.py b/nipype/interfaces/minc/tests/test_auto_Volcentre.py index b095751a4d..cf9f777f70 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volcentre.py +++ b/nipype/interfaces/minc/tests/test_auto_Volcentre.py @@ -1,44 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Volcentre def test_Volcentre_inputs(): input_map = dict( - args=dict(argstr='%s', ), - centre=dict(argstr='-centre %s %s %s', ), + args=dict( + argstr="%s", + ), + centre=dict( + argstr="-centre %s %s %s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - com=dict(argstr='-com', ), + com=dict( + argstr="-com", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_volcentre.mnc', + name_source=["input_file"], + name_template="%s_volcentre.mnc", position=-1, ), - verbose=dict(argstr='-verbose', ), - zero_dircos=dict(argstr='-zero_dircos', ), + verbose=dict( + argstr="-verbose", + ), + zero_dircos=dict( + argstr="-zero_dircos", + ), ) inputs = Volcentre.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volcentre_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Volcentre.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Voliso.py b/nipype/interfaces/minc/tests/test_auto_Voliso.py index 967642a328..40d01b5767 100644 --- a/nipype/interfaces/minc/tests/test_auto_Voliso.py +++ b/nipype/interfaces/minc/tests/test_auto_Voliso.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Voliso def test_Voliso_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avgstep=dict(argstr='--avgstep', ), + args=dict( + argstr="%s", + ), + avgstep=dict( + argstr="--avgstep", + ), clobber=dict( - argstr='--clobber', + argstr="--clobber", usedefault=True, ), environ=dict( @@ -16,29 +19,43 @@ def test_Voliso_inputs(): usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - maxstep=dict(argstr='--maxstep %s', ), - minstep=dict(argstr='--minstep %s', ), + maxstep=dict( + argstr="--maxstep %s", + ), + minstep=dict( + argstr="--minstep %s", + ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_voliso.mnc', + name_source=["input_file"], + name_template="%s_voliso.mnc", position=-1, ), - verbose=dict(argstr='--verbose', ), + verbose=dict( + argstr="--verbose", + ), ) inputs = Voliso.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Voliso_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Voliso.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Volpad.py b/nipype/interfaces/minc/tests/test_auto_Volpad.py index 865bc79e69..5102199657 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volpad.py +++ b/nipype/interfaces/minc/tests/test_auto_Volpad.py @@ -1,46 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import Volpad def test_Volpad_inputs(): input_map = dict( - args=dict(argstr='%s', ), - auto=dict(argstr='-auto', ), - auto_freq=dict(argstr='-auto_freq %s', ), + args=dict( + argstr="%s", + ), + auto=dict( + argstr="-auto", + ), + auto_freq=dict( + argstr="-auto_freq %s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), - distance=dict(argstr='-distance %s', ), + distance=dict( + argstr="-distance %s", + ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_volpad.mnc', + name_source=["input_file"], + name_template="%s_volpad.mnc", position=-1, ), - smooth=dict(argstr='-smooth', ), - smooth_distance=dict(argstr='-smooth_distance %s', ), - verbose=dict(argstr='-verbose', ), + smooth=dict( + argstr="-smooth", + ), + smooth_distance=dict( + argstr="-smooth_distance %s", + ), + verbose=dict( + argstr="-verbose", + ), ) inputs = Volpad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volpad_outputs(): - output_map = dict(output_file=dict(), ) + output_map = dict( + output_file=dict( + extensions=None, + ), + ) outputs = Volpad.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_XfmAvg.py b/nipype/interfaces/minc/tests/test_auto_XfmAvg.py index 6d036a0c0e..f5df6f4d54 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmAvg.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmAvg.py @@ -1,46 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import XfmAvg def test_XfmAvg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avg_linear=dict(argstr='-avg_linear', ), - avg_nonlinear=dict(argstr='-avg_nonlinear', ), + args=dict( + argstr="%s", + ), + avg_linear=dict( + argstr="-avg_linear", + ), + avg_nonlinear=dict( + argstr="-avg_nonlinear", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), - ignore_linear=dict(argstr='-ignore_linear', ), - ignore_nonlinear=dict(argstr='-ignore_nonline', ), + ignore_linear=dict( + argstr="-ignore_linear", + ), + ignore_nonlinear=dict( + argstr="-ignore_nonline", + ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', + sep=" ", ), input_grid_files=dict(), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), - verbose=dict(argstr='-verbose', ), + verbose=dict( + argstr="-verbose", + ), ) inputs = XfmAvg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmAvg_outputs(): output_map = dict( - output_file=dict(), - output_grid=dict(), + output_file=dict( + extensions=None, + ), + output_grid=dict( + extensions=None, + ), ) outputs = XfmAvg.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py index eb748953ef..58144779b8 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import XfmConcat def test_XfmConcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( @@ -15,30 +16,37 @@ def test_XfmConcat_inputs(): usedefault=True, ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', + sep=" ", ), input_grid_files=dict(), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_xfmconcat.xfm', + name_source=["input_files"], + name_template="%s_xfmconcat.xfm", position=-1, ), - verbose=dict(argstr='-verbose', ), + verbose=dict( + argstr="-verbose", + ), ) inputs = XfmConcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmConcat_outputs(): output_map = dict( - output_file=dict(), + output_file=dict( + extensions=None, + ), output_grids=dict(), ) outputs = XfmConcat.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_XfmInvert.py b/nipype/interfaces/minc/tests/test_auto_XfmInvert.py index d729e90639..aa8fb61ccd 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmInvert.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmInvert.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..minc import XfmInvert def test_XfmInvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), clobber=dict( - argstr='-clobber', + argstr="-clobber", usedefault=True, ), environ=dict( @@ -15,26 +16,36 @@ def test_XfmInvert_inputs(): usedefault=True, ), input_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), output_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), - verbose=dict(argstr='-verbose', ), + verbose=dict( + argstr="-verbose", + ), ) inputs = XfmInvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmInvert_outputs(): output_map = dict( - output_file=dict(), - output_grid=dict(), + output_file=dict( + extensions=None, + ), + output_grid=dict( + extensions=None, + ), ) outputs = XfmInvert.output_spec() diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index 8a9e08fd9b..9cde4c0bcd 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,12 +1,22 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""MIPAV enables quantitative analysis and visualization of multimodal medical images.""" + from .developer import ( - JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, - JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, - JistLaminarROIAveraging, MedicAlgorithmLesionToads, - JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, - MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, - JistLaminarProfileSampling, MedicAlgorithmMipavReorient, - MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, - JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask) + JistLaminarVolumetricLayering, + JistBrainMgdmSegmentation, + JistLaminarProfileGeometry, + JistLaminarProfileCalculator, + MedicAlgorithmN3, + JistLaminarROIAveraging, + MedicAlgorithmLesionToads, + JistBrainMp2rageSkullStripping, + JistCortexSurfaceMeshInflation, + RandomVol, + MedicAlgorithmImageCalculator, + JistBrainMp2rageDuraEstimation, + JistLaminarProfileSampling, + MedicAlgorithmMipavReorient, + MedicAlgorithmSPECTRE2010, + JistBrainPartialVolumeFilter, + JistIntensityMp2rageMasking, + MedicAlgorithmThresholdToBinaryMask, +) diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index ffb9e10cc3..a76f2e35c4 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -1,49 +1,50 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class JistLaminarVolumetricLayeringInputSpec(CommandLineInputSpec): inInner = File( - desc="Inner Distance Image (GM/WM boundary)", - exists=True, - argstr="--inInner %s") + desc="Inner Distance Image (GM/WM boundary)", exists=True, argstr="--inInner %s" + ) inOuter = File( desc="Outer Distance Image (CSF/GM boundary)", exists=True, - argstr="--inOuter %s") + argstr="--inOuter %s", + ) inNumber = traits.Int(desc="Number of layers", argstr="--inNumber %d") inMax = traits.Int( - desc="Max iterations for narrow band evolution", argstr="--inMax %d") + desc="Max iterations for narrow band evolution", argstr="--inMax %d" + ) inMin = traits.Float( - desc="Min change ratio for narrow band evolution", argstr="--inMin %f") + desc="Min change ratio for narrow band evolution", argstr="--inMin %f" + ) inLayering = traits.Enum( "distance-preserving", "volume-preserving", desc="Layering method", - argstr="--inLayering %s") + argstr="--inLayering %s", + ) inLayering2 = traits.Enum( - "outward", - "inward", - desc="Layering direction", - argstr="--inLayering2 %s") + "outward", "inward", desc="Layering direction", argstr="--inLayering2 %s" + ) incurvature = traits.Int( - desc="curvature approximation scale (voxels)", - argstr="--incurvature %d") + desc="curvature approximation scale (voxels)", argstr="--incurvature %d" + ) inratio = traits.Float( - desc="ratio smoothing kernel size (voxels)", argstr="--inratio %f") + desc="ratio smoothing kernel size (voxels)", argstr="--inratio %f" + ) inpresmooth = traits.Enum( - "true", - "false", - desc="pre-smooth cortical surfaces", - argstr="--inpresmooth %s") + "true", "false", desc="pre-smooth cortical surfaces", argstr="--inpresmooth %s" + ) inTopology = traits.Enum( "26/6", "6/26", @@ -54,35 +55,40 @@ class JistLaminarVolumetricLayeringInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inTopology %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outContinuous = traits.Either( traits.Bool, File(), hash_files=False, desc="Continuous depth measurement", - argstr="--outContinuous %s") + argstr="--outContinuous %s", + ) outDiscrete = traits.Either( traits.Bool, File(), hash_files=False, desc="Discrete sampled layers", - argstr="--outDiscrete %s") + argstr="--outDiscrete %s", + ) outLayer = traits.Either( traits.Bool, File(), hash_files=False, desc="Layer boundary surfaces", - argstr="--outLayer %s") + argstr="--outLayer %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): @@ -92,47 +98,43 @@ class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): class JistLaminarVolumetricLayering(SEMLikeCommandLine): - """title: Volumetric Layering + """Volumetric Layering. -category: Developer Tools + Builds a continuous layering of the cortex following distance-preserving or volume-preserving + models of cortical folding. -description: Builds a continuous layering of the cortex following distance-preserving or volume-preserving models of cortical folding. -Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. + References + ---------- + Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, + Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. -version: 3.0.RC - -contributor: Miriam Waehnert (waehnert@cbs.mpg.de) http://www.cbs.mpg.de/ - -""" + """ input_spec = JistLaminarVolumetricLayeringInputSpec output_spec = JistLaminarVolumetricLayeringOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering " _outputs_filenames = { - 'outContinuous': 'outContinuous.nii', - 'outLayer': 'outLayer.nii', - 'outDiscrete': 'outDiscrete.nii' + "outContinuous": "outContinuous.nii", + "outLayer": "outLayer.nii", + "outDiscrete": "outDiscrete.nii", } _redirect_x = True class JistBrainMgdmSegmentationInputSpec(CommandLineInputSpec): - inMP2RAGE = File( - desc="MP2RAGE T1 Map Image", exists=True, argstr="--inMP2RAGE %s") + inMP2RAGE = File(desc="MP2RAGE T1 Map Image", exists=True, argstr="--inMP2RAGE %s") inMP2RAGE2 = File( - desc="MP2RAGE T1-weighted Image", - exists=True, - argstr="--inMP2RAGE2 %s") + desc="MP2RAGE T1-weighted Image", exists=True, argstr="--inMP2RAGE2 %s" + ) inPV = File(desc="PV / Dura Image", exists=True, argstr="--inPV %s") inMPRAGE = File( - desc="MPRAGE T1-weighted Image", exists=True, argstr="--inMPRAGE %s") + desc="MPRAGE T1-weighted Image", exists=True, argstr="--inMPRAGE %s" + ) inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = File(desc="Atlas file", exists=True, argstr="--inAtlas %s") inData = traits.Float(desc="Data weight", argstr="--inData %f") - inCurvature = traits.Float( - desc="Curvature weight", argstr="--inCurvature %f") - inPosterior = traits.Float( - desc="Posterior scale (mm)", argstr="--inPosterior %f") + inCurvature = traits.Float(desc="Curvature weight", argstr="--inCurvature %f") + inPosterior = traits.Float(desc="Posterior scale (mm)", argstr="--inPosterior %f") inMax = traits.Int(desc="Max iterations", argstr="--inMax %d") inMin = traits.Float(desc="Min change", argstr="--inMin %f") inSteps = traits.Int(desc="Steps", argstr="--inSteps %d") @@ -146,89 +148,87 @@ class JistBrainMgdmSegmentationInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") + argstr="--inTopology %s", + ) inCompute = traits.Enum( - "true", "false", desc="Compute posteriors", argstr="--inCompute %s") + "true", "false", desc="Compute posteriors", argstr="--inCompute %s" + ) inAdjust = traits.Enum( - "true", - "false", - desc="Adjust intensity priors", - argstr="--inAdjust %s") + "true", "false", desc="Adjust intensity priors", argstr="--inAdjust %s" + ) inOutput = traits.Enum( - "segmentation", - "memberships", - desc="Output images", - argstr="--inOutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "segmentation", "memberships", desc="Output images", argstr="--inOutput %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSegmented = traits.Either( traits.Bool, File(), hash_files=False, desc="Segmented Brain Image", - argstr="--outSegmented %s") + argstr="--outSegmented %s", + ) outLevelset = traits.Either( traits.Bool, File(), hash_files=False, desc="Levelset Boundary Image", - argstr="--outLevelset %s") + argstr="--outLevelset %s", + ) outPosterior2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Memberships (4D)", - argstr="--outPosterior2 %s") + argstr="--outPosterior2 %s", + ) outPosterior3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Labels (4D)", - argstr="--outPosterior3 %s") + argstr="--outPosterior3 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMgdmSegmentationOutputSpec(TraitedSpec): outSegmented = File(desc="Segmented Brain Image", exists=True) outLevelset = File(desc="Levelset Boundary Image", exists=True) - outPosterior2 = File( - desc="Posterior Maximum Memberships (4D)", exists=True) + outPosterior2 = File(desc="Posterior Maximum Memberships (4D)", exists=True) outPosterior3 = File(desc="Posterior Maximum Labels (4D)", exists=True) class JistBrainMgdmSegmentation(SEMLikeCommandLine): - """title: MGDM Whole Brain Segmentation - -category: Developer Tools + """MGDM Whole Brain Segmentation. -description: Estimate brain structures from an atlas for a MRI dataset (multiple input combinations are possible). + Estimate brain structures from an atlas for a MRI dataset (multiple input combinations + are possible). -version: 2.0.RC - -""" + """ input_spec = JistBrainMgdmSegmentationInputSpec output_spec = JistBrainMgdmSegmentationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation " _outputs_filenames = { - 'outSegmented': 'outSegmented.nii', - 'outPosterior2': 'outPosterior2.nii', - 'outPosterior3': 'outPosterior3.nii', - 'outLevelset': 'outLevelset.nii' + "outSegmented": "outSegmented.nii", + "outPosterior2": "outPosterior2.nii", + "outPosterior3": "outPosterior3.nii", + "outLevelset": "outLevelset.nii", } _redirect_x = True class JistLaminarProfileGeometryInputSpec(CommandLineInputSpec): - inProfile = File( - desc="Profile Surface Image", exists=True, argstr="--inProfile %s") + inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") incomputed = traits.Enum( "thickness", "curvedness", @@ -239,32 +239,27 @@ class JistLaminarProfileGeometryInputSpec(CommandLineInputSpec): "profile_curvature", "profile_torsion", desc="computed measure", - argstr="--incomputed %s") + argstr="--incomputed %s", + ) inregularization = traits.Enum( - "none", - "Gaussian", - desc="regularization", - argstr="--inregularization %s") - insmoothing = traits.Float( - desc="smoothing parameter", argstr="--insmoothing %f") - inoutside = traits.Float( - desc="outside extension (mm)", argstr="--inoutside %f") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "none", "Gaussian", desc="regularization", argstr="--inregularization %s" + ) + insmoothing = traits.Float(desc="smoothing parameter", argstr="--insmoothing %f") + inoutside = traits.Float(desc="outside extension (mm)", argstr="--inoutside %f") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Result", - argstr="--outResult %s") + traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileGeometryOutputSpec(TraitedSpec): @@ -272,51 +267,42 @@ class JistLaminarProfileGeometryOutputSpec(TraitedSpec): class JistLaminarProfileGeometry(SEMLikeCommandLine): - """title: Profile Geometry - -category: Developer Tools - -description: Compute various geometric quantities for a cortical layers. - -version: 3.0.RC - -""" + """Compute various geometric quantities for a cortical layers.""" input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistLaminarProfileCalculatorInputSpec(CommandLineInputSpec): inIntensity = File( - desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s") - inMask = File( - desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") + desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" + ) + inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") incomputed = traits.Enum( "mean", "stdev", "skewness", "kurtosis", desc="computed statistic", - argstr="--incomputed %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--incomputed %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Result", - argstr="--outResult %s") + traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): @@ -324,77 +310,70 @@ class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): class JistLaminarProfileCalculator(SEMLikeCommandLine): - """title: Profile Calculator - -category: Developer Tools - -description: Compute various moments for intensities mapped along a cortical profile. - -version: 3.0.RC - -""" + """Compute various moments for intensities mapped along a cortical profile.""" input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class MedicAlgorithmN3InputSpec(CommandLineInputSpec): inInput = File(desc="Input Volume", exists=True, argstr="--inInput %s") inSignal = traits.Float( - desc= - "Default = min + 1, Values at less than threshold are treated as part of the background", - argstr="--inSignal %f") - inMaximum = traits.Int( - desc="Maximum number of Iterations", argstr="--inMaximum %d") + desc="Default = min + 1, Values at less than threshold are treated as part of the background", + argstr="--inSignal %f", + ) + inMaximum = traits.Int(desc="Maximum number of Iterations", argstr="--inMaximum %d") inEnd = traits.Float( - desc= - "Usually 0.01-0.00001, The measure used to terminate the iterations is the coefficient of variation of change in field estimates between successive iterations.", - argstr="--inEnd %f") + desc="Usually 0.01-0.00001, The measure used to terminate the iterations is the coefficient of variation of change in field estimates between successive iterations.", + argstr="--inEnd %f", + ) inField = traits.Float( - desc= - "Characteristic distance over which the field varies. The distance between adjacent knots in bspline fitting with at least 4 knots going in every dimension. The default in the dialog is one third the distance (resolution * extents) of the smallest dimension.", - argstr="--inField %f") + desc="Characteristic distance over which the field varies. The distance between adjacent knots in bspline fitting with at least 4 knots going in every dimension. The default in the dialog is one third the distance (resolution * extents) of the smallest dimension.", + argstr="--inField %f", + ) inSubsample = traits.Float( - desc= - "Usually between 1-32, The factor by which the data is subsampled to a lower resolution in estimating the slowly varying non-uniformity field. Reduce sampling in the finest sampling direction by the shrink factor.", - argstr="--inSubsample %f") + desc="Usually between 1-32, The factor by which the data is subsampled to a lower resolution in estimating the slowly varying non-uniformity field. Reduce sampling in the finest sampling direction by the shrink factor.", + argstr="--inSubsample %f", + ) inKernel = traits.Float( - desc= - "Usually between 0.05-0.50, Width of deconvolution kernel used to sharpen the histogram. Larger values give faster convergence while smaller values give greater accuracy.", - argstr="--inKernel %f") - inWeiner = traits.Float( - desc="Usually between 0.0-1.0", argstr="--inWeiner %f") + desc="Usually between 0.05-0.50, Width of deconvolution kernel used to sharpen the histogram. Larger values give faster convergence while smaller values give greater accuracy.", + argstr="--inKernel %f", + ) + inWeiner = traits.Float(desc="Usually between 0.0-1.0", argstr="--inWeiner %f") inAutomatic = traits.Enum( "true", "false", - desc= - "If true determines the threshold by histogram analysis. If true a VOI cannot be used and the input threshold is ignored.", - argstr="--inAutomatic %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="If true determines the threshold by histogram analysis. If true a VOI cannot be used and the input threshold is ignored.", + argstr="--inAutomatic %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Corrected Volume", - argstr="--outInhomogeneity %s") + argstr="--outInhomogeneity %s", + ) outInhomogeneity2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", - argstr="--outInhomogeneity2 %s") + argstr="--outInhomogeneity2 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmN3OutputSpec(TraitedSpec): @@ -403,49 +382,39 @@ class MedicAlgorithmN3OutputSpec(TraitedSpec): class MedicAlgorithmN3(SEMLikeCommandLine): - """title: N3 Correction - -category: Developer Tools - -description: Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled. - -version: 1.8.R - -""" + """Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled.""" input_spec = MedicAlgorithmN3InputSpec output_spec = MedicAlgorithmN3OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3 " _outputs_filenames = { - 'outInhomogeneity2': 'outInhomogeneity2.nii', - 'outInhomogeneity': 'outInhomogeneity.nii' + "outInhomogeneity2": "outInhomogeneity2.nii", + "outInhomogeneity": "outInhomogeneity.nii", } _redirect_x = True class JistLaminarROIAveragingInputSpec(CommandLineInputSpec): inIntensity = File( - desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s") + desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" + ) inROI = File(desc="ROI Mask", exists=True, argstr="--inROI %s") inROI2 = traits.Str(desc="ROI Name", argstr="--inROI2 %s") - inMask = File( - desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outROI3 = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="ROI Average", - argstr="--outROI3 %s") + traits.Bool, File(), hash_files=False, desc="ROI Average", argstr="--outROI3 %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarROIAveragingOutputSpec(TraitedSpec): @@ -453,165 +422,164 @@ class JistLaminarROIAveragingOutputSpec(TraitedSpec): class JistLaminarROIAveraging(SEMLikeCommandLine): - """title: Profile ROI Averaging - -category: Developer Tools - -description: Compute an average profile over a given ROI. - -version: 3.0.RC - -""" + """Compute an average profile over a given ROI.""" input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarROIAveraging " - _outputs_filenames = {'outROI3': 'outROI3'} + _outputs_filenames = {"outROI3": "outROI3"} _redirect_x = True class MedicAlgorithmLesionToadsInputSpec(CommandLineInputSpec): - inT1_MPRAGE = File( - desc="T1_MPRAGE Image", exists=True, argstr="--inT1_MPRAGE %s") - inT1_SPGR = File( - desc="T1_SPGR Image", exists=True, argstr="--inT1_SPGR %s") + inT1_MPRAGE = File(desc="T1_MPRAGE Image", exists=True, argstr="--inT1_MPRAGE %s") + inT1_SPGR = File(desc="T1_SPGR Image", exists=True, argstr="--inT1_SPGR %s") inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = traits.Enum( - "With Lesion", "No Lesion", desc="Atlas to Use", argstr="--inAtlas %s") + "With Lesion", "No Lesion", desc="Atlas to Use", argstr="--inAtlas %s" + ) inOutput = traits.Enum( "hard segmentation", "hard segmentation+memberships", "cruise inputs", "dura removal inputs", desc="Output images", - argstr="--inOutput %s") + argstr="--inOutput %s", + ) inOutput2 = traits.Enum( "true", "false", - desc= - "Output the hard classification using maximum membership (not neceesarily topologically correct)", - argstr="--inOutput2 %s") + desc="Output the hard classification using maximum membership (not neceesarily topologically correct)", + argstr="--inOutput2 %s", + ) inCorrect = traits.Enum( - "true", - "false", - desc="Correct MR field inhomogeneity.", - argstr="--inCorrect %s") + "true", "false", desc="Correct MR field inhomogeneity.", argstr="--inCorrect %s" + ) inOutput3 = traits.Enum( "true", "false", desc="Output the estimated inhomogeneity field", - argstr="--inOutput3 %s") + argstr="--inOutput3 %s", + ) inAtlas2 = File( - desc="Atlas File - With Lesions", exists=True, argstr="--inAtlas2 %s") + desc="Atlas File - With Lesions", exists=True, argstr="--inAtlas2 %s" + ) inAtlas3 = File( desc="Atlas File - No Lesion - T1 and FLAIR", exists=True, - argstr="--inAtlas3 %s") + argstr="--inAtlas3 %s", + ) inAtlas4 = File( - desc="Atlas File - No Lesion - T1 Only", - exists=True, - argstr="--inAtlas4 %s") + desc="Atlas File - No Lesion - T1 Only", exists=True, argstr="--inAtlas4 %s" + ) inMaximum = traits.Int( - desc= - "Maximum distance from the interventricular WM boundary to downweight the lesion membership to avoid false postives", - argstr="--inMaximum %d") - inMaximum2 = traits.Int( - desc="Maximum Ventircle Distance", argstr="--inMaximum2 %d") + desc="Maximum distance from the interventricular WM boundary to downweight the lesion membership to avoid false positives", + argstr="--inMaximum %d", + ) + inMaximum2 = traits.Int(desc="Maximum Ventircle Distance", argstr="--inMaximum2 %d") inMaximum3 = traits.Int( - desc="Maximum InterVentricular Distance", argstr="--inMaximum3 %d") + desc="Maximum InterVentricular Distance", argstr="--inMaximum3 %d" + ) inInclude = traits.Enum( "true", "false", desc="Include lesion in WM class in hard classification", - argstr="--inInclude %s") + argstr="--inInclude %s", + ) inAtlas5 = traits.Float( desc="Controls the effect of the statistical atlas on the segmentation", - argstr="--inAtlas5 %f") + argstr="--inAtlas5 %f", + ) inSmooting = traits.Float( - desc="Controls the effect of neighberhood voxels on the membership", - argstr="--inSmooting %f") + desc="Controls the effect of neighborhood voxels on the membership", + argstr="--inSmooting %f", + ) inMaximum4 = traits.Float( - desc= - "Maximum amount of relative change in the energy function considered as the convergence criteria", - argstr="--inMaximum4 %f") - inMaximum5 = traits.Int( - desc="Maximum iterations", argstr="--inMaximum5 %d") + desc="Maximum amount of relative change in the energy function considered as the convergence criteria", + argstr="--inMaximum4 %f", + ) + inMaximum5 = traits.Int(desc="Maximum iterations", argstr="--inMaximum5 %d") inAtlas6 = traits.Enum( - "rigid", - "multi_fully_affine", - desc="Atlas alignment", - argstr="--inAtlas6 %s") + "rigid", "multi_fully_affine", desc="Atlas alignment", argstr="--inAtlas6 %s" + ) inConnectivity = traits.Enum( "(26,6)", "(6,26)", "(6,18)", "(18,6)", desc="Connectivity (foreground,background)", - argstr="--inConnectivity %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inConnectivity %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outHard = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentation", - argstr="--outHard %s") + argstr="--outHard %s", + ) outHard2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentationfrom memberships", - argstr="--outHard2 %s") + argstr="--outHard2 %s", + ) outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", - argstr="--outInhomogeneity %s") + argstr="--outInhomogeneity %s", + ) outMembership = traits.Either( traits.Bool, File(), hash_files=False, desc="Membership Functions", - argstr="--outMembership %s") + argstr="--outMembership %s", + ) outLesion = traits.Either( traits.Bool, File(), hash_files=False, desc="Lesion Segmentation", - argstr="--outLesion %s") + argstr="--outLesion %s", + ) outSulcal = traits.Either( traits.Bool, File(), hash_files=False, desc="Sulcal CSF Membership", - argstr="--outSulcal %s") + argstr="--outSulcal %s", + ) outCortical = traits.Either( traits.Bool, File(), hash_files=False, desc="Cortical GM Membership", - argstr="--outCortical %s") + argstr="--outCortical %s", + ) outFilled = traits.Either( traits.Bool, File(), hash_files=False, desc="Filled WM Membership", - argstr="--outFilled %s") + argstr="--outFilled %s", + ) outWM = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="WM Mask", - argstr="--outWM %s") + traits.Bool, File(), hash_files=False, desc="WM Mask", argstr="--outWM %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): @@ -627,85 +595,85 @@ class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): class MedicAlgorithmLesionToads(SEMLikeCommandLine): - """title: Lesion TOADS - -category: Developer Tools - -description: Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. The brain segmentation is topologically consistent and the algorithm can use multiple MR sequences as input data. -N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. + """Algorithm for simultaneous brain structures and MS lesion segmentation of MS Brains. -version: 1.9.R + The brain segmentation is topologically consistent and the algorithm can use multiple + MR sequences as input data. -contributor: Navid Shiee (navid.shiee@nih.gov) http://iacl.ece.jhu.edu/~nshiee/ + References + ---------- + N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, + "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", + NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. -""" + """ input_spec = MedicAlgorithmLesionToadsInputSpec output_spec = MedicAlgorithmLesionToadsOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads " _outputs_filenames = { - 'outWM': 'outWM.nii', - 'outHard': 'outHard.nii', - 'outFilled': 'outFilled.nii', - 'outMembership': 'outMembership.nii', - 'outInhomogeneity': 'outInhomogeneity.nii', - 'outCortical': 'outCortical.nii', - 'outHard2': 'outHard2.nii', - 'outLesion': 'outLesion.nii', - 'outSulcal': 'outSulcal.nii' + "outWM": "outWM.nii", + "outHard": "outHard.nii", + "outFilled": "outFilled.nii", + "outMembership": "outMembership.nii", + "outInhomogeneity": "outInhomogeneity.nii", + "outCortical": "outCortical.nii", + "outHard2": "outHard2.nii", + "outLesion": "outLesion.nii", + "outSulcal": "outSulcal.nii", } _redirect_x = True class JistBrainMp2rageSkullStrippingInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") - inT1 = File( - desc="T1 Map (T1_Images) Image (opt)", exists=True, argstr="--inT1 %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) + inT1 = File(desc="T1 Map (T1_Images) Image (opt)", exists=True, argstr="--inT1 %s") inT1weighted = File( - desc="T1-weighted (UNI) Image (opt)", - exists=True, - argstr="--inT1weighted %s") - inFilter = File( - desc="Filter Image (opt)", exists=True, argstr="--inFilter %s") - inSkip = traits.Enum( - "true", "false", desc="Skip zero values", argstr="--inSkip %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="T1-weighted (UNI) Image (opt)", exists=True, argstr="--inT1weighted %s" + ) + inFilter = File(desc="Filter Image (opt)", exists=True, argstr="--inFilter %s") + inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBrain = traits.Either( traits.Bool, File(), hash_files=False, desc="Brain Mask Image", - argstr="--outBrain %s") + argstr="--outBrain %s", + ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", - argstr="--outMasked %s") + argstr="--outMasked %s", + ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1-weighted Image", - argstr="--outMasked2 %s") + argstr="--outMasked2 %s", + ) outMasked3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Filter Image", - argstr="--outMasked3 %s") + argstr="--outMasked3 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): @@ -716,38 +684,33 @@ class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): - """title: MP2RAGE Skull Stripping + """Estimate a brain mask for a MP2RAGE dataset. -category: Developer Tools + At least a T1-weighted or a T1 map image is required. -description: Estimate a brain mask for a MP2RAGE dataset. At least a T1-weighted or a T1 map image is required. - -version: 3.0.RC - -""" + """ input_spec = JistBrainMp2rageSkullStrippingInputSpec output_spec = JistBrainMp2rageSkullStrippingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping " _outputs_filenames = { - 'outBrain': 'outBrain.nii', - 'outMasked3': 'outMasked3.nii', - 'outMasked2': 'outMasked2.nii', - 'outMasked': 'outMasked.nii' + "outBrain": "outBrain.nii", + "outMasked3": "outMasked3.nii", + "outMasked2": "outMasked2.nii", + "outMasked": "outMasked.nii", } _redirect_x = True class JistCortexSurfaceMeshInflationInputSpec(CommandLineInputSpec): - inLevelset = File( - desc="Levelset Image", exists=True, argstr="--inLevelset %s") + inLevelset = File(desc="Levelset Image", exists=True, argstr="--inLevelset %s") inSOR = traits.Float(desc="SOR Parameter", argstr="--inSOR %f") - inMean = traits.Float( - desc="Mean Curvature Threshold", argstr="--inMean %f") + inMean = traits.Float(desc="Mean Curvature Threshold", argstr="--inMean %f") inStep = traits.Int(desc="Step Size", argstr="--inStep %d") inMax = traits.Int(desc="Max Iterations", argstr="--inMax %d") inLorentzian = traits.Enum( - "true", "false", desc="Lorentzian Norm", argstr="--inLorentzian %s") + "true", "false", desc="Lorentzian Norm", argstr="--inLorentzian %s" + ) inTopology = traits.Enum( "26/6", "6/26", @@ -758,29 +721,33 @@ class JistCortexSurfaceMeshInflationInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inTopology %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, desc="Original Surface", - argstr="--outOriginal %s") + argstr="--outOriginal %s", + ) outInflated = traits.Either( traits.Bool, File(), hash_files=False, desc="Inflated Surface", - argstr="--outInflated %s") + argstr="--outInflated %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): @@ -789,68 +756,52 @@ class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): - """title: Surface Mesh Inflation - -category: Developer Tools - -description: Inflates a cortical surface mesh. -D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. + """Inflates a cortical surface mesh. -version: 3.0.RC + References + ---------- + D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, + Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. -contributor: Duygu Tosun - -""" + """ input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation " - _outputs_filenames = { - 'outOriginal': 'outOriginal', - 'outInflated': 'outInflated' - } + _outputs_filenames = {"outOriginal": "outOriginal", "outInflated": "outInflated"} _redirect_x = True class RandomVolInputSpec(CommandLineInputSpec): - inSize = traits.Int( - desc="Size of Volume in X direction", argstr="--inSize %d") - inSize2 = traits.Int( - desc="Size of Volume in Y direction", argstr="--inSize2 %d") - inSize3 = traits.Int( - desc="Size of Volume in Z direction", argstr="--inSize3 %d") - inSize4 = traits.Int( - desc="Size of Volume in t direction", argstr="--inSize4 %d") + inSize = traits.Int(desc="Size of Volume in X direction", argstr="--inSize %d") + inSize2 = traits.Int(desc="Size of Volume in Y direction", argstr="--inSize2 %d") + inSize3 = traits.Int(desc="Size of Volume in Z direction", argstr="--inSize3 %d") + inSize4 = traits.Int(desc="Size of Volume in t direction", argstr="--inSize4 %d") inStandard = traits.Int( - desc="Standard Deviation for Normal Distribution", - argstr="--inStandard %d") + desc="Standard Deviation for Normal Distribution", argstr="--inStandard %d" + ) inLambda = traits.Float( - desc="Lambda Value for Exponential Distribution", - argstr="--inLambda %f") + desc="Lambda Value for Exponential Distribution", argstr="--inLambda %f" + ) inMaximum = traits.Int(desc="Maximum Value", argstr="--inMaximum %d") inMinimum = traits.Int(desc="Minimum Value", argstr="--inMinimum %d") inField = traits.Enum( - "Uniform", - "Normal", - "Exponential", - desc="Field", - argstr="--inField %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "Uniform", "Normal", "Exponential", desc="Field", argstr="--inField %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outRand1 = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Rand1", - argstr="--outRand1 %s") + traits.Bool, File(), hash_files=False, desc="Rand1", argstr="--outRand1 %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class RandomVolOutputSpec(TraitedSpec): @@ -858,22 +809,12 @@ class RandomVolOutputSpec(TraitedSpec): class RandomVol(SEMLikeCommandLine): - """title: Random Volume Generator - -category: Developer Tools - -description: Generate a random scalar volume. - -version: 1.12.RC - -documentation-url: http://www.nitrc.org/projects/jist/ - -""" + """Generate a volume of random scalars.""" input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.bme.smile.demo.RandomVol " - _outputs_filenames = {'outRand1': 'outRand1.nii'} + _outputs_filenames = {"outRand1": "outRand1.nii"} _redirect_x = True @@ -888,23 +829,26 @@ class MedicAlgorithmImageCalculatorInputSpec(CommandLineInputSpec): "Min", "Max", desc="Operation", - argstr="--inOperation %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inOperation %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( traits.Bool, File(), hash_files=False, desc="Result Volume", - argstr="--outResult %s") + argstr="--outResult %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): @@ -912,59 +856,50 @@ class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): class MedicAlgorithmImageCalculator(SEMLikeCommandLine): - """title: Image Calculator - -category: Developer Tools - -description: Perform simple image calculator operations on two images. The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' - -version: 1.10.RC + """Perform simple image calculator operations on two images. -documentation-url: http://www.iacl.ece.jhu.edu/ + The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' -""" + """ input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistBrainMp2rageDuraEstimationInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") - inSkull = File( - desc="Skull Stripping Mask", exists=True, argstr="--inSkull %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) + inSkull = File(desc="Skull Stripping Mask", exists=True, argstr="--inSkull %s") inDistance = traits.Float( - desc="Distance to background (mm)", argstr="--inDistance %f") + desc="Distance to background (mm)", argstr="--inDistance %f" + ) inoutput = traits.Enum( "dura_region", "boundary", "dura_prior", "bg_prior", "intens_prior", - desc= - "Outputs an estimate of the dura / CSF boundary or an estimate of the entire dura region.", - argstr="--inoutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Outputs an estimate of the dura / CSF boundary or an estimate of the entire dura region.", + argstr="--inoutput %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outDura = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Dura Image", - argstr="--outDura %s") + traits.Bool, File(), hash_files=False, desc="Dura Image", argstr="--outDura %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): @@ -972,52 +907,44 @@ class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): - """title: MP2RAGE Dura Estimation - -category: Developer Tools - -description: Filters a MP2RAGE brain image to obtain a probability map of dura matter. - -version: 3.0.RC - -""" + """Filters a MP2RAGE brain image to obtain a probability map of dura matter.""" input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation " - _outputs_filenames = {'outDura': 'outDura.nii'} + _outputs_filenames = {"outDura": "outDura.nii"} _redirect_x = True class JistLaminarProfileSamplingInputSpec(CommandLineInputSpec): - inProfile = File( - desc="Profile Surface Image", exists=True, argstr="--inProfile %s") - inIntensity = File( - desc="Intensity Image", exists=True, argstr="--inIntensity %s") - inCortex = File( - desc="Cortex Mask (opt)", exists=True, argstr="--inCortex %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") + inIntensity = File(desc="Intensity Image", exists=True, argstr="--inIntensity %s") + inCortex = File(desc="Cortex Mask (opt)", exists=True, argstr="--inCortex %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outProfilemapped = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile-mapped Intensity Image", - argstr="--outProfilemapped %s") + argstr="--outProfilemapped %s", + ) outProfile2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile 4D Mask", - argstr="--outProfile2 %s") + argstr="--outProfile2 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileSamplingOutputSpec(TraitedSpec): @@ -1026,29 +953,20 @@ class JistLaminarProfileSamplingOutputSpec(TraitedSpec): class JistLaminarProfileSampling(SEMLikeCommandLine): - """title: Profile Sampling - -category: Developer Tools - -description: Sample some intensity image along a cortical profile across layer surfaces. - -version: 3.0.RC - -""" + """Sample some intensity image along a cortical profile across layer surfaces.""" input_spec = JistLaminarProfileSamplingInputSpec output_spec = JistLaminarProfileSamplingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileSampling " _outputs_filenames = { - 'outProfile2': 'outProfile2.nii', - 'outProfilemapped': 'outProfilemapped.nii' + "outProfile2": "outProfile2.nii", + "outProfilemapped": "outProfilemapped.nii", } _redirect_x = True class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): - inSource = InputMultiPath( - File, desc="Source", sep=";", argstr="--inSource %s") + inSource = InputMultiPath(File, desc="Source", sep=";", argstr="--inSource %s") inTemplate = File(desc="Template", exists=True, argstr="--inTemplate %s") inNew = traits.Enum( "Dicom axial", @@ -1056,7 +974,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Dicom sagittal", "User defined", desc="New image orientation", - argstr="--inNew %s") + argstr="--inNew %s", + ) inUser = traits.Enum( "Unknown", "Patient Right to Left", @@ -1066,7 +985,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined X-axis orientation (image left to right)", - argstr="--inUser %s") + argstr="--inUser %s", + ) inUser2 = traits.Enum( "Unknown", "Patient Right to Left", @@ -1076,7 +996,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Y-axis orientation (image top to bottom)", - argstr="--inUser2 %s") + argstr="--inUser2 %s", + ) inUser3 = traits.Enum( "Unknown", "Patient Right to Left", @@ -1086,14 +1007,16 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Z-axis orientation (into the screen)", - argstr="--inUser3 %s") + argstr="--inUser3 %s", + ) inUser4 = traits.Enum( "Axial", "Coronal", "Sagittal", "Unknown", desc="User defined Image Orientation", - argstr="--inUser4 %s") + argstr="--inUser4 %s", + ) inInterpolation = traits.Enum( "Nearest Neighbor", "Trilinear", @@ -1104,26 +1027,30 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Heptic Lagrangian", "Windowed Sinc", desc="Interpolation", - argstr="--inInterpolation %s") + argstr="--inInterpolation %s", + ) inResolution = traits.Enum( "Unchanged", "Finest cubic", "Coarsest cubic", "Same as template", desc="Resolution", - argstr="--inResolution %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inResolution %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outReoriented = InputMultiPath( - File, desc="Reoriented Volume", sep=";", argstr="--outReoriented %s") + File, desc="Reoriented Volume", sep=";", argstr="--outReoriented %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): @@ -1131,15 +1058,7 @@ class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): class MedicAlgorithmMipavReorient(SEMLikeCommandLine): - """title: Reorient Volume - -category: Developer Tools - -description: Reorient a volume to a particular anatomical orientation. - -version: .alpha - -""" + """Reorient a volume to a particular anatomical orientation.""" input_spec = MedicAlgorithmMipavReorientInputSpec output_spec = MedicAlgorithmMipavReorientOutputSpec @@ -1150,86 +1069,91 @@ class MedicAlgorithmMipavReorient(SEMLikeCommandLine): class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): inInput = File( - desc="Input volume to be skullstripped.", - exists=True, - argstr="--inInput %s") + desc="Input volume to be skullstripped.", exists=True, argstr="--inInput %s" + ) inAtlas = File( - desc= - "SPECTRE atlas description file. A text file enumerating atlas files and landmarks.", + desc="SPECTRE atlas description file. A text file enumerating atlas files and landmarks.", exists=True, - argstr="--inAtlas %s") + argstr="--inAtlas %s", + ) inInitial = traits.Int( - desc= - "Erosion of the inital mask, which is based on the probability mask and the classification., The initial mask is ouput as the d0 volume at the conclusion of SPECTRE.", - argstr="--inInitial %d") + desc="Erosion of the initial mask, which is based on the probability mask and the classification., The initial mask is output as the d0 volume at the conclusion of SPECTRE.", + argstr="--inInitial %d", + ) inImage = traits.Enum( "T1_SPGR", "T1_ALT", "T1_MPRAGE", "T2", "FLAIR", - desc= - "Set the image modality. MP-RAGE is recommended for most T1 sequence images.", - argstr="--inImage %s") + desc="Set the image modality. MP-RAGE is recommended for most T1 sequence images.", + argstr="--inImage %s", + ) inOutput = traits.Enum( "true", "false", - desc= - "Determines if the output results are transformed back into the space of the original input image.", - argstr="--inOutput %s") + desc="Determines if the output results are transformed back into the space of the original input image.", + argstr="--inOutput %s", + ) inFind = traits.Enum( - "true", "false", desc="Find Midsaggital Plane", argstr="--inFind %s") + "true", "false", desc="Find Midsaggital Plane", argstr="--inFind %s" + ) inRun = traits.Enum( - "true", "false", desc="Run Smooth Brain Mask", argstr="--inRun %s") + "true", "false", desc="Run Smooth Brain Mask", argstr="--inRun %s" + ) inResample = traits.Enum( "true", "false", - desc= - "Determines if the data is resampled to be isotropic during the processing.", - argstr="--inResample %s") + desc="Determines if the data is resampled to be isotropic during the processing.", + argstr="--inResample %s", + ) inInitial2 = traits.Float( - desc="Initial probability threshold", argstr="--inInitial2 %f") + desc="Initial probability threshold", argstr="--inInitial2 %f" + ) inMinimum = traits.Float( - desc="Minimum probability threshold", argstr="--inMinimum %f") + desc="Minimum probability threshold", argstr="--inMinimum %f" + ) inMMC = traits.Int( - desc= - "The size of the dilation step within the Modified Morphological Closing.", - argstr="--inMMC %d") + desc="The size of the dilation step within the Modified Morphological Closing.", + argstr="--inMMC %d", + ) inMMC2 = traits.Int( - desc= - "The size of the erosion step within the Modified Morphological Closing.", - argstr="--inMMC2 %d") + desc="The size of the erosion step within the Modified Morphological Closing.", + argstr="--inMMC2 %d", + ) inInhomogeneity = traits.Enum( "true", "false", - desc= - "Set to false by default, this parameter will make FANTASM try to do inhomogeneity correction during it's iterative cycle.", - argstr="--inInhomogeneity %s") + desc="Set to false by default, this parameter will make FANTASM try to do inhomogeneity correction during it's iterative cycle.", + argstr="--inInhomogeneity %s", + ) inSmoothing = traits.Float(argstr="--inSmoothing %f") inBackground = traits.Float(argstr="--inBackground %f") inOutput2 = traits.Enum( - "true", "false", desc="Output Plane?", argstr="--inOutput2 %s") + "true", "false", desc="Output Plane?", argstr="--inOutput2 %s" + ) inOutput3 = traits.Enum( - "true", "false", desc="Output Split-Halves?", argstr="--inOutput3 %s") + "true", "false", desc="Output Split-Halves?", argstr="--inOutput3 %s" + ) inOutput4 = traits.Enum( - "true", - "false", - desc="Output Segmentation on Plane?", - argstr="--inOutput4 %s") + "true", "false", desc="Output Segmentation on Plane?", argstr="--inOutput4 %s" + ) inDegrees = traits.Enum( "Rigid - 6", "Global rescale - 7", "Specific rescale - 9", "Affine - 12", desc="Degrees of freedom", - argstr="--inDegrees %s") + argstr="--inDegrees %s", + ) inCost = traits.Enum( "Correlation ratio", "Least squares", "Normalized cross correlation", "Normalized mutual information", desc="Cost function", - argstr="--inCost %s") + argstr="--inCost %s", + ) inRegistration = traits.Enum( "Trilinear", "Bspline 3rd order", @@ -1239,7 +1163,8 @@ class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): "Heptic Lagrangian", "Windowed sinc", desc="Registration interpolation", - argstr="--inRegistration %s") + argstr="--inRegistration %s", + ) inOutput5 = traits.Enum( "Trilinear", "Bspline 3rd order", @@ -1250,174 +1175,176 @@ class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): "Windowed sinc", "Nearest Neighbor", desc="Output interpolation", - argstr="--inOutput5 %s") + argstr="--inOutput5 %s", + ) inApply = traits.Enum( - "All", "X", "Y", "Z", desc="Apply rotation", argstr="--inApply %s") + "All", "X", "Y", "Z", desc="Apply rotation", argstr="--inApply %s" + ) inMinimum2 = traits.Float(desc="Minimum angle", argstr="--inMinimum2 %f") inMaximum = traits.Float(desc="Maximum angle", argstr="--inMaximum %f") - inCoarse = traits.Float( - desc="Coarse angle increment", argstr="--inCoarse %f") + inCoarse = traits.Float(desc="Coarse angle increment", argstr="--inCoarse %f") inFine = traits.Float(desc="Fine angle increment", argstr="--inFine %f") inMultiple = traits.Int( - desc="Multiple of tolerance to bracket the minimum", - argstr="--inMultiple %d") + desc="Multiple of tolerance to bracket the minimum", argstr="--inMultiple %d" + ) inNumber = traits.Int(desc="Number of iterations", argstr="--inNumber %d") inNumber2 = traits.Int( - desc="Number of minima from Level 8 to test at Level 4", - argstr="--inNumber2 %d") + desc="Number of minima from Level 8 to test at Level 4", argstr="--inNumber2 %d" + ) inUse = traits.Enum( "true", "false", - desc= - "Use the max of the min resolutions of the two datasets when resampling", - argstr="--inUse %s") + desc="Use the max of the min resolutions of the two datasets when resampling", + argstr="--inUse %s", + ) inSubsample = traits.Enum( - "true", - "false", - desc="Subsample image for speed", - argstr="--inSubsample %s") + "true", "false", desc="Subsample image for speed", argstr="--inSubsample %s" + ) inSkip = traits.Enum( "true", "false", desc="Skip multilevel search (Assume images are close to alignment)", - argstr="--inSkip %s") + argstr="--inSkip %s", + ) inMultithreading = traits.Enum( "true", "false", - desc= - "Set to false by default, this parameter controls the multithreaded behavior of the linear registration.", - argstr="--inMultithreading %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Set to false by default, this parameter controls the multithreaded behavior of the linear registration.", + argstr="--inMultithreading %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", - argstr="--outOriginal %s") + desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", + argstr="--outOriginal %s", + ) outStripped = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped result of the input volume with just the brain.", - argstr="--outStripped %s") + argstr="--outStripped %s", + ) outMask = traits.Either( traits.Bool, File(), hash_files=False, desc="Binary Mask of the skullstripped result with just the brain", - argstr="--outMask %s") + argstr="--outMask %s", + ) outPrior = traits.Either( traits.Bool, File(), hash_files=False, desc="Probability prior from the atlas registrations", - argstr="--outPrior %s") + argstr="--outPrior %s", + ) outFANTASM = traits.Either( traits.Bool, File(), hash_files=False, - desc="Tissue classification of of the whole input volume.", - argstr="--outFANTASM %s") + desc="Tissue classification of the whole input volume.", + argstr="--outFANTASM %s", + ) outd0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Initial Brainmask", - argstr="--outd0 %s") + argstr="--outd0 %s", + ) outMidsagittal = traits.Either( traits.Bool, File(), hash_files=False, desc="Plane dividing the brain hemispheres", - argstr="--outMidsagittal %s") + argstr="--outMidsagittal %s", + ) outSplitHalves = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped mask of the brain with the hemispheres divided.", - argstr="--outSplitHalves %s") + argstr="--outSplitHalves %s", + ) outSegmentation = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "2D image showing the tissue classification on the midsagittal plane", - argstr="--outSegmentation %s") + desc="2D image showing the tissue classification on the midsagittal plane", + argstr="--outSegmentation %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec): outOriginal = File( - desc= - "If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", - exists=True) + desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", + exists=True, + ) outStripped = File( desc="Skullstripped result of the input volume with just the brain.", - exists=True) + exists=True, + ) outMask = File( - desc="Binary Mask of the skullstripped result with just the brain", - exists=True) - outPrior = File( - desc="Probability prior from the atlas registrations", exists=True) + desc="Binary Mask of the skullstripped result with just the brain", exists=True + ) + outPrior = File(desc="Probability prior from the atlas registrations", exists=True) outFANTASM = File( - desc="Tissue classification of of the whole input volume.", - exists=True) + desc="Tissue classification of the whole input volume.", exists=True + ) outd0 = File(desc="Initial Brainmask", exists=True) - outMidsagittal = File( - desc="Plane dividing the brain hemispheres", exists=True) + outMidsagittal = File(desc="Plane dividing the brain hemispheres", exists=True) outSplitHalves = File( desc="Skullstripped mask of the brain with the hemispheres divided.", - exists=True) + exists=True, + ) outSegmentation = File( - desc= - "2D image showing the tissue classification on the midsagittal plane", - exists=True) + desc="2D image showing the tissue classification on the midsagittal plane", + exists=True, + ) class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): - """title: SPECTRE 2010 - -category: Developer Tools - -description: Simple Paradigm for Extra-Cranial Tissue REmoval - -Algorithm Version: 1.6 -GUI Version: 1.10 - -A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, 'A Joint Registration and Segmentation Approach to Skull Stripping', Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, April 12-15, 2007. -A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', NeuroImage 56(4):1982-1992, 2011. + """SPECTRE 2010: Simple Paradigm for Extra-Cranial Tissue REmoval [1]_, [2]_. -version: 1.6.R + References + ---------- -documentation-url: http://www.iacl.ece.jhu.edu/ + .. [1] A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, + 'A Joint Registration and Segmentation Approach to Skull Stripping', + Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, + April 12-15, 2007. + .. [2] A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, + 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', + NeuroImage 56(4):1982-1992, 2011. -contributor: Aaron Carass (aaron_carass@jhu.edu) http://www.iacl.ece.jhu.edu/ -Hanlin Wan (hanlinwan@gmail.com) - -""" + """ input_spec = MedicAlgorithmSPECTRE2010InputSpec output_spec = MedicAlgorithmSPECTRE2010OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010 " _outputs_filenames = { - 'outd0': 'outd0.nii', - 'outOriginal': 'outOriginal.nii', - 'outMask': 'outMask.nii', - 'outSplitHalves': 'outSplitHalves.nii', - 'outMidsagittal': 'outMidsagittal.nii', - 'outPrior': 'outPrior.nii', - 'outFANTASM': 'outFANTASM.nii', - 'outSegmentation': 'outSegmentation.nii', - 'outStripped': 'outStripped.nii' + "outd0": "outd0.nii", + "outOriginal": "outOriginal.nii", + "outMask": "outMask.nii", + "outSplitHalves": "outSplitHalves.nii", + "outMidsagittal": "outMidsagittal.nii", + "outPrior": "outPrior.nii", + "outFANTASM": "outFANTASM.nii", + "outSegmentation": "outSegmentation.nii", + "outStripped": "outStripped.nii", } _redirect_x = True @@ -1428,27 +1355,30 @@ class JistBrainPartialVolumeFilterInputSpec(CommandLineInputSpec): "bright", "dark", "both", - desc= - "Outputs the raw intensity values or a probability score for the partial volume regions.", - argstr="--inPV %s") + desc="Outputs the raw intensity values or a probability score for the partial volume regions.", + argstr="--inPV %s", + ) inoutput = traits.Enum( - "probability", "intensity", desc="output", argstr="--inoutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "probability", "intensity", desc="output", argstr="--inoutput %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outPartial = traits.Either( traits.Bool, File(), hash_files=False, desc="Partial Volume Image", - argstr="--outPartial %s") + argstr="--outPartial %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): @@ -1456,84 +1386,83 @@ class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): class JistBrainPartialVolumeFilter(SEMLikeCommandLine): - """title: Partial Volume Filter - -category: Developer Tools + """Partial Volume Filter. -description: Filters an image for regions of partial voluming assuming a ridge-like model of intensity. + Filters an image for regions of partial voluming assuming a ridge-like model of intensity. -version: 2.0.RC - -""" + """ input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter " - _outputs_filenames = {'outPartial': 'outPartial.nii'} + _outputs_filenames = {"outPartial": "outPartial.nii"} _redirect_x = True class JistIntensityMp2rageMaskingInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) inQuantitative = File( desc="Quantitative T1 Map (T1_Images) Image", exists=True, - argstr="--inQuantitative %s") + argstr="--inQuantitative %s", + ) inT1weighted = File( - desc="T1-weighted (UNI) Image", - exists=True, - argstr="--inT1weighted %s") + desc="T1-weighted (UNI) Image", exists=True, argstr="--inT1weighted %s" + ) inBackground = traits.Enum( "exponential", "half-normal", - desc= - "Model distribution for background noise (default is half-normal, exponential is more stringent).", - argstr="--inBackground %s") - inSkip = traits.Enum( - "true", "false", desc="Skip zero values", argstr="--inSkip %s") + desc="Model distribution for background noise (default is half-normal, exponential is more stringent).", + argstr="--inBackground %s", + ) + inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") inMasking = traits.Enum( "binary", "proba", - desc= - "Whether to use a binary threshold or a weighted average based on the probability.", - argstr="--inMasking %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Whether to use a binary threshold or a weighted average based on the probability.", + argstr="--inMasking %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSignal = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Proba Image", - argstr="--outSignal_Proba %s") + argstr="--outSignal_Proba %s", + ) outSignal2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Mask Image", - argstr="--outSignal_Mask %s") + argstr="--outSignal_Mask %s", + ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", - argstr="--outMasked_T1_Map %s") + argstr="--outMasked_T1_Map %s", + ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Iso Image", - argstr="--outMasked_T1weighted %s") + argstr="--outMasked_T1weighted %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): @@ -1544,52 +1473,44 @@ class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): class JistIntensityMp2rageMasking(SEMLikeCommandLine): - """title: MP2RAGE Background Masking - -category: Developer Tools - -description: Estimate a background signal mask for a MP2RAGE dataset. - -version: 3.0.RC - -""" + """Estimate a background signal mask for a MP2RAGE dataset.""" input_spec = JistIntensityMp2rageMaskingInputSpec output_spec = JistIntensityMp2rageMaskingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking " _outputs_filenames = { - 'outSignal2': 'outSignal2.nii', - 'outSignal': 'outSignal.nii', - 'outMasked2': 'outMasked2.nii', - 'outMasked': 'outMasked.nii' + "outSignal2": "outSignal2.nii", + "outSignal": "outSignal.nii", + "outMasked2": "outMasked2.nii", + "outMasked": "outMasked.nii", } _redirect_x = True class MedicAlgorithmThresholdToBinaryMaskInputSpec(CommandLineInputSpec): - inLabel = InputMultiPath( - File, desc="Input volumes", sep=";", argstr="--inLabel %s") - inMinimum = traits.Float( - desc="Minimum threshold value.", argstr="--inMinimum %f") - inMaximum = traits.Float( - desc="Maximum threshold value.", argstr="--inMaximum %f") + inLabel = InputMultiPath(File, desc="Input volumes", sep=";", argstr="--inLabel %s") + inMinimum = traits.Float(desc="Minimum threshold value.", argstr="--inMinimum %f") + inMaximum = traits.Float(desc="Maximum threshold value.", argstr="--inMaximum %f") inUse = traits.Enum( "true", "false", desc="Use the images max intensity as the max value of the range.", - argstr="--inUse %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inUse %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBinary = InputMultiPath( - File, desc="Binary Mask", sep=";", argstr="--outBinary %s") + File, desc="Binary Mask", sep=";", argstr="--outBinary %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): @@ -1597,17 +1518,11 @@ class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): class MedicAlgorithmThresholdToBinaryMask(SEMLikeCommandLine): - """title: Threshold to Binary Mask - -category: Developer Tools - -description: Given a volume and an intensity range create a binary mask for values within that range. - -version: 1.2.RC + """Threshold to Binary Mask. -documentation-url: http://www.iacl.ece.jhu.edu/ + Given a volume and an intensity range create a binary mask for values within that range. -""" + """ input_spec = MedicAlgorithmThresholdToBinaryMaskInputSpec output_spec = MedicAlgorithmThresholdToBinaryMaskOutputSpec diff --git a/nipype/interfaces/mipav/generate_classes.py b/nipype/interfaces/mipav/generate_classes.py index df48a2d2da..ab91e48150 100644 --- a/nipype/interfaces/mipav/generate_classes.py +++ b/nipype/interfaces/mipav/generate_classes.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - if __name__ == "__main__": from nipype.interfaces.slicer.generate_classes import generate_all_classes @@ -7,49 +5,54 @@ # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ - 'edu.jhu.bme.smile.demo.RandomVol', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileSampling', - 'de.mpg.cbs.jist.laminar.JistLaminarROIAveraging', - 'de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry', - 'de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation' + "edu.jhu.bme.smile.demo.RandomVol", + "de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator", + "de.mpg.cbs.jist.laminar.JistLaminarProfileSampling", + "de.mpg.cbs.jist.laminar.JistLaminarROIAveraging", + "de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering", + "de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry", + "de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation", + "de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping", + "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", + "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", ] modules_from_chris = [ - 'edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient', - 'edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask', + "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", + "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient", + "edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator", + "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", + "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", + "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask", # 'de.mpg.cbs.jist.cortex.JistCortexFullCRUISE', # waiting for http://www.nitrc.org/tracker/index.php?func=detail&aid=7236&group_id=228&atid=942 to be fixed - 'de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation' + "de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation", ] modules_from_julia = [ - 'de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking', - 'edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010' + "de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking", + "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", ] modules_from_leonie = [ - 'edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads' + "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads" ] modules_from_yasinyazici = [ - 'edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3' + "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3" ] modules_list = list( - set(modules_list).union(modules_from_chris).union(modules_from_leonie) - .union(modules_from_julia).union(modules_from_yasinyazici).union( - modules_list)) + set(modules_list) + .union(modules_from_chris) + .union(modules_from_leonie) + .union(modules_from_julia) + .union(modules_from_yasinyazici) + .union(modules_list) + ) generate_all_classes( modules_list=modules_list, launcher=["java edu.jhu.ece.iacl.jist.cli.run"], redirect_x=True, - mipav_hacks=True) + mipav_hacks=True, + ) diff --git a/nipype/interfaces/mipav/tests/__init__.py b/nipype/interfaces/mipav/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mipav/tests/__init__.py +++ b/nipype/interfaces/mipav/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py index 16605acbb7..9daa1b996e 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py @@ -1,66 +1,121 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistBrainMgdmSegmentation def test_JistBrainMgdmSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inAdjust=dict(argstr='--inAdjust %s', ), - inAtlas=dict(argstr='--inAtlas %s', ), - inCompute=dict(argstr='--inCompute %s', ), - inCurvature=dict(argstr='--inCurvature %f', ), - inData=dict(argstr='--inData %f', ), - inFLAIR=dict(argstr='--inFLAIR %s', ), - inMP2RAGE=dict(argstr='--inMP2RAGE %s', ), - inMP2RAGE2=dict(argstr='--inMP2RAGE2 %s', ), - inMPRAGE=dict(argstr='--inMPRAGE %s', ), - inMax=dict(argstr='--inMax %d', ), - inMin=dict(argstr='--inMin %f', ), - inOutput=dict(argstr='--inOutput %s', ), - inPV=dict(argstr='--inPV %s', ), - inPosterior=dict(argstr='--inPosterior %f', ), - inSteps=dict(argstr='--inSteps %d', ), - inTopology=dict(argstr='--inTopology %s', ), - null=dict(argstr='--null %s', ), + inAdjust=dict( + argstr="--inAdjust %s", + ), + inAtlas=dict( + argstr="--inAtlas %s", + extensions=None, + ), + inCompute=dict( + argstr="--inCompute %s", + ), + inCurvature=dict( + argstr="--inCurvature %f", + ), + inData=dict( + argstr="--inData %f", + ), + inFLAIR=dict( + argstr="--inFLAIR %s", + extensions=None, + ), + inMP2RAGE=dict( + argstr="--inMP2RAGE %s", + extensions=None, + ), + inMP2RAGE2=dict( + argstr="--inMP2RAGE2 %s", + extensions=None, + ), + inMPRAGE=dict( + argstr="--inMPRAGE %s", + extensions=None, + ), + inMax=dict( + argstr="--inMax %d", + ), + inMin=dict( + argstr="--inMin %f", + ), + inOutput=dict( + argstr="--inOutput %s", + ), + inPV=dict( + argstr="--inPV %s", + extensions=None, + ), + inPosterior=dict( + argstr="--inPosterior %f", + ), + inSteps=dict( + argstr="--inSteps %d", + ), + inTopology=dict( + argstr="--inTopology %s", + ), + null=dict( + argstr="--null %s", + ), outLevelset=dict( - argstr='--outLevelset %s', + argstr="--outLevelset %s", hash_files=False, ), outPosterior2=dict( - argstr='--outPosterior2 %s', + argstr="--outPosterior2 %s", hash_files=False, ), outPosterior3=dict( - argstr='--outPosterior3 %s', + argstr="--outPosterior3 %s", hash_files=False, ), outSegmented=dict( - argstr='--outSegmented %s', + argstr="--outSegmented %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistBrainMgdmSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMgdmSegmentation_outputs(): output_map = dict( - outLevelset=dict(), - outPosterior2=dict(), - outPosterior3=dict(), - outSegmented=dict(), + outLevelset=dict( + extensions=None, + ), + outPosterior2=dict( + extensions=None, + ), + outPosterior3=dict( + extensions=None, + ), + outSegmented=dict( + extensions=None, + ), ) outputs = JistBrainMgdmSegmentation.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py index ebb65bb789..e7706b16b5 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py @@ -1,38 +1,61 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistBrainMp2rageDuraEstimation def test_JistBrainMp2rageDuraEstimation_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inDistance=dict(argstr='--inDistance %f', ), - inSecond=dict(argstr='--inSecond %s', ), - inSkull=dict(argstr='--inSkull %s', ), - inoutput=dict(argstr='--inoutput %s', ), - null=dict(argstr='--null %s', ), + inDistance=dict( + argstr="--inDistance %f", + ), + inSecond=dict( + argstr="--inSecond %s", + extensions=None, + ), + inSkull=dict( + argstr="--inSkull %s", + extensions=None, + ), + inoutput=dict( + argstr="--inoutput %s", + ), + null=dict( + argstr="--null %s", + ), outDura=dict( - argstr='--outDura %s', + argstr="--outDura %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistBrainMp2rageDuraEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMp2rageDuraEstimation_outputs(): - output_map = dict(outDura=dict(), ) + output_map = dict( + outDura=dict( + extensions=None, + ), + ) outputs = JistBrainMp2rageDuraEstimation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py index abf794b662..637b50dcad 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py @@ -1,55 +1,86 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistBrainMp2rageSkullStripping def test_JistBrainMp2rageSkullStripping_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inFilter=dict(argstr='--inFilter %s', ), - inSecond=dict(argstr='--inSecond %s', ), - inSkip=dict(argstr='--inSkip %s', ), - inT1=dict(argstr='--inT1 %s', ), - inT1weighted=dict(argstr='--inT1weighted %s', ), - null=dict(argstr='--null %s', ), + inFilter=dict( + argstr="--inFilter %s", + extensions=None, + ), + inSecond=dict( + argstr="--inSecond %s", + extensions=None, + ), + inSkip=dict( + argstr="--inSkip %s", + ), + inT1=dict( + argstr="--inT1 %s", + extensions=None, + ), + inT1weighted=dict( + argstr="--inT1weighted %s", + extensions=None, + ), + null=dict( + argstr="--null %s", + ), outBrain=dict( - argstr='--outBrain %s', + argstr="--outBrain %s", hash_files=False, ), outMasked=dict( - argstr='--outMasked %s', + argstr="--outMasked %s", hash_files=False, ), outMasked2=dict( - argstr='--outMasked2 %s', + argstr="--outMasked2 %s", hash_files=False, ), outMasked3=dict( - argstr='--outMasked3 %s', + argstr="--outMasked3 %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistBrainMp2rageSkullStripping.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMp2rageSkullStripping_outputs(): output_map = dict( - outBrain=dict(), - outMasked=dict(), - outMasked2=dict(), - outMasked3=dict(), + outBrain=dict( + extensions=None, + ), + outMasked=dict( + extensions=None, + ), + outMasked2=dict( + extensions=None, + ), + outMasked3=dict( + extensions=None, + ), ) outputs = JistBrainMp2rageSkullStripping.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py index bb86144c20..61a3e2b074 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py @@ -1,37 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistBrainPartialVolumeFilter def test_JistBrainPartialVolumeFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inInput=dict(argstr='--inInput %s', ), - inPV=dict(argstr='--inPV %s', ), - inoutput=dict(argstr='--inoutput %s', ), - null=dict(argstr='--null %s', ), + inInput=dict( + argstr="--inInput %s", + extensions=None, + ), + inPV=dict( + argstr="--inPV %s", + ), + inoutput=dict( + argstr="--inoutput %s", + ), + null=dict( + argstr="--null %s", + ), outPartial=dict( - argstr='--outPartial %s', + argstr="--outPartial %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistBrainPartialVolumeFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainPartialVolumeFilter_outputs(): - output_map = dict(outPartial=dict(), ) + output_map = dict( + outPartial=dict( + extensions=None, + ), + ) outputs = JistBrainPartialVolumeFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py b/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py index 407b9755ca..41ae9c5cce 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py @@ -1,47 +1,75 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistCortexSurfaceMeshInflation def test_JistCortexSurfaceMeshInflation_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inLevelset=dict(argstr='--inLevelset %s', ), - inLorentzian=dict(argstr='--inLorentzian %s', ), - inMax=dict(argstr='--inMax %d', ), - inMean=dict(argstr='--inMean %f', ), - inSOR=dict(argstr='--inSOR %f', ), - inStep=dict(argstr='--inStep %d', ), - inTopology=dict(argstr='--inTopology %s', ), - null=dict(argstr='--null %s', ), + inLevelset=dict( + argstr="--inLevelset %s", + extensions=None, + ), + inLorentzian=dict( + argstr="--inLorentzian %s", + ), + inMax=dict( + argstr="--inMax %d", + ), + inMean=dict( + argstr="--inMean %f", + ), + inSOR=dict( + argstr="--inSOR %f", + ), + inStep=dict( + argstr="--inStep %d", + ), + inTopology=dict( + argstr="--inTopology %s", + ), + null=dict( + argstr="--null %s", + ), outInflated=dict( - argstr='--outInflated %s', + argstr="--outInflated %s", hash_files=False, ), outOriginal=dict( - argstr='--outOriginal %s', + argstr="--outOriginal %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistCortexSurfaceMeshInflation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistCortexSurfaceMeshInflation_outputs(): output_map = dict( - outInflated=dict(), - outOriginal=dict(), + outInflated=dict( + extensions=None, + ), + outOriginal=dict( + extensions=None, + ), ) outputs = JistCortexSurfaceMeshInflation.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py index bfdace4944..94d9cc525a 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py @@ -1,56 +1,88 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistIntensityMp2rageMasking def test_JistIntensityMp2rageMasking_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inBackground=dict(argstr='--inBackground %s', ), - inMasking=dict(argstr='--inMasking %s', ), - inQuantitative=dict(argstr='--inQuantitative %s', ), - inSecond=dict(argstr='--inSecond %s', ), - inSkip=dict(argstr='--inSkip %s', ), - inT1weighted=dict(argstr='--inT1weighted %s', ), - null=dict(argstr='--null %s', ), + inBackground=dict( + argstr="--inBackground %s", + ), + inMasking=dict( + argstr="--inMasking %s", + ), + inQuantitative=dict( + argstr="--inQuantitative %s", + extensions=None, + ), + inSecond=dict( + argstr="--inSecond %s", + extensions=None, + ), + inSkip=dict( + argstr="--inSkip %s", + ), + inT1weighted=dict( + argstr="--inT1weighted %s", + extensions=None, + ), + null=dict( + argstr="--null %s", + ), outMasked=dict( - argstr='--outMasked_T1_Map %s', + argstr="--outMasked_T1_Map %s", hash_files=False, ), outMasked2=dict( - argstr='--outMasked_T1weighted %s', + argstr="--outMasked_T1weighted %s", hash_files=False, ), outSignal=dict( - argstr='--outSignal_Proba %s', + argstr="--outSignal_Proba %s", hash_files=False, ), outSignal2=dict( - argstr='--outSignal_Mask %s', + argstr="--outSignal_Mask %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistIntensityMp2rageMasking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistIntensityMp2rageMasking_outputs(): output_map = dict( - outMasked=dict(), - outMasked2=dict(), - outSignal=dict(), - outSignal2=dict(), + outMasked=dict( + extensions=None, + ), + outMasked2=dict( + extensions=None, + ), + outSignal=dict( + extensions=None, + ), + outSignal2=dict( + extensions=None, + ), ) outputs = JistIntensityMp2rageMasking.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py index 12203a1aa6..0cc1501e4f 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py @@ -1,37 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistLaminarProfileCalculator def test_JistLaminarProfileCalculator_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inIntensity=dict(argstr='--inIntensity %s', ), - inMask=dict(argstr='--inMask %s', ), - incomputed=dict(argstr='--incomputed %s', ), - null=dict(argstr='--null %s', ), + inIntensity=dict( + argstr="--inIntensity %s", + extensions=None, + ), + inMask=dict( + argstr="--inMask %s", + extensions=None, + ), + incomputed=dict( + argstr="--incomputed %s", + ), + null=dict( + argstr="--null %s", + ), outResult=dict( - argstr='--outResult %s', + argstr="--outResult %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistLaminarProfileCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileCalculator_outputs(): - output_map = dict(outResult=dict(), ) + output_map = dict( + outResult=dict( + extensions=None, + ), + ) outputs = JistLaminarProfileCalculator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py index ddc4d5d922..758d331935 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py @@ -1,39 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistLaminarProfileGeometry def test_JistLaminarProfileGeometry_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inProfile=dict(argstr='--inProfile %s', ), - incomputed=dict(argstr='--incomputed %s', ), - inoutside=dict(argstr='--inoutside %f', ), - inregularization=dict(argstr='--inregularization %s', ), - insmoothing=dict(argstr='--insmoothing %f', ), - null=dict(argstr='--null %s', ), + inProfile=dict( + argstr="--inProfile %s", + extensions=None, + ), + incomputed=dict( + argstr="--incomputed %s", + ), + inoutside=dict( + argstr="--inoutside %f", + ), + inregularization=dict( + argstr="--inregularization %s", + ), + insmoothing=dict( + argstr="--insmoothing %f", + ), + null=dict( + argstr="--null %s", + ), outResult=dict( - argstr='--outResult %s', + argstr="--outResult %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistLaminarProfileGeometry.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileGeometry_outputs(): - output_map = dict(outResult=dict(), ) + output_map = dict( + outResult=dict( + extensions=None, + ), + ) outputs = JistLaminarProfileGeometry.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py index 58de472b85..65841c48a9 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py @@ -1,43 +1,65 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistLaminarProfileSampling def test_JistLaminarProfileSampling_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inCortex=dict(argstr='--inCortex %s', ), - inIntensity=dict(argstr='--inIntensity %s', ), - inProfile=dict(argstr='--inProfile %s', ), - null=dict(argstr='--null %s', ), + inCortex=dict( + argstr="--inCortex %s", + extensions=None, + ), + inIntensity=dict( + argstr="--inIntensity %s", + extensions=None, + ), + inProfile=dict( + argstr="--inProfile %s", + extensions=None, + ), + null=dict( + argstr="--null %s", + ), outProfile2=dict( - argstr='--outProfile2 %s', + argstr="--outProfile2 %s", hash_files=False, ), outProfilemapped=dict( - argstr='--outProfilemapped %s', + argstr="--outProfilemapped %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistLaminarProfileSampling.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileSampling_outputs(): output_map = dict( - outProfile2=dict(), - outProfilemapped=dict(), + outProfile2=dict( + extensions=None, + ), + outProfilemapped=dict( + extensions=None, + ), ) outputs = JistLaminarProfileSampling.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py index bb9577ccee..fed4abfca1 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py @@ -1,38 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistLaminarROIAveraging def test_JistLaminarROIAveraging_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inIntensity=dict(argstr='--inIntensity %s', ), - inMask=dict(argstr='--inMask %s', ), - inROI=dict(argstr='--inROI %s', ), - inROI2=dict(argstr='--inROI2 %s', ), - null=dict(argstr='--null %s', ), + inIntensity=dict( + argstr="--inIntensity %s", + extensions=None, + ), + inMask=dict( + argstr="--inMask %s", + extensions=None, + ), + inROI=dict( + argstr="--inROI %s", + extensions=None, + ), + inROI2=dict( + argstr="--inROI2 %s", + ), + null=dict( + argstr="--null %s", + ), outROI3=dict( - argstr='--outROI3 %s', + argstr="--outROI3 %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistLaminarROIAveraging.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarROIAveraging_outputs(): - output_map = dict(outROI3=dict(), ) + output_map = dict( + outROI3=dict( + extensions=None, + ), + ) outputs = JistLaminarROIAveraging.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py index 4aa9f9d77b..31d34ae32e 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py @@ -1,56 +1,95 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import JistLaminarVolumetricLayering def test_JistLaminarVolumetricLayering_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inInner=dict(argstr='--inInner %s', ), - inLayering=dict(argstr='--inLayering %s', ), - inLayering2=dict(argstr='--inLayering2 %s', ), - inMax=dict(argstr='--inMax %d', ), - inMin=dict(argstr='--inMin %f', ), - inNumber=dict(argstr='--inNumber %d', ), - inOuter=dict(argstr='--inOuter %s', ), - inTopology=dict(argstr='--inTopology %s', ), - incurvature=dict(argstr='--incurvature %d', ), - inpresmooth=dict(argstr='--inpresmooth %s', ), - inratio=dict(argstr='--inratio %f', ), - null=dict(argstr='--null %s', ), + inInner=dict( + argstr="--inInner %s", + extensions=None, + ), + inLayering=dict( + argstr="--inLayering %s", + ), + inLayering2=dict( + argstr="--inLayering2 %s", + ), + inMax=dict( + argstr="--inMax %d", + ), + inMin=dict( + argstr="--inMin %f", + ), + inNumber=dict( + argstr="--inNumber %d", + ), + inOuter=dict( + argstr="--inOuter %s", + extensions=None, + ), + inTopology=dict( + argstr="--inTopology %s", + ), + incurvature=dict( + argstr="--incurvature %d", + ), + inpresmooth=dict( + argstr="--inpresmooth %s", + ), + inratio=dict( + argstr="--inratio %f", + ), + null=dict( + argstr="--null %s", + ), outContinuous=dict( - argstr='--outContinuous %s', + argstr="--outContinuous %s", hash_files=False, ), outDiscrete=dict( - argstr='--outDiscrete %s', + argstr="--outDiscrete %s", hash_files=False, ), outLayer=dict( - argstr='--outLayer %s', + argstr="--outLayer %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = JistLaminarVolumetricLayering.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarVolumetricLayering_outputs(): output_map = dict( - outContinuous=dict(), - outDiscrete=dict(), - outLayer=dict(), + outContinuous=dict( + extensions=None, + ), + outDiscrete=dict( + extensions=None, + ), + outLayer=dict( + extensions=None, + ), ) outputs = JistLaminarVolumetricLayering.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py index 016beee263..7b9a0fc859 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py @@ -1,37 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmImageCalculator def test_MedicAlgorithmImageCalculator_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inOperation=dict(argstr='--inOperation %s', ), - inVolume=dict(argstr='--inVolume %s', ), - inVolume2=dict(argstr='--inVolume2 %s', ), - null=dict(argstr='--null %s', ), + inOperation=dict( + argstr="--inOperation %s", + ), + inVolume=dict( + argstr="--inVolume %s", + extensions=None, + ), + inVolume2=dict( + argstr="--inVolume2 %s", + extensions=None, + ), + null=dict( + argstr="--null %s", + ), outResult=dict( - argstr='--outResult %s', + argstr="--outResult %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmImageCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmImageCalculator_outputs(): - output_map = dict(outResult=dict(), ) + output_map = dict( + outResult=dict( + extensions=None, + ), + ) outputs = MedicAlgorithmImageCalculator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py index 87f9ab6a72..4fb5f2567b 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py @@ -1,96 +1,171 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmLesionToads def test_MedicAlgorithmLesionToads_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inAtlas=dict(argstr='--inAtlas %s', ), - inAtlas2=dict(argstr='--inAtlas2 %s', ), - inAtlas3=dict(argstr='--inAtlas3 %s', ), - inAtlas4=dict(argstr='--inAtlas4 %s', ), - inAtlas5=dict(argstr='--inAtlas5 %f', ), - inAtlas6=dict(argstr='--inAtlas6 %s', ), - inConnectivity=dict(argstr='--inConnectivity %s', ), - inCorrect=dict(argstr='--inCorrect %s', ), - inFLAIR=dict(argstr='--inFLAIR %s', ), - inInclude=dict(argstr='--inInclude %s', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inMaximum2=dict(argstr='--inMaximum2 %d', ), - inMaximum3=dict(argstr='--inMaximum3 %d', ), - inMaximum4=dict(argstr='--inMaximum4 %f', ), - inMaximum5=dict(argstr='--inMaximum5 %d', ), - inOutput=dict(argstr='--inOutput %s', ), - inOutput2=dict(argstr='--inOutput2 %s', ), - inOutput3=dict(argstr='--inOutput3 %s', ), - inSmooting=dict(argstr='--inSmooting %f', ), - inT1_MPRAGE=dict(argstr='--inT1_MPRAGE %s', ), - inT1_SPGR=dict(argstr='--inT1_SPGR %s', ), - null=dict(argstr='--null %s', ), + inAtlas=dict( + argstr="--inAtlas %s", + ), + inAtlas2=dict( + argstr="--inAtlas2 %s", + extensions=None, + ), + inAtlas3=dict( + argstr="--inAtlas3 %s", + extensions=None, + ), + inAtlas4=dict( + argstr="--inAtlas4 %s", + extensions=None, + ), + inAtlas5=dict( + argstr="--inAtlas5 %f", + ), + inAtlas6=dict( + argstr="--inAtlas6 %s", + ), + inConnectivity=dict( + argstr="--inConnectivity %s", + ), + inCorrect=dict( + argstr="--inCorrect %s", + ), + inFLAIR=dict( + argstr="--inFLAIR %s", + extensions=None, + ), + inInclude=dict( + argstr="--inInclude %s", + ), + inMaximum=dict( + argstr="--inMaximum %d", + ), + inMaximum2=dict( + argstr="--inMaximum2 %d", + ), + inMaximum3=dict( + argstr="--inMaximum3 %d", + ), + inMaximum4=dict( + argstr="--inMaximum4 %f", + ), + inMaximum5=dict( + argstr="--inMaximum5 %d", + ), + inOutput=dict( + argstr="--inOutput %s", + ), + inOutput2=dict( + argstr="--inOutput2 %s", + ), + inOutput3=dict( + argstr="--inOutput3 %s", + ), + inSmooting=dict( + argstr="--inSmooting %f", + ), + inT1_MPRAGE=dict( + argstr="--inT1_MPRAGE %s", + extensions=None, + ), + inT1_SPGR=dict( + argstr="--inT1_SPGR %s", + extensions=None, + ), + null=dict( + argstr="--null %s", + ), outCortical=dict( - argstr='--outCortical %s', + argstr="--outCortical %s", hash_files=False, ), outFilled=dict( - argstr='--outFilled %s', + argstr="--outFilled %s", hash_files=False, ), outHard=dict( - argstr='--outHard %s', + argstr="--outHard %s", hash_files=False, ), outHard2=dict( - argstr='--outHard2 %s', + argstr="--outHard2 %s", hash_files=False, ), outInhomogeneity=dict( - argstr='--outInhomogeneity %s', + argstr="--outInhomogeneity %s", hash_files=False, ), outLesion=dict( - argstr='--outLesion %s', + argstr="--outLesion %s", hash_files=False, ), outMembership=dict( - argstr='--outMembership %s', + argstr="--outMembership %s", hash_files=False, ), outSulcal=dict( - argstr='--outSulcal %s', + argstr="--outSulcal %s", hash_files=False, ), outWM=dict( - argstr='--outWM %s', + argstr="--outWM %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmLesionToads.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmLesionToads_outputs(): output_map = dict( - outCortical=dict(), - outFilled=dict(), - outHard=dict(), - outHard2=dict(), - outInhomogeneity=dict(), - outLesion=dict(), - outMembership=dict(), - outSulcal=dict(), - outWM=dict(), + outCortical=dict( + extensions=None, + ), + outFilled=dict( + extensions=None, + ), + outHard=dict( + extensions=None, + ), + outHard2=dict( + extensions=None, + ), + outInhomogeneity=dict( + extensions=None, + ), + outLesion=dict( + extensions=None, + ), + outMembership=dict( + extensions=None, + ), + outSulcal=dict( + extensions=None, + ), + outWM=dict( + extensions=None, + ), ) outputs = MedicAlgorithmLesionToads.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py index 7c8c7248ad..49c307f27f 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py @@ -1,44 +1,70 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmMipavReorient def test_MedicAlgorithmMipavReorient_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inInterpolation=dict(argstr='--inInterpolation %s', ), - inNew=dict(argstr='--inNew %s', ), - inResolution=dict(argstr='--inResolution %s', ), + inInterpolation=dict( + argstr="--inInterpolation %s", + ), + inNew=dict( + argstr="--inNew %s", + ), + inResolution=dict( + argstr="--inResolution %s", + ), inSource=dict( - argstr='--inSource %s', - sep=';', - ), - inTemplate=dict(argstr='--inTemplate %s', ), - inUser=dict(argstr='--inUser %s', ), - inUser2=dict(argstr='--inUser2 %s', ), - inUser3=dict(argstr='--inUser3 %s', ), - inUser4=dict(argstr='--inUser4 %s', ), - null=dict(argstr='--null %s', ), + argstr="--inSource %s", + sep=";", + ), + inTemplate=dict( + argstr="--inTemplate %s", + extensions=None, + ), + inUser=dict( + argstr="--inUser %s", + ), + inUser2=dict( + argstr="--inUser2 %s", + ), + inUser3=dict( + argstr="--inUser3 %s", + ), + inUser4=dict( + argstr="--inUser4 %s", + ), + null=dict( + argstr="--null %s", + ), outReoriented=dict( - argstr='--outReoriented %s', - sep=';', + argstr="--outReoriented %s", + sep=";", + ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmMipavReorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmMipavReorient_outputs(): output_map = dict() outputs = MedicAlgorithmMipavReorient.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py index 9d5a148a24..bf895247a6 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py @@ -1,49 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmN3 def test_MedicAlgorithmN3_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inAutomatic=dict(argstr='--inAutomatic %s', ), - inEnd=dict(argstr='--inEnd %f', ), - inField=dict(argstr='--inField %f', ), - inInput=dict(argstr='--inInput %s', ), - inKernel=dict(argstr='--inKernel %f', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inSignal=dict(argstr='--inSignal %f', ), - inSubsample=dict(argstr='--inSubsample %f', ), - inWeiner=dict(argstr='--inWeiner %f', ), - null=dict(argstr='--null %s', ), + inAutomatic=dict( + argstr="--inAutomatic %s", + ), + inEnd=dict( + argstr="--inEnd %f", + ), + inField=dict( + argstr="--inField %f", + ), + inInput=dict( + argstr="--inInput %s", + extensions=None, + ), + inKernel=dict( + argstr="--inKernel %f", + ), + inMaximum=dict( + argstr="--inMaximum %d", + ), + inSignal=dict( + argstr="--inSignal %f", + ), + inSubsample=dict( + argstr="--inSubsample %f", + ), + inWeiner=dict( + argstr="--inWeiner %f", + ), + null=dict( + argstr="--null %s", + ), outInhomogeneity=dict( - argstr='--outInhomogeneity %s', + argstr="--outInhomogeneity %s", hash_files=False, ), outInhomogeneity2=dict( - argstr='--outInhomogeneity2 %s', + argstr="--outInhomogeneity2 %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmN3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmN3_outputs(): output_map = dict( - outInhomogeneity=dict(), - outInhomogeneity2=dict(), + outInhomogeneity=dict( + extensions=None, + ), + outInhomogeneity2=dict( + extensions=None, + ), ) outputs = MedicAlgorithmN3.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py index b2d247e9dc..b62def8a4f 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py @@ -1,109 +1,206 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmSPECTRE2010 def test_MedicAlgorithmSPECTRE2010_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inApply=dict(argstr='--inApply %s', ), - inAtlas=dict(argstr='--inAtlas %s', ), - inBackground=dict(argstr='--inBackground %f', ), - inCoarse=dict(argstr='--inCoarse %f', ), - inCost=dict(argstr='--inCost %s', ), - inDegrees=dict(argstr='--inDegrees %s', ), - inFind=dict(argstr='--inFind %s', ), - inFine=dict(argstr='--inFine %f', ), - inImage=dict(argstr='--inImage %s', ), - inInhomogeneity=dict(argstr='--inInhomogeneity %s', ), - inInitial=dict(argstr='--inInitial %d', ), - inInitial2=dict(argstr='--inInitial2 %f', ), - inInput=dict(argstr='--inInput %s', ), - inMMC=dict(argstr='--inMMC %d', ), - inMMC2=dict(argstr='--inMMC2 %d', ), - inMaximum=dict(argstr='--inMaximum %f', ), - inMinimum=dict(argstr='--inMinimum %f', ), - inMinimum2=dict(argstr='--inMinimum2 %f', ), - inMultiple=dict(argstr='--inMultiple %d', ), - inMultithreading=dict(argstr='--inMultithreading %s', ), - inNumber=dict(argstr='--inNumber %d', ), - inNumber2=dict(argstr='--inNumber2 %d', ), - inOutput=dict(argstr='--inOutput %s', ), - inOutput2=dict(argstr='--inOutput2 %s', ), - inOutput3=dict(argstr='--inOutput3 %s', ), - inOutput4=dict(argstr='--inOutput4 %s', ), - inOutput5=dict(argstr='--inOutput5 %s', ), - inRegistration=dict(argstr='--inRegistration %s', ), - inResample=dict(argstr='--inResample %s', ), - inRun=dict(argstr='--inRun %s', ), - inSkip=dict(argstr='--inSkip %s', ), - inSmoothing=dict(argstr='--inSmoothing %f', ), - inSubsample=dict(argstr='--inSubsample %s', ), - inUse=dict(argstr='--inUse %s', ), - null=dict(argstr='--null %s', ), + inApply=dict( + argstr="--inApply %s", + ), + inAtlas=dict( + argstr="--inAtlas %s", + extensions=None, + ), + inBackground=dict( + argstr="--inBackground %f", + ), + inCoarse=dict( + argstr="--inCoarse %f", + ), + inCost=dict( + argstr="--inCost %s", + ), + inDegrees=dict( + argstr="--inDegrees %s", + ), + inFind=dict( + argstr="--inFind %s", + ), + inFine=dict( + argstr="--inFine %f", + ), + inImage=dict( + argstr="--inImage %s", + ), + inInhomogeneity=dict( + argstr="--inInhomogeneity %s", + ), + inInitial=dict( + argstr="--inInitial %d", + ), + inInitial2=dict( + argstr="--inInitial2 %f", + ), + inInput=dict( + argstr="--inInput %s", + extensions=None, + ), + inMMC=dict( + argstr="--inMMC %d", + ), + inMMC2=dict( + argstr="--inMMC2 %d", + ), + inMaximum=dict( + argstr="--inMaximum %f", + ), + inMinimum=dict( + argstr="--inMinimum %f", + ), + inMinimum2=dict( + argstr="--inMinimum2 %f", + ), + inMultiple=dict( + argstr="--inMultiple %d", + ), + inMultithreading=dict( + argstr="--inMultithreading %s", + ), + inNumber=dict( + argstr="--inNumber %d", + ), + inNumber2=dict( + argstr="--inNumber2 %d", + ), + inOutput=dict( + argstr="--inOutput %s", + ), + inOutput2=dict( + argstr="--inOutput2 %s", + ), + inOutput3=dict( + argstr="--inOutput3 %s", + ), + inOutput4=dict( + argstr="--inOutput4 %s", + ), + inOutput5=dict( + argstr="--inOutput5 %s", + ), + inRegistration=dict( + argstr="--inRegistration %s", + ), + inResample=dict( + argstr="--inResample %s", + ), + inRun=dict( + argstr="--inRun %s", + ), + inSkip=dict( + argstr="--inSkip %s", + ), + inSmoothing=dict( + argstr="--inSmoothing %f", + ), + inSubsample=dict( + argstr="--inSubsample %s", + ), + inUse=dict( + argstr="--inUse %s", + ), + null=dict( + argstr="--null %s", + ), outFANTASM=dict( - argstr='--outFANTASM %s', + argstr="--outFANTASM %s", hash_files=False, ), outMask=dict( - argstr='--outMask %s', + argstr="--outMask %s", hash_files=False, ), outMidsagittal=dict( - argstr='--outMidsagittal %s', + argstr="--outMidsagittal %s", hash_files=False, ), outOriginal=dict( - argstr='--outOriginal %s', + argstr="--outOriginal %s", hash_files=False, ), outPrior=dict( - argstr='--outPrior %s', + argstr="--outPrior %s", hash_files=False, ), outSegmentation=dict( - argstr='--outSegmentation %s', + argstr="--outSegmentation %s", hash_files=False, ), outSplitHalves=dict( - argstr='--outSplitHalves %s', + argstr="--outSplitHalves %s", hash_files=False, ), outStripped=dict( - argstr='--outStripped %s', + argstr="--outStripped %s", hash_files=False, ), outd0=dict( - argstr='--outd0 %s', + argstr="--outd0 %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmSPECTRE2010.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmSPECTRE2010_outputs(): output_map = dict( - outFANTASM=dict(), - outMask=dict(), - outMidsagittal=dict(), - outOriginal=dict(), - outPrior=dict(), - outSegmentation=dict(), - outSplitHalves=dict(), - outStripped=dict(), - outd0=dict(), + outFANTASM=dict( + extensions=None, + ), + outMask=dict( + extensions=None, + ), + outMidsagittal=dict( + extensions=None, + ), + outOriginal=dict( + extensions=None, + ), + outPrior=dict( + extensions=None, + ), + outSegmentation=dict( + extensions=None, + ), + outSplitHalves=dict( + extensions=None, + ), + outStripped=dict( + extensions=None, + ), + outd0=dict( + extensions=None, + ), ) outputs = MedicAlgorithmSPECTRE2010.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py index b498025401..bdd6e4e1b6 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py @@ -1,39 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import MedicAlgorithmThresholdToBinaryMask def test_MedicAlgorithmThresholdToBinaryMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inLabel=dict( - argstr='--inLabel %s', - sep=';', + argstr="--inLabel %s", + sep=";", + ), + inMaximum=dict( + argstr="--inMaximum %f", + ), + inMinimum=dict( + argstr="--inMinimum %f", + ), + inUse=dict( + argstr="--inUse %s", + ), + null=dict( + argstr="--null %s", ), - inMaximum=dict(argstr='--inMaximum %f', ), - inMinimum=dict(argstr='--inMinimum %f', ), - inUse=dict(argstr='--inUse %s', ), - null=dict(argstr='--null %s', ), outBinary=dict( - argstr='--outBinary %s', - sep=';', + argstr="--outBinary %s", + sep=";", + ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = MedicAlgorithmThresholdToBinaryMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmThresholdToBinaryMask_outputs(): output_map = dict() outputs = MedicAlgorithmThresholdToBinaryMask.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_RandomVol.py b/nipype/interfaces/mipav/tests/test_auto_RandomVol.py index d3d92142c4..4929f54d6a 100644 --- a/nipype/interfaces/mipav/tests/test_auto_RandomVol.py +++ b/nipype/interfaces/mipav/tests/test_auto_RandomVol.py @@ -1,43 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..developer import RandomVol def test_RandomVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inField=dict(argstr='--inField %s', ), - inLambda=dict(argstr='--inLambda %f', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inMinimum=dict(argstr='--inMinimum %d', ), - inSize=dict(argstr='--inSize %d', ), - inSize2=dict(argstr='--inSize2 %d', ), - inSize3=dict(argstr='--inSize3 %d', ), - inSize4=dict(argstr='--inSize4 %d', ), - inStandard=dict(argstr='--inStandard %d', ), - null=dict(argstr='--null %s', ), + inField=dict( + argstr="--inField %s", + ), + inLambda=dict( + argstr="--inLambda %f", + ), + inMaximum=dict( + argstr="--inMaximum %d", + ), + inMinimum=dict( + argstr="--inMinimum %d", + ), + inSize=dict( + argstr="--inSize %d", + ), + inSize2=dict( + argstr="--inSize2 %d", + ), + inSize3=dict( + argstr="--inSize3 %d", + ), + inSize4=dict( + argstr="--inSize4 %d", + ), + inStandard=dict( + argstr="--inStandard %d", + ), + null=dict( + argstr="--null %s", + ), outRand1=dict( - argstr='--outRand1 %s', + argstr="--outRand1 %s", hash_files=False, ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), + xDefaultMem=dict( + argstr="-xDefaultMem %d", + ), xMaxProcess=dict( - argstr='-xMaxProcess %d', + argstr="-xMaxProcess %d", usedefault=True, ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + xPrefExt=dict( + argstr="--xPrefExt %s", + ), ) inputs = RandomVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RandomVol_outputs(): - output_map = dict(outRand1=dict(), ) + output_map = dict( + outRand1=dict( + extensions=None, + ), + ) outputs = RandomVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mixins/__init__.py b/nipype/interfaces/mixins/__init__.py index 587d3a22a8..e54986231f 100644 --- a/nipype/interfaces/mixins/__init__.py +++ b/nipype/interfaces/mixins/__init__.py @@ -1,2 +1,6 @@ from .reporting import ( - ReportCapableInterface, ReportCapableInputSpec, ReportCapableOutputSpec) + ReportCapableInterface, + ReportCapableInputSpec, + ReportCapableOutputSpec, +) +from .fixheader import CopyHeaderInputSpec, CopyHeaderInterface diff --git a/nipype/interfaces/mixins/fixheader.py b/nipype/interfaces/mixins/fixheader.py new file mode 100644 index 0000000000..7bbff18f2b --- /dev/null +++ b/nipype/interfaces/mixins/fixheader.py @@ -0,0 +1,136 @@ +from ..base import BaseInterface, BaseInterfaceInputSpec, traits +from ...utils.imagemanip import copy_header as _copy_header + + +class CopyHeaderInputSpec(BaseInterfaceInputSpec): + copy_header = traits.Bool( + desc="Copy headers of the input image into the output image" + ) + + +class CopyHeaderInterface(BaseInterface): + """Copy headers if the copy_header input is ``True`` + + This interface mixin adds a post-run hook that allows for copying + an input header to an output file. + The subclass should specify a ``_copy_header_map`` that maps the **output** + image to the **input** image whose header should be copied. + + This feature is intended for tools that are intended to adjust voxel data without + modifying the header, but for some reason do not reliably preserve the header. + + Here we show an example interface that takes advantage of the mixin by simply + setting the data block: + + >>> import os + >>> import numpy as np + >>> import nibabel as nb + >>> from nipype.interfaces.base import SimpleInterface, TraitedSpec, File + >>> from nipype.interfaces.mixins import CopyHeaderInputSpec, CopyHeaderInterface + + >>> class ZerofileInputSpec(CopyHeaderInputSpec): + ... in_file = File(mandatory=True, exists=True) + + >>> class ZerofileOutputSpec(TraitedSpec): + ... out_file = File() + + >>> class ZerofileInterface(SimpleInterface, CopyHeaderInterface): + ... input_spec = ZerofileInputSpec + ... output_spec = ZerofileOutputSpec + ... _copy_header_map = {'out_file': 'in_file'} + ... + ... def _run_interface(self, runtime): + ... img = nb.load(self.inputs.in_file) + ... # Just set the data. Let the CopyHeaderInterface mixin fix the affine and header. + ... nb.Nifti1Image(np.zeros(img.shape, dtype=np.uint8), None).to_filename('out.nii') + ... self._results = {'out_file': os.path.abspath('out.nii')} + ... return runtime + + Consider a file of all ones and a non-trivial affine: + + >>> in_file = 'test.nii' + >>> nb.Nifti1Image(np.ones((5,5,5), dtype=np.int16), + ... affine=np.diag((4, 3, 2, 1))).to_filename(in_file) + + The default behavior would produce a file with similar data: + + >>> res = ZerofileInterface(in_file=in_file).run() + >>> out_img = nb.load(res.outputs.out_file) + >>> out_img.shape + (5, 5, 5) + >>> np.all(out_img.get_fdata() == 0) + True + + An updated data type: + + >>> out_img.get_data_dtype() + dtype('uint8') + + But a different affine: + + >>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) + False + + With ``copy_header=True``, then the affine is also equal: + + >>> res = ZerofileInterface(in_file=in_file, copy_header=True).run() + >>> out_img = nb.load(res.outputs.out_file) + >>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) + True + + The data properties remain as expected: + + >>> out_img.shape + (5, 5, 5) + >>> out_img.get_data_dtype() + dtype('uint8') + >>> np.all(out_img.get_fdata() == 0) + True + + By default, the data type of the output file is permitted to vary from the + inputs. That is, the data type is preserved. + If the data type of the original file is preferred, the ``_copy_header_map`` + can indicate the output data type should **not** be preserved by providing a + tuple of the input and ``False``. + + >>> ZerofileInterface._copy_header_map['out_file'] = ('in_file', False) + + >>> res = ZerofileInterface(in_file=in_file, copy_header=True).run() + >>> out_img = nb.load(res.outputs.out_file) + >>> out_img.get_data_dtype() + dtype('>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) + True + >>> out_img.shape + (5, 5, 5) + >>> np.all(out_img.get_fdata() == 0) + True + + Providing a tuple where the second value is ``True`` is also permissible to + achieve the default behavior. + + """ + + _copy_header_map = None + + def _post_run_hook(self, runtime): + """Copy headers for outputs, if required.""" + runtime = super()._post_run_hook(runtime) + + if self._copy_header_map is None or not self.inputs.copy_header: + return runtime + + inputs = self.inputs.get_traitsfree() + outputs = self.aggregate_outputs(runtime=runtime).get_traitsfree() + defined_outputs = set(outputs.keys()).intersection(self._copy_header_map.keys()) + for out in defined_outputs: + inp = self._copy_header_map[out] + keep_dtype = True + if isinstance(inp, tuple): + inp, keep_dtype = inp + _copy_header(inputs[inp], outputs[out], keep_dtype=keep_dtype) + + return runtime diff --git a/nipype/interfaces/mixins/reporting.py b/nipype/interfaces/mixins/reporting.py index 3f4d1b1317..90ca804618 100644 --- a/nipype/interfaces/mixins/reporting.py +++ b/nipype/interfaces/mixins/reporting.py @@ -1,39 +1,40 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ class mixin and utilities for enabling reports for nipype interfaces """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from abc import abstractmethod from ... import logging -from ..base import ( - File, BaseInterface, BaseInterfaceInputSpec, TraitedSpec) +from ..base import File, BaseInterface, BaseInterfaceInputSpec, TraitedSpec -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class ReportCapableInputSpec(BaseInterfaceInputSpec): - out_report = File('report', usedefault=True, hash_files=False, - desc='filename for the visual report') + out_report = File( + "report", + usedefault=True, + hash_files=False, + desc="filename for the visual report", + ) class ReportCapableOutputSpec(TraitedSpec): - out_report = File(desc='filename for the visual report') + out_report = File(desc="filename for the visual report") class ReportCapableInterface(BaseInterface): """Mixin to enable reporting for Nipype interfaces""" + _out_report = None def __init__(self, generate_report=False, **kwargs): - super(ReportCapableInterface, self).__init__(**kwargs) + super().__init__(**kwargs) self.generate_report = generate_report def _post_run_hook(self, runtime): - runtime = super(ReportCapableInterface, self)._post_run_hook(runtime) + runtime = super()._post_run_hook(runtime) # leave early if there's nothing to do if not self.generate_report: @@ -41,8 +42,9 @@ def _post_run_hook(self, runtime): self._out_report = self.inputs.out_report if not os.path.isabs(self._out_report): - self._out_report = os.path.abspath(os.path.join(runtime.cwd, - self._out_report)) + self._out_report = os.path.abspath( + os.path.join(runtime.cwd, self._out_report) + ) self._generate_report() @@ -50,11 +52,11 @@ def _post_run_hook(self, runtime): def _list_outputs(self): try: - outputs = super(ReportCapableInterface, self)._list_outputs() + outputs = super()._list_outputs() except NotImplementedError: outputs = {} if self._out_report is not None: - outputs['out_report'] = self._out_report + outputs["out_report"] = self._out_report return outputs @abstractmethod diff --git a/nipype/interfaces/mixins/tests/__init__.py b/nipype/interfaces/mixins/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mixins/tests/__init__.py +++ b/nipype/interfaces/mixins/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mixins/tests/test_auto_CopyHeaderInterface.py b/nipype/interfaces/mixins/tests/test_auto_CopyHeaderInterface.py new file mode 100644 index 0000000000..58f9bc0864 --- /dev/null +++ b/nipype/interfaces/mixins/tests/test_auto_CopyHeaderInterface.py @@ -0,0 +1,11 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..fixheader import CopyHeaderInterface + + +def test_CopyHeaderInterface_inputs(): + input_map = dict() + inputs = CopyHeaderInterface.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mixins/tests/test_auto_ReportCapableInterface.py b/nipype/interfaces/mixins/tests/test_auto_ReportCapableInterface.py index 8985d7069d..0cf527114b 100644 --- a/nipype/interfaces/mixins/tests/test_auto_ReportCapableInterface.py +++ b/nipype/interfaces/mixins/tests/test_auto_ReportCapableInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reporting import ReportCapableInterface diff --git a/nipype/interfaces/mne/__init__.py b/nipype/interfaces/mne/__init__.py index 8bf3db28ed..7eba176251 100644 --- a/nipype/interfaces/mne/__init__.py +++ b/nipype/interfaces/mne/__init__.py @@ -1,2 +1,3 @@ -# -*- coding: utf-8 -*- +"""MNE is a software for exploring, visualizing, and analyzing human neurophysiological data.""" + from .base import WatershedBEM diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 7f53071372..c8b1f6012d 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -1,81 +1,88 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, bytes - import os.path as op import glob from ... import logging from ...utils.filemanip import simplify_list -from ..base import (traits, File, Directory, TraitedSpec, OutputMultiPath) +from ..base import traits, File, Directory, TraitedSpec, OutputMultiPath from ..freesurfer.base import FSCommand, FSTraitedSpec -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class WatershedBEMInputSpec(FSTraitedSpec): subject_id = traits.Str( - argstr='--subject %s', + argstr="--subject %s", mandatory=True, - desc='Subject ID (must have a complete Freesurfer directory)') + desc="Subject ID (must have a complete Freesurfer directory)", + ) subjects_dir = Directory( exists=True, mandatory=True, usedefault=True, - desc='Path to Freesurfer subjects directory') + desc="Path to Freesurfer subjects directory", + ) volume = traits.Enum( - 'T1', - 'aparc+aseg', - 'aseg', - 'brain', - 'orig', - 'brainmask', - 'ribbon', - argstr='--volume %s', + "T1", + "aparc+aseg", + "aseg", + "brain", + "orig", + "brainmask", + "ribbon", + argstr="--volume %s", usedefault=True, - desc='The volume from the "mri" directory to use (defaults to T1)') + desc='The volume from the "mri" directory to use (defaults to T1)', + ) overwrite = traits.Bool( True, usedefault=True, - argstr='--overwrite', - desc='Overwrites the existing files') + argstr="--overwrite", + desc="Overwrites the existing files", + ) atlas_mode = traits.Bool( - argstr='--atlas', - desc='Use atlas mode for registration (default: no rigid alignment)') + argstr="--atlas", + desc="Use atlas mode for registration (default: no rigid alignment)", + ) class WatershedBEMOutputSpec(TraitedSpec): mesh_files = OutputMultiPath( File(exists=True), - desc=('Paths to the output meshes (brain, inner ' - 'skull, outer skull, outer skin)')) + desc=( + "Paths to the output meshes (brain, inner " + "skull, outer skull, outer skin)" + ), + ) brain_surface = File( - exists=True, - loc='bem/watershed', - desc='Brain surface (in Freesurfer format)') + exists=True, loc="bem/watershed", desc="Brain surface (in Freesurfer format)" + ) inner_skull_surface = File( exists=True, - loc='bem/watershed', - desc='Inner skull surface (in Freesurfer format)') + loc="bem/watershed", + desc="Inner skull surface (in Freesurfer format)", + ) outer_skull_surface = File( exists=True, - loc='bem/watershed', - desc='Outer skull surface (in Freesurfer format)') + loc="bem/watershed", + desc="Outer skull surface (in Freesurfer format)", + ) outer_skin_surface = File( exists=True, - loc='bem/watershed', - desc='Outer skin surface (in Freesurfer format)') + loc="bem/watershed", + desc="Outer skin surface (in Freesurfer format)", + ) fif_file = File( exists=True, - loc='bem', - altkey='fif', - desc='"fif" format file for EEG processing in MNE') + loc="bem", + altkey="fif", + desc='"fif" format file for EEG processing in MNE', + ) cor_files = OutputMultiPath( File(exists=True), - loc='bem/watershed/ws', - altkey='COR', - desc='"COR" format files') + loc="bem/watershed/ws", + altkey="COR", + desc='"COR" format files', + ) class WatershedBEM(FSCommand): @@ -89,23 +96,23 @@ class WatershedBEM(FSCommand): >>> bem.inputs.subject_id = 'subj1' >>> bem.inputs.subjects_dir = '.' >>> bem.cmdline - 'mne_watershed_bem --overwrite --subject subj1 --volume T1' - >>> bem.run() # doctest: +SKIP + 'mne watershed_bem --overwrite --subject subj1 --volume T1' + >>> bem.run() # doctest: +SKIP - """ + """ - _cmd = 'mne_watershed_bem' + _cmd = "mne watershed_bem" input_spec = WatershedBEMInputSpec output_spec = WatershedBEMOutputSpec - _additional_metadata = ['loc', 'altkey'] + _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): - globsuffix = '*' - globprefix = '*' + globsuffix = "*" + globprefix = "*" keydir = op.join(path, dirval) if altkey: key = altkey - globpattern = op.join(keydir, ''.join((globprefix, key, globsuffix))) + globpattern = op.join(keydir, f"{globprefix}{key}{globsuffix}") return glob.glob(globpattern) def _list_outputs(self): @@ -115,22 +122,23 @@ def _list_outputs(self): output_traits = self._outputs() mesh_paths = [] for k in list(outputs.keys()): - if k != 'mesh_files': - val = self._get_files(subject_path, k, - output_traits.traits()[k].loc, - output_traits.traits()[k].altkey) + if k != "mesh_files": + val = self._get_files( + subject_path, + k, + output_traits.traits()[k].loc, + output_traits.traits()[k].altkey, + ) if val: value_list = simplify_list(val) if isinstance(value_list, list): - out_files = [] - for value in value_list: - out_files.append(op.abspath(value)) + out_files = [op.abspath(value) for value in value_list] elif isinstance(value_list, (str, bytes)): out_files = op.abspath(value_list) else: raise TypeError outputs[k] = out_files - if not k.rfind('surface') == -1: + if k.rfind("surface") != -1: mesh_paths.append(out_files) - outputs['mesh_files'] = mesh_paths + outputs["mesh_files"] = mesh_paths return outputs diff --git a/nipype/interfaces/mne/tests/__init__.py b/nipype/interfaces/mne/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mne/tests/__init__.py +++ b/nipype/interfaces/mne/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py b/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py index 4ced8de75a..57f1b40e4d 100644 --- a/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py +++ b/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import WatershedBEM def test_WatershedBEM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlas_mode=dict(argstr='--atlas', ), + args=dict( + argstr="%s", + ), + atlas_mode=dict( + argstr="--atlas", + ), environ=dict( nohash=True, usedefault=True, ), overwrite=dict( - argstr='--overwrite', + argstr="--overwrite", usedefault=True, ), subject_id=dict( - argstr='--subject %s', + argstr="--subject %s", mandatory=True, ), subjects_dir=dict( @@ -24,7 +27,7 @@ def test_WatershedBEM_inputs(): usedefault=True, ), volume=dict( - argstr='--volume %s', + argstr="--volume %s", usedefault=True, ), ) @@ -33,21 +36,36 @@ def test_WatershedBEM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WatershedBEM_outputs(): output_map = dict( - brain_surface=dict(loc='bem/watershed', ), + brain_surface=dict( + extensions=None, + loc="bem/watershed", + ), cor_files=dict( - altkey='COR', - loc='bem/watershed/ws', + altkey="COR", + loc="bem/watershed/ws", ), fif_file=dict( - altkey='fif', - loc='bem', + altkey="fif", + extensions=None, + loc="bem", + ), + inner_skull_surface=dict( + extensions=None, + loc="bem/watershed", ), - inner_skull_surface=dict(loc='bem/watershed', ), mesh_files=dict(), - outer_skin_surface=dict(loc='bem/watershed', ), - outer_skull_surface=dict(loc='bem/watershed', ), + outer_skin_surface=dict( + extensions=None, + loc="bem/watershed", + ), + outer_skull_surface=dict( + extensions=None, + loc="bem/watershed", + ), ) outputs = WatershedBEM.output_spec() diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index ea066d4cd8..232dc119cf 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,15 +1,35 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .tracking import (Tracks2Prob, FilterTracks, StreamlineTrack, - DiffusionTensorStreamlineTrack, - SphericallyDeconvolutedStreamlineTrack, - ProbabilisticSphericallyDeconvolutedStreamlineTrack) -from .tensors import (FSL2MRTrix, ConstrainedSphericalDeconvolution, - DWI2SphericalHarmonicsImage, EstimateResponseForSH, - GenerateDirections, FindShPeaks, Directions2Amplitude) -from .preprocess import (MRConvert, MRMultiply, MRTrixViewer, MRTrixInfo, - GenerateWhiteMatterMask, DWI2Tensor, - Tensor2ApparentDiffusion, Tensor2FractionalAnisotropy, - Tensor2Vector, MedianFilter3D, Erode, Threshold) +"""MRTrix version 2 (DEPRECATED) -- tools to perform various types of diffusion MRI analyses.""" +from .tracking import ( + Tracks2Prob, + FilterTracks, + StreamlineTrack, + DiffusionTensorStreamlineTrack, + SphericallyDeconvolutedStreamlineTrack, + ProbabilisticSphericallyDeconvolutedStreamlineTrack, +) +from .tensors import ( + FSL2MRTrix, + ConstrainedSphericalDeconvolution, + DWI2SphericalHarmonicsImage, + EstimateResponseForSH, + GenerateDirections, + FindShPeaks, + Directions2Amplitude, +) +from .preprocess import ( + MRConvert, + MRMultiply, + MRTrixViewer, + MRTrixInfo, + GenerateWhiteMatterMask, + DWI2Tensor, + Tensor2ApparentDiffusion, + Tensor2FractionalAnisotropy, + Tensor2Vector, + MedianFilter3D, + Erode, + Threshold, +) from .convert import MRTrix2TrackVis diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index a3a280c895..783974c667 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -1,35 +1,52 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from io import open - import os.path as op import nibabel as nb -import nibabel.trackvis as trk import numpy as np -from nibabel.trackvis import HeaderError from nibabel.volumeutils import native_code from nibabel.orientations import aff2axcodes from ... import logging from ...utils.filemanip import split_filename -from ...workflows.misc.utils import get_data_dims, get_vox_dims from ..base import TraitedSpec, File, isdefined -from ..dipy.base import DipyBaseInterface, HAVE_DIPY as have_dipy +from ..dipy.base import DipyBaseInterface + +iflogger = logging.getLogger("nipype.interface") + + +def get_vox_dims(volume): + import nibabel as nb + + if isinstance(volume, list): + volume = volume[0] + nii = nb.load(volume) + hdr = nii.header + voxdims = hdr.get_zooms() + return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] -iflogger = logging.getLogger('nipype.interface') + +def get_data_dims(volume): + import nibabel as nb + + if isinstance(volume, list): + volume = volume[0] + nii = nb.load(volume) + hdr = nii.header + datadims = hdr.get_data_shape() + return [int(datadims[0]), int(datadims[1]), int(datadims[2])] def transform_to_affine(streams, header, affine): - from dipy.tracking.utils import move_streamlines + try: + from dipy.tracking.utils import transform_tracking_output + except ImportError: + from dipy.tracking.utils import move_streamlines as transform_tracking_output + rotation, scale = np.linalg.qr(affine) - streams = move_streamlines(streams, rotation) - scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], - np.diag(1. / header['voxel_size'])) + streams = transform_tracking_output(streams, rotation) + scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], np.diag(1.0 / header["voxel_size"])) scale[0:3, 3] = abs(scale[0:3, 3]) - streams = move_streamlines(streams, scale) + streams = transform_tracking_output(streams, scale) return streams @@ -40,58 +57,57 @@ def read_mrtrix_tracks(in_file, as_generator=True): def read_mrtrix_header(in_file): - fileobj = open(in_file, 'rb') + fileobj = open(in_file, "rb") header = {} - iflogger.info('Reading header data...') + iflogger.info("Reading header data...") for line in fileobj: line = line.decode() - if line == 'END\n': - iflogger.info('Reached the end of the header!') + if line == "END\n": + iflogger.info("Reached the end of the header!") break - elif ': ' in line: - line = line.replace('\n', '') + elif ": " in line: + line = line.replace("\n", "") line = line.replace("'", "") - key = line.split(': ')[0] - value = line.split(': ')[1] + key = line.split(": ")[0] + value = line.split(": ")[1] header[key] = value iflogger.info('...adding "%s" to header for key "%s"', value, key) fileobj.close() - header['count'] = int(header['count'].replace('\n', '')) - header['offset'] = int(header['file'].replace('.', '')) + header["count"] = int(header["count"].replace("\n", "")) + header["offset"] = int(header["file"].replace(".", "")) return header def read_mrtrix_streamlines(in_file, header, as_generator=True): - offset = header['offset'] - stream_count = header['count'] - fileobj = open(in_file, 'rb') + offset = header["offset"] + stream_count = header["count"] + fileobj = open(in_file, "rb") fileobj.seek(offset) endianness = native_code - f4dt = np.dtype(endianness + 'f4') + f4dt = np.dtype(endianness + "f4") pt_cols = 3 bytesize = pt_cols * 4 def points_per_track(offset): track_points = [] - iflogger.info('Identifying the number of points per tract...') + iflogger.info("Identifying the number of points per tract...") all_str = fileobj.read() num_triplets = int(len(all_str) / bytesize) - pts = np.ndarray( - shape=(num_triplets, pt_cols), dtype='f4', buffer=all_str) + pts = np.ndarray(shape=(num_triplets, pt_cols), dtype="f4", buffer=all_str) nonfinite_list = np.where(np.invert(np.isfinite(pts[:, 2]))) nonfinite_list = list(nonfinite_list[0])[ - 0:-1] # Converts numpy array to list, removes the last value + 0:-1 + ] # Converts numpy array to list, removes the last value for idx, value in enumerate(nonfinite_list): if idx == 0: track_points.append(nonfinite_list[idx]) else: - track_points.append( - nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) + track_points.append(nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) return track_points, nonfinite_list def track_gen(track_points): n_streams = 0 - iflogger.info('Reading tracks...') + iflogger.info("Reading tracks...") while True: try: n_pts = track_points[n_streams] @@ -100,14 +116,15 @@ def track_gen(track_points): pts_str = fileobj.read(n_pts * bytesize) nan_str = fileobj.read(bytesize) if len(pts_str) < (n_pts * bytesize): - if not n_streams == stream_count: - raise HeaderError('Expecting %s points, found only %s' % - (stream_count, n_streams)) - iflogger.error('Expecting %s points, found only %s', - stream_count, n_streams) + if n_streams != stream_count: + raise nb.trackvis.HeaderError( + f"Expecting {stream_count} points, found only {n_streams}" + ) + iflogger.error( + "Expecting %s points, found only %s", stream_count, n_streams + ) break - pts = np.ndarray( - shape=(n_pts, pt_cols), dtype=f4dt, buffer=pts_str) + pts = np.ndarray(shape=(n_pts, pt_cols), dtype=f4dt, buffer=pts_str) nan_pt = np.ndarray(shape=(1, pt_cols), dtype=f4dt, buffer=nan_str) if np.isfinite(nan_pt[0][0]): raise ValueError @@ -116,15 +133,14 @@ def track_gen(track_points): yield xyz n_streams += 1 if n_streams == stream_count: - iflogger.info('100%% : %i tracks read', n_streams) + iflogger.info("100%% : %i tracks read", n_streams) raise StopIteration try: if n_streams % int(stream_count / 100) == 0: percent = int(float(n_streams) / float(stream_count) * 100) - iflogger.info('%i%% : %i tracks read', percent, n_streams) + iflogger.info("%i%% : %i tracks read", percent, n_streams) except ZeroDivisionError: - iflogger.info('%i stream read out of %i', n_streams, - stream_count) + iflogger.info("%i stream read out of %i", n_streams, stream_count) track_points, nonfinite_list = points_per_track(offset) fileobj.seek(offset) @@ -138,22 +154,22 @@ class MRTrix2TrackVisInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, - desc='The input file for the tracks in MRTrix (.tck) format') - image_file = File( - exists=True, desc='The image the tracks were generated from') + desc="The input file for the tracks in MRTrix (.tck) format", + ) + image_file = File(exists=True, desc="The image the tracks were generated from") matrix_file = File( exists=True, - desc= - 'A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)' + desc="A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)", ) registration_image_file = File( - exists=True, - desc='The final image the tracks should be registered to.') + exists=True, desc="The final image the tracks should be registered to." + ) out_filename = File( - 'converted.trk', + "converted.trk", genfile=True, usedefault=True, - desc='The output filename for the tracks in TrackVis (.trk) format') + desc="The output filename for the tracks in TrackVis (.trk) format", + ) class MRTrix2TrackVisOutputSpec(TraitedSpec): @@ -172,12 +188,20 @@ class MRTrix2TrackVis(DipyBaseInterface): >>> tck2trk.inputs.image_file = 'diffusion.nii' >>> tck2trk.run() # doctest: +SKIP """ + input_spec = MRTrix2TrackVisInputSpec output_spec = MRTrix2TrackVisOutputSpec def _run_interface(self, runtime): - from dipy.tracking.utils import move_streamlines, \ - affine_from_fsl_mat_file + from dipy.tracking.utils import affine_from_fsl_mat_file + + try: + from dipy.tracking.utils import transform_tracking_output + except ImportError: + from dipy.tracking.utils import ( + move_streamlines as transform_tracking_output, + ) + dx, dy, dz = get_data_dims(self.inputs.image_file) vx, vy, vz = get_vox_dims(self.inputs.image_file) image_file = nb.load(self.inputs.image_file) @@ -185,81 +209,82 @@ def _run_interface(self, runtime): out_filename = op.abspath(self.inputs.out_filename) # Reads MRTrix tracks - header, streamlines = read_mrtrix_tracks( - self.inputs.in_file, as_generator=True) - iflogger.info('MRTrix Header:') + header, streamlines = read_mrtrix_tracks(self.inputs.in_file, as_generator=True) + iflogger.info("MRTrix Header:") iflogger.info(header) # Writes to Trackvis trk_header = nb.trackvis.empty_header() - trk_header['dim'] = [dx, dy, dz] - trk_header['voxel_size'] = [vx, vy, vz] - trk_header['n_count'] = header['count'] + trk_header["dim"] = [dx, dy, dz] + trk_header["voxel_size"] = [vx, vy, vz] + trk_header["n_count"] = header["count"] if isdefined(self.inputs.matrix_file) and isdefined( - self.inputs.registration_image_file): - iflogger.info('Applying transformation from matrix file %s', - self.inputs.matrix_file) + self.inputs.registration_image_file + ): + iflogger.info( + "Applying transformation from matrix file %s", self.inputs.matrix_file + ) xfm = np.genfromtxt(self.inputs.matrix_file) iflogger.info(xfm) - registration_image_file = nb.load( - self.inputs.registration_image_file) + registration_image_file = nb.load(self.inputs.registration_image_file) reg_affine = registration_image_file.affine - r_dx, r_dy, r_dz = get_data_dims( - self.inputs.registration_image_file) - r_vx, r_vy, r_vz = get_vox_dims( - self.inputs.registration_image_file) - iflogger.info('Using affine from registration image file %s', - self.inputs.registration_image_file) + r_dx, r_dy, r_dz = get_data_dims(self.inputs.registration_image_file) + r_vx, r_vy, r_vz = get_vox_dims(self.inputs.registration_image_file) + iflogger.info( + "Using affine from registration image file %s", + self.inputs.registration_image_file, + ) iflogger.info(reg_affine) - trk_header['vox_to_ras'] = reg_affine - trk_header['dim'] = [r_dx, r_dy, r_dz] - trk_header['voxel_size'] = [r_vx, r_vy, r_vz] + trk_header["vox_to_ras"] = reg_affine + trk_header["dim"] = [r_dx, r_dy, r_dz] + trk_header["voxel_size"] = [r_vx, r_vy, r_vz] - affine = np.dot(affine, np.diag(1. / np.array([vx, vy, vz, 1]))) + affine = np.dot(affine, np.diag(1.0 / np.array([vx, vy, vz, 1]))) transformed_streamlines = transform_to_affine( - streamlines, trk_header, affine) + streamlines, trk_header, affine + ) - aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], - [r_vx, r_vy, r_vz]) + aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], [r_vx, r_vy, r_vz]) iflogger.info(aff) axcode = aff2axcodes(reg_affine) - trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] + trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] - final_streamlines = move_streamlines(transformed_streamlines, aff) + final_streamlines = transform_tracking_output(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) - trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving transformed Trackvis file as %s', - out_filename) - iflogger.info('New TrackVis Header:') + nb.trackvis.write(out_filename, trk_tracks, trk_header) + iflogger.info("Saving transformed Trackvis file as %s", out_filename) + iflogger.info("New TrackVis Header:") iflogger.info(trk_header) else: iflogger.info( - 'Applying transformation from scanner coordinates to %s', - self.inputs.image_file) + "Applying transformation from scanner coordinates to %s", + self.inputs.image_file, + ) axcode = aff2axcodes(affine) - trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] - trk_header['vox_to_ras'] = affine + trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] + trk_header["vox_to_ras"] = affine transformed_streamlines = transform_to_affine( - streamlines, trk_header, affine) + streamlines, trk_header, affine + ) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) - trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving Trackvis file as %s', out_filename) - iflogger.info('TrackVis Header:') + nb.trackvis.write(out_filename, trk_tracks, trk_header) + iflogger.info("Saving Trackvis file as %s", out_filename) + iflogger.info("TrackVis Header:") iflogger.info(trk_header) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_filename) + outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.trk' + return name + ".trk" diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index 5fc67177a1..8f585cb901 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -1,53 +1,55 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, InputMultiPath, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + InputMultiPath, + isdefined, +) class MRConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='voxel-order data filename') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + desc="voxel-order data filename", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") extract_at_axis = traits.Enum( 1, 2, 3, - argstr='-coord %s', + argstr="-coord %s", position=1, - desc= - '"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.' + desc='"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.', ) extract_at_coordinate = traits.List( traits.Float, - argstr='%s', - sep=',', + argstr="%s", + sep=",", position=2, minlen=1, maxlen=3, - desc= - '"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.' + desc='"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.', ) voxel_dims = traits.List( traits.Float, - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", position=3, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) output_datatype = traits.Enum( "nii", "float", @@ -56,10 +58,9 @@ class MRConvertInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-output %s', + argstr="-output %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"' + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) extension = traits.Enum( "mif", @@ -71,9 +72,9 @@ class MRConvertInputSpec(CommandLineInputSpec): "long", "double", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', - usedefault=True) + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', + usedefault=True, + ) layout = traits.Enum( "nii", "float", @@ -82,33 +83,34 @@ class MRConvertInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-output %s', + argstr="-output %s", position=2, - desc= - 'specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.' + desc="specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.", ) resample = traits.Float( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', - desc='Apply scaling to the intensity values.') + units="mm", + desc="Apply scaling to the intensity values.", + ) offset_bias = traits.Float( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', - desc='Apply offset to the intensity values.') + units="mm", + desc="Apply offset to the intensity values.", + ) replace_NaN_with_zero = traits.Bool( - argstr='-zero', position=3, desc="Replace all NaN values with zero.") + argstr="-zero", position=3, desc="Replace all NaN values with zero." + ) prs = traits.Bool( - argstr='-prs', + argstr="-prs", position=3, - desc= - "Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only)." + desc="Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only).", ) class MRConvertOutputSpec(TraitedSpec): - converted = File(exists=True, desc='path/name of 4D volume in voxel order') + converted = File(exists=True, desc="path/name of 4D volume in voxel order") class MRConvert(CommandLine): @@ -130,21 +132,21 @@ class MRConvert(CommandLine): >>> mrconvert.run() # doctest: +SKIP """ - _cmd = 'mrconvert' + _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted'] = self.inputs.out_filename - if not isdefined(outputs['converted']): - outputs['converted'] = op.abspath(self._gen_outfilename()) + outputs["converted"] = self.inputs.out_filename + if not isdefined(outputs["converted"]): + outputs["converted"] = op.abspath(self._gen_outfilename()) else: - outputs['converted'] = op.abspath(outputs['converted']) + outputs["converted"] = op.abspath(outputs["converted"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None @@ -154,67 +156,81 @@ def _gen_outfilename(self): if isdefined(self.inputs.out_filename): outname = self.inputs.out_filename else: - outname = name + '_mrconvert.' + self.inputs.extension + outname = name + "_mrconvert." + self.inputs.extension return outname class DWI2TensorInputSpec(CommandLineInputSpec): in_file = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) out_filename = File( name_template="%s_tensor.mif", name_source="in_file", output_name="tensor", - argstr='%s', - desc='Output tensor filename', - position=-1) + argstr="%s", + desc="Output tensor filename", + position=-1, + ) encoding_file = File( - argstr='-grad %s', + argstr="-grad %s", position=2, - desc=('Encoding file supplied as a 4xN text file with ' - 'each line is in the format [ X Y Z b ], where ' - '[ X Y Z ] describe the direction of the applied ' - 'gradient, and b gives the b-value in units ' - '(1000 s/mm^2). See FSL2MRTrix()')) + desc=( + "Encoding file supplied as a 4xN text file with " + "each line is in the format [ X Y Z b ], where " + "[ X Y Z ] describe the direction of the applied " + "gradient, and b gives the b-value in units " + "(1000 s/mm^2). See FSL2MRTrix()" + ), + ) ignore_slice_by_volume = traits.List( traits.Int, - argstr='-ignoreslices %s', - sep=' ', + argstr="-ignoreslices %s", + sep=" ", position=2, minlen=2, maxlen=2, - desc=('Requires two values (i.e. [34 ' - '1] for [Slice Volume] Ignores ' - 'the image slices specified ' - 'when computing the tensor. ' - 'Slice here means the z ' - 'coordinate of the slice to be ' - 'ignored.')) + desc=( + "Requires two values (i.e. [34 " + "1] for [Slice Volume] Ignores " + "the image slices specified " + "when computing the tensor. " + "Slice here means the z " + "coordinate of the slice to be " + "ignored." + ), + ) ignore_volumes = traits.List( traits.Int, - argstr='-ignorevolumes %s', - sep=' ', + argstr="-ignorevolumes %s", + sep=" ", position=2, minlen=1, - desc=('Requires two values (i.e. [2 5 6] for ' - '[Volumes] Ignores the image volumes ' - 'specified when computing the tensor.')) + desc=( + "Requires two values (i.e. [2 5 6] for " + "[Volumes] Ignores the image volumes " + "specified when computing the tensor." + ), + ) + mask = File( + exists=True, + argstr="-mask %s", + desc="Only perform computation within the specified binary brain mask image.", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc=("Do not display information messages or progress " - "status.")) - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc=("Do not display information messages or progress status."), + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class DWI2TensorOutputSpec(TraitedSpec): - tensor = File( - exists=True, desc='path/name of output diffusion tensor image') + tensor = File(exists=True, desc="path/name of output diffusion tensor image") class DWI2Tensor(CommandLine): @@ -233,7 +249,7 @@ class DWI2Tensor(CommandLine): >>> dwi2tensor.run() # doctest: +SKIP """ - _cmd = 'dwi2tensor' + _cmd = "dwi2tensor" input_spec = DWI2TensorInputSpec output_spec = DWI2TensorOutputSpec @@ -241,25 +257,26 @@ class DWI2Tensor(CommandLine): class Tensor2VectorInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output vector filename') + genfile=True, argstr="%s", position=-1, desc="Output vector filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2VectorOutputSpec(TraitedSpec): vector = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -276,55 +293,56 @@ class Tensor2Vector(CommandLine): >>> tensor2vector.run() # doctest: +SKIP """ - _cmd = 'tensor2vector' + _cmd = "tensor2vector" input_spec = Tensor2VectorInputSpec output_spec = Tensor2VectorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['vector'] = self.inputs.out_filename - if not isdefined(outputs['vector']): - outputs['vector'] = op.abspath(self._gen_outfilename()) + outputs["vector"] = self.inputs.out_filename + if not isdefined(outputs["vector"]): + outputs["vector"] = op.abspath(self._gen_outfilename()) else: - outputs['vector'] = op.abspath(outputs['vector']) + outputs["vector"] = op.abspath(outputs["vector"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_vector.mif' + return name + "_vector.mif" class Tensor2FractionalAnisotropyInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output Fractional Anisotropy filename') + desc="Output Fractional Anisotropy filename", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2FractionalAnisotropyOutputSpec(TraitedSpec): FA = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -341,55 +359,56 @@ class Tensor2FractionalAnisotropy(CommandLine): >>> tensor2FA.run() # doctest: +SKIP """ - _cmd = 'tensor2FA' + _cmd = "tensor2FA" input_spec = Tensor2FractionalAnisotropyInputSpec output_spec = Tensor2FractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['FA'] = self.inputs.out_filename - if not isdefined(outputs['FA']): - outputs['FA'] = op.abspath(self._gen_outfilename()) + outputs["FA"] = self.inputs.out_filename + if not isdefined(outputs["FA"]): + outputs["FA"] = op.abspath(self._gen_outfilename()) else: - outputs['FA'] = op.abspath(outputs['FA']) + outputs["FA"] = op.abspath(outputs["FA"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_FA.mif' + return name + "_FA.mif" class Tensor2ApparentDiffusionInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output Fractional Anisotropy filename') + desc="Output Fractional Anisotropy filename", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2ApparentDiffusionOutputSpec(TraitedSpec): ADC = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -406,49 +425,51 @@ class Tensor2ApparentDiffusion(CommandLine): >>> tensor2ADC.run() # doctest: +SKIP """ - _cmd = 'tensor2ADC' + _cmd = "tensor2ADC" input_spec = Tensor2ApparentDiffusionInputSpec output_spec = Tensor2ApparentDiffusionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['ADC'] = self.inputs.out_filename - if not isdefined(outputs['ADC']): - outputs['ADC'] = op.abspath(self._gen_outfilename()) + outputs["ADC"] = self.inputs.out_filename + if not isdefined(outputs["ADC"]): + outputs["ADC"] = op.abspath(self._gen_outfilename()) else: - outputs['ADC'] = op.abspath(outputs['ADC']) + outputs["ADC"] = op.abspath(outputs["ADC"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_ADC.mif' + return name + "_ADC.mif" class MRMultiplyInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be multiplied') + desc="Input images to be multiplied", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRMultiplyOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image of the multiplication') + out_file = File(exists=True, desc="the output image of the multiplication") class MRMultiply(CommandLine): @@ -464,43 +485,44 @@ class MRMultiply(CommandLine): >>> MRmult.run() # doctest: +SKIP """ - _cmd = 'mrmult' + _cmd = "mrmult" input_spec = MRMultiplyInputSpec output_spec = MRMultiplyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) - return name + '_MRMult.mif' + return name + "_MRMult.mif" class MRTrixViewerInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be viewed') + desc="Input images to be viewed", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTrixViewerOutputSpec(TraitedSpec): @@ -520,7 +542,7 @@ class MRTrixViewer(CommandLine): >>> MRview.run() # doctest: +SKIP """ - _cmd = 'mrview' + _cmd = "mrview" input_spec = MRTrixViewerInputSpec output_spec = MRTrixViewerOutputSpec @@ -531,10 +553,11 @@ def _list_outputs(self): class MRTrixInfoInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be read') + desc="Input images to be read", + ) class MRTrixInfoOutputSpec(TraitedSpec): @@ -554,7 +577,7 @@ class MRTrixInfo(CommandLine): >>> MRinfo.run() # doctest: +SKIP """ - _cmd = 'mrinfo' + _cmd = "mrinfo" input_spec = MRTrixInfoInputSpec output_spec = MRTrixInfoOutputSpec @@ -565,38 +588,35 @@ def _list_outputs(self): class GenerateWhiteMatterMaskInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) binary_mask = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='Binary brain mask') + exists=True, argstr="%s", mandatory=True, position=-2, desc="Binary brain mask" + ) out_WMProb_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output WM probability image filename') + desc="Output WM probability image filename", + ) encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) noise_level_margin = traits.Float( - argstr='-margin %s', - desc= - 'Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)' + argstr="-margin %s", + desc="Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)", ) class GenerateWhiteMatterMaskOutputSpec(TraitedSpec): - WMprobabilitymap = File(exists=True, desc='WMprobabilitymap') + WMprobabilitymap = File(exists=True, desc="WMprobabilitymap") class GenerateWhiteMatterMask(CommandLine): @@ -613,51 +633,53 @@ class GenerateWhiteMatterMask(CommandLine): >>> genWM.run() # doctest: +SKIP """ - _cmd = 'gen_WM_mask' + _cmd = "gen_WM_mask" input_spec = GenerateWhiteMatterMaskInputSpec output_spec = GenerateWhiteMatterMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['WMprobabilitymap'] = op.abspath(self._gen_outfilename()) + outputs["WMprobabilitymap"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_WMProb_filename': + if name == "out_WMProb_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_WMProb.mif' + return name + "_WMProb.mif" class ErodeInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input mask image to be eroded') + desc="Input mask image to be eroded", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) number_of_passes = traits.Int( - argstr='-npass %s', desc='the number of passes (default: 1)') + argstr="-npass %s", desc="the number of passes (default: 1)" + ) dilate = traits.Bool( - argstr='-dilate', - position=1, - desc="Perform dilation rather than erosion") + argstr="-dilate", position=1, desc="Perform dilation rather than erosion" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ErodeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image') + out_file = File(exists=True, desc="the output image") class Erode(CommandLine): @@ -672,64 +694,63 @@ class Erode(CommandLine): >>> erode.inputs.in_file = 'mask.mif' >>> erode.run() # doctest: +SKIP """ - _cmd = 'erode' + + _cmd = "erode" input_spec = ErodeInputSpec output_spec = ErodeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_erode.mif' + return name + "_erode.mif" class ThresholdInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='The input image to be thresholded') + desc="The input image to be thresholded", + ) out_filename = File( - genfile=True, - argstr='%s', - position=-1, - desc='The output binary image mask.') + genfile=True, argstr="%s", position=-1, desc="The output binary image mask." + ) absolute_threshold_value = traits.Float( - argstr='-abs %s', - desc='Specify threshold value as absolute intensity.') + argstr="-abs %s", desc="Specify threshold value as absolute intensity." + ) percentage_threshold_value = traits.Float( - argstr='-percent %s', - desc= - 'Specify threshold value as a percentage of the peak intensity in the input image.' + argstr="-percent %s", + desc="Specify threshold value as a percentage of the peak intensity in the input image.", ) - invert = traits.Bool( - argstr='-invert', position=1, desc="Invert output binary mask") + invert = traits.Bool(argstr="-invert", position=1, desc="Invert output binary mask") replace_zeros_with_NaN = traits.Bool( - argstr='-nan', position=1, desc="Replace all zero values with NaN") + argstr="-nan", position=1, desc="Replace all zero values with NaN" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ThresholdOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='The output binary image mask.') + out_file = File(exists=True, desc="The output binary image mask.") class Threshold(CommandLine): @@ -750,49 +771,51 @@ class Threshold(CommandLine): >>> thresh.run() # doctest: +SKIP """ - _cmd = 'threshold' + _cmd = "threshold" input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_thresh.mif' + return name + "_thresh.mif" class MedianFilter3DInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be smoothed') + desc="Input images to be smoothed", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MedianFilter3DOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image') + out_file = File(exists=True, desc="the output image") class MedianFilter3D(CommandLine): @@ -808,82 +831,93 @@ class MedianFilter3D(CommandLine): >>> median3d.run() # doctest: +SKIP """ - _cmd = 'median3D' + _cmd = "median3D" input_spec = MedianFilter3DInputSpec output_spec = MedianFilter3DOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_median3D.mif' + return name + "_median3D.mif" class MRTransformInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be transformed') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image') + desc="Input images to be transformed", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output image") invert = traits.Bool( - argstr='-inverse', + argstr="-inverse", + position=1, + desc="Invert the specified transform before using it", + ) + linear_transform = File( + exists=True, + argstr="-linear %s", position=1, - desc="Invert the specified transform before using it") + desc=( + "Specify a linear transform to apply, in the form of a 3x4 or 4x4 ascii file. " + "Note the standard reverse convention is used, " + "where the transform maps points in the template image to the moving image. " + "Note that the reverse convention is still assumed even if no -template image is supplied." + ), + ) replace_transform = traits.Bool( - argstr='-replace', + argstr="-replace", position=1, - desc= - "replace the current transform by that specified, rather than applying it to the current transform" + desc="replace the current transform by that specified, rather than applying it to the current transform", ) transformation_file = File( exists=True, - argstr='-transform %s', + argstr="-transform %s", position=1, - desc='The transform to apply, in the form of a 4x4 ascii file.') + desc="The transform to apply, in the form of a 4x4 ascii file.", + ) template_image = File( exists=True, - argstr='-template %s', + argstr="-template %s", position=1, - desc='Reslice the input image to match the specified template image.') + desc="Reslice the input image to match the specified template image.", + ) reference_image = File( exists=True, - argstr='-reference %s', + argstr="-reference %s", position=1, - desc= - 'in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.' + desc="in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.", ) flip_x = traits.Bool( - argstr='-flipx', + argstr="-flipx", position=1, - desc= - "assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option." + desc="assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option.", ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTransformOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image of the transformation') + out_file = File(exists=True, desc="the output image of the transformation") class MRTransform(CommandLine): @@ -898,25 +932,25 @@ class MRTransform(CommandLine): >>> MRxform.run() # doctest: +SKIP """ - _cmd = 'mrtransform' + _cmd = "mrtransform" input_spec = MRTransformInputSpec output_spec = MRTransformOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) - return name + '_MRTransform.mif' + return name + "_MRTransform.mif" diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 04c901f1f9..3680282b89 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -1,50 +1,51 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op import numpy as np from ... import logging from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, BaseInterface, traits, - File, TraitedSpec, isdefined) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + CommandLineInputSpec, + CommandLine, + BaseInterface, + traits, + File, + TraitedSpec, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") class DWI2SphericalHarmonicsImageInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion-weighted images') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + desc="Diffusion-weighted images", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Float( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( - argstr='-normalise', - position=3, - desc="normalise the DW signal to the b=0 image") + argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" + ) class DWI2SphericalHarmonicsImageOutputSpec(TraitedSpec): - spherical_harmonics_image = File( - exists=True, desc='Spherical harmonics image') + spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class DWI2SphericalHarmonicsImage(CommandLine): @@ -85,110 +86,100 @@ class DWI2SphericalHarmonicsImage(CommandLine): >>> dwi2SH.inputs.encoding_file = 'encoding.txt' >>> dwi2SH.run() # doctest: +SKIP """ - _cmd = 'dwi2SH' + + _cmd = "dwi2SH" input_spec = DWI2SphericalHarmonicsImageInputSpec output_spec = DWI2SphericalHarmonicsImageOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['spherical_harmonics_image'] = self.inputs.out_filename - if not isdefined(outputs['spherical_harmonics_image']): - outputs['spherical_harmonics_image'] = op.abspath( - self._gen_outfilename()) + outputs["spherical_harmonics_image"] = self.inputs.out_filename + if not isdefined(outputs["spherical_harmonics_image"]): + outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: - outputs['spherical_harmonics_image'] = op.abspath( - outputs['spherical_harmonics_image']) + outputs["spherical_harmonics_image"] = op.abspath( + outputs["spherical_harmonics_image"] + ) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_SH.mif' + return name + "_SH.mif" class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='diffusion-weighted image') + desc="diffusion-weighted image", + ) response_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc= - 'the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)' + desc="the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") mask_image = File( exists=True, - argstr='-mask %s', + argstr="-mask %s", position=2, - desc= - 'only perform computation within the specified binary brain mask image' + desc="only perform computation within the specified binary brain mask image", ) encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) filter_file = File( exists=True, - argstr='-filter %s', + argstr="-filter %s", position=-2, - desc= - 'a text file containing the filtering coefficients for each even harmonic order.' - 'the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).' + desc="a text file containing the filtering coefficients for each even harmonic order." + "the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).", ) lambda_value = traits.Float( - argstr='-lambda %s', - desc= - 'the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).' + argstr="-lambda %s", + desc="the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).", ) maximum_harmonic_order = traits.Int( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) threshold_value = traits.Float( - argstr='-threshold %s', - desc= - 'the threshold below which the amplitude of the FOD is assumed to be zero, expressed as a fraction of the mean value of the initial FOD (default = 0.1)' + argstr="-threshold %s", + desc="the threshold below which the amplitude of the FOD is assumed to be zero, expressed as a fraction of the mean value of the initial FOD (default = 0.1)", ) iterations = traits.Int( - argstr='-niter %s', - desc= - 'the maximum number of iterations to perform for each voxel (default = 50)' + argstr="-niter %s", + desc="the maximum number of iterations to perform for each voxel (default = 50)", ) - debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') + debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") directions_file = File( exists=True, - argstr='-directions %s', + argstr="-directions %s", position=-2, - desc= - 'a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)' + desc="a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)", ) normalise = traits.Bool( - argstr='-normalise', - position=3, - desc="normalise the DW signal to the b=0 image") + argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" + ) class ConstrainedSphericalDeconvolutionOutputSpec(TraitedSpec): - spherical_harmonics_image = File( - exists=True, desc='Spherical harmonics image') + spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class ConstrainedSphericalDeconvolution(CommandLine): @@ -199,7 +190,7 @@ class ConstrainedSphericalDeconvolution(CommandLine): First, the fact the signal attenuation profile is real implies that it has conjugate symmetry, i.e. Y(l,-m) = Y(l,m)* (where * denotes the complex conjugate). Second, the diffusion profile should be antipodally symmetric (i.e. S(x) = S(-x)), implying that all odd l components should be zero. - Therefore, this program only computes the even elements. Note that the spherical harmonics equations used here + Therefore, this program only computes the even elements. Note that the spherical harmonics equations used here differ slightly from those conventionally used, in that the (-1)^m factor has been omitted. This should be taken into account in all subsequent calculations. Each volume in the output image corresponds to a different spherical harmonic component, according to the following convention: @@ -223,72 +214,70 @@ class ConstrainedSphericalDeconvolution(CommandLine): >>> csdeconv.run() # doctest: +SKIP """ - _cmd = 'csdeconv' + _cmd = "csdeconv" input_spec = ConstrainedSphericalDeconvolutionInputSpec output_spec = ConstrainedSphericalDeconvolutionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['spherical_harmonics_image'] = self.inputs.out_filename - if not isdefined(outputs['spherical_harmonics_image']): - outputs['spherical_harmonics_image'] = op.abspath( - self._gen_outfilename()) + outputs["spherical_harmonics_image"] = self.inputs.out_filename + if not isdefined(outputs["spherical_harmonics_image"]): + outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: - outputs['spherical_harmonics_image'] = op.abspath( - outputs['spherical_harmonics_image']) + outputs["spherical_harmonics_image"] = op.abspath( + outputs["spherical_harmonics_image"] + ) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_CSD.mif' + return name + "_CSD.mif" class EstimateResponseForSHInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) mask_image = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc= - 'only perform computation within the specified binary brain mask image' + desc="only perform computation within the specified binary brain mask image", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Int( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( - argstr='-normalise', desc='normalise the DW signal to the b=0 image') + argstr="-normalise", desc="normalise the DW signal to the b=0 image" + ) quiet = traits.Bool( - argstr='-quiet', - desc='Do not display information messages or progress status.') - debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="Do not display information messages or progress status." + ) + debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") class EstimateResponseForSHOutputSpec(TraitedSpec): - response = File(exists=True, desc='Spherical harmonics image') + response = File(exists=True, desc="Spherical harmonics image") class EstimateResponseForSH(CommandLine): @@ -305,28 +294,29 @@ class EstimateResponseForSH(CommandLine): >>> estresp.inputs.encoding_file = 'encoding.txt' >>> estresp.run() # doctest: +SKIP """ - _cmd = 'estimate_response' + + _cmd = "estimate_response" input_spec = EstimateResponseForSHInputSpec output_spec = EstimateResponseForSHOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['response'] = self.inputs.out_filename - if not isdefined(outputs['response']): - outputs['response'] = op.abspath(self._gen_outfilename()) + outputs["response"] = self.inputs.out_filename + if not isdefined(outputs["response"]): + outputs["response"] = op.abspath(self._gen_outfilename()) else: - outputs['response'] = op.abspath(outputs['response']) + outputs["response"] = op.abspath(outputs["response"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_ER.txt' + return name + "_ER.txt" def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): @@ -336,42 +326,47 @@ def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): bvecs = np.transpose(bvecs) if invert_x: bvecs[0, :] = -bvecs[0, :] - iflogger.info('Inverting b-vectors in the x direction') + iflogger.info("Inverting b-vectors in the x direction") if invert_y: bvecs[1, :] = -bvecs[1, :] - iflogger.info('Inverting b-vectors in the y direction') + iflogger.info("Inverting b-vectors in the y direction") if invert_z: bvecs[2, :] = -bvecs[2, :] - iflogger.info('Inverting b-vectors in the z direction') + iflogger.info("Inverting b-vectors in the z direction") iflogger.info(np.shape(bvecs)) iflogger.info(np.shape(bvals)) encoding = np.transpose(np.vstack((bvecs, bvals))) _, bvec, _ = split_filename(bvec_file) _, bval, _ = split_filename(bval_file) - out_encoding_file = bvec + '_' + bval + '.txt' + out_encoding_file = bvec + "_" + bval + ".txt" np.savetxt(out_encoding_file, encoding) return out_encoding_file class FSL2MRTrixInputSpec(TraitedSpec): bvec_file = File( - exists=True, mandatory=True, desc='FSL b-vectors file (3xN text file)') + exists=True, mandatory=True, desc="FSL b-vectors file (3xN text file)" + ) bval_file = File( - exists=True, mandatory=True, desc='FSL b-values file (1xN text file)') + exists=True, mandatory=True, desc="FSL b-values file (1xN text file)" + ) invert_x = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the x-axis') + False, usedefault=True, desc="Inverts the b-vectors along the x-axis" + ) invert_y = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the y-axis') + False, usedefault=True, desc="Inverts the b-vectors along the y-axis" + ) invert_z = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the z-axis') - out_encoding_file = File(genfile=True, desc='Output encoding filename') + False, usedefault=True, desc="Inverts the b-vectors along the z-axis" + ) + out_encoding_file = File(genfile=True, desc="Output encoding filename") class FSL2MRTrixOutputSpec(TraitedSpec): encoding_file = File( - desc= - 'The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient' - 'and b gives the b-value in units (1000 s/mm^2).') + desc="The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient" + "and b gives the b-value in units (1000 s/mm^2)." + ) class FSL2MRTrix(BaseInterface): @@ -391,23 +386,27 @@ class FSL2MRTrix(BaseInterface): >>> fsl2mrtrix.inputs.invert_y = True >>> fsl2mrtrix.run() # doctest: +SKIP """ + input_spec = FSL2MRTrixInputSpec output_spec = FSL2MRTrixOutputSpec def _run_interface(self, runtime): - encoding = concat_files(self.inputs.bvec_file, self.inputs.bval_file, - self.inputs.invert_x, self.inputs.invert_y, - self.inputs.invert_z) + encoding = concat_files( + self.inputs.bvec_file, + self.inputs.bval_file, + self.inputs.invert_x, + self.inputs.invert_y, + self.inputs.invert_z, + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['encoding_file'] = op.abspath( - self._gen_filename('out_encoding_file')) + outputs["encoding_file"] = op.abspath(self._gen_filename("out_encoding_file")) return outputs def _gen_filename(self, name): - if name == 'out_encoding_file': + if name == "out_encoding_file": return self._gen_outfilename() else: return None @@ -415,40 +414,40 @@ def _gen_filename(self, name): def _gen_outfilename(self): _, bvec, _ = split_filename(self.inputs.bvec_file) _, bval, _ = split_filename(self.inputs.bval_file) - return bvec + '_' + bval + '.txt' + return bvec + "_" + bval + ".txt" class GenerateDirectionsInputSpec(CommandLineInputSpec): num_dirs = traits.Int( mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='the number of directions to generate.') + desc="the number of directions to generate.", + ) power = traits.Float( - argstr='-power %s', - desc='specify exponent to use for repulsion power law.') + argstr="-power %s", desc="specify exponent to use for repulsion power law." + ) niter = traits.Int( - argstr='-niter %s', - desc='specify the maximum number of iterations to perform.') - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + argstr="-niter %s", desc="specify the maximum number of iterations to perform." + ) + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( - name_source=['num_dirs'], - name_template='directions_%d.txt', - argstr='%s', + name_source=["num_dirs"], + name_template="directions_%d.txt", + argstr="%s", hash_files=False, position=-1, - desc='the text file to write the directions to, as [ az el ] pairs.') + desc="the text file to write the directions to, as [ az el ] pairs.", + ) class GenerateDirectionsOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='directions file') + out_file = File(exists=True, desc="directions file") class GenerateDirections(CommandLine): @@ -464,7 +463,7 @@ class GenerateDirections(CommandLine): >>> gendir.run() # doctest: +SKIP """ - _cmd = 'gendir' + _cmd = "gendir" input_spec = GenerateDirectionsInputSpec output_spec = GenerateDirectionsOutputSpec @@ -472,58 +471,57 @@ class GenerateDirections(CommandLine): class FindShPeaksInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='the input image of SH coefficients.') + desc="the input image of SH coefficients.", + ) directions_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='the set of directions to use as seeds for the peak finding') + desc="the set of directions to use as seeds for the peak finding", + ) peaks_image = File( exists=True, - argstr='-peaks %s', - desc= - 'the program will try to find the peaks that most closely match those in the image provided' + argstr="-peaks %s", + desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( - argstr='-num %s', desc='the number of peaks to extract (default is 3)') + argstr="-num %s", desc="the number of peaks to extract (default is 3)" + ) peak_directions = traits.List( traits.Float, - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", minlen=2, maxlen=2, - desc= - 'phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' - ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)' + desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " + " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) peak_threshold = traits.Float( - argstr='-threshold %s', - desc= - 'only peak amplitudes greater than the threshold will be considered') - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + argstr="-threshold %s", + desc="only peak amplitudes greater than the threshold will be considered", + ) + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_peak_dirs.mif", keep_extension=False, - argstr='%s', + argstr="%s", hash_files=False, position=-1, - desc= - 'the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn', - name_source=["in_file"]) + desc="the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn", + name_source=["in_file"], + ) class FindShPeaksOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Peak directions image') + out_file = File(exists=True, desc="Peak directions image") class FindShPeaks(CommandLine): @@ -541,7 +539,7 @@ class FindShPeaks(CommandLine): >>> shpeaks.run() # doctest: +SKIP """ - _cmd = 'find_SH_peaks' + _cmd = "find_SH_peaks" input_spec = FindShPeaksInputSpec output_spec = FindShPeaksOutputSpec @@ -549,49 +547,46 @@ class FindShPeaks(CommandLine): class Directions2AmplitudeInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc= - 'the input directions image. Each volume corresponds to the x, y & z component of each direction vector in turn.' + desc="the input directions image. Each volume corresponds to the x, y & z component of each direction vector in turn.", ) peaks_image = File( exists=True, - argstr='-peaks %s', - desc= - 'the program will try to find the peaks that most closely match those in the image provided' + argstr="-peaks %s", + desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( - argstr='-num %s', desc='the number of peaks to extract (default is 3)') + argstr="-num %s", desc="the number of peaks to extract (default is 3)" + ) peak_directions = traits.List( traits.Float, - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", minlen=2, maxlen=2, - desc= - 'phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' - ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)' + desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " + " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_amplitudes.mif", keep_extension=False, - argstr='%s', + argstr="%s", hash_files=False, position=-1, - desc='the output amplitudes image', - name_source=["in_file"]) + desc="the output amplitudes image", + name_source=["in_file"], + ) class Directions2AmplitudeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='amplitudes image') + out_file = File(exists=True, desc="amplitudes image") class Directions2Amplitude(CommandLine): @@ -607,6 +602,6 @@ class Directions2Amplitude(CommandLine): >>> amplitudes.run() # doctest: +SKIP """ - _cmd = 'dir2amp' + _cmd = "dir2amp" input_spec = Directions2AmplitudeInputSpec output_spec = Directions2AmplitudeOutputSpec diff --git a/nipype/interfaces/mrtrix/tests/__init__.py b/nipype/interfaces/mrtrix/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mrtrix/tests/__init__.py +++ b/nipype/interfaces/mrtrix/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py b/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py index 78a275f6a7..ad93f35b9a 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py @@ -1,18 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import ConstrainedSphericalDeconvolution def test_ConstrainedSphericalDeconvolution_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), + args=dict( + argstr="%s", + ), + debug=dict( + argstr="-debug", + ), directions_file=dict( - argstr='-directions %s', + argstr="-directions %s", + extensions=None, position=-2, ), encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, position=1, ), environ=dict( @@ -20,44 +25,63 @@ def test_ConstrainedSphericalDeconvolution_inputs(): usedefault=True, ), filter_file=dict( - argstr='-filter %s', + argstr="-filter %s", + extensions=None, position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - iterations=dict(argstr='-niter %s', ), - lambda_value=dict(argstr='-lambda %s', ), + iterations=dict( + argstr="-niter %s", + ), + lambda_value=dict( + argstr="-lambda %s", + ), mask_image=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=2, ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), + maximum_harmonic_order=dict( + argstr="-lmax %s", + ), normalise=dict( - argstr='-normalise', + argstr="-normalise", position=3, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), response_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - threshold_value=dict(argstr='-threshold %s', ), + threshold_value=dict( + argstr="-threshold %s", + ), ) inputs = ConstrainedSphericalDeconvolution.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConstrainedSphericalDeconvolution_outputs(): - output_map = dict(spherical_harmonics_image=dict(), ) + output_map = dict( + spherical_harmonics_image=dict( + extensions=None, + ), + ) outputs = ConstrainedSphericalDeconvolution.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py b/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py index dc95cff525..97b5885baf 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import DWI2SphericalHarmonicsImage def test_DWI2SphericalHarmonicsImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, mandatory=True, position=1, ), @@ -16,17 +18,21 @@ def test_DWI2SphericalHarmonicsImage_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), + maximum_harmonic_order=dict( + argstr="-lmax %s", + ), normalise=dict( - argstr='-normalise', + argstr="-normalise", position=3, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), @@ -36,8 +42,14 @@ def test_DWI2SphericalHarmonicsImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWI2SphericalHarmonicsImage_outputs(): - output_map = dict(spherical_harmonics_image=dict(), ) + output_map = dict( + spherical_harmonics_image=dict( + extensions=None, + ), + ) outputs = DWI2SphericalHarmonicsImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py b/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py index 86975950bf..72a2820cc1 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DWI2Tensor def test_DWI2Tensor_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, position=2, ), environ=dict( @@ -19,29 +21,34 @@ def test_DWI2Tensor_inputs(): usedefault=True, ), ignore_slice_by_volume=dict( - argstr='-ignoreslices %s', + argstr="-ignoreslices %s", position=2, - sep=' ', + sep=" ", ), ignore_volumes=dict( - argstr='-ignorevolumes %s', + argstr="-ignorevolumes %s", position=2, - sep=' ', + sep=" ", ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), + mask=dict( + argstr="-mask %s", + extensions=None, + ), out_filename=dict( - argstr='%s', - name_source='in_file', - name_template='%s_tensor.mif', - output_name='tensor', + argstr="%s", + extensions=None, + name_source="in_file", + name_template="%s_tensor.mif", + output_name="tensor", position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -50,8 +57,14 @@ def test_DWI2Tensor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWI2Tensor_outputs(): - output_map = dict(tensor=dict(), ) + output_map = dict( + tensor=dict( + extensions=None, + ), + ) outputs = DWI2Tensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py index 2c6417fe3a..c6fe4f586e 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py @@ -1,123 +1,149 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import DiffusionTensorStreamlineTrack def test_DiffusionTensorStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cutoff_value=dict( - argstr='-cutoff %s', - units='NA', + argstr="-cutoff %s", + units="NA", + ), + desired_number_of_tracks=dict( + argstr="-number %d", + ), + do_not_precompute=dict( + argstr="-noprecomputed", ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( - argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", + extensions=None, + xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), gradient_encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, mandatory=True, position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), include_file=dict( - argstr='-include %s', - xor=['include_file', 'include_spec'], + argstr="-include %s", + extensions=None, + xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', + argstr="-initcutoff %s", + units="NA", ), initial_direction=dict( - argstr='-initdirection %s', - units='voxels', + argstr="-initdirection %s", + units="voxels", ), inputmodel=dict( - argstr='%s', + argstr="%s", position=-3, usedefault=True, ), mask_file=dict( - argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", + extensions=None, + xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict( + argstr="-maxnum %d", ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), maximum_tract_length=dict( - argstr='-length %s', - units='mm', + argstr="-length %s", + units="mm", ), minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', + argstr="-curvature %s", + units="mm", ), minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + argstr="-minlength %s", + units="mm", + ), + no_mask_interpolation=dict( + argstr="-nomaskinterp", ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", + extensions=None, + xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), step_size=dict( - argstr='-step %s', - units='mm', + argstr="-step %s", + units="mm", + ), + stop=dict( + argstr="-stop", + ), + unidirectional=dict( + argstr="-unidirectional", ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), ) inputs = DiffusionTensorStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionTensorStreamlineTrack_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = DiffusionTensorStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py index b5474bd18c..4685b0e9d6 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py @@ -1,45 +1,65 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import Directions2Amplitude def test_Directions2Amplitude_inputs(): input_map = dict( - args=dict(argstr='%s', ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), + args=dict( + argstr="%s", + ), + display_debug=dict( + argstr="-debug", + ), + display_info=dict( + argstr="-info", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - num_peaks=dict(argstr='-num %s', ), + num_peaks=dict( + argstr="-num %s", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_amplitudes.mif', + name_source=["in_file"], + name_template="%s_amplitudes.mif", position=-1, ), peak_directions=dict( - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", + ), + peaks_image=dict( + argstr="-peaks %s", + extensions=None, + ), + quiet_display=dict( + argstr="-quiet", ), - peaks_image=dict(argstr='-peaks %s', ), - quiet_display=dict(argstr='-quiet', ), ) inputs = Directions2Amplitude.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Directions2Amplitude_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Directions2Amplitude.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Erode.py b/nipype/interfaces/mrtrix/tests/test_auto_Erode.py index 6215593ab0..86a6a3d0b1 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Erode.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Erode.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Erode def test_Erode_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), dilate=dict( - argstr='-dilate', + argstr="-dilate", position=1, ), environ=dict( @@ -19,18 +20,22 @@ def test_Erode_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - number_of_passes=dict(argstr='-npass %s', ), + number_of_passes=dict( + argstr="-npass %s", + ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -39,8 +44,14 @@ def test_Erode_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Erode_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Erode.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py b/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py index 983433104e..e93a7744fc 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py @@ -1,14 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import EstimateResponseForSH def test_EstimateResponseForSH_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), + args=dict( + argstr="%s", + ), + debug=dict( + argstr="-debug", + ), encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, mandatory=True, position=1, ), @@ -17,31 +21,46 @@ def test_EstimateResponseForSH_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), mask_image=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), - normalise=dict(argstr='-normalise', ), + maximum_harmonic_order=dict( + argstr="-lmax %s", + ), + normalise=dict( + argstr="-normalise", + ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), - quiet=dict(argstr='-quiet', ), + quiet=dict( + argstr="-quiet", + ), ) inputs = EstimateResponseForSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateResponseForSH_outputs(): - output_map = dict(response=dict(), ) + output_map = dict( + response=dict( + extensions=None, + ), + ) outputs = EstimateResponseForSH.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py b/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py index 6261b6b0a5..3e11a7db45 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py @@ -1,24 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import FSL2MRTrix def test_FSL2MRTrix_inputs(): input_map = dict( - bval_file=dict(mandatory=True, ), - bvec_file=dict(mandatory=True, ), - invert_x=dict(usedefault=True, ), - invert_y=dict(usedefault=True, ), - invert_z=dict(usedefault=True, ), - out_encoding_file=dict(genfile=True, ), + bval_file=dict( + extensions=None, + mandatory=True, + ), + bvec_file=dict( + extensions=None, + mandatory=True, + ), + invert_x=dict( + usedefault=True, + ), + invert_y=dict( + usedefault=True, + ), + invert_z=dict( + usedefault=True, + ), + out_encoding_file=dict( + extensions=None, + genfile=True, + ), ) inputs = FSL2MRTrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSL2MRTrix_outputs(): - output_map = dict(encoding_file=dict(), ) + output_map = dict( + encoding_file=dict( + extensions=None, + ), + ) outputs = FSL2MRTrix.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py index 6e0e670d06..c7b9c19d24 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import FilterTracks def test_FilterTracks_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,47 +16,55 @@ def test_FilterTracks_inputs(): usedefault=True, ), exclude_file=dict( - argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", + extensions=None, + xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), include_file=dict( - argstr='-include %s', - xor=['include_file', 'include_spec'], + argstr="-include %s", + extensions=None, + xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], + sep=",", + units="mm", + xor=["include_file", "include_spec"], + ), + invert=dict( + argstr="-invert", ), - invert=dict(argstr='-invert', ), minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + argstr="-minlength %s", + units="mm", + ), + no_mask_interpolation=dict( + argstr="-nomaskinterp", ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_filt', + name_source=["in_file"], + name_template="%s_filt", position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -64,8 +73,14 @@ def test_FilterTracks_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FilterTracks_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FilterTracks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py index 5d9f51739c..62132f795a 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py @@ -1,51 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import FindShPeaks def test_FindShPeaks_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), directions_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), + display_debug=dict( + argstr="-debug", + ), + display_info=dict( + argstr="-info", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - num_peaks=dict(argstr='-num %s', ), + num_peaks=dict( + argstr="-num %s", + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_peak_dirs.mif', + name_source=["in_file"], + name_template="%s_peak_dirs.mif", position=-1, ), peak_directions=dict( - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", + ), + peak_threshold=dict( + argstr="-threshold %s", + ), + peaks_image=dict( + argstr="-peaks %s", + extensions=None, + ), + quiet_display=dict( + argstr="-quiet", ), - peak_threshold=dict(argstr='-threshold %s', ), - peaks_image=dict(argstr='-peaks %s', ), - quiet_display=dict(argstr='-quiet', ), ) inputs = FindShPeaks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindShPeaks_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FindShPeaks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py index 2cfd89bb6f..63b66ab1e6 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py @@ -1,40 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tensors import GenerateDirections def test_GenerateDirections_inputs(): input_map = dict( - args=dict(argstr='%s', ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), + args=dict( + argstr="%s", + ), + display_debug=dict( + argstr="-debug", + ), + display_info=dict( + argstr="-info", + ), environ=dict( nohash=True, usedefault=True, ), - niter=dict(argstr='-niter %s', ), + niter=dict( + argstr="-niter %s", + ), num_dirs=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, hash_files=False, - name_source=['num_dirs'], - name_template='directions_%d.txt', + name_source=["num_dirs"], + name_template="directions_%d.txt", position=-1, ), - power=dict(argstr='-power %s', ), - quiet_display=dict(argstr='-quiet', ), + power=dict( + argstr="-power %s", + ), + quiet_display=dict( + argstr="-quiet", + ), ) inputs = GenerateDirections.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateDirections_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = GenerateDirections.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py b/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py index 6a0305f9e2..09b893f105 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py @@ -1,18 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import GenerateWhiteMatterMask def test_GenerateWhiteMatterMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), binary_mask=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), encoding_file=dict( - argstr='-grad %s', + argstr="-grad %s", + extensions=None, mandatory=True, position=1, ), @@ -21,13 +24,17 @@ def test_GenerateWhiteMatterMask_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - noise_level_margin=dict(argstr='-margin %s', ), + noise_level_margin=dict( + argstr="-margin %s", + ), out_WMProb_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), @@ -37,8 +44,14 @@ def test_GenerateWhiteMatterMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateWhiteMatterMask_outputs(): - output_map = dict(WMprobabilitymap=dict(), ) + output_map = dict( + WMprobabilitymap=dict( + extensions=None, + ), + ) outputs = GenerateWhiteMatterMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py b/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py index 5aef0bfd13..7e819a66da 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRConvert def test_MRConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -15,54 +16,56 @@ def test_MRConvert_inputs(): usedefault=True, ), extract_at_axis=dict( - argstr='-coord %s', + argstr="-coord %s", position=1, ), extract_at_coordinate=dict( - argstr='%s', + argstr="%s", position=2, - sep=',', + sep=",", ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), layout=dict( - argstr='-output %s', + argstr="-output %s", position=2, ), offset_bias=dict( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', + units="mm", ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), output_datatype=dict( - argstr='-output %s', + argstr="-output %s", position=2, ), prs=dict( - argstr='-prs', + argstr="-prs", position=3, ), replace_NaN_with_zero=dict( - argstr='-zero', + argstr="-zero", position=3, ), resample=dict( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', + units="mm", ), voxel_dims=dict( - argstr='-vox %s', + argstr="-vox %s", position=3, - sep=',', + sep=",", ), ) inputs = MRConvert.input_spec() @@ -70,8 +73,14 @@ def test_MRConvert_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRConvert_outputs(): - output_map = dict(converted=dict(), ) + output_map = dict( + converted=dict( + extensions=None, + ), + ) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py b/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py index a5a864289a..daa1231f06 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRMultiply def test_MRMultiply_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,17 +16,18 @@ def test_MRMultiply_inputs(): usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -34,8 +36,14 @@ def test_MRMultiply_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRMultiply_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRMultiply.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py index d6a3db0a1d..f619525575 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRTransform def test_MRTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,41 +16,50 @@ def test_MRTransform_inputs(): usedefault=True, ), flip_x=dict( - argstr='-flipx', + argstr="-flipx", position=1, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), invert=dict( - argstr='-inverse', + argstr="-inverse", + position=1, + ), + linear_transform=dict( + argstr="-linear %s", + extensions=None, position=1, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), reference_image=dict( - argstr='-reference %s', + argstr="-reference %s", + extensions=None, position=1, ), replace_transform=dict( - argstr='-replace', + argstr="-replace", position=1, ), template_image=dict( - argstr='-template %s', + argstr="-template %s", + extensions=None, position=1, ), transformation_file=dict( - argstr='-transform %s', + argstr="-transform %s", + extensions=None, position=1, ), ) @@ -58,8 +68,14 @@ def test_MRTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTransform_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py index b6fefac9a0..0fb54a3020 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py @@ -1,26 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..convert import MRTrix2TrackVis def test_MRTrix2TrackVis_inputs(): input_map = dict( - image_file=dict(), - in_file=dict(mandatory=True, ), - matrix_file=dict(), + image_file=dict( + extensions=None, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + matrix_file=dict( + extensions=None, + ), out_filename=dict( + extensions=None, genfile=True, usedefault=True, ), - registration_image_file=dict(), + registration_image_file=dict( + extensions=None, + ), ) inputs = MRTrix2TrackVis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrix2TrackVis_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRTrix2TrackVis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py index 0022de4d7a..eecdc39bf5 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py @@ -1,17 +1,19 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRTrixInfo def test_MRTrixInfo_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), @@ -21,6 +23,8 @@ def test_MRTrixInfo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrixInfo_outputs(): output_map = dict() outputs = MRTrixInfo.output_spec() diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py index 117258ea17..8eab033221 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MRTrixViewer def test_MRTrixViewer_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,12 +16,12 @@ def test_MRTrixViewer_inputs(): usedefault=True, ), in_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -29,6 +30,8 @@ def test_MRTrixViewer_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrixViewer_outputs(): output_map = dict() outputs = MRTrixViewer.output_spec() diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py b/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py index 0e7daabcaa..7a0974d9b8 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import MedianFilter3D def test_MedianFilter3D_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,17 +16,19 @@ def test_MedianFilter3D_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -34,8 +37,14 @@ def test_MedianFilter3D_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianFilter3D_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MedianFilter3D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py index 6d75b1b9a2..8dded55576 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py @@ -1,119 +1,146 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import ProbabilisticSphericallyDeconvolutedStreamlineTrack def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cutoff_value=dict( - argstr='-cutoff %s', - units='NA', + argstr="-cutoff %s", + units="NA", + ), + desired_number_of_tracks=dict( + argstr="-number %d", + ), + do_not_precompute=dict( + argstr="-noprecomputed", ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( - argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", + extensions=None, + xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), include_file=dict( - argstr='-include %s', - xor=['include_file', 'include_spec'], + argstr="-include %s", + extensions=None, + xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', + argstr="-initcutoff %s", + units="NA", ), initial_direction=dict( - argstr='-initdirection %s', - units='voxels', + argstr="-initdirection %s", + units="voxels", ), inputmodel=dict( - argstr='%s', + argstr="%s", position=-3, usedefault=True, ), mask_file=dict( - argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", + extensions=None, + xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict( + argstr="-maxnum %d", + ), + maximum_number_of_trials=dict( + argstr="-trials %s", ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), - maximum_number_of_trials=dict(argstr='-trials %s', ), maximum_tract_length=dict( - argstr='-length %s', - units='mm', + argstr="-length %s", + units="mm", ), minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', + argstr="-curvature %s", + units="mm", ), minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + argstr="-minlength %s", + units="mm", + ), + no_mask_interpolation=dict( + argstr="-nomaskinterp", ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", + extensions=None, + xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), step_size=dict( - argstr='-step %s', - units='mm', + argstr="-step %s", + units="mm", + ), + stop=dict( + argstr="-stop", + ), + unidirectional=dict( + argstr="-unidirectional", ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), ) inputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py index 9bed95276d..c717eb628b 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py @@ -1,118 +1,143 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import SphericallyDeconvolutedStreamlineTrack def test_SphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cutoff_value=dict( - argstr='-cutoff %s', - units='NA', + argstr="-cutoff %s", + units="NA", + ), + desired_number_of_tracks=dict( + argstr="-number %d", + ), + do_not_precompute=dict( + argstr="-noprecomputed", ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( - argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", + extensions=None, + xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), include_file=dict( - argstr='-include %s', - xor=['include_file', 'include_spec'], + argstr="-include %s", + extensions=None, + xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', + argstr="-initcutoff %s", + units="NA", ), initial_direction=dict( - argstr='-initdirection %s', - units='voxels', + argstr="-initdirection %s", + units="voxels", ), inputmodel=dict( - argstr='%s', + argstr="%s", position=-3, usedefault=True, ), mask_file=dict( - argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", + extensions=None, + xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict( + argstr="-maxnum %d", ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), maximum_tract_length=dict( - argstr='-length %s', - units='mm', + argstr="-length %s", + units="mm", ), minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', + argstr="-curvature %s", + units="mm", ), minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + argstr="-minlength %s", + units="mm", + ), + no_mask_interpolation=dict( + argstr="-nomaskinterp", ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", + extensions=None, + xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), step_size=dict( - argstr='-step %s', - units='mm', + argstr="-step %s", + units="mm", + ), + stop=dict( + argstr="-stop", + ), + unidirectional=dict( + argstr="-unidirectional", ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), ) inputs = SphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SphericallyDeconvolutedStreamlineTrack_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = SphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py index c898a60eb3..d8f1a5c869 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py @@ -1,118 +1,143 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import StreamlineTrack def test_StreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cutoff_value=dict( - argstr='-cutoff %s', - units='NA', + argstr="-cutoff %s", + units="NA", + ), + desired_number_of_tracks=dict( + argstr="-number %d", + ), + do_not_precompute=dict( + argstr="-noprecomputed", ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( - argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", + extensions=None, + xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), include_file=dict( - argstr='-include %s', - xor=['include_file', 'include_spec'], + argstr="-include %s", + extensions=None, + xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', + argstr="-initcutoff %s", + units="NA", ), initial_direction=dict( - argstr='-initdirection %s', - units='voxels', + argstr="-initdirection %s", + units="voxels", ), inputmodel=dict( - argstr='%s', + argstr="%s", position=-3, usedefault=True, ), mask_file=dict( - argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", + extensions=None, + xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict( + argstr="-maxnum %d", ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), maximum_tract_length=dict( - argstr='-length %s', - units='mm', + argstr="-length %s", + units="mm", ), minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', + argstr="-curvature %s", + units="mm", ), minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + argstr="-minlength %s", + units="mm", + ), + no_mask_interpolation=dict( + argstr="-nomaskinterp", ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", + extensions=None, + xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), step_size=dict( - argstr='-step %s', - units='mm', + argstr="-step %s", + units="mm", + ), + stop=dict( + argstr="-stop", + ), + unidirectional=dict( + argstr="-unidirectional", ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), ) inputs = StreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StreamlineTrack_outputs(): - output_map = dict(tracked=dict(), ) + output_map = dict( + tracked=dict( + extensions=None, + ), + ) outputs = StreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py index e9546d7e90..c32daa3574 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Tensor2ApparentDiffusion def test_Tensor2ApparentDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,17 +16,19 @@ def test_Tensor2ApparentDiffusion_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -34,8 +37,14 @@ def test_Tensor2ApparentDiffusion_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2ApparentDiffusion_outputs(): - output_map = dict(ADC=dict(), ) + output_map = dict( + ADC=dict( + extensions=None, + ), + ) outputs = Tensor2ApparentDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py index d16a907f62..bf90806f74 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Tensor2FractionalAnisotropy def test_Tensor2FractionalAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,17 +16,19 @@ def test_Tensor2FractionalAnisotropy_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -34,8 +37,14 @@ def test_Tensor2FractionalAnisotropy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2FractionalAnisotropy_outputs(): - output_map = dict(FA=dict(), ) + output_map = dict( + FA=dict( + extensions=None, + ), + ) outputs = Tensor2FractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py index c07d0a8db4..a87eefef7e 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Tensor2Vector def test_Tensor2Vector_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -15,17 +16,19 @@ def test_Tensor2Vector_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), ) @@ -34,8 +37,14 @@ def test_Tensor2Vector_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2Vector_outputs(): - output_map = dict(vector=dict(), ) + output_map = dict( + vector=dict( + extensions=None, + ), + ) outputs = Tensor2Vector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py b/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py index 39fbf14d2c..0a67f4db56 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py @@ -1,14 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Threshold def test_Threshold_inputs(): input_map = dict( - absolute_threshold_value=dict(argstr='-abs %s', ), - args=dict(argstr='%s', ), + absolute_threshold_value=dict( + argstr="-abs %s", + ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-debug', + argstr="-debug", position=1, ), environ=dict( @@ -16,26 +19,30 @@ def test_Threshold_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), invert=dict( - argstr='-invert', + argstr="-invert", position=1, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), - percentage_threshold_value=dict(argstr='-percent %s', ), + percentage_threshold_value=dict( + argstr="-percent %s", + ), quiet=dict( - argstr='-quiet', + argstr="-quiet", position=1, ), replace_zeros_with_NaN=dict( - argstr='-nan', + argstr="-nan", position=1, ), ) @@ -44,8 +51,14 @@ def test_Threshold_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py b/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py index f984203ab1..9323ba74f2 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import Tracks2Prob def test_Tracks2Prob_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), colour=dict( - argstr='-colour', + argstr="-colour", position=3, ), environ=dict( @@ -15,36 +16,39 @@ def test_Tracks2Prob_inputs(): usedefault=True, ), fraction=dict( - argstr='-fraction', + argstr="-fraction", position=3, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_filename=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), output_datatype=dict( - argstr='-datatype %s', + argstr="-datatype %s", position=2, ), resample=dict( - argstr='-resample %d', + argstr="-resample %d", position=3, - units='mm', + units="mm", ), template_file=dict( - argstr='-template %s', + argstr="-template %s", + extensions=None, position=1, ), voxel_dims=dict( - argstr='-vox %s', + argstr="-vox %s", position=2, - sep=',', + sep=",", ), ) inputs = Tracks2Prob.input_spec() @@ -52,8 +56,14 @@ def test_Tracks2Prob_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tracks2Prob_outputs(): - output_map = dict(tract_image=dict(), ) + output_map = dict( + tract_image=dict( + extensions=None, + ), + ) outputs = Tracks2Prob.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index 7a7ed995f0..53e805eeb6 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -1,91 +1,94 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import os.path as op from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + isdefined, +) class FilterTracksInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input tracks to be filtered') - include_xor = ['include_file', 'include_spec'] + desc="input tracks to be filtered", + ) + include_xor = ["include_file", "include_spec"] include_file = File( - exists=True, - argstr='-include %s', - desc='inclusion file', - xor=include_xor) + exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor + ) include_spec = traits.List( traits.Float, - desc='inclusion specification in mm and radius (x y z r)', + desc="inclusion specification in mm and radius (x y z r)", position=2, - argstr='-include %s', + argstr="-include %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=include_xor) + sep=",", + units="mm", + xor=include_xor, + ) - exclude_xor = ['exclude_file', 'exclude_spec'] + exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( - exists=True, - argstr='-exclude %s', - desc='exclusion file', - xor=exclude_xor) + exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor + ) exclude_spec = traits.List( traits.Float, - desc='exclusion specification in mm and radius (x y z r)', + desc="exclusion specification in mm and radius (x y z r)", position=2, - argstr='-exclude %s', + argstr="-exclude %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=exclude_xor) + sep=",", + units="mm", + xor=exclude_xor, + ) minimum_tract_length = traits.Float( - argstr='-minlength %s', - units='mm', - desc= - "Sets the minimum length of any track in millimeters (default is 10 mm)." + argstr="-minlength %s", + units="mm", + desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - desc='Output filtered track filename', - name_source=['in_file'], + desc="Output filtered track filename", + name_source=["in_file"], hash_files=False, - name_template='%s_filt') + name_template="%s_filt", + ) no_mask_interpolation = traits.Bool( - argstr='-nomaskinterp', - desc="Turns off trilinear interpolation of mask images.") + argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." + ) invert = traits.Bool( - argstr='-invert', + argstr="-invert", desc="invert the matching process, so that tracks that would" - "otherwise have been included are now excluded and vice-versa.") + "otherwise have been included are now excluded and vice-versa.", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class FilterTracksOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output filtered tracks') + out_file = File(exists=True, desc="the output filtered tracks") class FilterTracks(CommandLine): @@ -102,45 +105,39 @@ class FilterTracks(CommandLine): >>> filt.run() # doctest: +SKIP """ - _cmd = 'filter_tracks' + _cmd = "filter_tracks" input_spec = FilterTracksInputSpec output_spec = FilterTracksOutputSpec class Tracks2ProbInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='tract file') + exists=True, argstr="%s", mandatory=True, position=-2, desc="tract file" + ) template_file = File( exists=True, - argstr='-template %s', + argstr="-template %s", position=1, - desc= - 'an image file to be used as a template for the output (the output image wil have the same transform and field of view)' + desc="an image file to be used as a template for the output (the output image will have the same transform and field of view)", ) voxel_dims = traits.List( traits.Float, - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", position=2, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) colour = traits.Bool( - argstr='-colour', + argstr="-colour", position=3, - desc= - "add colour to the output image according to the direction of the tracks." + desc="add colour to the output image according to the direction of the tracks.", ) fraction = traits.Bool( - argstr='-fraction', + argstr="-fraction", position=3, - desc= - "produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count." + desc="produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count.", ) output_datatype = traits.Enum( "Bit", @@ -152,25 +149,21 @@ class Tracks2ProbInputSpec(CommandLineInputSpec): "UInt32", "float32", "float64", - argstr='-datatype %s', + argstr="-datatype %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"' + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) resample = traits.Float( - argstr='-resample %d', + argstr="-resample %d", position=3, - units='mm', - desc= - 'resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.' + units="mm", + desc="resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='output data file') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="output data file") class Tracks2ProbOutputSpec(TraitedSpec): - tract_image = File( - exists=True, desc='Output tract count or track density image') + tract_image = File(exists=True, desc="Output tract count or track density image") class Tracks2Prob(CommandLine): @@ -190,199 +183,197 @@ class Tracks2Prob(CommandLine): >>> tdi.run() # doctest: +SKIP """ - _cmd = 'tracks2prob' + _cmd = "tracks2prob" input_spec = Tracks2ProbInputSpec output_spec = Tracks2ProbOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tract_image'] = self.inputs.out_filename - if not isdefined(outputs['tract_image']): - outputs['tract_image'] = op.abspath(self._gen_outfilename()) + outputs["tract_image"] = self.inputs.out_filename + if not isdefined(outputs["tract_image"]): + outputs["tract_image"] = op.abspath(self._gen_outfilename()) else: - outputs['tract_image'] = os.path.abspath(outputs['tract_image']) + outputs["tract_image"] = os.path.abspath(outputs["tract_image"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_TDI.mif' + return name + "_TDI.mif" class StreamlineTrackInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='the image containing the source data.' - 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' - 'the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.' + desc="the image containing the source data." + "The type of data required depends on the type of tracking as set in the preceding argument. For DT methods, " + "the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.", ) - seed_xor = ['seed_file', 'seed_spec'] - seed_file = File( - exists=True, argstr='-seed %s', desc='seed file', xor=seed_xor) + seed_xor = ["seed_file", "seed_spec"] + seed_file = File(exists=True, argstr="-seed %s", desc="seed file", xor=seed_xor) seed_spec = traits.List( traits.Float, - desc='seed specification in mm and radius (x y z r)', + desc="seed specification in mm and radius (x y z r)", position=2, - argstr='-seed %s', + argstr="-seed %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=seed_xor) + sep=",", + units="mm", + xor=seed_xor, + ) - include_xor = ['include_file', 'include_spec'] + include_xor = ["include_file", "include_spec"] include_file = File( - exists=True, - argstr='-include %s', - desc='inclusion file', - xor=include_xor) + exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor + ) include_spec = traits.List( traits.Float, - desc='inclusion specification in mm and radius (x y z r)', + desc="inclusion specification in mm and radius (x y z r)", position=2, - argstr='-include %s', + argstr="-include %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=include_xor) + sep=",", + units="mm", + xor=include_xor, + ) - exclude_xor = ['exclude_file', 'exclude_spec'] + exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( - exists=True, - argstr='-exclude %s', - desc='exclusion file', - xor=exclude_xor) + exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor + ) exclude_spec = traits.List( traits.Float, - desc='exclusion specification in mm and radius (x y z r)', + desc="exclusion specification in mm and radius (x y z r)", position=2, - argstr='-exclude %s', + argstr="-exclude %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=exclude_xor) + sep=",", + units="mm", + xor=exclude_xor, + ) - mask_xor = ['mask_file', 'mask_spec'] + mask_xor = ["mask_file", "mask_spec"] mask_file = File( exists=True, - argstr='-mask %s', - desc='mask file. Only tracks within mask.', - xor=mask_xor) + argstr="-mask %s", + desc="mask file. Only tracks within mask.", + xor=mask_xor, + ) mask_spec = traits.List( traits.Float, - desc= - 'Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.', + desc="Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.", position=2, - argstr='-mask %s', + argstr="-mask %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=mask_xor) + sep=",", + units="mm", + xor=mask_xor, + ) inputmodel = traits.Enum( - 'DT_STREAM', - 'SD_PROB', - 'SD_STREAM', - argstr='%s', - desc='input model type', + "DT_STREAM", + "SD_PROB", + "SD_STREAM", + argstr="%s", + desc="input model type", usedefault=True, - position=-3) + position=-3, + ) stop = traits.Bool( - argstr='-stop', - desc="stop track as soon as it enters any of the include regions.") + argstr="-stop", + desc="stop track as soon as it enters any of the include regions.", + ) do_not_precompute = traits.Bool( - argstr='-noprecomputed', - desc= - "Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4." + argstr="-noprecomputed", + desc="Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4.", ) unidirectional = traits.Bool( - argstr='-unidirectional', - desc= - "Track from the seed point in one direction only (default is to track in both directions)." + argstr="-unidirectional", + desc="Track from the seed point in one direction only (default is to track in both directions).", ) no_mask_interpolation = traits.Bool( - argstr='-nomaskinterp', - desc="Turns off trilinear interpolation of mask images.") + argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." + ) step_size = traits.Float( - argstr='-step %s', - units='mm', - desc="Set the step size of the algorithm in mm (default is 0.2).") + argstr="-step %s", + units="mm", + desc="Set the step size of the algorithm in mm (default is 0.2).", + ) minimum_radius_of_curvature = traits.Float( - argstr='-curvature %s', - units='mm', - desc= - "Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)" + argstr="-curvature %s", + units="mm", + desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)", ) desired_number_of_tracks = traits.Int( - argstr='-number %d', - desc='Sets the desired number of tracks.' - 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' - '(default is 100 for *_STREAM methods, 1000 for *_PROB methods).') + argstr="-number %d", + desc="Sets the desired number of tracks." + "The program will continue to generate tracks until this number of tracks have been selected and written to the output file" + "(default is 100 for ``*_STREAM`` methods, 1000 for ``*_PROB`` methods).", + ) maximum_number_of_tracks = traits.Int( - argstr='-maxnum %d', - desc='Sets the maximum number of tracks to generate.' + argstr="-maxnum %d", + desc="Sets the maximum number of tracks to generate." "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" - '(default is 100 x number).') + "(default is 100 x number).", + ) minimum_tract_length = traits.Float( - argstr='-minlength %s', - units='mm', - desc= - "Sets the minimum length of any track in millimeters (default is 10 mm)." + argstr="-minlength %s", + units="mm", + desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) maximum_tract_length = traits.Float( - argstr='-length %s', - units='mm', - desc= - "Sets the maximum length of any track in millimeters (default is 200 mm)." + argstr="-length %s", + units="mm", + desc="Sets the maximum length of any track in millimeters (default is 200 mm).", ) cutoff_value = traits.Float( - argstr='-cutoff %s', - units='NA', - desc= - "Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1)." + argstr="-cutoff %s", + units="NA", + desc="Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1).", ) initial_cutoff_value = traits.Float( - argstr='-initcutoff %s', - units='NA', - desc= - "Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff)." + argstr="-initcutoff %s", + units="NA", + desc="Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff).", ) initial_direction = traits.List( traits.Int, - desc='Specify the initial tracking direction as a vector', - argstr='-initdirection %s', + desc="Specify the initial tracking direction as a vector", + argstr="-initdirection %s", minlen=2, maxlen=2, - units='voxels') + units="voxels", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', - desc='output data file') + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", + desc="output data file", + ) class StreamlineTrackOutputSpec(TraitedSpec): - tracked = File( - exists=True, desc='output file containing reconstructed tracts') + tracked = File(exists=True, desc="output file containing reconstructed tracts") class StreamlineTrack(CommandLine): @@ -396,16 +387,17 @@ class StreamlineTrack(CommandLine): ------- >>> import nipype.interfaces.mrtrix as mrt - >>> strack = mrt.StreamlineTrack() - >>> strack.inputs.inputmodel = 'SD_PROB' - >>> strack.inputs.in_file = 'data.Bfloat' - >>> strack.inputs.seed_file = 'seed_mask.nii' - >>> strack.inputs.mask_file = 'mask.nii' - >>> strack.cmdline + >>> streamtrack = mrt.StreamlineTrack() + >>> streamtrack.inputs.inputmodel = 'SD_PROB' + >>> streamtrack.inputs.in_file = 'data.Bfloat' + >>> streamtrack.inputs.seed_file = 'seed_mask.nii' + >>> streamtrack.inputs.mask_file = 'mask.nii' + >>> streamtrack.cmdline 'streamtrack -mask mask.nii -seed seed_mask.nii SD_PROB data.Bfloat data_tracked.tck' - >>> strack.run() # doctest: +SKIP + >>> streamtrack.run() # doctest: +SKIP """ - _cmd = 'streamtrack' + + _cmd = "streamtrack" input_spec = StreamlineTrackInputSpec output_spec = StreamlineTrackOutputSpec @@ -413,11 +405,10 @@ class StreamlineTrack(CommandLine): class DiffusionTensorStreamlineTrackInputSpec(StreamlineTrackInputSpec): gradient_encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=-2, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) @@ -431,26 +422,25 @@ class DiffusionTensorStreamlineTrack(StreamlineTrack): ------- >>> import nipype.interfaces.mrtrix as mrt - >>> dtstrack = mrt.DiffusionTensorStreamlineTrack() - >>> dtstrack.inputs.in_file = 'data.Bfloat' - >>> dtstrack.inputs.seed_file = 'seed_mask.nii' - >>> dtstrack.run() # doctest: +SKIP + >>> dtstreamtrack = mrt.DiffusionTensorStreamlineTrack() + >>> dtstreamtrack.inputs.in_file = 'data.Bfloat' + >>> dtstreamtrack.inputs.seed_file = 'seed_mask.nii' + >>> dtstreamtrack.run() # doctest: +SKIP """ input_spec = DiffusionTensorStreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "DT_STREAM" - return super(DiffusionTensorStreamlineTrack, self).__init__( - command, **inputs) + return super().__init__(command, **inputs) class ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec( - StreamlineTrackInputSpec): + StreamlineTrackInputSpec +): maximum_number_of_trials = traits.Int( - argstr='-trials %s', - desc= - "Set the maximum number of sampling trials at each point (only used for probabilistic tracking)." + argstr="-trials %s", + desc="Set the maximum number of sampling trials at each point (only used for probabilistic tracking).", ) @@ -471,12 +461,12 @@ class ProbabilisticSphericallyDeconvolutedStreamlineTrack(StreamlineTrack): >>> sdprobtrack.inputs.seed_file = 'seed_mask.nii' >>> sdprobtrack.run() # doctest: +SKIP """ + input_spec = ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_PROB" - return super(ProbabilisticSphericallyDeconvolutedStreamlineTrack, - self).__init__(command, **inputs) + return super().__init__(command, **inputs) class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): @@ -496,9 +486,9 @@ class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): >>> sdtrack.inputs.seed_file = 'seed_mask.nii' >>> sdtrack.run() # doctest: +SKIP """ + input_spec = StreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_STREAM" - return super(SphericallyDeconvolutedStreamlineTrack, self).__init__( - command, **inputs) + return super().__init__(command, **inputs) diff --git a/nipype/interfaces/mrtrix3/__init__.py b/nipype/interfaces/mrtrix3/__init__.py index 507380c30e..3bd9f55250 100644 --- a/nipype/interfaces/mrtrix3/__init__.py +++ b/nipype/interfaces/mrtrix3/__init__.py @@ -1,12 +1,36 @@ -from __future__ import absolute_import # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- - -from .utils import (Mesh2PVE, Generate5tt, BrainMask, TensorMetrics, - ComputeTDI, TCK2VTK, MRMath, MRConvert, DWIExtract) -from .preprocess import (ResponseSD, ACTPrepareFSL, ReplaceFSwithFIRST, - DWIDenoise) +"""MRTrix3 provides software tools to perform various types of diffusion MRI analyses.""" +from .connectivity import BuildConnectome, LabelConfig, LabelConvert +from .preprocess import ( + ACTPrepareFSL, + DWIBiasCorrect, + DWIDenoise, + DWIPreproc, + MRDeGibbs, + ReplaceFSwithFIRST, + ResponseSD, +) +from .reconst import ConstrainedSphericalDeconvolution, EstimateFOD, FitTensor from .tracking import Tractography -from .reconst import FitTensor, EstimateFOD -from .connectivity import LabelConfig, LabelConvert, BuildConnectome +from .utils import ( + TCK2VTK, + BrainMask, + ComputeTDI, + DWIExtract, + Generate5tt, + Mesh2PVE, + MRCat, + MRConvert, + MRMath, + MRResize, + MRTransform, + SH2Amp, + SHConv, + TensorMetrics, + TransformFSLConvert, + MaskFilter, + MTNormalise, + Generate5tt2gmwmi, +) diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index b4e050292e..cd5d492fcf 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -1,111 +1,136 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ... import logging, LooseVersion -from ...utils.filemanip import which -from ..base import (CommandLineInputSpec, CommandLine, traits, File, isdefined, PackageInfo) -iflogger = logging.getLogger('nipype.interface') +from looseversion import LooseVersion + +from ... import logging +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + Tuple, + File, + isdefined, + PackageInfo, +) + +iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): - version_cmd = 'mrconvert --version' + version_cmd = "mrconvert --version" @staticmethod def parse_version(raw_info): # info is like: "== mrconvert 0.3.15-githash" for line in raw_info.splitlines(): - if line.startswith('== mrconvert '): + if line.startswith("== mrconvert "): v_string = line.split()[2] break else: return None # -githash may or may not be appended - v_string = v_string.split('-')[0] + v_string = v_string.split("-")[0] - return '.'.join(v_string.split('.')[:3]) + return ".".join(v_string.split(".")[:3]) @classmethod def looseversion(cls): - """ Return a comparable version object + """Return a comparable version object If no version found, use LooseVersion('0.0.0') """ - return LooseVersion(cls.version() or '0.0.0') + return LooseVersion(cls.version() or "0.0.0") class MRTrix3BaseInputSpec(CommandLineInputSpec): nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) # DW gradient table import options grad_file = File( exists=True, - argstr='-grad %s', - desc='dw gradient scheme (MRTrix format') - grad_fsl = traits.Tuple( + argstr="-grad %s", + desc="dw gradient scheme (MRTrix format)", + xor=["grad_fsl"], + ) + grad_fsl = Tuple( File(exists=True), File(exists=True), - argstr='-fslgrad %s %s', - desc='(bvecs, bvals) dw gradient scheme (FSL format') + argstr="-fslgrad %s %s", + desc="(bvecs, bvals) dw gradient scheme (FSL format)", + xor=["grad_file"], + ) bval_scale = traits.Enum( - 'yes', - 'no', - argstr='-bvalue_scaling %s', - desc='specifies whether the b - values should be scaled by the square' - ' of the corresponding DW gradient norm, as often required for ' - 'multishell or DSI DW acquisition schemes. The default action ' - 'can also be set in the MRtrix config file, under the ' - 'BValueScaling entry. Valid choices are yes / no, true / ' - 'false, 0 / 1 (default: true).') + "yes", + "no", + argstr="-bvalue_scaling %s", + desc="specifies whether the b - values should be scaled by the square" + " of the corresponding DW gradient norm, as often required for " + "multishell or DSI DW acquisition schemes. The default action " + "can also be set in the MRtrix config file, under the " + "BValueScaling entry. Valid choices are yes / no, true / " + "false, 0 / 1 (default: true).", + ) in_bvec = File( - exists=True, argstr='-fslgrad %s %s', desc='bvecs file in FSL format') - in_bval = File(exists=True, desc='bvals file in FSL format') + exists=True, argstr="-fslgrad %s %s", desc="bvecs file in FSL format" + ) + in_bval = File(exists=True, desc="bvals file in FSL format") + out_bvec = File( + exists=False, + argstr="-export_grad_fsl %s %s", + desc="export bvec file in FSL format", + ) + out_bval = File( + exists=False, + desc="export bval file in FSL format", + ) class MRTrix3Base(CommandLine): def _format_arg(self, name, trait_spec, value): - if name == 'nthreads' and value == 0: + if name == "nthreads" and value == 0: value = 1 try: from multiprocessing import cpu_count + value = cpu_count() except: - iflogger.warning('Number of threads could not be computed') - pass + iflogger.warning("Number of threads could not be computed") return trait_spec.argstr % value - if name == 'in_bvec': + if name == "in_bvec": return trait_spec.argstr % (value, self.inputs.in_bval) + if name == "out_bvec": + return trait_spec.argstr % (value, self.inputs.out_bval) - return super(MRTrix3Base, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: skip = [] try: - if (isdefined(self.inputs.grad_file) - or isdefined(self.inputs.grad_fsl)): - skip += ['in_bvec', 'in_bval'] + if isdefined(self.inputs.grad_file) or isdefined(self.inputs.grad_fsl): + skip += ["in_bvec", "in_bval"] is_bvec = isdefined(self.inputs.in_bvec) is_bval = isdefined(self.inputs.in_bval) if is_bvec or is_bval: if not is_bvec or not is_bval: - raise RuntimeError('If using bvecs and bvals inputs, both' - 'should be defined') - skip += ['in_bval'] + raise RuntimeError( + "If using bvecs and bvals inputs, both should be defined" + ) + skip += ["in_bval"] except AttributeError: pass - return super(MRTrix3Base, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) @property def version(self): diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 0e246c796b..a3dde940dc 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -1,98 +1,99 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import os.path as op -from ..base import (CommandLineInputSpec, traits, TraitedSpec, File, isdefined) +from ..base import CommandLineInputSpec, traits, TraitedSpec, File, isdefined from .base import MRTrix3Base class BuildConnectomeInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input tractography') - in_parc = File( - exists=True, argstr='%s', position=-2, desc='parcellation file') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input tractography" + ) + in_parc = File(exists=True, argstr="%s", position=-2, desc="parcellation file") out_file = File( - 'connectome.csv', - argstr='%s', + "connectome.csv", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) vox_lookup = traits.Bool( - argstr='-assignment_voxel_lookup', - desc='use a simple voxel lookup value at each streamline endpoint') + argstr="-assignment_voxel_lookup", + desc="use a simple voxel lookup value at each streamline endpoint", + ) search_radius = traits.Float( - argstr='-assignment_radial_search %f', - desc='perform a radial search from each streamline endpoint to locate ' - 'the nearest node. Argument is the maximum radius in mm; if no node is' - ' found within this radius, the streamline endpoint is not assigned to' - ' any node.') + argstr="-assignment_radial_search %f", + desc="perform a radial search from each streamline endpoint to locate " + "the nearest node. Argument is the maximum radius in mm; if no node is" + " found within this radius, the streamline endpoint is not assigned to" + " any node.", + ) search_reverse = traits.Float( - argstr='-assignment_reverse_search %f', - desc='traverse from each streamline endpoint inwards along the ' - 'streamline, in search of the last node traversed by the streamline. ' - 'Argument is the maximum traversal length in mm (set to 0 to allow ' - 'search to continue to the streamline midpoint).') + argstr="-assignment_reverse_search %f", + desc="traverse from each streamline endpoint inwards along the " + "streamline, in search of the last node traversed by the streamline. " + "Argument is the maximum traversal length in mm (set to 0 to allow " + "search to continue to the streamline midpoint).", + ) search_forward = traits.Float( - argstr='-assignment_forward_search %f', - desc='project the streamline forwards from the endpoint in search of a' - 'parcellation node voxel. Argument is the maximum traversal length in ' - 'mm.') + argstr="-assignment_forward_search %f", + desc="project the streamline forwards from the endpoint in search of a" + "parcellation node voxel. Argument is the maximum traversal length in " + "mm.", + ) metric = traits.Enum( - 'count', - 'meanlength', - 'invlength', - 'invnodevolume', - 'mean_scalar', - 'invlength_invnodevolume', - argstr='-metric %s', - desc='specify the edge' - ' weight metric') + "count", + "meanlength", + "invlength", + "invnodevolume", + "mean_scalar", + "invlength_invnodevolume", + argstr="-metric %s", + desc="specify the edge weight metric", + ) in_scalar = File( exists=True, - argstr='-image %s', - desc='provide the associated image ' - 'for the mean_scalar metric') + argstr="-image %s", + desc="provide the associated image for the mean_scalar metric", + ) in_weights = File( exists=True, - argstr='-tck_weights_in %s', - desc='specify a text scalar ' - 'file containing the streamline weights') + argstr="-tck_weights_in %s", + desc="specify a text scalar file containing the streamline weights", + ) keep_unassigned = traits.Bool( - argstr='-keep_unassigned', - desc='By default, the program discards the' - ' information regarding those streamlines that are not successfully ' - 'assigned to a node pair. Set this option to keep these values (will ' - 'be the first row/column in the output matrix)') + argstr="-keep_unassigned", + desc="By default, the program discards the" + " information regarding those streamlines that are not successfully " + "assigned to a node pair. Set this option to keep these values (will " + "be the first row/column in the output matrix)", + ) zero_diagonal = traits.Bool( - argstr='-zero_diagonal', - desc='set all diagonal entries in the matrix ' - 'to zero (these represent streamlines that connect to the same node at' - ' both ends)') + argstr="-zero_diagonal", + desc="set all diagonal entries in the matrix " + "to zero (these represent streamlines that connect to the same node at" + " both ends)", + ) class BuildConnectomeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class BuildConnectome(MRTrix3Base): @@ -112,69 +113,74 @@ class BuildConnectome(MRTrix3Base): >>> mat.run() # doctest: +SKIP """ - _cmd = 'tck2connectome' + _cmd = "tck2connectome" input_spec = BuildConnectomeInputSpec output_spec = BuildConnectomeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConfigInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='input anatomical image') + desc="input anatomical image", + ) in_config = File( - exists=True, - argstr='%s', - position=-2, - desc='connectome configuration file') + exists=True, argstr="%s", position=-2, desc="connectome configuration file" + ) out_file = File( - 'parcellation.mif', - argstr='%s', + "parcellation.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) lut_basic = File( - argstr='-lut_basic %s', - desc='get information from ' - 'a basic lookup table consisting of index / name pairs') + argstr="-lut_basic %s", + desc="get information from " + "a basic lookup table consisting of index / name pairs", + ) lut_fs = File( - argstr='-lut_freesurfer %s', - desc='get information from ' + argstr="-lut_freesurfer %s", + desc="get information from " 'a FreeSurfer lookup table(typically "FreeSurferColorLUT' - '.txt")') + '.txt")', + ) lut_aal = File( - argstr='-lut_aal %s', - desc='get information from the AAL ' - 'lookup table (typically "ROI_MNI_V4.txt")') + argstr="-lut_aal %s", + desc="get information from the AAL " + 'lookup table (typically "ROI_MNI_V4.txt")', + ) lut_itksnap = File( - argstr='-lut_itksnap %s', - desc='get information from an' - ' ITK - SNAP lookup table(this includes the IIT atlas ' - 'file "LUT_GM.txt")') + argstr="-lut_itksnap %s", + desc="get information from an" + " ITK - SNAP lookup table(this includes the IIT atlas " + 'file "LUT_GM.txt")', + ) spine = File( - argstr='-spine %s', - desc='provide a manually-defined ' - 'segmentation of the base of the spine where the streamlines' - ' terminate, so that this can become a node in the connection' - ' matrix.') + argstr="-spine %s", + desc="provide a manually-defined " + "segmentation of the base of the spine where the streamlines" + " terminate, so that this can become a node in the connection" + " matrix.", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) class LabelConfigOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class LabelConfig(MRTrix3Base): @@ -193,7 +199,7 @@ class LabelConfig(MRTrix3Base): >>> labels.run() # doctest: +SKIP """ - _cmd = 'labelconfig' + _cmd = "labelconfig" input_spec = LabelConfigInputSpec output_spec = LabelConfigOutputSpec @@ -202,66 +208,70 @@ def _parse_inputs(self, skip=None): skip = [] if not isdefined(self.inputs.in_config): - from distutils.spawn import find_executable - path = find_executable(self._cmd) + from shutil import which + + path = which(self._cmd) if path is None: - path = os.getenv(MRTRIX3_HOME, '/opt/mrtrix3') + path = os.getenv("MRTRIX3_HOME", "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( - path, 'src/dwi/tractography/connectomics/' - 'example_configs/fs_default.txt') + path, + "src/dwi/tractography/connectomics/example_configs/fs_default.txt", + ) - return super(LabelConfig, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-4, - desc='input anatomical image') + desc="input anatomical image", + ) in_lut = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='get information from ' - 'a basic lookup table consisting of index / name pairs') + desc="get information from " + "a basic lookup table consisting of index / name pairs", + ) in_config = File( - exists=True, - argstr='%s', - position=-2, - desc='connectome configuration file') + exists=True, argstr="%s", position=-2, desc="connectome configuration file" + ) out_file = File( - 'parcellation.mif', - argstr='%s', + "parcellation.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) spine = File( - argstr='-spine %s', - desc='provide a manually-defined ' - 'segmentation of the base of the spine where the streamlines' - ' terminate, so that this can become a node in the connection' - ' matrix.') + argstr="-spine %s", + desc="provide a manually-defined " + "segmentation of the base of the spine where the streamlines" + " terminate, so that this can become a node in the connection" + " matrix.", + ) num_threads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) class LabelConvertOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class LabelConvert(MRTrix3Base): @@ -281,7 +291,7 @@ class LabelConvert(MRTrix3Base): >>> labels.run() # doctest: +SKIP """ - _cmd = 'labelconvert' + _cmd = "labelconvert" input_spec = LabelConvertInputSpec output_spec = LabelConvertOutputSpec @@ -291,19 +301,21 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.in_config): from nipype.utils.filemanip import which + path = which(self._cmd) if path is None: - path = os.getenv(MRTRIX3_HOME, '/opt/mrtrix3') + path = os.getenv("MRTRIX3_HOME", "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( - path, 'src/dwi/tractography/connectomics/' - 'example_configs/fs_default.txt') + path, + "src/dwi/tractography/connectomics/example_configs/fs_default.txt", + ) - return super(LabelConvert, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index fc3559c918..57cdad0168 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -1,44 +1,59 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, isdefined, Undefined, InputMultiObject) -from .base import MRTrix3BaseInputSpec, MRTrix3Base +from ..base import ( + CommandLine, + CommandLineInputSpec, + Directory, + File, + InputMultiObject, + TraitedSpec, + Undefined, + traits, + Tuple, +) +from .base import MRTrix3Base, MRTrix3BaseInputSpec class DWIDenoiseInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", position=-2, mandatory=True, - desc='input DWI image') - mask = File( - exists=True, - argstr='-mask %s', - position=1, - desc='mask image') - extent = traits.Tuple((traits.Int, traits.Int, traits.Int), - argstr='-extent %d,%d,%d', - desc='set the window size of the denoising filter. (default = 5,5,5)') + desc="input DWI image", + ) + mask = File(exists=True, argstr="-mask %s", position=1, desc="mask image") + extent = Tuple( + (traits.Int, traits.Int, traits.Int), + argstr="-extent %d,%d,%d", + desc="set the window size of the denoising filter. (default = 5,5,5)", + ) noise = File( - argstr='-noise %s', - desc='noise map') - out_file = File(name_template='%s_denoised', - name_source='in_file', + argstr="-noise %s", + name_template="%s_noise", + name_source="in_file", keep_extension=True, + desc="the output noise map", + ) + out_file = File( argstr="%s", position=-1, - desc="the output denoised DWI image") + name_template="%s_denoised", + name_source="in_file", + keep_extension=True, + desc="the output denoised DWI image", + ) + class DWIDenoiseOutputSpec(TraitedSpec): + noise = File(desc="the output noise map", exists=True) out_file = File(desc="the output denoised DWI image", exists=True) + class DWIDenoise(MRTrix3Base): """ Denoise DWI data and estimate the noise level based on the optimal @@ -65,59 +80,385 @@ class DWIDenoise(MRTrix3Base): >>> denoise = mrt.DWIDenoise() >>> denoise.inputs.in_file = 'dwi.mif' >>> denoise.inputs.mask = 'mask.mif' + >>> denoise.inputs.noise = 'noise.mif' >>> denoise.cmdline # doctest: +ELLIPSIS - 'dwidenoise -mask mask.mif dwi.mif dwi_denoised.mif' + 'dwidenoise -mask mask.mif -noise noise.mif dwi.mif dwi_denoised.mif' >>> denoise.run() # doctest: +SKIP """ - _cmd = 'dwidenoise' + _cmd = "dwidenoise" input_spec = DWIDenoiseInputSpec output_spec = DWIDenoiseOutputSpec +class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, + argstr="%s", + position=-2, + mandatory=True, + desc="input DWI image", + ) + axes = traits.ListInt( + default_value=[0, 1], + usedefault=True, + sep=",", + minlen=2, + maxlen=2, + argstr="-axes %s", + desc="indicate the plane in which the data was acquired (axial = 0,1; " + "coronal = 0,2; sagittal = 1,2", + ) + nshifts = traits.Int( + default_value=20, + usedefault=True, + argstr="-nshifts %d", + desc="discretization of subpixel spacing (default = 20)", + ) + minW = traits.Int( + default_value=1, + usedefault=True, + argstr="-minW %d", + desc="left border of window used for total variation (TV) computation " + "(default = 1)", + ) + maxW = traits.Int( + default_value=3, + usedefault=True, + argstr="-maxW %d", + desc="right border of window used for total variation (TV) computation " + "(default = 3)", + ) + out_file = File( + name_template="%s_unr", + name_source="in_file", + keep_extension=True, + argstr="%s", + position=-1, + desc="the output unringed DWI image", + ) + + +class MRDeGibbsOutputSpec(TraitedSpec): + out_file = File(desc="the output unringed DWI image", exists=True) + + +class MRDeGibbs(MRTrix3Base): + """ + Remove Gibbs ringing artifacts. + + This application attempts to remove Gibbs ringing artefacts from MRI images + using the method of local subvoxel-shifts proposed by Kellner et al. + + This command is designed to run on data directly after it has been + reconstructed by the scanner, before any interpolation of any kind has + taken place. You should not run this command after any form of motion + correction (e.g. not after dwipreproc). Similarly, if you intend running + dwidenoise, you should run this command afterwards, since it has the + potential to alter the noise structure, which would impact on dwidenoise's + performance. + + Note that this method is designed to work on images acquired with full + k-space coverage. Running this method on partial Fourier ('half-scan') data + may lead to suboptimal and/or biased results, as noted in the original + reference below. There is currently no means of dealing with this; users + should exercise caution when using this method on partial Fourier data, and + inspect its output for any obvious artefacts. + + For more information, see + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> unring = mrt.MRDeGibbs() + >>> unring.inputs.in_file = 'dwi.mif' + >>> unring.cmdline + 'mrdegibbs -axes 0,1 -maxW 3 -minW 1 -nshifts 20 dwi.mif dwi_unr.mif' + >>> unring.run() # doctest: +SKIP + """ + + _cmd = "mrdegibbs" + input_spec = MRDeGibbsInputSpec + output_spec = MRDeGibbsOutputSpec + + +class DWIBiasCorrectInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, + argstr="%s", + position=-2, + mandatory=True, + desc="input DWI image", + ) + in_mask = File(argstr="-mask %s", desc="input mask image for bias field estimation") + use_ants = traits.Bool( + argstr="ants", + mandatory=True, + desc="use ANTS N4 to estimate the inhomogeneity field", + position=0, + xor=["use_fsl"], + ) + use_fsl = traits.Bool( + argstr="fsl", + mandatory=True, + desc="use FSL FAST to estimate the inhomogeneity field", + position=0, + xor=["use_ants"], + ) + bias = File(argstr="-bias %s", desc="bias field") + out_file = File( + name_template="%s_biascorr", + name_source="in_file", + keep_extension=True, + argstr="%s", + position=-1, + desc="the output bias corrected DWI image", + genfile=True, + ) + + +class DWIBiasCorrectOutputSpec(TraitedSpec): + bias = File(desc="the output bias field", exists=True) + out_file = File(desc="the output bias corrected DWI image", exists=True) + + +class DWIBiasCorrect(MRTrix3Base): + """ + Perform B1 field inhomogeneity correction for a DWI volume series. + + For more information, see + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> bias_correct = mrt.DWIBiasCorrect() + >>> bias_correct.inputs.in_file = 'dwi.mif' + >>> bias_correct.inputs.use_ants = True + >>> bias_correct.cmdline + 'dwibiascorrect ants dwi.mif dwi_biascorr.mif' + >>> bias_correct.run() # doctest: +SKIP + """ + + _cmd = "dwibiascorrect" + input_spec = DWIBiasCorrectInputSpec + output_spec = DWIBiasCorrectOutputSpec + + def _format_arg(self, name, trait_spec, value): + if name in ("use_ants", "use_fsl"): + ver = self.version + # Changed in version 3.0, after release candidates + if ver is not None and (ver[0] < "3" or ver.startswith("3.0_RC")): + return f"-{trait_spec.argstr}" + return super()._format_arg(name, trait_spec, value) + + def _list_outputs(self): + outputs = self.output_spec().get() + if self.inputs.out_file: + outputs["out_file"] = op.abspath(self.inputs.out_file) + if self.inputs.bias: + outputs["bias"] = op.abspath(self.inputs.bias) + return outputs + + +class DWIPreprocInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, + argstr="%s", + position=0, + mandatory=True, + desc="input DWI image", + ) + out_file = File( + "preproc.mif", + argstr="%s", + mandatory=True, + position=1, + usedefault=True, + desc="output file after preprocessing", + ) + rpe_options = traits.Enum( + "none", + "pair", + "all", + "header", + argstr="-rpe_%s", + position=2, + mandatory=True, + desc='Specify acquisition phase-encoding design. "none" for no reversed phase-encoding image, "all" for all DWIs have opposing phase-encoding acquisition, "pair" for using a pair of b0 volumes for inhomogeneity field estimation only, and "header" for phase-encoding information can be found in the image header(s)', + ) + pe_dir = traits.Str( + argstr="-pe_dir %s", + desc="Specify the phase encoding direction of the input series, can be a signed axis number (e.g. -0, 1, +2), an axis designator (e.g. RL, PA, IS), or NIfTI axis codes (e.g. i-, j, k)", + ) + ro_time = traits.Float( + argstr="-readout_time %f", + desc="Total readout time of input series (in seconds)", + ) + in_epi = File( + exists=True, + argstr="-se_epi %s", + desc="Provide an additional image series consisting of spin-echo EPI images, which is to be used exclusively by topup for estimating the inhomogeneity field (i.e. it will not form part of the output image series)", + ) + align_seepi = traits.Bool( + argstr="-align_seepi", + desc="Achieve alignment between the SE-EPI images used for inhomogeneity field estimation, and the DWIs", + ) + json_import = File( + exists=True, + argstr="-json_import %s", + desc="Import image header information from an associated JSON file (may be necessary to determine phase encoding information)", + ) + topup_options = traits.Str( + argstr='-topup_options "%s"', + desc="Manually provide additional command-line options to the topup command", + ) + eddy_options = traits.Str( + argstr='-eddy_options "%s"', + desc="Manually provide additional command-line options to the eddy command", + ) + eddy_mask = File( + exists=True, + argstr="-eddy_mask %s", + desc="Provide a processing mask to use for eddy, instead of having dwifslpreproc generate one internally using dwi2mask", + ) + eddy_slspec = File( + exists=True, + argstr="-eddy_slspec %s", + desc="Provide a file containing slice groupings for eddy's slice-to-volume registration", + ) + eddyqc_text = Directory( + exists=False, + argstr="-eddyqc_text %s", + desc="Copy the various text-based statistical outputs generated by eddy, and the output of eddy_qc (if installed), into an output directory", + ) + eddyqc_all = Directory( + exists=False, + argstr="-eddyqc_all %s", + desc="Copy ALL outputs generated by eddy (including images), and the output of eddy_qc (if installed), into an output directory", + ) + out_grad_mrtrix = File( + "grad.b", + argstr="-export_grad_mrtrix %s", + desc="export new gradient files in mrtrix format", + ) + out_grad_fsl = Tuple( + File("grad.bvecs", desc="bvecs"), + File("grad.bvals", desc="bvals"), + argstr="-export_grad_fsl %s, %s", + desc="export gradient files in FSL format", + ) + + +class DWIPreprocOutputSpec(TraitedSpec): + out_file = File(argstr="%s", desc="output preprocessed image series") + out_grad_mrtrix = File( + "grad.b", + argstr="%s", + usedefault=True, + desc="preprocessed gradient file in mrtrix3 format", + ) + out_fsl_bvec = File( + "grad.bvecs", + argstr="%s", + usedefault=True, + desc="exported fsl gradient bvec file", + ) + out_fsl_bval = File( + "grad.bvals", + argstr="%s", + usedefault=True, + desc="exported fsl gradient bval file", + ) + + +class DWIPreproc(MRTrix3Base): + """ + Perform diffusion image pre-processing using FSL's eddy tool; including inhomogeneity distortion correction using FSL's topup tool if possible + + For more information, see + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> preproc = mrt.DWIPreproc() + >>> preproc.inputs.in_file = 'dwi.mif' + >>> preproc.inputs.rpe_options = 'none' + >>> preproc.inputs.out_file = "preproc.mif" + >>> preproc.inputs.eddy_options = '--slm=linear --repol' # linear second level model and replace outliers + >>> preproc.inputs.out_grad_mrtrix = "grad.b" # export final gradient table in MRtrix format + >>> preproc.inputs.ro_time = 0.165240 # 'TotalReadoutTime' in BIDS JSON metadata files + >>> preproc.inputs.pe_dir = 'j' # 'PhaseEncodingDirection' in BIDS JSON metadata files + >>> preproc.cmdline + 'dwifslpreproc dwi.mif preproc.mif -rpe_none -eddy_options "--slm=linear --repol" -export_grad_mrtrix grad.b -pe_dir j -readout_time 0.165240' + >>> preproc.run() # doctest: +SKIP + """ + + _cmd = "dwifslpreproc" + input_spec = DWIPreprocInputSpec + output_spec = DWIPreprocOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) + if self.inputs.out_grad_mrtrix: + outputs["out_grad_mrtrix"] = op.abspath(self.inputs.out_grad_mrtrix) + if self.inputs.out_grad_fsl: + outputs["out_fsl_bvec"] = op.abspath(self.inputs.out_grad_fsl[0]) + outputs["out_fsl_bval"] = op.abspath(self.inputs.out_grad_fsl[1]) + + return outputs + + class ResponseSDInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'msmt_5tt', - 'dhollander', - 'tournier', - 'tax', - argstr='%s', + "msmt_5tt", + "dhollander", + "tournier", + "tax", + argstr="%s", position=1, mandatory=True, - desc='response estimation algorithm (multi-tissue)') + desc="response estimation algorithm (multi-tissue)", + ) in_file = File( exists=True, - argstr='%s', + argstr="%s", position=-5, mandatory=True, - desc='input DWI image') - mtt_file = File(argstr='%s', position=-4, desc='input 5tt image') + desc="input DWI image", + ) + mtt_file = File(argstr="%s", position=-4, desc="input 5tt image") wm_file = File( - 'wm.txt', - argstr='%s', + "wm.txt", + argstr="%s", position=-3, usedefault=True, - desc='output WM response text file') - gm_file = File( - argstr='%s', position=-2, desc='output GM response text file') - csf_file = File( - argstr='%s', position=-1, desc='output CSF response text file') - in_mask = File( - exists=True, argstr='-mask %s', desc='provide initial mask image') + desc="output WM response text file", + ) + gm_file = File(argstr="%s", position=-2, desc="output GM response text file") + csf_file = File(argstr="%s", position=-1, desc="output CSF response text file") + in_mask = File(exists=True, argstr="-mask %s", desc="provide initial mask image") max_sh = InputMultiObject( traits.Int, - value=[8], - usedefault=True, - argstr='-lmax %s', - sep=',', - desc=('maximum harmonic degree of response function - single value for ' - 'single-shell response, list for multi-shell response')) + argstr="-lmax %s", + sep=",", + desc=( + "maximum harmonic degree of response function - single value for " + "single-shell response, list for multi-shell response" + ), + ) class ResponseSDOutputSpec(TraitedSpec): - wm_file = File(argstr='%s', desc='output WM response text file') - gm_file = File(argstr='%s', desc='output GM response text file') - csf_file = File(argstr='%s', desc='output CSF response text file') + wm_file = File(argstr="%s", desc="output WM response text file") + gm_file = File(argstr="%s", desc="output GM response text file") + csf_file = File(argstr="%s", desc="output CSF response text file") class ResponseSD(MRTrix3Base): @@ -133,7 +474,7 @@ class ResponseSD(MRTrix3Base): >>> resp.inputs.algorithm = 'tournier' >>> resp.inputs.grad_fsl = ('bvecs', 'bvals') >>> resp.cmdline # doctest: +ELLIPSIS - 'dwi2response tournier -fslgrad bvecs bvals -lmax 8 dwi.mif wm.txt' + 'dwi2response tournier -fslgrad bvecs bvals dwi.mif wm.txt' >>> resp.run() # doctest: +SKIP # We can also pass in multiple harmonic degrees in the case of multi-shell @@ -142,39 +483,41 @@ class ResponseSD(MRTrix3Base): 'dwi2response tournier -fslgrad bvecs bvals -lmax 6,8,10 dwi.mif wm.txt' """ - _cmd = 'dwi2response' + _cmd = "dwi2response" input_spec = ResponseSDInputSpec output_spec = ResponseSDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['wm_file'] = op.abspath(self.inputs.wm_file) + outputs["wm_file"] = op.abspath(self.inputs.wm_file) if self.inputs.gm_file != Undefined: - outputs['gm_file'] = op.abspath(self.inputs.gm_file) + outputs["gm_file"] = op.abspath(self.inputs.gm_file) if self.inputs.csf_file != Undefined: - outputs['csf_file'] = op.abspath(self.inputs.csf_file) + outputs["csf_file"] = op.abspath(self.inputs.csf_file) return outputs class ACTPrepareFSLInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input anatomical image') + desc="input anatomical image", + ) out_file = File( - 'act_5tt.mif', - argstr='%s', + "act_5tt.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) class ACTPrepareFSLOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class ACTPrepareFSL(CommandLine): @@ -193,46 +536,50 @@ class ACTPrepareFSL(CommandLine): >>> prep.run() # doctest: +SKIP """ - _cmd = 'act_anat_prepare_fsl' + _cmd = "act_anat_prepare_fsl" input_spec = ACTPrepareFSLInputSpec output_spec = ACTPrepareFSLOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class ReplaceFSwithFIRSTInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-4, - desc='input anatomical image') + desc="input anatomical image", + ) in_t1w = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='input T1 image') + desc="input T1 image", + ) in_config = File( exists=True, - argstr='%s', + argstr="%s", position=-2, - desc='connectome configuration file') + desc="connectome configuration file", + ) out_file = File( - 'aparc+first.mif', - argstr='%s', + "aparc+first.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) class ReplaceFSwithFIRSTOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class ReplaceFSwithFIRST(CommandLine): @@ -254,11 +601,11 @@ class ReplaceFSwithFIRST(CommandLine): >>> prep.run() # doctest: +SKIP """ - _cmd = 'fs_parc_replace_sgm_first' + _cmd = "fs_parc_replace_sgm_first" input_spec = ReplaceFSwithFIRSTInputSpec output_spec = ReplaceFSwithFIRSTOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/reconst.py b/nipype/interfaces/mrtrix3/reconst.py index f1e9c604d2..bfcd055186 100644 --- a/nipype/interfaces/mrtrix3/reconst.py +++ b/nipype/interfaces/mrtrix3/reconst.py @@ -1,53 +1,65 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op -from ..base import traits, TraitedSpec, File, Undefined +from ..base import traits, TraitedSpec, File, InputMultiObject, isdefined from .base import MRTrix3BaseInputSpec, MRTrix3Base class FitTensorInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input diffusion weighted images') + desc="input diffusion weighted images", + ) out_file = File( - 'dti.mif', - argstr='%s', + "dti.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='the output diffusion tensor image') + desc="the output diffusion tensor image", + ) # General options in_mask = File( exists=True, - argstr='-mask %s', - desc=('only perform computation within the specified ' - 'binary brain mask image')) + argstr="-mask %s", + desc=("only perform computation within the specified binary brain mask image"), + ) method = traits.Enum( - 'nonlinear', - 'loglinear', - 'sech', - 'rician', - argstr='-method %s', - desc=('select method used to perform the fitting')) + "nonlinear", + "loglinear", + "sech", + "rician", + argstr="-method %s", + desc=("select method used to perform the fitting"), + ) reg_term = traits.Float( - argstr='-regularisation %f', - max_ver='0.3.13', - desc=('specify the strength of the regularisation term on the ' - 'magnitude of the tensor elements (default = 5000). This ' - 'only applies to the non-linear methods')) + argstr="-regularisation %f", + max_ver="0.3.13", + desc=( + "specify the strength of the regularisation term on the " + "magnitude of the tensor elements (default = 5000). This " + "only applies to the non-linear methods" + ), + ) + predicted_signal = File( + argstr="-predicted_signal %s", + desc=( + "specify a file to contain the predicted signal from the tensor " + "fits. This can be used to calculate the residual signal" + ), + ) class FitTensorOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output DTI file') + out_file = File(exists=True, desc="the output DTI file") + predicted_signal = File(desc="Predicted signal from fitted tensors") class FitTensor(MRTrix3Base): @@ -68,99 +80,180 @@ class FitTensor(MRTrix3Base): >>> tsr.run() # doctest: +SKIP """ - _cmd = 'dwi2tensor' + _cmd = "dwi2tensor" input_spec = FitTensorInputSpec output_spec = FitTensorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) + if isdefined(self.inputs.predicted_signal): + outputs["predicted_signal"] = op.abspath(self.inputs.predicted_signal) return outputs class EstimateFODInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'csd', - 'msmt_csd', - argstr='%s', + "csd", + "msmt_csd", + argstr="%s", position=-8, mandatory=True, - desc='FOD algorithm') + desc="FOD algorithm", + ) in_file = File( - exists=True, - argstr='%s', - position=-7, - mandatory=True, - desc='input DWI image') + exists=True, argstr="%s", position=-7, mandatory=True, desc="input DWI image" + ) wm_txt = File( - argstr='%s', position=-6, mandatory=True, desc='WM response text file') + argstr="%s", position=-6, mandatory=True, desc="WM response text file" + ) wm_odf = File( - 'wm.mif', - argstr='%s', + "wm.mif", + argstr="%s", position=-5, usedefault=True, mandatory=True, - desc='output WM ODF') - gm_txt = File(argstr='%s', position=-4, desc='GM response text file') - gm_odf = File('gm.mif', usedefault=True, argstr='%s', - position=-3, desc='output GM ODF') - csf_txt = File(argstr='%s', position=-2, desc='CSF response text file') - csf_odf = File('csf.mif', usedefault=True, argstr='%s', - position=-1, desc='output CSF ODF') - mask_file = File(exists=True, argstr='-mask %s', desc='mask image') + desc="output WM ODF", + ) + gm_txt = File(argstr="%s", position=-4, desc="GM response text file") + gm_odf = File( + "gm.mif", usedefault=True, argstr="%s", position=-3, desc="output GM ODF" + ) + csf_txt = File(argstr="%s", position=-2, desc="CSF response text file") + csf_odf = File( + "csf.mif", usedefault=True, argstr="%s", position=-1, desc="output CSF ODF" + ) + mask_file = File(exists=True, argstr="-mask %s", desc="mask image") # DW Shell selection options shell = traits.List( traits.Float, - sep=',', - argstr='-shell %s', - desc='specify one or more dw gradient shells') - max_sh = traits.Int( - 8, usedefault=True, - argstr='-lmax %d', - desc='maximum harmonic degree of response function') + sep=",", + argstr="-shell %s", + desc="specify one or more dw gradient shells", + ) + max_sh = InputMultiObject( + traits.Int, + value=[8], + usedefault=True, + argstr="-lmax %s", + sep=",", + desc=( + "maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response" + ), + ) in_dirs = File( exists=True, - argstr='-directions %s', - desc=('specify the directions over which to apply the non-negativity ' - 'constraint (by default, the built-in 300 direction set is ' - 'used). These should be supplied as a text file containing the ' - '[ az el ] pairs for the directions.')) + argstr="-directions %s", + desc=( + "specify the directions over which to apply the non-negativity " + "constraint (by default, the built-in 300 direction set is " + "used). These should be supplied as a text file containing the " + "[ az el ] pairs for the directions." + ), + ) + predicted_signal = File( + argstr="-predicted_signal %s", + desc=( + "specify a file to contain the predicted signal from the FOD " + "estimates. This can be used to calculate the residual signal." + "Note that this is only valid if algorithm == 'msmt_csd'. " + "For single shell reconstructions use a combination of SHConv " + "and SH2Amp instead." + ), + ) class EstimateFODOutputSpec(TraitedSpec): - wm_odf = File(argstr='%s', desc='output WM ODF') - gm_odf = File(argstr='%s', desc='output GM ODF') - csf_odf = File(argstr='%s', desc='output CSF ODF') + wm_odf = File(argstr="%s", desc="output WM ODF") + gm_odf = File(argstr="%s", desc="output GM ODF") + csf_odf = File(argstr="%s", desc="output CSF ODF") + predicted_signal = File(desc="output predicted signal") class EstimateFOD(MRTrix3Base): """ Estimate fibre orientation distributions from diffusion data using spherical deconvolution + .. warning:: + + The CSD algorithm does not work as intended, but fixing it in this interface could break + existing workflows. This interface has been superseded by + :py:class:`.ConstrainedSphericalDecomposition`. + Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> fod = mrt.EstimateFOD() - >>> fod.inputs.algorithm = 'csd' + >>> fod.inputs.algorithm = 'msmt_csd' >>> fod.inputs.in_file = 'dwi.mif' >>> fod.inputs.wm_txt = 'wm.txt' >>> fod.inputs.grad_fsl = ('bvecs', 'bvals') - >>> fod.cmdline # doctest: +ELLIPSIS - 'dwi2fod -fslgrad bvecs bvals -lmax 8 csd dwi.mif wm.txt wm.mif gm.mif csf.mif' - >>> fod.run() # doctest: +SKIP + >>> fod.cmdline + 'dwi2fod -fslgrad bvecs bvals -lmax 8 msmt_csd dwi.mif wm.txt wm.mif gm.mif csf.mif' + >>> fod.run() # doctest: +SKIP """ - _cmd = 'dwi2fod' + _cmd = "dwi2fod" input_spec = EstimateFODInputSpec output_spec = EstimateFODOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['wm_odf'] = op.abspath(self.inputs.wm_odf) - if self.inputs.gm_odf != Undefined: - outputs['gm_odf'] = op.abspath(self.inputs.gm_odf) - if self.inputs.csf_odf != Undefined: - outputs['csf_odf'] = op.abspath(self.inputs.csf_odf) + outputs["wm_odf"] = op.abspath(self.inputs.wm_odf) + if isdefined(self.inputs.gm_odf): + outputs["gm_odf"] = op.abspath(self.inputs.gm_odf) + if isdefined(self.inputs.csf_odf): + outputs["csf_odf"] = op.abspath(self.inputs.csf_odf) + if isdefined(self.inputs.predicted_signal): + if self.inputs.algorithm != "msmt_csd": + raise Exception( + "'predicted_signal' option can only be used with " + "the 'msmt_csd' algorithm" + ) + outputs["predicted_signal"] = op.abspath(self.inputs.predicted_signal) return outputs + + +class ConstrainedSphericalDeconvolutionInputSpec(EstimateFODInputSpec): + gm_odf = File(argstr="%s", position=-3, desc="output GM ODF") + csf_odf = File(argstr="%s", position=-1, desc="output CSF ODF") + max_sh = InputMultiObject( + traits.Int, + argstr="-lmax %s", + sep=",", + desc=( + "maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response" + ), + ) + + +class ConstrainedSphericalDeconvolution(EstimateFOD): + """ + Estimate fibre orientation distributions from diffusion data using spherical deconvolution + + This interface supersedes :py:class:`.EstimateFOD`. + The old interface has contained a bug when using the CSD algorithm as opposed to the MSMT CSD + algorithm, but fixing it could potentially break existing workflows. The new interface works + the same, but does not populate the following inputs by default: + + * ``gm_odf`` + * ``csf_odf`` + * ``max_sh`` + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> fod = mrt.ConstrainedSphericalDeconvolution() + >>> fod.inputs.algorithm = 'csd' + >>> fod.inputs.in_file = 'dwi.mif' + >>> fod.inputs.wm_txt = 'wm.txt' + >>> fod.inputs.grad_fsl = ('bvecs', 'bvals') + >>> fod.cmdline + 'dwi2fod -fslgrad bvecs bvals csd dwi.mif wm.txt wm.mif' + >>> fod.run() # doctest: +SKIP + """ + + input_spec = ConstrainedSphericalDeconvolutionInputSpec diff --git a/nipype/interfaces/mrtrix3/tests/__init__.py b/nipype/interfaces/mrtrix3/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mrtrix3/tests/__init__.py +++ b/nipype/interfaces/mrtrix3/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py b/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py index abb3ba6831..a9334f5412 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py @@ -1,22 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ACTPrepareFSL def test_ACTPrepareFSL_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, @@ -27,8 +30,14 @@ def test_ACTPrepareFSL_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACTPrepareFSL_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ACTPrepareFSL.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py b/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py index c318be0bfd..26f94fcab0 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py @@ -1,31 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import BrainMask def test_BrainMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, @@ -36,8 +60,14 @@ def test_BrainMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrainMask_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BrainMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py b/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py index 95e5797d06..bfb0b1f0c9 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py @@ -1,51 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..connectivity import BuildConnectome def test_BuildConnectome_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), in_parc=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - in_scalar=dict(argstr='-image %s', ), - in_weights=dict(argstr='-tck_weights_in %s', ), - keep_unassigned=dict(argstr='-keep_unassigned', ), - metric=dict(argstr='-metric %s', ), + in_scalar=dict( + argstr="-image %s", + extensions=None, + ), + in_weights=dict( + argstr="-tck_weights_in %s", + extensions=None, + ), + keep_unassigned=dict( + argstr="-keep_unassigned", + ), + metric=dict( + argstr="-metric %s", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), - search_forward=dict(argstr='-assignment_forward_search %f', ), - search_radius=dict(argstr='-assignment_radial_search %f', ), - search_reverse=dict(argstr='-assignment_reverse_search %f', ), - vox_lookup=dict(argstr='-assignment_voxel_lookup', ), - zero_diagonal=dict(argstr='-zero_diagonal', ), + search_forward=dict( + argstr="-assignment_forward_search %f", + ), + search_radius=dict( + argstr="-assignment_radial_search %f", + ), + search_reverse=dict( + argstr="-assignment_reverse_search %f", + ), + vox_lookup=dict( + argstr="-assignment_voxel_lookup", + ), + zero_diagonal=dict( + argstr="-zero_diagonal", + ), ) inputs = BuildConnectome.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BuildConnectome_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BuildConnectome.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py b/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py index 38369e8fe5..ab1d984425 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py @@ -1,47 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ComputeTDI def test_ComputeTDI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrast=dict(argstr='-constrast %s', ), - data_type=dict(argstr='-datatype %s', ), - dixel=dict(argstr='-dixel %s', ), - ends_only=dict(argstr='-ends_only', ), + args=dict( + argstr="%s", + ), + contrast=dict( + argstr="-constrast %s", + ), + data_type=dict( + argstr="-datatype %s", + ), + dixel=dict( + argstr="-dixel %s", + extensions=None, + ), + ends_only=dict( + argstr="-ends_only", + ), environ=dict( nohash=True, usedefault=True, ), - fwhm_tck=dict(argstr='-fwhm_tck %f', ), + fwhm_tck=dict( + argstr="-fwhm_tck %f", + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - in_map=dict(argstr='-image %s', ), - map_zero=dict(argstr='-map_zero', ), - max_tod=dict(argstr='-tod %d', ), + in_map=dict( + argstr="-image %s", + extensions=None, + ), + map_zero=dict( + argstr="-map_zero", + ), + max_tod=dict( + argstr="-tod %d", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, usedefault=True, ), - precise=dict(argstr='-precise', ), - reference=dict(argstr='-template %s', ), - stat_tck=dict(argstr='-stat_tck %s', ), - stat_vox=dict(argstr='-stat_vox %s', ), - tck_weights=dict(argstr='-tck_weights_in %s', ), - upsample=dict(argstr='-upsample %d', ), - use_dec=dict(argstr='-dec', ), + precise=dict( + argstr="-precise", + ), + reference=dict( + argstr="-template %s", + extensions=None, + ), + stat_tck=dict( + argstr="-stat_tck %s", + ), + stat_vox=dict( + argstr="-stat_vox %s", + ), + tck_weights=dict( + argstr="-tck_weights_in %s", + extensions=None, + ), + upsample=dict( + argstr="-upsample %d", + ), + use_dec=dict( + argstr="-dec", + ), vox_size=dict( - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", ), ) inputs = ComputeTDI.input_spec() @@ -49,8 +86,14 @@ def test_ComputeTDI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeTDI_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ComputeTDI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ConstrainedSphericalDeconvolution.py b/nipype/interfaces/mrtrix3/tests/test_auto_ConstrainedSphericalDeconvolution.py new file mode 100644 index 0000000000..1348326728 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ConstrainedSphericalDeconvolution.py @@ -0,0 +1,138 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..reconst import ConstrainedSphericalDeconvolution + + +def test_ConstrainedSphericalDeconvolution_inputs(): + input_map = dict( + algorithm=dict( + argstr="%s", + mandatory=True, + position=-8, + ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + csf_odf=dict( + argstr="%s", + extensions=None, + position=-1, + ), + csf_txt=dict( + argstr="%s", + extensions=None, + position=-2, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + gm_odf=dict( + argstr="%s", + extensions=None, + position=-3, + ), + gm_txt=dict( + argstr="%s", + extensions=None, + position=-4, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_dirs=dict( + argstr="-directions %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-7, + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + max_sh=dict( + argstr="-lmax %s", + sep=",", + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + predicted_signal=dict( + argstr="-predicted_signal %s", + extensions=None, + ), + shell=dict( + argstr="-shell %s", + sep=",", + ), + wm_odf=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-5, + usedefault=True, + ), + wm_txt=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-6, + ), + ) + inputs = ConstrainedSphericalDeconvolution.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ConstrainedSphericalDeconvolution_outputs(): + output_map = dict( + csf_odf=dict( + argstr="%s", + extensions=None, + ), + gm_odf=dict( + argstr="%s", + extensions=None, + ), + predicted_signal=dict( + extensions=None, + ), + wm_odf=dict( + argstr="%s", + extensions=None, + ), + ) + outputs = ConstrainedSphericalDeconvolution.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py new file mode 100644 index 0000000000..82beccfa9d --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py @@ -0,0 +1,100 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import DWIBiasCorrect + + +def test_DWIBiasCorrect_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bias=dict( + argstr="-bias %s", + extensions=None, + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + in_mask=dict( + argstr="-mask %s", + extensions=None, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + genfile=True, + keep_extension=True, + name_source="in_file", + name_template="%s_biascorr", + position=-1, + ), + use_ants=dict( + argstr="ants", + mandatory=True, + position=0, + xor=["use_fsl"], + ), + use_fsl=dict( + argstr="fsl", + mandatory=True, + position=0, + xor=["use_ants"], + ), + ) + inputs = DWIBiasCorrect.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_DWIBiasCorrect_outputs(): + output_map = dict( + bias=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + ) + outputs = DWIBiasCorrect.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py index 769ccb34a9..edd2ee254d 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py @@ -1,40 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DWIDenoise def test_DWIDenoise_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), environ=dict( nohash=True, usedefault=True, ), - extent=dict(argstr='-extent %d,%d,%d', ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + extent=dict( + argstr="-extent %d,%d,%d", + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=1, ), - noise=dict(argstr='-noise %s', ), + noise=dict( + argstr="-noise %s", + extensions=None, + keep_extension=True, + name_source="in_file", + name_template="%s_noise", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_denoised', + name_source="in_file", + name_template="%s_denoised", position=-1, ), ) @@ -43,8 +76,17 @@ def test_DWIDenoise_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIDenoise_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + noise=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + ) outputs = DWIDenoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py index 0114c5efe0..a2d05a6fdc 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py @@ -1,49 +1,85 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import DWIExtract def test_DWIExtract_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - bzero=dict(argstr='-bzero', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + bzero=dict( + argstr="-bzero", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - nobzero=dict(argstr='-no_bzero', ), + nobzero=dict( + argstr="-no_bzero", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), shell=dict( - argstr='-shell %s', - sep=',', + argstr="-shell %s", + sep=",", + ), + singleshell=dict( + argstr="-singleshell", ), - singleshell=dict(argstr='-singleshell', ), ) inputs = DWIExtract.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIExtract_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = DWIExtract.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIPreproc.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIPreproc.py new file mode 100644 index 0000000000..7c0231bd70 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIPreproc.py @@ -0,0 +1,140 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import DWIPreproc + + +def test_DWIPreproc_inputs(): + input_map = dict( + align_seepi=dict( + argstr="-align_seepi", + ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + eddy_mask=dict( + argstr="-eddy_mask %s", + extensions=None, + ), + eddy_options=dict( + argstr='-eddy_options "%s"', + ), + eddy_slspec=dict( + argstr="-eddy_slspec %s", + extensions=None, + ), + eddyqc_all=dict( + argstr="-eddyqc_all %s", + ), + eddyqc_text=dict( + argstr="-eddyqc_text %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_epi=dict( + argstr="-se_epi %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + json_import=dict( + argstr="-json_import %s", + extensions=None, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=1, + usedefault=True, + ), + out_grad_fsl=dict( + argstr="-export_grad_fsl %s, %s", + ), + out_grad_mrtrix=dict( + argstr="-export_grad_mrtrix %s", + extensions=None, + ), + pe_dir=dict( + argstr="-pe_dir %s", + ), + ro_time=dict( + argstr="-readout_time %f", + ), + rpe_options=dict( + argstr="-rpe_%s", + mandatory=True, + position=2, + ), + topup_options=dict( + argstr='-topup_options "%s"', + ), + ) + inputs = DWIPreproc.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_DWIPreproc_outputs(): + output_map = dict( + out_file=dict( + argstr="%s", + extensions=None, + ), + out_fsl_bval=dict( + argstr="%s", + extensions=None, + usedefault=True, + ), + out_fsl_bvec=dict( + argstr="%s", + extensions=None, + usedefault=True, + ), + out_grad_mrtrix=dict( + argstr="%s", + extensions=None, + usedefault=True, + ), + ) + outputs = DWIPreproc.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py b/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py index a5c152e928..378dd4a220 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py @@ -1,24 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reconst import EstimateFOD def test_EstimateFOD_inputs(): input_map = dict( algorithm=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-8, ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), csf_odf=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, usedefault=True, ), csf_txt=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), environ=dict( @@ -26,45 +31,80 @@ def test_EstimateFOD_inputs(): usedefault=True, ), gm_odf=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, usedefault=True, ), gm_txt=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-4, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), - in_dirs=dict(argstr='-directions %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_dirs=dict( + argstr="-directions %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-7, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), max_sh=dict( - argstr='-lmax %d', + argstr="-lmax %s", + sep=",", usedefault=True, ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + predicted_signal=dict( + argstr="-predicted_signal %s", + extensions=None, + ), shell=dict( - argstr='-shell %s', - sep=',', + argstr="-shell %s", + sep=",", ), wm_odf=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-5, usedefault=True, ), wm_txt=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-6, ), @@ -74,11 +114,25 @@ def test_EstimateFOD_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateFOD_outputs(): output_map = dict( - csf_odf=dict(argstr='%s', ), - gm_odf=dict(argstr='%s', ), - wm_odf=dict(argstr='%s', ), + csf_odf=dict( + argstr="%s", + extensions=None, + ), + gm_odf=dict( + argstr="%s", + extensions=None, + ), + predicted_signal=dict( + extensions=None, + ), + wm_odf=dict( + argstr="%s", + extensions=None, + ), ) outputs = EstimateFOD.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py b/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py index 85c103cc10..884b87be06 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py @@ -1,40 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reconst import FitTensor def test_FitTensor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - in_mask=dict(argstr='-mask %s', ), - method=dict(argstr='-method %s', ), + in_mask=dict( + argstr="-mask %s", + extensions=None, + ), + method=dict( + argstr="-method %s", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), + predicted_signal=dict( + argstr="-predicted_signal %s", + extensions=None, + ), reg_term=dict( - argstr='-regularisation %f', - max_ver='0.3.13', + argstr="-regularisation %f", + max_ver="0.3.13", ), ) inputs = FitTensor.input_spec() @@ -42,8 +75,17 @@ def test_FitTensor_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitTensor_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + predicted_signal=dict( + extensions=None, + ), + ) outputs = FitTensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py index dcfbe1fc6f..d8f6e53364 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py @@ -1,47 +1,107 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Generate5tt def test_Generate5tt_inputs(): input_map = dict( algorithm=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-3, ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + hippocampi=dict( + argstr="-hippocampi %s", + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), + lut_file=dict( + argstr="-lut %s", + extensions=None, + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + nocrop=dict( + argstr="-nocrop", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), + premasked=dict( + argstr="-premasked", + ), + sgm_amyg_hipp=dict( + argstr="-sgm_amyg_hipp", + ), + t2_image=dict( + argstr="-t2 %s", + extensions=None, + ), + template=dict( + argstr="-template %s", + extensions=None, + ), + white_stem=dict( + argstr="-white_stem", + ), ) inputs = Generate5tt.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Generate5tt_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Generate5tt.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt2gmwmi.py b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt2gmwmi.py new file mode 100644 index 0000000000..2f4fc24e5d --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt2gmwmi.py @@ -0,0 +1,79 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import Generate5tt2gmwmi + + +def test_Generate5tt2gmwmi_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + mask_in=dict( + argstr="-mask_in %s", + extensions=None, + position=-3, + ), + mask_out=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-1, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + ) + inputs = Generate5tt2gmwmi.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_Generate5tt2gmwmi_outputs(): + output_map = dict( + mask_out=dict( + extensions=None, + ), + ) + outputs = Generate5tt2gmwmi.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py index 2de0e6c115..15116f9bb8 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py @@ -1,47 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..connectivity import LabelConfig def test_LabelConfig_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - lut_aal=dict(argstr='-lut_aal %s', ), - lut_basic=dict(argstr='-lut_basic %s', ), - lut_fs=dict(argstr='-lut_freesurfer %s', ), - lut_itksnap=dict(argstr='-lut_itksnap %s', ), + lut_aal=dict( + argstr="-lut_aal %s", + extensions=None, + ), + lut_basic=dict( + argstr="-lut_basic %s", + extensions=None, + ), + lut_fs=dict( + argstr="-lut_freesurfer %s", + extensions=None, + ), + lut_itksnap=dict( + argstr="-lut_itksnap %s", + extensions=None, + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), - spine=dict(argstr='-spine %s', ), + spine=dict( + argstr="-spine %s", + extensions=None, + ), ) inputs = LabelConfig.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelConfig_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = LabelConfig.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py index 1e93ae82d8..98512ac317 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py @@ -1,48 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..connectivity import LabelConvert def test_LabelConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), in_lut=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), num_threads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), - spine=dict(argstr='-spine %s', ), + spine=dict( + argstr="-spine %s", + extensions=None, + ), ) inputs = LabelConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelConvert_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = LabelConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRCat.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRCat.py new file mode 100644 index 0000000000..25bfea5d37 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRCat.py @@ -0,0 +1,80 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import MRCat + + +def test_MRCat_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + axis=dict( + argstr="-axis %s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + datatype=dict( + argstr="-datatype %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_files=dict( + argstr="%s", + mandatory=True, + position=-2, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-1, + usedefault=True, + ), + ) + inputs = MRCat.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRCat_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = MRCat.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py index ece24e1d47..0cd3c95794 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py @@ -1,50 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRConvert def test_MRConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), axes=dict( - argstr='-axes %s', - sep=',', + argstr="-axes %s", + sep=",", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), coord=dict( - argstr='-coord %s', - sep=' ', + argstr="-coord %s", + sep=" ", ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), + json_export=dict( + argstr="-json_export %s", + extensions=None, + mandatory=False, + ), + json_import=dict( + argstr="-json_import %s", + extensions=None, + mandatory=False, + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), scaling=dict( - argstr='-scaling %s', - sep=',', + argstr="-scaling %s", + sep=",", ), vox=dict( - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", ), ) inputs = MRConvert.input_spec() @@ -52,8 +86,23 @@ def test_MRConvert_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRConvert_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + json_export=dict( + extensions=None, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + ) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py new file mode 100644 index 0000000000..cd15f36ac6 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py @@ -0,0 +1,95 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import MRDeGibbs + + +def test_MRDeGibbs_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + axes=dict( + argstr="-axes %s", + maxlen=2, + minlen=2, + sep=",", + usedefault=True, + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + maxW=dict( + argstr="-maxW %d", + usedefault=True, + ), + minW=dict( + argstr="-minW %d", + usedefault=True, + ), + nshifts=dict( + argstr="-nshifts %d", + usedefault=True, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source="in_file", + name_template="%s_unr", + position=-1, + ), + ) + inputs = MRDeGibbs.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRDeGibbs_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = MRDeGibbs.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py index eec06b6276..10470bea7a 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py @@ -1,37 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import MRMath def test_MRMath_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axis=dict(argstr='-axis %d', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + axis=dict( + argstr="-axis %d", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), operation=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), @@ -41,8 +67,14 @@ def test_MRMath_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRMath_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MRMath.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py new file mode 100644 index 0000000000..4728309194 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py @@ -0,0 +1,95 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import MRResize + + +def test_MRResize_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + image_size=dict( + argstr="-size %d,%d,%d", + mandatory=True, + xor=["voxel_size", "scale_factor"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + interpolation=dict( + argstr="-interp %s", + usedefault=True, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source=["in_file"], + name_template="%s_resized", + position=-1, + ), + scale_factor=dict( + argstr="-scale %g,%g,%g", + mandatory=True, + xor=["image_size", "voxel_size"], + ), + voxel_size=dict( + argstr="-voxel %g,%g,%g", + mandatory=True, + xor=["image_size", "scale_factor"], + ), + ) + inputs = MRResize.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRResize_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = MRResize.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRTransform.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRTransform.py new file mode 100644 index 0000000000..e0337da2a9 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRTransform.py @@ -0,0 +1,104 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import MRTransform + + +def test_MRTransform_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + debug=dict( + argstr="-debug", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + flip_x=dict( + argstr="-flipx", + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_files=dict( + argstr="%s", + mandatory=True, + position=-2, + ), + invert=dict( + argstr="-inverse", + ), + linear_transform=dict( + argstr="-linear %s", + extensions=None, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file=dict( + argstr="%s", + extensions=None, + genfile=True, + position=-1, + ), + quiet=dict( + argstr="-quiet", + ), + reference_image=dict( + argstr="-reference %s", + extensions=None, + ), + replace_transform=dict( + argstr="-replace", + ), + template_image=dict( + argstr="-template %s", + extensions=None, + ), + transformation_file=dict( + argstr="-transform %s", + extensions=None, + ), + ) + inputs = MRTransform.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MRTransform_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = MRTransform.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py index 453baa053a..7689f14f11 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import MRTrix3Base def test_MRTrix3Base_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MTNormalise.py b/nipype/interfaces/mrtrix3/tests/test_auto_MTNormalise.py new file mode 100644 index 0000000000..8463e5a64a --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MTNormalise.py @@ -0,0 +1,103 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import MTNormalise + + +def test_MTNormalise_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + csf_fod=dict( + argstr="%s", + extensions=None, + position=5, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + gm_fod=dict( + argstr="%s", + extensions=None, + position=3, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + mask=dict( + argstr="-mask %s", + extensions=None, + position=-1, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_file_csf=dict( + argstr="%s", + extensions=None, + position=6, + ), + out_file_gm=dict( + argstr="%s", + extensions=None, + position=4, + ), + out_file_wm=dict( + argstr="%s", + extensions=None, + position=2, + ), + wm_fod=dict( + argstr="%s", + extensions=None, + position=1, + ), + ) + inputs = MTNormalise.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MTNormalise_outputs(): + output_map = dict( + out_file_csf=dict( + extensions=None, + ), + out_file_gm=dict( + extensions=None, + ), + out_file_wm=dict( + extensions=None, + ), + ) + outputs = MTNormalise.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MaskFilter.py b/nipype/interfaces/mrtrix3/tests/test_auto_MaskFilter.py new file mode 100644 index 0000000000..5443c09e15 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MaskFilter.py @@ -0,0 +1,54 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import MaskFilter + + +def test_MaskFilter_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + filter=dict( + argstr="%s", + mandatory=True, + position=-2, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-3, + ), + npass=dict( + argstr="-npass %d", + position=1, + ), + out_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + name_source=["input_image"], + position=-1, + ), + ) + inputs = MaskFilter.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MaskFilter_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = MaskFilter.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py b/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py index 9aa29acbe8..0fd63be8af 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py @@ -1,29 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Mesh2PVE def test_Mesh2PVE_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), - in_first=dict(argstr='-first %s', ), + in_first=dict( + argstr="-first %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), reference=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), @@ -33,8 +40,14 @@ def test_Mesh2PVE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Mesh2PVE_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Mesh2PVE.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py b/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py index 502a0f7fb1..ef3053cede 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py @@ -1,31 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ReplaceFSwithFIRST def test_ReplaceFSwithFIRST_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-4, ), in_t1w=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-3, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, @@ -36,8 +41,14 @@ def test_ReplaceFSwithFIRST_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReplaceFSwithFIRST_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ReplaceFSwithFIRST.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py index 027527ec85..1b9d4db9b9 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py @@ -1,19 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ResponseSD def test_ResponseSD_inputs(): input_map = dict( algorithm=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), csf_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), environ=dict( @@ -21,34 +25,59 @@ def test_ResponseSD_inputs(): usedefault=True, ), gm_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-5, ), - in_mask=dict(argstr='-mask %s', ), + in_mask=dict( + argstr="-mask %s", + extensions=None, + ), max_sh=dict( - argstr='-lmax %s', - sep=',', - usedefault=True, + argstr="-lmax %s", + sep=",", ), mtt_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-4, ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), wm_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, usedefault=True, ), @@ -58,11 +87,22 @@ def test_ResponseSD_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResponseSD_outputs(): output_map = dict( - csf_file=dict(argstr='%s', ), - gm_file=dict(argstr='%s', ), - wm_file=dict(argstr='%s', ), + csf_file=dict( + argstr="%s", + extensions=None, + ), + gm_file=dict( + argstr="%s", + extensions=None, + ), + wm_file=dict( + argstr="%s", + extensions=None, + ), ) outputs = ResponseSD.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_SH2Amp.py b/nipype/interfaces/mrtrix3/tests/test_auto_SH2Amp.py new file mode 100644 index 0000000000..ab75fc1f8a --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_SH2Amp.py @@ -0,0 +1,55 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import SH2Amp + + +def test_SH2Amp_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + directions=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-3, + ), + nonnegative=dict( + argstr="-nonnegative", + ), + out_file=dict( + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_amp.mif", + position=-1, + usedefault=True, + ), + ) + inputs = SH2Amp.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_SH2Amp_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = SH2Amp.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_SHConv.py b/nipype/interfaces/mrtrix3/tests/test_auto_SHConv.py new file mode 100644 index 0000000000..992e6984a8 --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_SHConv.py @@ -0,0 +1,52 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import SHConv + + +def test_SHConv_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-3, + ), + out_file=dict( + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s_shconv.mif", + position=-1, + usedefault=True, + ), + response=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-2, + ), + ) + inputs = SHConv.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_SHConv_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = SHConv.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py b/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py index 93a39484f8..215dafedc0 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py @@ -1,39 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TCK2VTK def test_TCK2VTK_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, usedefault=True, ), - reference=dict(argstr='-image %s', ), - voxel=dict(argstr='-image %s', ), + reference=dict( + argstr="-image %s", + extensions=None, + ), + voxel=dict( + argstr="-image %s", + extensions=None, + ), ) inputs = TCK2VTK.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCK2VTK_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TCK2VTK.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py b/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py index ae532c9910..28c33b0796 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py @@ -1,43 +1,137 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import TensorMetrics def test_TensorMetrics_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), component=dict( - argstr='-num %s', - sep=',', + argstr="-num %s", + sep=",", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, ), - in_mask=dict(argstr='-mask %s', ), - modulate=dict(argstr='-modulate %s', ), - out_adc=dict(argstr='-adc %s', ), - out_eval=dict(argstr='-value %s', ), - out_evec=dict(argstr='-vector %s', ), - out_fa=dict(argstr='-fa %s', ), + in_mask=dict( + argstr="-mask %s", + extensions=None, + ), + modulate=dict( + argstr="-modulate %s", + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_ad=dict( + argstr="-ad %s", + extensions=None, + ), + out_adc=dict( + argstr="-adc %s", + extensions=None, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_cl=dict( + argstr="-cl %s", + extensions=None, + ), + out_cp=dict( + argstr="-cp %s", + extensions=None, + ), + out_cs=dict( + argstr="-cs %s", + extensions=None, + ), + out_eval=dict( + argstr="-value %s", + extensions=None, + ), + out_evec=dict( + argstr="-vector %s", + extensions=None, + ), + out_fa=dict( + argstr="-fa %s", + extensions=None, + ), + out_rd=dict( + argstr="-rd %s", + extensions=None, + ), ) inputs = TensorMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TensorMetrics_outputs(): output_map = dict( - out_adc=dict(), - out_eval=dict(), - out_evec=dict(), - out_fa=dict(), + out_ad=dict( + extensions=None, + ), + out_adc=dict( + extensions=None, + ), + out_cl=dict( + extensions=None, + ), + out_cp=dict( + extensions=None, + ), + out_cs=dict( + extensions=None, + ), + out_eval=dict( + extensions=None, + ), + out_evec=dict( + extensions=None, + ), + out_fa=dict( + extensions=None, + ), + out_rd=dict( + extensions=None, + ), ) outputs = TensorMetrics.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py index dff5c783ee..87fac016f3 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py @@ -1,97 +1,194 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..tracking import Tractography def test_Tractography_inputs(): input_map = dict( - act_file=dict(argstr='-act %s', ), + act_file=dict( + argstr="-act %s", + extensions=None, + ), algorithm=dict( - argstr='-algorithm %s', + argstr="-algorithm %s", usedefault=True, ), - angle=dict(argstr='-angle %f', ), - args=dict(argstr='%s', ), - backtrack=dict(argstr='-backtrack', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - crop_at_gmwmi=dict(argstr='-crop_at_gmwmi', ), - cutoff=dict(argstr='-cutoff %f', ), - cutoff_init=dict(argstr='-initcutoff %f', ), - downsample=dict(argstr='-downsample %f', ), + angle=dict( + argstr="-angle %f", + ), + args=dict( + argstr="%s", + ), + backtrack=dict( + argstr="-backtrack", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + crop_at_gmwmi=dict( + argstr="-crop_at_gmwmi", + ), + cutoff=dict( + argstr="-cutoff %f", + ), + cutoff_init=dict( + argstr="-initcutoff %f", + ), + downsample=dict( + argstr="-downsample %f", + ), environ=dict( nohash=True, usedefault=True, ), - grad_file=dict(argstr='-grad %s', ), - grad_fsl=dict(argstr='-fslgrad %s %s', ), - in_bval=dict(), - in_bvec=dict(argstr='-fslgrad %s %s', ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-2, ), - init_dir=dict(argstr='-initdirection %f,%f,%f', ), - max_length=dict(argstr='-maxlength %f', ), - max_seed_attempts=dict(argstr='-max_seed_attempts %d', ), - max_tracks=dict(argstr='-maxnum %d', ), - min_length=dict(argstr='-minlength %f', ), + init_dir=dict( + argstr="-initdirection %f,%f,%f", + ), + max_length=dict( + argstr="-maxlength %f", + ), + max_seed_attempts=dict( + argstr="-max_seed_attempts %d", + ), + max_tracks=dict( + argstr="-maxnum %d", + ), + min_length=dict( + argstr="-minlength %f", + ), n_samples=dict( - argstr='-samples %d', + argstr="-samples %d", usedefault=True, ), - n_tracks=dict(argstr='-number %d', ), - n_trials=dict(argstr='-trials %d', ), - noprecompt=dict(argstr='-noprecomputed', ), + n_tracks=dict( + argstr="-number %d", + max_ver="0.4", + ), + n_trials=dict( + argstr="-trials %d", + ), + noprecompt=dict( + argstr="-noprecomputed", + ), nthreads=dict( - argstr='-nthreads %d', + argstr="-nthreads %d", nohash=True, ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=-1, usedefault=True, ), out_seeds=dict( - argstr='-output_seeds %s', + argstr="-output_seeds %s", + extensions=None, usedefault=True, ), - power=dict(argstr='-power %d', ), - roi_excl=dict(argstr='-exclude %s', ), - roi_incl=dict(argstr='-include %s', ), - roi_mask=dict(argstr='-mask %s', ), - seed_dynamic=dict(argstr='-seed_dynamic %s', ), + power=dict( + argstr="-power %d", + ), + roi_excl=dict( + argstr="-exclude %s", + ), + roi_incl=dict( + argstr="-include %s", + ), + roi_mask=dict( + argstr="-mask %s", + ), + seed_dynamic=dict( + argstr="-seed_dynamic %s", + extensions=None, + ), seed_gmwmi=dict( - argstr='-seed_gmwmi %s', - requires=['act_file'], + argstr="-seed_gmwmi %s", + extensions=None, + requires=["act_file"], ), seed_grid_voxel=dict( - argstr='-seed_grid_per_voxel %s %d', - xor=['seed_image', 'seed_rnd_voxel'], + argstr="-seed_grid_per_voxel %s %d", + xor=["seed_image", "seed_rnd_voxel"], + ), + seed_image=dict( + argstr="-seed_image %s", + extensions=None, + ), + seed_rejection=dict( + argstr="-seed_rejection %s", + extensions=None, ), - seed_image=dict(argstr='-seed_image %s', ), - seed_rejection=dict(argstr='-seed_rejection %s', ), seed_rnd_voxel=dict( - argstr='-seed_random_per_voxel %s %d', - xor=['seed_image', 'seed_grid_voxel'], - ), - seed_sphere=dict(argstr='-seed_sphere %f,%f,%f,%f', ), - sph_trait=dict(argstr='%f,%f,%f,%f', ), - step_size=dict(argstr='-step %f', ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), - use_rk4=dict(argstr='-rk4', ), + argstr="-seed_random_per_voxel %s %d", + xor=["seed_image", "seed_grid_voxel"], + ), + seed_sphere=dict( + argstr="-seed_sphere %f,%f,%f,%f", + ), + select=dict( + argstr="-select %d", + min_ver="3", + ), + sph_trait=dict( + argstr="%f,%f,%f,%f", + ), + step_size=dict( + argstr="-step %f", + ), + stop=dict( + argstr="-stop", + ), + unidirectional=dict( + argstr="-unidirectional", + ), + use_rk4=dict( + argstr="-rk4", + ), ) inputs = Tractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tractography_outputs(): output_map = dict( - out_file=dict(), - out_seeds=dict(), + out_file=dict( + extensions=None, + ), + out_seeds=dict( + extensions=None, + ), ) outputs = Tractography.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_TransformFSLConvert.py b/nipype/interfaces/mrtrix3/tests/test_auto_TransformFSLConvert.py new file mode 100644 index 0000000000..48a528a75e --- /dev/null +++ b/nipype/interfaces/mrtrix3/tests/test_auto_TransformFSLConvert.py @@ -0,0 +1,93 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..utils import TransformFSLConvert + + +def test_TransformFSLConvert_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + bval_scale=dict( + argstr="-bvalue_scaling %s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + flirt_import=dict( + argstr="flirt_import", + mandatory=True, + position=-2, + usedefault=True, + ), + grad_file=dict( + argstr="-grad %s", + extensions=None, + xor=["grad_fsl"], + ), + grad_fsl=dict( + argstr="-fslgrad %s %s", + xor=["grad_file"], + ), + in_bval=dict( + extensions=None, + ), + in_bvec=dict( + argstr="-fslgrad %s %s", + extensions=None, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=1, + ), + in_transform=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + nthreads=dict( + argstr="-nthreads %d", + nohash=True, + ), + out_bval=dict( + extensions=None, + ), + out_bvec=dict( + argstr="-export_grad_fsl %s %s", + extensions=None, + ), + out_transform=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=-1, + usedefault=True, + ), + reference=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=2, + ), + ) + inputs = TransformFSLConvert.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_TransformFSLConvert_outputs(): + output_map = dict( + out_transform=dict( + extensions=None, + ), + ) + outputs = TransformFSLConvert.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index e8bf15a33f..abb18139d1 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -1,230 +1,317 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op -from ..base import traits, TraitedSpec, File +from ..base import traits, Tuple, TraitedSpec, File from .base import MRTrix3BaseInputSpec, MRTrix3Base class TractographyInputSpec(MRTrix3BaseInputSpec): - sph_trait = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - traits.Float, - argstr='%f,%f,%f,%f') + sph_trait = Tuple( + traits.Float, traits.Float, traits.Float, traits.Float, argstr="%f,%f,%f,%f" + ) in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input file to be processed') + desc="input file to be processed", + ) out_file = File( - 'tracked.tck', - argstr='%s', + "tracked.tck", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file containing tracks') + desc="output file containing tracks", + ) algorithm = traits.Enum( - 'iFOD2', - 'FACT', - 'iFOD1', - 'Nulldist', - 'SD_Stream', - 'Tensor_Det', - 'Tensor_Prob', + "iFOD2", + "FACT", + "iFOD1", + "Nulldist", + "SD_Stream", + "Tensor_Det", + "Tensor_Prob", usedefault=True, - argstr='-algorithm %s', - desc='tractography algorithm to be used') + argstr="-algorithm %s", + desc="Tractography algorithm to be used -- References:" + "[FACT]_, [iFOD1]_, [iFOD2]_, [Nulldist]_, [Tensor_Det]_, [Tensor_Prob]_.", + ) # ROIs processing options roi_incl = traits.Either( File(exists=True), sph_trait, - argstr='-include %s', - desc=('specify an inclusion region of interest, streamlines must' - ' traverse ALL inclusion regions to be accepted')) + argstr="-include %s", + desc=( + "specify an inclusion region of interest, streamlines must" + " traverse ALL inclusion regions to be accepted" + ), + ) roi_excl = traits.Either( File(exists=True), sph_trait, - argstr='-exclude %s', - desc=('specify an exclusion region of interest, streamlines that' - ' enter ANY exclude region will be discarded')) + argstr="-exclude %s", + desc=( + "specify an exclusion region of interest, streamlines that" + " enter ANY exclude region will be discarded" + ), + ) roi_mask = traits.Either( File(exists=True), sph_trait, - argstr='-mask %s', - desc=('specify a masking region of interest. If defined,' - 'streamlines exiting the mask will be truncated')) + argstr="-mask %s", + desc=( + "specify a masking region of interest. If defined," + "streamlines exiting the mask will be truncated" + ), + ) # Streamlines tractography options step_size = traits.Float( - argstr='-step %f', - desc=('set the step size of the algorithm in mm (default is 0.1' - ' x voxelsize; for iFOD2: 0.5 x voxelsize)')) + argstr="-step %f", + desc=( + "set the step size of the algorithm in mm (default is 0.1" + " x voxelsize; for iFOD2: 0.5 x voxelsize)" + ), + ) angle = traits.Float( - argstr='-angle %f', - desc=('set the maximum angle between successive steps (default ' - 'is 90deg x stepsize / voxelsize)')) + argstr="-angle %f", + desc=( + "set the maximum angle between successive steps (default " + "is 90deg x stepsize / voxelsize)" + ), + ) n_tracks = traits.Int( - argstr='-number %d', - desc=('set the desired number of tracks. The program will continue' - ' to generate tracks until this number of tracks have been ' - 'selected and written to the output file')) + argstr="-number %d", + max_ver="0.4", + desc=( + "set the desired number of tracks. The program will continue" + " to generate tracks until this number of tracks have been " + "selected and written to the output file" + ), + ) + select = traits.Int( + argstr="-select %d", + min_ver="3", + desc=( + "set the desired number of tracks. The program will continue" + " to generate tracks until this number of tracks have been " + "selected and written to the output file" + ), + ) max_tracks = traits.Int( - argstr='-maxnum %d', - desc=('set the maximum number of tracks to generate. The program ' - 'will not generate more tracks than this number, even if ' - 'the desired number of tracks hasn\'t yet been reached ' - '(default is 100 x number)')) + argstr="-maxnum %d", + desc=( + "set the maximum number of tracks to generate. The program " + "will not generate more tracks than this number, even if " + "the desired number of tracks hasn't yet been reached " + "(default is 100 x number)" + ), + ) max_length = traits.Float( - argstr='-maxlength %f', - desc=('set the maximum length of any track in mm (default is ' - '100 x voxelsize)')) + argstr="-maxlength %f", + desc=("set the maximum length of any track in mm (default is 100 x voxelsize)"), + ) min_length = traits.Float( - argstr='-minlength %f', - desc=('set the minimum length of any track in mm (default is ' - '5 x voxelsize)')) + argstr="-minlength %f", + desc=("set the minimum length of any track in mm (default is 5 x voxelsize)"), + ) cutoff = traits.Float( - argstr='-cutoff %f', - desc=('set the FA or FOD amplitude cutoff for terminating ' - 'tracks (default is 0.1)')) + argstr="-cutoff %f", + desc=( + "set the FA or FOD amplitude cutoff for terminating " + "tracks (default is 0.1)" + ), + ) cutoff_init = traits.Float( - argstr='-initcutoff %f', - desc=('set the minimum FA or FOD amplitude for initiating ' - 'tracks (default is the same as the normal cutoff)')) + argstr="-initcutoff %f", + desc=( + "set the minimum FA or FOD amplitude for initiating " + "tracks (default is the same as the normal cutoff)" + ), + ) n_trials = traits.Int( - argstr='-trials %d', - desc=('set the maximum number of sampling trials at each point' - ' (only used for probabilistic tracking)')) + argstr="-trials %d", + desc=( + "set the maximum number of sampling trials at each point" + " (only used for probabilistic tracking)" + ), + ) unidirectional = traits.Bool( - argstr='-unidirectional', - desc=('track from the seed point in one direction only ' - '(default is to track in both directions)')) - init_dir = traits.Tuple( + argstr="-unidirectional", + desc=( + "track from the seed point in one direction only " + "(default is to track in both directions)" + ), + ) + init_dir = Tuple( traits.Float, traits.Float, traits.Float, - argstr='-initdirection %f,%f,%f', - desc=('specify an initial direction for the tracking (this ' - 'should be supplied as a vector of 3 comma-separated values')) + argstr="-initdirection %f,%f,%f", + desc=( + "specify an initial direction for the tracking (this " + "should be supplied as a vector of 3 comma-separated values" + ), + ) noprecompt = traits.Bool( - argstr='-noprecomputed', - desc=('do NOT pre-compute legendre polynomial values. Warning: this ' - 'will slow down the algorithm by a factor of approximately 4')) + argstr="-noprecomputed", + desc=( + "do NOT pre-compute legendre polynomial values. Warning: this " + "will slow down the algorithm by a factor of approximately 4" + ), + ) power = traits.Int( - argstr='-power %d', - desc=('raise the FOD to the power specified (default is 1/nsamples)')) + argstr="-power %d", + desc=("raise the FOD to the power specified (default is 1/nsamples)"), + ) n_samples = traits.Int( - 4, usedefault=True, - argstr='-samples %d', - desc=('set the number of FOD samples to take per step for the 2nd ' - 'order (iFOD2) method')) + 4, + usedefault=True, + argstr="-samples %d", + desc=( + "set the number of FOD samples to take per step for the 2nd " + "order (iFOD2) method" + ), + ) use_rk4 = traits.Bool( - argstr='-rk4', - desc=('use 4th-order Runge-Kutta integration (slower, but eliminates' - ' curvature overshoot in 1st-order deterministic methods)')) + argstr="-rk4", + desc=( + "use 4th-order Runge-Kutta integration (slower, but eliminates" + " curvature overshoot in 1st-order deterministic methods)" + ), + ) stop = traits.Bool( - argstr='-stop', - desc=('stop propagating a streamline once it has traversed all ' - 'include regions')) + argstr="-stop", + desc=( + "stop propagating a streamline once it has traversed all include regions" + ), + ) downsample = traits.Float( - argstr='-downsample %f', - desc='downsample the generated streamlines to reduce output file size') + argstr="-downsample %f", + desc="downsample the generated streamlines to reduce output file size", + ) # Anatomically-Constrained Tractography options act_file = File( exists=True, - argstr='-act %s', - desc=('use the Anatomically-Constrained Tractography framework during' - ' tracking; provided image must be in the 5TT ' - '(five - tissue - type) format')) - backtrack = traits.Bool( - argstr='-backtrack', desc='allow tracks to be truncated') + argstr="-act %s", + desc=( + "use the Anatomically-Constrained Tractography framework during" + " tracking; provided image must be in the 5TT " + "(five - tissue - type) format" + ), + ) + backtrack = traits.Bool(argstr="-backtrack", desc="allow tracks to be truncated") crop_at_gmwmi = traits.Bool( - argstr='-crop_at_gmwmi', - desc=('crop streamline endpoints more ' - 'precisely as they cross the GM-WM interface')) + argstr="-crop_at_gmwmi", + desc=( + "crop streamline endpoints more " + "precisely as they cross the GM-WM interface" + ), + ) # Tractography seeding options - seed_sphere = traits.Tuple( + seed_sphere = Tuple( traits.Float, traits.Float, traits.Float, traits.Float, - argstr='-seed_sphere %f,%f,%f,%f', - desc='spherical seed') + argstr="-seed_sphere %f,%f,%f,%f", + desc="spherical seed", + ) seed_image = File( exists=True, - argstr='-seed_image %s', - desc='seed streamlines entirely at random within mask') - seed_rnd_voxel = traits.Tuple( + argstr="-seed_image %s", + desc="seed streamlines entirely at random within mask", + ) + seed_rnd_voxel = Tuple( File(exists=True), traits.Int(), - argstr='-seed_random_per_voxel %s %d', - xor=['seed_image', 'seed_grid_voxel'], - desc=('seed a fixed number of streamlines per voxel in a mask ' - 'image; random placement of seeds in each voxel')) - seed_grid_voxel = traits.Tuple( + argstr="-seed_random_per_voxel %s %d", + xor=["seed_image", "seed_grid_voxel"], + desc=( + "seed a fixed number of streamlines per voxel in a mask " + "image; random placement of seeds in each voxel" + ), + ) + seed_grid_voxel = Tuple( File(exists=True), traits.Int(), - argstr='-seed_grid_per_voxel %s %d', - xor=['seed_image', 'seed_rnd_voxel'], - desc=('seed a fixed number of streamlines per voxel in a mask ' - 'image; place seeds on a 3D mesh grid (grid_size argument ' - 'is per axis; so a grid_size of 3 results in 27 seeds per' - ' voxel)')) + argstr="-seed_grid_per_voxel %s %d", + xor=["seed_image", "seed_rnd_voxel"], + desc=( + "seed a fixed number of streamlines per voxel in a mask " + "image; place seeds on a 3D mesh grid (grid_size argument " + "is per axis; so a grid_size of 3 results in 27 seeds per" + " voxel)" + ), + ) seed_rejection = File( exists=True, - argstr='-seed_rejection %s', - desc=('seed from an image using rejection sampling (higher ' - 'values = more probable to seed from')) + argstr="-seed_rejection %s", + desc=( + "seed from an image using rejection sampling (higher " + "values = more probable to seed from" + ), + ) seed_gmwmi = File( exists=True, - argstr='-seed_gmwmi %s', - requires=['act_file'], - desc=('seed from the grey matter - white matter interface (only ' - 'valid if using ACT framework)')) + argstr="-seed_gmwmi %s", + requires=["act_file"], + desc=( + "seed from the grey matter - white matter interface (only " + "valid if using ACT framework)" + ), + ) seed_dynamic = File( exists=True, - argstr='-seed_dynamic %s', - desc=('determine seed points dynamically using the SIFT model ' - '(must not provide any other seeding mechanism). Note that' - ' while this seeding mechanism improves the distribution of' - ' reconstructed streamlines density, it should NOT be used ' - 'as a substitute for the SIFT method itself.')) + argstr="-seed_dynamic %s", + desc=( + "determine seed points dynamically using the SIFT model " + "(must not provide any other seeding mechanism). Note that" + " while this seeding mechanism improves the distribution of" + " reconstructed streamlines density, it should NOT be used " + "as a substitute for the SIFT method itself." + ), + ) max_seed_attempts = traits.Int( - argstr='-max_seed_attempts %d', - desc=('set the maximum number of times that the tracking ' - 'algorithm should attempt to find an appropriate tracking' - ' direction from a given seed point')) + argstr="-max_seed_attempts %d", + desc=( + "set the maximum number of times that the tracking " + "algorithm should attempt to find an appropriate tracking" + " direction from a given seed point" + ), + ) out_seeds = File( - 'out_seeds.nii.gz', usedefault=True, - argstr='-output_seeds %s', - desc=('output the seed location of all successful streamlines to' - ' a file')) + "out_seeds.nii.gz", + usedefault=True, + argstr="-output_seeds %s", + desc=("output the seed location of all successful streamlines to a file"), + ) class TractographyOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output filtered tracks') + out_file = File(exists=True, desc="the output filtered tracks") out_seeds = File( - desc=('output the seed location of all successful' - ' streamlines to a file')) + desc=("output the seed location of all successful streamlines to a file") + ) class Tractography(MRTrix3Base): """ - Performs streamlines tractography after selecting the appropriate - algorithm. + Performs streamlines tractography after selecting the appropriate algorithm. + + References + ---------- .. [FACT] Mori, S.; Crain, B. J.; Chacko, V. P. & van Zijl, P. C. M. Three-dimensional tracking of axonal projections in the @@ -252,7 +339,6 @@ class Tractography(MRTrix3Base): Tracking Using the Wild Bootstrap With Diffusion Tensor MRI. IEEE Transactions on Medical Imaging, 2008, 27, 1268-1274 - Example ------- @@ -268,18 +354,18 @@ class Tractography(MRTrix3Base): >>> tk.run() # doctest: +SKIP """ - _cmd = 'tckgen' + _cmd = "tckgen" input_spec = TractographyInputSpec output_spec = TractographyOutputSpec def _format_arg(self, name, trait_spec, value): - if 'roi_' in name and isinstance(value, tuple): - value = ['%f' % v for v in value] - return trait_spec.argstr % ','.join(value) + if "roi_" in name and isinstance(value, tuple): + value = ["%f" % v for v in value] + return trait_spec.argstr % ",".join(value) - return super(Tractography, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py index a667c716f4..1a7c81dada 100644 --- a/nipype/interfaces/mrtrix3/utils.py +++ b/nipype/interfaces/mrtrix3/utils.py @@ -1,34 +1,44 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os.path as op -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, InputMultiPath, isdefined) +from ...utils.filemanip import split_filename +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + Tuple, + TraitedSpec, + File, + Directory, + InputMultiPath, + isdefined, +) from .base import MRTrix3BaseInputSpec, MRTrix3Base class BrainMaskInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input diffusion weighted images') + desc="input diffusion weighted images", + ) out_file = File( - 'brainmask.mif', - argstr='%s', + "brainmask.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output brain mask') + desc="output brain mask", + ) class BrainMaskOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class BrainMask(CommandLine): @@ -47,45 +57,140 @@ class BrainMask(CommandLine): >>> bmsk.run() # doctest: +SKIP """ - _cmd = 'dwi2mask' + _cmd = "dwi2mask" input_spec = BrainMaskInputSpec output_spec = BrainMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) + return outputs + + +class MRCatInputSpec(MRTrix3BaseInputSpec): + in_files = traits.List( + File(exists=True), + argstr="%s", + position=-2, + mandatory=True, + desc="files to concatenate", + ) + + out_file = File( + "concatenated.mif", + argstr="%s", + mandatory=True, + position=-1, + usedefault=True, + desc="output concatenated image", + ) + + axis = traits.Int( + argstr="-axis %s", + desc="""specify axis along which concatenation should be performed. By default, + the program will use the last non-singleton, non-spatial axis of any of + the input images - in other words axis 3 or whichever axis (greater than + 3) of the input images has size greater than one""", + ) + + datatype = traits.Enum( + "float32", + "float32le", + "float32be", + "float64", + "float64le", + "float64be", + "int64", + "uint64", + "int64le", + "uint64le", + "int64be", + "uint64be", + "int32", + "uint32", + "int32le", + "uint32le", + "int32be", + "uint32be", + "int16", + "uint16", + "int16le", + "uint16le", + "int16be", + "uint16be", + "cfloat32", + "cfloat32le", + "cfloat32be", + "cfloat64", + "cfloat64le", + "cfloat64be", + "int8", + "uint8", + "bit", + argstr="-datatype %s", + desc="specify output image data type", + ) + + +class MRCatOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output concatenated image") + + +class MRCat(CommandLine): + """ + Concatenate several images into one + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> mrcat = mrt.MRCat() + >>> mrcat.inputs.in_files = ['dwi.mif','mask.mif'] + >>> mrcat.cmdline # doctest: +ELLIPSIS + 'mrcat dwi.mif mask.mif concatenated.mif' + >>> mrcat.run() # doctest: +SKIP + """ + + _cmd = "mrcat" + input_spec = MRCatInputSpec + output_spec = MRCatOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Mesh2PVEInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input mesh') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input mesh" + ) reference = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input reference image') + desc="input reference image", + ) in_first = File( exists=True, - argstr='-first %s', - desc='indicates that the mesh file is provided by FSL FIRST') + argstr="-first %s", + desc="indicates that the mesh file is provided by FSL FIRST", + ) out_file = File( - 'mesh2volume.nii.gz', - argstr='%s', + "mesh2volume.nii.gz", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file containing SH coefficients') + desc="output file containing SH coefficients", + ) class Mesh2PVEOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class Mesh2PVE(CommandLine): @@ -106,37 +211,83 @@ class Mesh2PVE(CommandLine): >>> m2p.run() # doctest: +SKIP """ - _cmd = 'mesh2pve' + _cmd = "mesh2pve" input_spec = Mesh2PVEInputSpec output_spec = Mesh2PVEOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Generate5ttInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'fsl', - 'gif', - 'freesurfer', - argstr='%s', + "fsl", + "gif", + "freesurfer", + "hsvs", + argstr="%s", position=-3, mandatory=True, - desc='tissue segmentation algorithm') - in_file = File( - exists=True, - argstr='%s', + desc="tissue segmentation algorithm", + ) + in_file = traits.Either( + File(exists=True), + Directory(exists=True), + argstr="%s", mandatory=True, position=-2, - desc='input image') - out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') + desc="input image / directory", + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") + t2_image = File( + exists=True, + argstr="-t2 %s", + desc="Provide a T2-weighted image in addition to the default T1-weighted image. (Only for 'fsl' algorithm)", + ) + mask_file = File( + exists=True, + argstr="-mask %s", + desc="Provide a brain mask image. (Only for 'fsl' algorithm)", + ) + premasked = traits.Bool( + argstr="-premasked", + desc="Assume that the input image is already brain-masked. (Only for 'fsl' algorithm)", + ) + nocrop = traits.Bool( + argstr="-nocrop", + desc="Do not crop the image to the region of interest.", + ) + sgm_amyg_hipp = traits.Bool( + argstr="-sgm_amyg_hipp", + desc="Include the amygdala and hippocampus in the subcortical grey matter segment.", + ) + template = File( + exists=True, + argstr="-template %s", + desc="Provide an image that will form the template for the generated 5TT image. (Only for 'hsvs' algorithm)", + ) + hippocampi = traits.Enum( + "subfields", + "first", + "aseg", + argstr="-hippocampi %s", + desc="Choose the method used to segment the hippocampi. (Only for 'freesurfer' algorithm)", + ) + white_stem = traits.Bool( + argstr="-white_stem", + desc="Classify the brainstem as white matter. (Only for 'hsvs' algorithm)", + ) + lut_file = File( + exists=True, + argstr="-lut %s", + desc="Manually provide path to the lookup table on which the input parcellation image is based. (Only for 'freesurfer' algorithm)", + ) class Generate5ttOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class Generate5tt(MRTrix3Base): @@ -157,56 +308,68 @@ class Generate5tt(MRTrix3Base): >>> gen5tt.run() # doctest: +SKIP """ - _cmd = '5ttgen' + _cmd = "5ttgen" input_spec = Generate5ttInputSpec output_spec = Generate5ttOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs -class TensorMetricsInputSpec(CommandLineInputSpec): +class TensorMetricsInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-1, - desc='input DTI image') - - out_fa = File(argstr='-fa %s', desc='output FA file') - out_adc = File(argstr='-adc %s', desc='output ADC file') - out_evec = File( - argstr='-vector %s', desc='output selected eigenvector(s) file') - out_eval = File( - argstr='-value %s', desc='output selected eigenvalue(s) file') + desc="input DTI image", + ) + + out_fa = File(argstr="-fa %s", desc="output FA file") + out_adc = File(argstr="-adc %s", desc="output ADC file") + out_ad = File(argstr="-ad %s", desc="output AD file") + out_rd = File(argstr="-rd %s", desc="output RD file") + out_cl = File(argstr="-cl %s", desc="output CL file") + out_cp = File(argstr="-cp %s", desc="output CP file") + out_cs = File(argstr="-cs %s", desc="output CS file") + out_evec = File(argstr="-vector %s", desc="output selected eigenvector(s) file") + out_eval = File(argstr="-value %s", desc="output selected eigenvalue(s) file") component = traits.List( [1], usedefault=True, - argstr='-num %s', - sep=',', - desc=('specify the desired eigenvalue/eigenvector(s). Note that ' - 'several eigenvalues can be specified as a number sequence')) + argstr="-num %s", + sep=",", + desc=( + "specify the desired eigenvalue/eigenvector(s). Note that " + "several eigenvalues can be specified as a number sequence" + ), + ) in_mask = File( exists=True, - argstr='-mask %s', - desc=('only perform computation within the specified binary' - ' brain mask image')) + argstr="-mask %s", + desc=("only perform computation within the specified binary brain mask image"), + ) modulate = traits.Enum( - 'FA', - 'none', - 'eval', - argstr='-modulate %s', - desc=('how to modulate the magnitude of the' - ' eigenvectors')) + "FA", + "none", + "eval", + argstr="-modulate %s", + desc=("how to modulate the magnitude of the eigenvectors"), + ) class TensorMetricsOutputSpec(TraitedSpec): - out_fa = File(desc='output FA file') - out_adc = File(desc='output ADC file') - out_evec = File(desc='output selected eigenvector(s) file') - out_eval = File(desc='output selected eigenvalue(s) file') + out_fa = File(desc="output FA file") + out_adc = File(desc="output ADC file") + out_ad = File(desc="output AD file") + out_rd = File(desc="output RD file") + out_cl = File(desc="output CL file") + out_cp = File(desc="output CP file") + out_cs = File(desc="output CS file") + out_evec = File(desc="output selected eigenvector(s) file") + out_eval = File(desc="output selected eigenvalue(s) file") class TensorMetrics(CommandLine): @@ -226,7 +389,7 @@ class TensorMetrics(CommandLine): >>> comp.run() # doctest: +SKIP """ - _cmd = 'tensor2metric' + _cmd = "tensor2metric" input_spec = TensorMetricsInputSpec output_spec = TensorMetricsOutputSpec @@ -242,127 +405,130 @@ def _list_outputs(self): class ComputeTDIInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input tractography') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" + ) out_file = File( - 'tdi.mif', - argstr='%s', - usedefault=True, - position=-1, - desc='output TDI file') + "tdi.mif", argstr="%s", usedefault=True, position=-1, desc="output TDI file" + ) reference = File( exists=True, - argstr='-template %s', - desc='a reference' - 'image to be used as template') + argstr="-template %s", + desc="a reference image to be used as template", + ) vox_size = traits.List( - traits.Int, argstr='-vox %s', sep=',', desc='voxel dimensions') + traits.Int, argstr="-vox %s", sep=",", desc="voxel dimensions" + ) data_type = traits.Enum( - 'float', - 'unsigned int', - argstr='-datatype %s', - desc='specify output image data type') - use_dec = traits.Bool(argstr='-dec', desc='perform mapping in DEC space') + "float", + "unsigned int", + argstr="-datatype %s", + desc="specify output image data type", + ) + use_dec = traits.Bool(argstr="-dec", desc="perform mapping in DEC space") dixel = File( - argstr='-dixel %s', - desc='map streamlines to' - 'dixels within each voxel. Directions are stored as' - 'azimuth elevation pairs.') + argstr="-dixel %s", + desc="map streamlines to" + "dixels within each voxel. Directions are stored as" + "azimuth elevation pairs.", + ) max_tod = traits.Int( - argstr='-tod %d', - desc='generate a Track Orientation ' - 'Distribution (TOD) in each voxel.') + argstr="-tod %d", + desc="generate a Track Orientation Distribution (TOD) in each voxel.", + ) contrast = traits.Enum( - 'tdi', - 'length', - 'invlength', - 'scalar_map', - 'scalar_map_conut', - 'fod_amp', - 'curvature', - argstr='-constrast %s', - desc='define the desired ' - 'form of contrast for the output image') + "tdi", + "length", + "invlength", + "scalar_map", + "scalar_map_conut", + "fod_amp", + "curvature", + argstr="-constrast %s", + desc="define the desired form of contrast for the output image", + ) in_map = File( exists=True, - argstr='-image %s', - desc='provide the' - 'scalar image map for generating images with ' - '\'scalar_map\' contrasts, or the SHs image for fod_amp') + argstr="-image %s", + desc="provide the" + "scalar image map for generating images with " + "'scalar_map' contrasts, or the SHs image for fod_amp", + ) stat_vox = traits.Enum( - 'sum', - 'min', - 'mean', - 'max', - argstr='-stat_vox %s', - desc='define the statistic for choosing the final' - 'voxel intesities for a given contrast') + "sum", + "min", + "mean", + "max", + argstr="-stat_vox %s", + desc="define the statistic for choosing the final" + "voxel intesities for a given contrast", + ) stat_tck = traits.Enum( - 'mean', - 'sum', - 'min', - 'max', - 'median', - 'mean_nonzero', - 'gaussian', - 'ends_min', - 'ends_mean', - 'ends_max', - 'ends_prod', - argstr='-stat_tck %s', - desc='define the statistic for choosing ' - 'the contribution to be made by each streamline as a function of' - ' the samples taken along their lengths.') + "mean", + "sum", + "min", + "max", + "median", + "mean_nonzero", + "gaussian", + "ends_min", + "ends_mean", + "ends_max", + "ends_prod", + argstr="-stat_tck %s", + desc="define the statistic for choosing " + "the contribution to be made by each streamline as a function of" + " the samples taken along their lengths.", + ) fwhm_tck = traits.Float( - argstr='-fwhm_tck %f', - desc='define the statistic for choosing the' - ' contribution to be made by each streamline as a function of the ' - 'samples taken along their lengths') + argstr="-fwhm_tck %f", + desc="define the statistic for choosing the" + " contribution to be made by each streamline as a function of the " + "samples taken along their lengths", + ) map_zero = traits.Bool( - argstr='-map_zero', - desc='if a streamline has zero contribution based ' - 'on the contrast & statistic, typically it is not mapped; use this ' - 'option to still contribute to the map even if this is the case ' - '(these non-contributing voxels can then influence the mean value in ' - 'each voxel of the map)') + argstr="-map_zero", + desc="if a streamline has zero contribution based " + "on the contrast & statistic, typically it is not mapped; use this " + "option to still contribute to the map even if this is the case " + "(these non-contributing voxels can then influence the mean value in " + "each voxel of the map)", + ) upsample = traits.Int( - argstr='-upsample %d', - desc='upsample the tracks by' - ' some ratio using Hermite interpolation before ' - 'mappping') + argstr="-upsample %d", + desc="upsample the tracks by" + " some ratio using Hermite interpolation before " + "mapping", + ) precise = traits.Bool( - argstr='-precise', - desc='use a more precise streamline mapping ' - 'strategy, that accurately quantifies the length through each voxel ' - '(these lengths are then taken into account during TWI calculation)') + argstr="-precise", + desc="use a more precise streamline mapping " + "strategy, that accurately quantifies the length through each voxel " + "(these lengths are then taken into account during TWI calculation)", + ) ends_only = traits.Bool( - argstr='-ends_only', - desc='only map the streamline' - ' endpoints to the image') + argstr="-ends_only", desc="only map the streamline endpoints to the image" + ) tck_weights = File( exists=True, - argstr='-tck_weights_in %s', - desc='specify' - ' a text scalar file containing the streamline weights') + argstr="-tck_weights_in %s", + desc="specify a text scalar file containing the streamline weights", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) class ComputeTDIOutputSpec(TraitedSpec): - out_file = File(desc='output TDI file') + out_file = File(desc="output TDI file") class ComputeTDI(MRTrix3Base): @@ -419,51 +585,47 @@ class ComputeTDI(MRTrix3Base): >>> tdi.run() # doctest: +SKIP """ - _cmd = 'tckmap' + _cmd = "tckmap" input_spec = ComputeTDIInputSpec output_spec = ComputeTDIOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class TCK2VTKInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input tractography') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" + ) out_file = File( - 'tracks.vtk', - argstr='%s', - usedefault=True, - position=-1, - desc='output VTK file') + "tracks.vtk", argstr="%s", usedefault=True, position=-1, desc="output VTK file" + ) reference = File( exists=True, - argstr='-image %s', - desc='if specified, the properties of' - ' this image will be used to convert track point positions from real ' - '(scanner) coordinates into image coordinates (in mm).') + argstr="-image %s", + desc="if specified, the properties of" + " this image will be used to convert track point positions from real " + "(scanner) coordinates into image coordinates (in mm).", + ) voxel = File( exists=True, - argstr='-image %s', - desc='if specified, the properties of' - ' this image will be used to convert track point positions from real ' - '(scanner) coordinates into image coordinates.') + argstr="-image %s", + desc="if specified, the properties of" + " this image will be used to convert track point positions from real " + "(scanner) coordinates into image coordinates.", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number of available cpus will be used", + nohash=True, + ) class TCK2VTKOutputSpec(TraitedSpec): - out_file = File(desc='output VTK file') + out_file = File(desc="output VTK file") class TCK2VTK(MRTrix3Base): @@ -483,38 +645,36 @@ class TCK2VTK(MRTrix3Base): >>> vtk.run() # doctest: +SKIP """ - _cmd = 'tck2vtk' + _cmd = "tck2vtk" input_spec = TCK2VTKInputSpec output_spec = TCK2VTKOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class DWIExtractInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input image') - out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') - bzero = traits.Bool(argstr='-bzero', desc='extract b=0 volumes') - nobzero = traits.Bool(argstr='-no_bzero', desc='extract non b=0 volumes') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") + bzero = traits.Bool(argstr="-bzero", desc="extract b=0 volumes") + nobzero = traits.Bool(argstr="-no_bzero", desc="extract non b=0 volumes") singleshell = traits.Bool( - argstr='-singleshell', desc='extract volumes with a specific shell') + argstr="-singleshell", desc="extract volumes with a specific shell" + ) shell = traits.List( traits.Float, - sep=',', - argstr='-shell %s', - desc='specify one or more gradient shells') + sep=",", + argstr="-shell %s", + desc="specify one or more gradient shells", + ) class DWIExtractOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class DWIExtract(MRTrix3Base): @@ -536,54 +696,71 @@ class DWIExtract(MRTrix3Base): >>> dwiextract.run() # doctest: +SKIP """ - _cmd = 'dwiextract' + _cmd = "dwiextract" input_spec = DWIExtractInputSpec output_spec = DWIExtractOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRConvertInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input image') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" + ) out_file = File( - 'dwi.mif', - argstr='%s', + "dwi.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output image') + desc="output image", + ) coord = traits.List( - traits.Float, - sep=' ', - argstr='-coord %s', - desc='extract data at the specified coordinates') + traits.Int, + sep=" ", + argstr="-coord %s", + desc="extract data at the specified coordinates", + ) vox = traits.List( - traits.Float, - sep=',', - argstr='-vox %s', - desc='change the voxel dimensions') + traits.Float, sep=",", argstr="-vox %s", desc="change the voxel dimensions" + ) axes = traits.List( traits.Int, - sep=',', - argstr='-axes %s', - desc='specify the axes that will be used') + sep=",", + argstr="-axes %s", + desc="specify the axes that will be used", + ) scaling = traits.List( traits.Float, - sep=',', - argstr='-scaling %s', - desc='specify the data scaling parameter') + sep=",", + argstr="-scaling %s", + desc="specify the data scaling parameter", + ) + json_import = File( + exists=True, + argstr="-json_import %s", + mandatory=False, + desc="import data from a JSON file into header key-value pairs", + ) + json_export = File( + exists=False, + argstr="-json_export %s", + mandatory=False, + desc="export data from an image header key-value pairs into a JSON file", + ) class MRConvertOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") + json_export = File( + exists=True, + desc="exported data from an image header key-value pairs in a JSON file", + ) + out_bvec = File(exists=True, desc="export bvec file in FSL format") + out_bval = File(exists=True, desc="export bvec file in FSL format") class MRConvert(MRTrix3Base): @@ -603,50 +780,211 @@ class MRConvert(MRTrix3Base): >>> mrconvert.run() # doctest: +SKIP """ - _cmd = 'mrconvert' + _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) + if self.inputs.json_export: + outputs["json_export"] = op.abspath(self.inputs.json_export) + if self.inputs.out_bvec: + outputs["out_bvec"] = op.abspath(self.inputs.out_bvec) + if self.inputs.out_bval: + outputs["out_bval"] = op.abspath(self.inputs.out_bval) return outputs -class MRMathInputSpec(MRTrix3BaseInputSpec): +class TransformFSLConvertInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, - position=-3, - desc='input image') + position=1, + desc="FLIRT input image", + ) + reference = File( + exists=True, + argstr="%s", + mandatory=True, + position=2, + desc="FLIRT reference image", + ) + in_transform = File( + exists=True, + argstr="%s", + mandatory=True, + position=0, + desc="FLIRT output transformation matrix", + ) + out_transform = File( + "transform_mrtrix.txt", + argstr="%s", + mandatory=True, + position=-1, + usedefault=True, + desc="output transformed affine in mrtrix3's format", + ) + flirt_import = traits.Bool( + True, + argstr="flirt_import", + mandatory=True, + usedefault=True, + position=-2, + desc="import transform from FSL's FLIRT.", + ) + + +class TransformFSLConvertOutputSpec(TraitedSpec): + out_transform = File( + exists=True, desc="output transformed affine in mrtrix3's format" + ) + + +class TransformFSLConvert(MRTrix3Base): + """ + Perform conversion between FSL's transformation matrix format to mrtrix3's. + """ + + _cmd = "transformconvert" + input_spec = TransformFSLConvertInputSpec + output_spec = TransformFSLConvertOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_transform"] = op.abspath(self.inputs.out_transform) + return outputs + + +class MRTransformInputSpec(MRTrix3BaseInputSpec): + in_files = InputMultiPath( + File(exists=True), + argstr="%s", + mandatory=True, + position=-2, + desc="Input images to be transformed", + ) out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') + genfile=True, + argstr="%s", + position=-1, + desc="Output image", + ) + invert = traits.Bool( + argstr="-inverse", + desc="Invert the specified transform before using it", + ) + linear_transform = File( + exists=True, + argstr="-linear %s", + desc=( + "Specify a linear transform to apply, in the form of a 3x4 or 4x4 ascii file. " + "Note the standard reverse convention is used, " + "where the transform maps points in the template image to the moving image. " + "Note that the reverse convention is still assumed even if no -template image is supplied." + ), + ) + replace_transform = traits.Bool( + argstr="-replace", + desc="replace the current transform by that specified, rather than applying it to the current transform", + ) + transformation_file = File( + exists=True, + argstr="-transform %s", + desc="The transform to apply, in the form of a 4x4 ascii file.", + ) + template_image = File( + exists=True, + argstr="-template %s", + desc="Reslice the input image to match the specified template image.", + ) + reference_image = File( + exists=True, + argstr="-reference %s", + desc="in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.", + ) + flip_x = traits.Bool( + argstr="-flipx", + desc="assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option.", + ) + quiet = traits.Bool( + argstr="-quiet", + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") + + +class MRTransformOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output image of the transformation") + + +class MRTransform(MRTrix3Base): + """ + Apply spatial transformations or reslice images + + Example + ------- + + >>> MRxform = MRTransform() + >>> MRxform.inputs.in_files = 'anat_coreg.mif' + >>> MRxform.run() # doctest: +SKIP + """ + + _cmd = "mrtransform" + input_spec = MRTransformInputSpec + output_spec = MRTransformOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) + else: + outputs["out_file"] = op.abspath(outputs["out_file"]) + return outputs + + def _gen_filename(self, name): + if name == "out_file": + return self._gen_outfilename() + else: + return None + + def _gen_outfilename(self): + _, name, _ = split_filename(self.inputs.in_files[0]) + return name + "_MRTransform.mif" + + +class MRMathInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, argstr="%s", mandatory=True, position=-3, desc="input image" + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") operation = traits.Enum( - 'mean', - 'median', - 'sum', - 'product', - 'rms', - 'norm', - 'var', - 'std', - 'min', - 'max', - 'absmax', - 'magmax', - argstr='%s', + "mean", + "median", + "sum", + "product", + "rms", + "norm", + "var", + "std", + "min", + "max", + "absmax", + "magmax", + argstr="%s", position=-2, mandatory=True, - desc='operation to computer along a specified axis') + desc="operation to computer along a specified axis", + ) axis = traits.Int( - 0, - argstr='-axis %d', - desc='specfied axis to perform the operation along') + 0, argstr="-axis %d", desc="specified axis to perform the operation along" + ) class MRMathOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class MRMath(MRTrix3Base): @@ -669,11 +1007,406 @@ class MRMath(MRTrix3Base): >>> mrmath.run() # doctest: +SKIP """ - _cmd = 'mrmath' + _cmd = "mrmath" input_spec = MRMathInputSpec output_spec = MRMathOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) + return outputs + + +class MRResizeInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" + ) + image_size = Tuple( + (traits.Int, traits.Int, traits.Int), + argstr="-size %d,%d,%d", + mandatory=True, + desc="Number of voxels in each dimension of output image", + xor=["voxel_size", "scale_factor"], + ) + voxel_size = Tuple( + (traits.Float, traits.Float, traits.Float), + argstr="-voxel %g,%g,%g", + mandatory=True, + desc="Desired voxel size in mm for the output image", + xor=["image_size", "scale_factor"], + ) + scale_factor = Tuple( + (traits.Float, traits.Float, traits.Float), + argstr="-scale %g,%g,%g", + mandatory=True, + desc="Scale factors to rescale the image by in each dimension", + xor=["image_size", "voxel_size"], + ) + interpolation = traits.Enum( + "cubic", + "nearest", + "linear", + "sinc", + argstr="-interp %s", + usedefault=True, + desc="set the interpolation method to use when resizing (choices: " + "nearest, linear, cubic, sinc. Default: cubic).", + ) + out_file = File( + argstr="%s", + name_template="%s_resized", + name_source=["in_file"], + keep_extension=True, + position=-1, + desc="the output resized DWI image", + ) + + +class MRResizeOutputSpec(TraitedSpec): + out_file = File(desc="the output resized DWI image", exists=True) + + +class MRResize(MRTrix3Base): + """ + Resize an image by defining the new image resolution, voxel size or a + scale factor. If the image is 4D, then only the first 3 dimensions can be + resized. Also, if the image is down-sampled, the appropriate smoothing is + automatically applied using Gaussian smoothing. + For more information, see + + + Example + ------- + >>> import nipype.interfaces.mrtrix3 as mrt + + Defining the new image resolution: + >>> image_resize = mrt.MRResize() + >>> image_resize.inputs.in_file = 'dwi.mif' + >>> image_resize.inputs.image_size = (256, 256, 144) + >>> image_resize.cmdline # doctest: +ELLIPSIS + 'mrresize -size 256,256,144 -interp cubic dwi.mif dwi_resized.mif' + >>> image_resize.run() # doctest: +SKIP + + Defining the new image's voxel size: + >>> voxel_resize = mrt.MRResize() + >>> voxel_resize.inputs.in_file = 'dwi.mif' + >>> voxel_resize.inputs.voxel_size = (1, 1, 1) + >>> voxel_resize.cmdline # doctest: +ELLIPSIS + 'mrresize -interp cubic -voxel 1,1,1 dwi.mif dwi_resized.mif' + >>> voxel_resize.run() # doctest: +SKIP + + Defining the scale factor of each image dimension: + >>> scale_resize = mrt.MRResize() + >>> scale_resize.inputs.in_file = 'dwi.mif' + >>> scale_resize.inputs.scale_factor = (0.5,0.5,0.5) + >>> scale_resize.cmdline # doctest: +ELLIPSIS + 'mrresize -interp cubic -scale 0.5,0.5,0.5 dwi.mif dwi_resized.mif' + >>> scale_resize.run() # doctest: +SKIP + """ + + _cmd = "mrresize" + input_spec = MRResizeInputSpec + output_spec = MRResizeOutputSpec + + +class SHConvInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + argstr="%s", + mandatory=True, + position=-3, + desc="input ODF image", + ) + # General options + response = File( + exists=True, + mandatory=True, + argstr="%s", + position=-2, + desc=("The response function"), + ) + out_file = File( + name_template="%s_shconv.mif", + name_source=["in_file"], + argstr="%s", + position=-1, + usedefault=True, + desc="the output spherical harmonics", + ) + + +class SHConvOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output convoluted spherical harmonics file") + + +class SHConv(CommandLine): + """ + Convolve spherical harmonics with a tissue response function. Useful for + checking residuals of ODF estimates. + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> sh = mrt.SHConv() + >>> sh.inputs.in_file = 'csd.mif' + >>> sh.inputs.response = 'response.txt' + >>> sh.cmdline + 'shconv csd.mif response.txt csd_shconv.mif' + >>> sh.run() # doctest: +SKIP + """ + + _cmd = "shconv" + input_spec = SHConvInputSpec + output_spec = SHConvOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) + return outputs + + +class SH2AmpInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + argstr="%s", + mandatory=True, + position=-3, + desc="input ODF image", + ) + # General options + directions = File( + exists=True, + mandatory=True, + argstr="%s", + position=-2, + desc=( + "The gradient directions along which to sample the spherical " + "harmonics MRtrix format" + ), + ) + out_file = File( + name_template="%s_amp.mif", + name_source=["in_file"], + argstr="%s", + position=-1, + usedefault=True, + desc="the output spherical harmonics", + ) + nonnegative = traits.Bool( + argstr="-nonnegative", desc="cap all negative amplitudes to zero" + ) + + +class SH2AmpOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the output convoluted spherical harmonics file") + + +class SH2Amp(CommandLine): + """ + Sample spherical harmonics on a set of gradient orientations. Useful for + checking residuals of ODF estimates. + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> sh = mrt.SH2Amp() + >>> sh.inputs.in_file = 'sh.mif' + >>> sh.inputs.directions = 'grads.txt' + >>> sh.cmdline + 'sh2amp sh.mif grads.txt sh_amp.mif' + >>> sh.run() # doctest: +SKIP + """ + + _cmd = "sh2amp" + input_spec = SH2AmpInputSpec + output_spec = SH2AmpOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) + return outputs + + +class MaskFilterInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=-3, + desc="Input mask", + ) + filter = traits.Str( + mandatory=True, + argstr="%s", + position=-2, + desc="Filter to perform (e.g. dilate, erode)", + ) + out_file = File( + name_source=["input_image"], + mandatory=True, + argstr="%s", + position=-1, + desc="Output mask", + ) + npass = traits.Int(argstr="-npass %d", position=1, desc="Number of passes") + + +class MaskFilterOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="the filtered output mask") + + +class MaskFilter(CommandLine): + """ + Perform filtering operations on 3D / 4D mask images. + Only supports dilate / erode filters at the moment. + For more information see: https://mrtrix.readthedocs.io/en/latest/reference/commands/maskfilter.html + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> mf = mrt.MaskFilter() + >>> mf.inputs.in_file = 'mask.mif' + >>> mf.inputs.filter = 'dilate' + >>> mf.inputs.npass = 2 + >>> mf.inputs.out_file = 'mask_filtered.mif' + >>> mf.cmdline + 'maskfilter -npass 2 mask.mif dilate mask_filtered.mif' + >>> mf.run() # doctest: +SKIP + """ + + _cmd = "maskfilter" + input_spec = MaskFilterInputSpec + output_spec = MaskFilterOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file"] = op.abspath(self.inputs.out_file) + return outputs + + +class MTNormaliseInputSpec(MRTrix3BaseInputSpec): + wm_fod = File( + argstr="%s", + exists=True, + position=1, + desc="input fod of white matter tissue compartment", + ) + out_file_wm = File( + argstr="%s", position=2, desc="output file of white matter tissue compartment" + ) + gm_fod = File( + argstr="%s", + exists=True, + position=3, + desc="input fod of grey matter tissue compartment", + ) + out_file_gm = File( + argstr="%s", position=4, desc="output file of grey matter tissue compartment" + ) + csf_fod = File( + argstr="%s", exists=True, position=5, desc="input fod of CSF tissue compartment" + ) + out_file_csf = File( + argstr="%s", position=6, desc="output file of CSF tissue compartment 3" + ) + mask = File(argstr="-mask %s", exists=True, position=-1, desc="input brain mask") + + +class MTNormaliseOutputSpec(TraitedSpec): + out_file_wm = File(exists=True, desc="the normalized white matter fod") + out_file_gm = File(exists=True, desc="the normalized grey matter fod") + out_file_csf = File(exists=True, desc="the normalized csf fod") + + +class MTNormalise(CommandLine): + """ + Multi-tissue informed log-domain intensity normalisation + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> mtn = mrt.MTNormalise() + >>> mtn.inputs.wm_fod = 'wmfod.mif' + >>> mtn.inputs.gm_fod = 'gmfod.mif' + >>> mtn.inputs.csf_fod = 'csffod.mif' + >>> mtn.inputs.out_file_wm = 'wmfod_norm.mif' + >>> mtn.inputs.out_file_gm = 'gmfod_norm.mif' + >>> mtn.inputs.out_file_csf = 'csffod_norm.mif' + >>> mtn.inputs.mask = 'mask.mif' + >>> mtn.cmdline + 'mtnormalise wmfod.mif wmfod_norm.mif gmfod.mif gmfod_norm.mif csffod.mif csffod_norm.mif -mask mask.mif' + >>> mtn.run() # doctest: +SKIP + """ + + _cmd = "mtnormalise" + input_spec = MTNormaliseInputSpec + output_spec = MTNormaliseOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["out_file_wm"] = op.abspath(self.inputs.out_file_wm) + outputs["out_file_gm"] = op.abspath(self.inputs.out_file_gm) + outputs["out_file_csf"] = op.abspath(self.inputs.out_file_csf) + return outputs + + +class Generate5tt2gmwmiInputSpec(MRTrix3BaseInputSpec): + in_file = File( + exists=True, + argstr="%s", + mandatory=True, + position=-2, + desc="the input 5TT segmented anatomical image", + ) + mask_out = File( + "mask_gmwmi.mif", + argstr="%s", + mandatory=True, + position=-1, + desc="the output mask image", + ) + mask_in = File( + argstr="-mask_in %s", + position=-3, + desc="filter an input mask image according to those voxels that lie upon the grey matter - white matter boundary", + ) + + +class Generate5tt2gmwmiOutputSpec(TraitedSpec): + mask_out = File(exists=True, desc="the output mask file") + + +class Generate5tt2gmwmi(CommandLine): + """ + Generate a mask image appropriate for seeding streamlines on + the grey matter-white matter interface + + + Example + ------- + + >>> import nipype.interfaces.mrtrix3 as mrt + >>> gmwmi = mrt.Generate5tt2gmwmi() + >>> gmwmi.inputs.in_file = '5tt_in.mif' + >>> gmwmi.inputs.mask_out = 'mask_gmwmi.mif' + >>> gmwmi.cmdline + '5tt2gmwmi 5tt_in.mif mask_gmwmi.mif' + >>> gmwmi.run() # doctest: +SKIP + """ + + _cmd = "5tt2gmwmi" + input_spec = Generate5tt2gmwmiInputSpec + output_spec = Generate5tt2gmwmiOutputSpec + + def _list_outputs(self): + outputs = self.output_spec().get() + outputs["mask_out"] = op.abspath(self.inputs.mask_out) return outputs diff --git a/nipype/interfaces/niftyfit/__init__.py b/nipype/interfaces/niftyfit/__init__.py index b9d4725496..d945991fa0 100644 --- a/nipype/interfaces/niftyfit/__init__.py +++ b/nipype/interfaces/niftyfit/__init__.py @@ -1,12 +1,12 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ -The niftyfit module provides classes for interfacing with the `NiftyFit`_ -command line tools. +NiftyFit is a software package for multi-parametric model-fitting of 4D MRI. -Top-level namespace for niftyfit. -""" +The niftyfit module provides classes for interfacing with the `NiftyFit +`__ command line tools. +""" from .asl import FitAsl from .dwi import FitDwi, DwiTool from .qt1 import FitQt1 diff --git a/nipype/interfaces/niftyfit/asl.py b/nipype/interfaces/niftyfit/asl.py index c4920dc195..a23507bbd4 100644 --- a/nipype/interfaces/niftyfit/asl.py +++ b/nipype/interfaces/niftyfit/asl.py @@ -4,86 +4,85 @@ The ASL module of niftyfit, which wraps the fitting methods in NiftyFit. """ -from ..base import TraitedSpec, traits, CommandLineInputSpec +from ..base import File, TraitedSpec, traits, Tuple, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitAslInputSpec(CommandLineInputSpec): - """ Input Spec for FitAsl. """ - desc = 'Filename of the 4D ASL (control/label) source image (mandatory).' - source_file = traits.File( - position=1, - exists=True, - argstr='-source %s', - mandatory=True, - desc=desc) - pasl = traits.Bool(desc='Fit PASL ASL data [default]', argstr='-pasl') - pcasl = traits.Bool(desc='Fit PCASL ASL data', argstr='-pcasl') + """Input Spec for FitAsl.""" + + desc = "Filename of the 4D ASL (control/label) source image (mandatory)." + source_file = File( + position=1, exists=True, argstr="-source %s", mandatory=True, desc=desc + ) + pasl = traits.Bool(desc="Fit PASL ASL data [default]", argstr="-pasl") + pcasl = traits.Bool(desc="Fit PCASL ASL data", argstr="-pcasl") # *** Output options: - desc = 'Filename of the Cerebral Blood Flow map (in ml/100g/min).' - cbf_file = traits.File( - name_source=['source_file'], - name_template='%s_cbf.nii.gz', - argstr='-cbf %s', - desc=desc) - error_file = traits.File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - argstr='-error %s', - desc='Filename of the CBF error map.') - syn_file = traits.File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - argstr='-syn %s', - desc='Filename of the synthetic ASL data.') + desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." + cbf_file = File( + name_source=["source_file"], + name_template="%s_cbf.nii.gz", + argstr="-cbf %s", + desc=desc, + ) + error_file = File( + name_source=["source_file"], + name_template="%s_error.nii.gz", + argstr="-error %s", + desc="Filename of the CBF error map.", + ) + syn_file = File( + name_source=["source_file"], + name_template="%s_syn.nii.gz", + argstr="-syn %s", + desc="Filename of the synthetic ASL data.", + ) # *** Input options (see also fit_qt1 for generic T1 fitting): - desc = 'Filename of the estimated input T1 map (in ms).' - t1map = traits.File(exists=True, argstr='-t1map %s', desc=desc) - desc = 'Filename of the estimated input M0 map.' - m0map = traits.File(exists=True, argstr='-m0map %s', desc=desc) - desc = 'Filename of the estimated input M0 map error.' - m0mape = traits.File(exists=True, argstr='-m0mape %s', desc=desc) - desc = 'Filename of a [1,2,5]s Inversion Recovery volume (T1/M0 fitting \ -carried out internally).' - - ir_volume = traits.File(exists=True, argstr='-IRvolume %s', desc=desc) - desc = 'Output of [1,2,5]s Inversion Recovery fitting.' - ir_output = traits.File(exists=True, argstr='-IRoutput %s', desc=desc) + desc = "Filename of the estimated input T1 map (in ms)." + t1map = File(exists=True, argstr="-t1map %s", desc=desc) + desc = "Filename of the estimated input M0 map." + m0map = File(exists=True, argstr="-m0map %s", desc=desc) + desc = "Filename of the estimated input M0 map error." + m0mape = File(exists=True, argstr="-m0mape %s", desc=desc) + desc = "Filename of a [1,2,5]s Inversion Recovery volume (T1/M0 fitting \ +carried out internally)." + + ir_volume = File(exists=True, argstr="-IRvolume %s", desc=desc) + desc = "Output of [1,2,5]s Inversion Recovery fitting." + ir_output = File(exists=True, argstr="-IRoutput %s", desc=desc) # *** Experimental options (Choose those suitable for the model!): - mask = traits.File( - position=2, - exists=True, - desc='Filename of image mask.', - argstr='-mask %s') + mask = File( + position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" + ) t1_art_cmp = traits.Float( - desc='T1 of arterial component [1650ms].', argstr='-T1a %f') - desc = 'Single plasma/tissue partition coefficient [0.9ml/g].' - plasma_coeff = traits.Float(desc=desc, argstr='-L %f') - desc = 'Labelling efficiency [0.99 (pasl), 0.85 (pcasl)], ensure any \ -background suppression pulses are included in -eff' + desc="T1 of arterial component [1650ms].", argstr="-T1a %f" + ) + desc = "Single plasma/tissue partition coefficient [0.9ml/g]." + plasma_coeff = traits.Float(desc=desc, argstr="-L %f") + desc = "Labelling efficiency [0.99 (pasl), 0.85 (pcasl)], ensure any \ +background suppression pulses are included in -eff" - eff = traits.Float(desc=desc, argstr='-eff %f') - desc = 'Outlier rejection for multi CL volumes (enter z-score threshold \ -(e.g. 2.5)) [off].' + eff = traits.Float(desc=desc, argstr="-eff %f") + desc = "Outlier rejection for multi CL volumes (enter z-score threshold \ +(e.g. 2.5)) [off]." - out = traits.Float(desc=desc, argstr='-out %f') + out = traits.Float(desc=desc, argstr="-out %f") # *** PCASL options (Choose those suitable for the model!): - pld = traits.Float(desc='Post Labelling Delay [2000ms].', argstr='-PLD %f') - ldd = traits.Float(desc='Labelling Duration [1800ms].', argstr='-LDD %f') - desc = 'Difference in labelling delay per slice [0.0 ms/slice.' - dpld = traits.Float(desc=desc, argstr='-dPLD %f') + pld = traits.Float(desc="Post Labelling Delay [2000ms].", argstr="-PLD %f") + ldd = traits.Float(desc="Labelling Duration [1800ms].", argstr="-LDD %f") + desc = "Difference in labelling delay per slice [0.0 ms/slice." + dpld = traits.Float(desc=desc, argstr="-dPLD %f") # *** PASL options (Choose those suitable for the model!): - t_inv1 = traits.Float( - desc='Saturation pulse time [800ms].', argstr='-Tinv1 %f') - t_inv2 = traits.Float(desc='Inversion time [2000ms].', argstr='-Tinv2 %f') - desc = 'Difference in inversion time per slice [0ms/slice].' - dt_inv2 = traits.Float(desc=desc, argstr='-dTinv2 %f') + t_inv1 = traits.Float(desc="Saturation pulse time [800ms].", argstr="-Tinv1 %f") + t_inv2 = traits.Float(desc="Inversion time [2000ms].", argstr="-Tinv2 %f") + desc = "Difference in inversion time per slice [0ms/slice]." + dt_inv2 = traits.Float(desc=desc, argstr="-dTinv2 %f") # *** Other experimental assumptions: @@ -91,51 +90,53 @@ class FitAslInputSpec(CommandLineInputSpec): # desc = 'Slope and intercept for Arterial Transit Time.' # ATT = traits.Float(desc=desc, argstr='-ATT %f') - gm_t1 = traits.Float(desc='T1 of GM [1150ms].', argstr='-gmT1 %f') + gm_t1 = traits.Float(desc="T1 of GM [1150ms].", argstr="-gmT1 %f") gm_plasma = traits.Float( - desc='Plasma/GM water partition [0.95ml/g].', argstr='-gmL %f') - gm_ttt = traits.Float(desc='Time to GM [ATT+0ms].', argstr='-gmTTT %f') - wm_t1 = traits.Float(desc='T1 of WM [800ms].', argstr='-wmT1 %f') + desc="Plasma/GM water partition [0.95ml/g].", argstr="-gmL %f" + ) + gm_ttt = traits.Float(desc="Time to GM [ATT+0ms].", argstr="-gmTTT %f") + wm_t1 = traits.Float(desc="T1 of WM [800ms].", argstr="-wmT1 %f") wm_plasma = traits.Float( - desc='Plasma/WM water partition [0.82ml/g].', argstr='-wmL %f') - wm_ttt = traits.Float(desc='Time to WM [ATT+0ms].', argstr='-wmTTT %f') + desc="Plasma/WM water partition [0.82ml/g].", argstr="-wmL %f" + ) + wm_ttt = traits.Float(desc="Time to WM [ATT+0ms].", argstr="-wmTTT %f") # *** Segmentation options: - desc = 'Filename of the 4D segmentation (in ASL space) for L/T1 \ -estimation and PV correction {WM,GM,CSF}.' - - seg = traits.File(exists=True, argstr='-seg %s', desc=desc) - desc = 'Use sigmoid to estimate L from T1: L(T1|gmL,wmL) [Off].' - sig = traits.Bool(desc=desc, argstr='-sig') - desc = 'Simple PV correction (CBF=vg*CBFg + vw*CBFw, with CBFw=f*CBFg) \ -[0.25].' - - pv0 = traits.Int(desc=desc, argstr='-pv0 %d') - pv2 = traits.Int(desc='In plane PV kernel size [3x3].', argstr='-pv2 %d') - pv3 = traits.Tuple( + desc = "Filename of the 4D segmentation (in ASL space) for L/T1 \ +estimation and PV correction {WM,GM,CSF}." + + seg = File(exists=True, argstr="-seg %s", desc=desc) + desc = "Use sigmoid to estimate L from T1: L(T1|gmL,wmL) [Off]." + sig = traits.Bool(desc=desc, argstr="-sig") + desc = "Simple PV correction (CBF=vg*CBFg + vw*CBFw, with CBFw=f*CBFg) \ +[0.25]." + + pv0 = traits.Int(desc=desc, argstr="-pv0 %d") + pv2 = traits.Int(desc="In plane PV kernel size [3x3].", argstr="-pv2 %d") + pv3 = Tuple( traits.Int, traits.Int, traits.Int, - desc='3D kernel size [3x3x1].', - argstr='-pv3 %d %d %d') - desc = 'Multiply CBF by this value (e.g. if CL are mislabelled use -1.0).' - mul = traits.Float(desc=desc, argstr='-mul %f') - mulgm = traits.Bool( - desc='Multiply CBF by segmentation [Off].', argstr='-sig') - desc = 'Set PV threshold for switching off LSQR [O.05].' - pv_threshold = traits.Bool(desc=desc, argstr='-pvthreshold') - segstyle = traits.Bool( - desc='Set CBF as [gm,wm] not [wm,gm].', argstr='-segstyle') + desc="3D kernel size [3x3x1].", + argstr="-pv3 %d %d %d", + ) + desc = "Multiply CBF by this value (e.g. if CL are mislabelled use -1.0)." + mul = traits.Float(desc=desc, argstr="-mul %f") + mulgm = traits.Bool(desc="Multiply CBF by segmentation [Off].", argstr="-sig") + desc = "Set PV threshold for switching off LSQR [O.05]." + pv_threshold = traits.Bool(desc=desc, argstr="-pvthreshold") + segstyle = traits.Bool(desc="Set CBF as [gm,wm] not [wm,gm].", argstr="-segstyle") class FitAslOutputSpec(TraitedSpec): - """ Output Spec for FitAsl. """ - desc = 'Filename of the Cerebral Blood Flow map (in ml/100g/min).' - cbf_file = traits.File(exists=True, desc=desc) - desc = 'Filename of the CBF error map.' - error_file = traits.File(exists=True, desc=desc) - desc = 'Filename of the synthetic ASL data.' - syn_file = traits.File(exists=True, desc=desc) + """Output Spec for FitAsl.""" + + desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." + cbf_file = File(exists=True, desc=desc) + desc = "Filename of the CBF error map." + error_file = File(exists=True, desc=desc) + desc = "Filename of the synthetic ASL data." + syn_file = File(exists=True, desc=desc) class FitAsl(NiftyFitCommand): @@ -158,7 +159,8 @@ class FitAsl(NiftyFitCommand): -syn asl_syn.nii.gz' """ - _cmd = get_custom_path('fit_asl', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_asl", env_dir="NIFTYFITDIR") input_spec = FitAslInputSpec output_spec = FitAslOutputSpec - _suffix = '_fit_asl' + _suffix = "_fit_asl" diff --git a/nipype/interfaces/niftyfit/base.py b/nipype/interfaces/niftyfit/base.py index 87c1d63825..dc004c871c 100644 --- a/nipype/interfaces/niftyfit/base.py +++ b/nipype/interfaces/niftyfit/base.py @@ -27,16 +27,17 @@ class NiftyFitCommand(CommandLine): """ Base support interface for NiftyFit commands. """ - _suffix = '_nf' + + _suffix = "_nf" def __init__(self, **inputs): - """ Init method calling super. No version to be checked.""" - super(NiftyFitCommand, self).__init__(**inputs) + """Init method calling super. No version to be checked.""" + super().__init__(**inputs) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: @@ -44,5 +45,5 @@ def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if ext is not None: final_ext = ext if suffix is not None: - final_bn = ''.join((final_bn, suffix)) + final_bn = f"{final_bn}{suffix}" return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) diff --git a/nipype/interfaces/niftyfit/dwi.py b/nipype/interfaces/niftyfit/dwi.py index 23b73aea90..6d82809694 100644 --- a/nipype/interfaces/niftyfit/dwi.py +++ b/nipype/interfaces/niftyfit/dwi.py @@ -4,254 +4,300 @@ The dwi module of niftyfit, which wraps the fitting methods in NiftyFit. """ -from ..base import TraitedSpec, traits, isdefined, CommandLineInputSpec +from ..base import File, TraitedSpec, traits, Tuple, isdefined, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitDwiInputSpec(CommandLineInputSpec): - """ Input Spec for FitDwi. """ + """Input Spec for FitDwi.""" + # Inputs options - source_file = traits.File( + source_file = File( position=1, exists=True, - argstr='-source %s', + argstr="-source %s", mandatory=True, - desc='The source image containing the dwi data.') - desc = 'The file containing the bvalues of the source DWI.' - bval_file = traits.File( - position=2, exists=True, argstr='-bval %s', mandatory=True, desc=desc) - desc = 'The file containing the bvectors of the source DWI.' - bvec_file = traits.File( - position=3, exists=True, argstr='-bvec %s', mandatory=True, desc=desc) - te_file = traits.File( - exists=True, - argstr='-TE %s', - desc='Filename of TEs (ms).', - xor=['te_file']) - te_value = traits.File( - exists=True, - argstr='-TE %s', - desc='Value of TEs (ms).', - xor=['te_file']) - mask_file = traits.File( - exists=True, desc='The image mask', argstr='-mask %s') - desc = 'Filename of parameter priors for -ball and -nod.' - prior_file = traits.File(exists=True, argstr='-prior %s', desc=desc) - desc = 'Rotate the output tensors according to the q/s form of the image \ -(resulting tensors will be in mm coordinates, default: 0).' - - rot_sform_flag = traits.Int(desc=desc, argstr='-rotsform %d') + desc="The source image containing the dwi data.", + ) + desc = "The file containing the bvalues of the source DWI." + bval_file = File( + position=2, exists=True, argstr="-bval %s", mandatory=True, desc=desc + ) + desc = "The file containing the bvectors of the source DWI." + bvec_file = File( + position=3, exists=True, argstr="-bvec %s", mandatory=True, desc=desc + ) + te_file = File( + exists=True, argstr="-TE %s", desc="Filename of TEs (ms).", xor=["te_file"] + ) + te_value = File( + exists=True, argstr="-TE %s", desc="Value of TEs (ms).", xor=["te_file"] + ) + mask_file = File(exists=True, desc="The image mask", argstr="-mask %s") + desc = "Filename of parameter priors for -ball and -nod." + prior_file = File(exists=True, argstr="-prior %s", desc=desc) + desc = "Rotate the output tensors according to the q/s form of the image \ +(resulting tensors will be in mm coordinates, default: 0)." + + rot_sform_flag = traits.Int(desc=desc, argstr="-rotsform %d") # generic output options: - error_file = traits.File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - desc='Filename of parameter error maps.', - argstr='-error %s') - res_file = traits.File( - name_source=['source_file'], - name_template='%s_resmap.nii.gz', - desc='Filename of model residual map.', - argstr='-res %s') - syn_file = traits.File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - desc='Filename of synthetic image.', - argstr='-syn %s') - nodiff_file = traits.File( - name_source=['source_file'], - name_template='%s_no_diff.nii.gz', - desc='Filename of average no diffusion image.', - argstr='-nodiff %s') + error_file = File( + name_source=["source_file"], + name_template="%s_error.nii.gz", + desc="Filename of parameter error maps.", + argstr="-error %s", + ) + res_file = File( + name_source=["source_file"], + name_template="%s_resmap.nii.gz", + desc="Filename of model residual map.", + argstr="-res %s", + ) + syn_file = File( + name_source=["source_file"], + name_template="%s_syn.nii.gz", + desc="Filename of synthetic image.", + argstr="-syn %s", + ) + nodiff_file = File( + name_source=["source_file"], + name_template="%s_no_diff.nii.gz", + desc="Filename of average no diffusion image.", + argstr="-nodiff %s", + ) # Output options, with templated output names based on the source image - mcmap_file = traits.File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - desc='Filename of multi-compartment model parameter map ' - '(-ivim,-ball,-nod)', - argstr='-mcmap %s', - requires=['nodv_flag']) + mcmap_file = File( + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + desc="Filename of multi-compartment model parameter map (-ivim,-ball,-nod)", + argstr="-mcmap %s", + requires=["nodv_flag"], + ) # Model Specific Output options: - mdmap_file = traits.File( - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', - desc='Filename of MD map/ADC', - argstr='-mdmap %s') - famap_file = traits.File( - name_source=['source_file'], - name_template='%s_famap.nii.gz', - desc='Filename of FA map', - argstr='-famap %s') - v1map_file = traits.File( - name_source=['source_file'], - name_template='%s_v1map.nii.gz', - desc='Filename of PDD map [x,y,z]', - argstr='-v1map %s') - rgbmap_file = traits.File( - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - desc='Filename of colour-coded FA map', - argstr='-rgbmap %s', - requires=['dti_flag']) - - desc = 'Use lower triangular (tenmap2) or diagonal, off-diagonal tensor \ -format' - - ten_type = traits.Enum( - 'lower-tri', 'diag-off-diag', desc=desc, usedefault=True) - - tenmap_file = traits.File( - name_source=['source_file'], - name_template='%s_tenmap.nii.gz', - desc='Filename of tensor map [diag,offdiag].', - argstr='-tenmap %s', - requires=['dti_flag']) - tenmap2_file = traits.File( - name_source=['source_file'], - name_template='%s_tenmap2.nii.gz', - desc='Filename of tensor map [lower tri]', - argstr='-tenmap2 %s', - requires=['dti_flag']) + mdmap_file = File( + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", + desc="Filename of MD map/ADC", + argstr="-mdmap %s", + ) + famap_file = File( + name_source=["source_file"], + name_template="%s_famap.nii.gz", + desc="Filename of FA map", + argstr="-famap %s", + ) + v1map_file = File( + name_source=["source_file"], + name_template="%s_v1map.nii.gz", + desc="Filename of PDD map [x,y,z]", + argstr="-v1map %s", + ) + rgbmap_file = File( + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + desc="Filename of colour-coded FA map", + argstr="-rgbmap %s", + requires=["dti_flag"], + ) + + desc = "Use lower triangular (tenmap2) or diagonal, off-diagonal tensor \ +format" + + ten_type = traits.Enum("lower-tri", "diag-off-diag", desc=desc, usedefault=True) + + tenmap_file = File( + name_source=["source_file"], + name_template="%s_tenmap.nii.gz", + desc="Filename of tensor map [diag,offdiag].", + argstr="-tenmap %s", + requires=["dti_flag"], + ) + tenmap2_file = File( + name_source=["source_file"], + name_template="%s_tenmap2.nii.gz", + desc="Filename of tensor map [lower tri]", + argstr="-tenmap2 %s", + requires=["dti_flag"], + ) # Methods options - desc = 'Fit single exponential to non-directional data [default with \ -no b-vectors]' + desc = "Fit single exponential to non-directional data [default with \ +no b-vectors]" mono_flag = traits.Bool( desc=desc, - argstr='-mono', + argstr="-mono", position=4, xor=[ - 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ivim_flag = traits.Bool( - desc='Fit IVIM model to non-directional data.', - argstr='-ivim', + desc="Fit IVIM model to non-directional data.", + argstr="-ivim", position=4, xor=[ - 'mono_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) - desc = 'Fit the tensor model [default with b-vectors].' + "mono_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Fit the tensor model [default with b-vectors]." dti_flag = traits.Bool( desc=desc, - argstr='-dti', + argstr="-dti", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ball_flag = traits.Bool( - desc='Fit the ball and stick model.', - argstr='-ball', + desc="Fit the ball and stick model.", + argstr="-ball", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) - desc = 'Fit the ball and stick model with optimised PDD.' + "mono_flag", + "ivim_flag", + "dti_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Fit the ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, - argstr='-ballv', + argstr="-ballv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'nod_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "nod_flag", + "nodv_flag", + ], + ) nod_flag = traits.Bool( - desc='Fit the NODDI model', - argstr='-nod', + desc="Fit the NODDI model", + argstr="-nod", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nodv_flag", + ], + ) nodv_flag = traits.Bool( - desc='Fit the NODDI model with optimised PDD', - argstr='-nodv', + desc="Fit the NODDI model with optimised PDD", + argstr="-nodv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nod_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + ], + ) # Experimental options - desc = 'Maximum number of non-linear LSQR iterations [100x2 passes])' - maxit_val = traits.Int(desc=desc, argstr='-maxit %d', requires=['gn_flag']) - desc = 'LM parameters (initial value, decrease rate) [100,1.2].' - lm_vals = traits.Tuple( - traits.Float, - traits.Float, - argstr='-lm %f %f', - requires=['gn_flag'], - desc=desc) - desc = 'Use Gauss-Newton algorithm [Levenberg-Marquardt].' - gn_flag = traits.Bool(desc=desc, argstr='-gn', xor=['wls_flag']) - desc = 'Use Variational Bayes fitting with known prior (currently \ -identity covariance...).' - - vb_flag = traits.Bool(desc=desc, argstr='-vb') - cov_file = traits.File( + desc = "Maximum number of non-linear LSQR iterations [100x2 passes])" + maxit_val = traits.Int(desc=desc, argstr="-maxit %d", requires=["gn_flag"]) + desc = "LM parameters (initial value, decrease rate) [100,1.2]." + lm_vals = Tuple( + traits.Float, traits.Float, argstr="-lm %f %f", requires=["gn_flag"], desc=desc + ) + desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." + gn_flag = traits.Bool(desc=desc, argstr="-gn", xor=["wls_flag"]) + desc = "Use Variational Bayes fitting with known prior (currently \ +identity covariance...)." + + vb_flag = traits.Bool(desc=desc, argstr="-vb") + cov_file = File( exists=True, - desc='Filename of ithe nc*nc covariance matrix [I]', - argstr='-cov %s') - wls_flag = traits.Bool(desc=desc, argstr='-wls', xor=['gn_flag']) - desc = 'Use location-weighted least squares for DTI fitting [3x3 Gaussian]' - swls_val = traits.Float(desc=desc, argstr='-swls %f') - slice_no = traits.Int( - desc='Fit to single slice number.', argstr='-slice %d') - voxel = traits.Tuple( + desc="Filename of ithe nc*nc covariance matrix [I]", + argstr="-cov %s", + ) + wls_flag = traits.Bool(desc=desc, argstr="-wls", xor=["gn_flag"]) + desc = "Use location-weighted least squares for DTI fitting [3x3 Gaussian]" + swls_val = traits.Float(desc=desc, argstr="-swls %f") + slice_no = traits.Int(desc="Fit to single slice number.", argstr="-slice %d") + voxel = Tuple( traits.Int, traits.Int, traits.Int, - desc='Fit to single voxel only.', - argstr='-voxel %d %d %d') + desc="Fit to single voxel only.", + argstr="-voxel %d %d %d", + ) diso_val = traits.Float( - desc='Isotropic diffusivity for -nod [3e-3]', argstr='-diso %f') + desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" + ) dpr_val = traits.Float( - desc='Parallel diffusivity for -nod [1.7e-3].', argstr='-dpr %f') - wm_t2_val = traits.Float( - desc='White matter T2 value [80ms].', argstr='-wmT2 %f') - csf_t2_val = traits.Float(desc='CSF T2 value [400ms].', argstr='-csfT2 %f') - desc = 'Threshold for perfusion/diffsuion effects [100].' - perf_thr = traits.Float(desc=desc, argstr='-perfthreshold %f') + desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" + ) + wm_t2_val = traits.Float(desc="White matter T2 value [80ms].", argstr="-wmT2 %f") + csf_t2_val = traits.Float(desc="CSF T2 value [400ms].", argstr="-csfT2 %f") + desc = "Threshold for perfusion/diffsuion effects [100]." + perf_thr = traits.Float(desc=desc, argstr="-perfthreshold %f") # MCMC options: - mcout = traits.File( - name_source=['source_file'], - name_template='%s_mcout.txt', - desc='Filename of mc samples (ascii text file)', - argstr='-mcout %s') + mcout = File( + name_source=["source_file"], + name_template="%s_mcout.txt", + desc="Filename of mc samples (ascii text file)", + argstr="-mcout %s", + ) mcsamples = traits.Int( - desc='Number of samples to keep [100].', argstr='-mcsamples %d') + desc="Number of samples to keep [100].", argstr="-mcsamples %d" + ) mcmaxit = traits.Int( - desc='Number of iterations to run [10,000].', argstr='-mcmaxit %d') + desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" + ) acceptance = traits.Float( - desc='Fraction of iterations to accept [0.23].', - argstr='-accpetance %f') + desc="Fraction of iterations to accept [0.23].", argstr="-accpetance %f" + ) class FitDwiOutputSpec(TraitedSpec): - """ Output Spec for FitDwi. """ + """Output Spec for FitDwi.""" - error_file = traits.File(desc='Filename of parameter error maps') - res_file = traits.File(desc='Filename of model residual map') - syn_file = traits.File(desc='Filename of synthetic image') - nodiff_file = traits.File(desc='Filename of average no diffusion image.') - mdmap_file = traits.File(desc='Filename of MD map/ADC') - famap_file = traits.File(desc='Filename of FA map') - v1map_file = traits.File(desc='Filename of PDD map [x,y,z]') - rgbmap_file = traits.File(desc='Filename of colour FA map') - tenmap_file = traits.File(desc='Filename of tensor map') - tenmap2_file = traits.File(desc='Filename of tensor map [lower tri]') + error_file = File(desc="Filename of parameter error maps") + res_file = File(desc="Filename of model residual map") + syn_file = File(desc="Filename of synthetic image") + nodiff_file = File(desc="Filename of average no diffusion image.") + mdmap_file = File(desc="Filename of MD map/ADC") + famap_file = File(desc="Filename of FA map") + v1map_file = File(desc="Filename of PDD map [x,y,z]") + rgbmap_file = File(desc="Filename of colour FA map") + tenmap_file = File(desc="Filename of tensor map") + tenmap2_file = File(desc="Filename of tensor map [lower tri]") - mcmap_file = traits.File(desc='Filename of multi-compartment model ' - 'parameter map (-ivim,-ball,-nod).') - mcout = traits.File(desc='Filename of mc samples (ascii text file)') + mcmap_file = File( + desc="Filename of multi-compartment model parameter map (-ivim,-ball,-nod)." + ) + mcout = File(desc="Filename of mc samples (ascii text file)") class FitDwi(NiftyFitCommand): @@ -281,174 +327,231 @@ class FitDwi(NiftyFitCommand): -v1map dwi_v1map.nii.gz' """ - _cmd = get_custom_path('fit_dwi', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") input_spec = FitDwiInputSpec output_spec = FitDwiOutputSpec - _suffix = '_fit_dwi' + _suffix = "_fit_dwi" def _format_arg(self, name, trait_spec, value): - if name == 'tenmap_file' and self.inputs.ten_type != 'diag-off-diag': - return '' - if name == 'tenmap2_file' and self.inputs.ten_type != 'lower-tri': - return '' - return super(FitDwi, self)._format_arg(name, trait_spec, value) + if name == "tenmap_file" and self.inputs.ten_type != "diag-off-diag": + return "" + if name == "tenmap2_file" and self.inputs.ten_type != "lower-tri": + return "" + return super()._format_arg(name, trait_spec, value) class DwiToolInputSpec(CommandLineInputSpec): - """ Input Spec for DwiTool. """ - desc = 'The source image containing the fitted model.' - source_file = traits.File( - position=1, - exists=True, - desc=desc, - argstr='-source %s', - mandatory=True) - desc = 'The file containing the bvalues of the source DWI.' - bval_file = traits.File( - position=2, exists=True, desc=desc, argstr='-bval %s', mandatory=True) - desc = 'The file containing the bvectors of the source DWI.' - bvec_file = traits.File( - position=3, exists=True, desc=desc, argstr='-bvec %s') - b0_file = traits.File( + """Input Spec for DwiTool.""" + + desc = "The source image containing the fitted model." + source_file = File( + position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True + ) + desc = "The file containing the bvalues of the source DWI." + bval_file = File( + position=2, exists=True, desc=desc, argstr="-bval %s", mandatory=True + ) + desc = "The file containing the bvectors of the source DWI." + bvec_file = File(position=3, exists=True, desc=desc, argstr="-bvec %s") + b0_file = File( position=4, exists=True, - desc='The B0 image corresponding to the source DWI', - argstr='-b0 %s') - mask_file = traits.File( - position=5, exists=True, desc='The image mask', argstr='-mask %s') + desc="The B0 image corresponding to the source DWI", + argstr="-b0 %s", + ) + mask_file = File(position=5, exists=True, desc="The image mask", argstr="-mask %s") # Output options, with templated output names based on the source image - desc = 'Filename of multi-compartment model parameter map \ -(-ivim,-ball,-nod)' + desc = "Filename of multi-compartment model parameter map \ +(-ivim,-ball,-nod)" - mcmap_file = traits.File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', + mcmap_file = File( + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", desc=desc, - argstr='-mcmap %s') - desc = 'Filename of synthetic image. Requires: bvec_file/b0_file.' - syn_file = traits.File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', + argstr="-mcmap %s", + ) + desc = "Filename of synthetic image. Requires: bvec_file/b0_file." + syn_file = File( + name_source=["source_file"], + name_template="%s_syn.nii.gz", desc=desc, - argstr='-syn %s', - requires=['bvec_file', 'b0_file']) - mdmap_file = traits.File( - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', - desc='Filename of MD map/ADC', - argstr='-mdmap %s') - famap_file = traits.File( - name_source=['source_file'], - name_template='%s_famap.nii.gz', - desc='Filename of FA map', - argstr='-famap %s') - v1map_file = traits.File( - name_source=['source_file'], - name_template='%s_v1map.nii.gz', - desc='Filename of PDD map [x,y,z]', - argstr='-v1map %s') - rgbmap_file = traits.File( - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - desc='Filename of colour FA map.', - argstr='-rgbmap %s') - logdti_file = traits.File( - name_source=['source_file'], - name_template='%s_logdti2.nii.gz', - desc='Filename of output logdti map.', - argstr='-logdti2 %s') + argstr="-syn %s", + requires=["bvec_file", "b0_file"], + ) + mdmap_file = File( + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", + desc="Filename of MD map/ADC", + argstr="-mdmap %s", + ) + famap_file = File( + name_source=["source_file"], + name_template="%s_famap.nii.gz", + desc="Filename of FA map", + argstr="-famap %s", + ) + v1map_file = File( + name_source=["source_file"], + name_template="%s_v1map.nii.gz", + desc="Filename of PDD map [x,y,z]", + argstr="-v1map %s", + ) + rgbmap_file = File( + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + desc="Filename of colour FA map.", + argstr="-rgbmap %s", + ) + logdti_file = File( + name_source=["source_file"], + name_template="%s_logdti2.nii.gz", + desc="Filename of output logdti map.", + argstr="-logdti2 %s", + ) # Methods options - desc = 'Input is a single exponential to non-directional data \ -[default with no b-vectors]' + desc = "Input is a single exponential to non-directional data \ +[default with no b-vectors]" mono_flag = traits.Bool( desc=desc, position=6, - argstr='-mono', + argstr="-mono", xor=[ - 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) - desc = 'Inputs is an IVIM model to non-directional data.' + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Inputs is an IVIM model to non-directional data." ivim_flag = traits.Bool( desc=desc, position=6, - argstr='-ivim', + argstr="-ivim", xor=[ - 'mono_flag', 'dti_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) dti_flag = traits.Bool( - desc='Input is a tensor model diag/off-diag.', + desc="Input is a tensor model diag/off-diag.", position=6, - argstr='-dti', + argstr="-dti", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) dti_flag2 = traits.Bool( - desc='Input is a tensor model lower triangular', + desc="Input is a tensor model lower triangular", position=6, - argstr='-dti2', + argstr="-dti2", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ball_flag = traits.Bool( - desc='Input is a ball and stick model.', + desc="Input is a ball and stick model.", position=6, - argstr='-ball', + argstr="-ball", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) - desc = 'Input is a ball and stick model with optimised PDD.' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Input is a ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, position=6, - argstr='-ballv', + argstr="-ballv", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "nod_flag", + "nodv_flag", + ], + ) nod_flag = traits.Bool( - desc='Input is a NODDI model', + desc="Input is a NODDI model", position=6, - argstr='-nod', + argstr="-nod", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nodv_flag", + ], + ) nodv_flag = traits.Bool( - desc='Input is a NODDI model with optimised PDD', + desc="Input is a NODDI model with optimised PDD", position=6, - argstr='-nodv', + argstr="-nodv", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + ], + ) # Experimental options diso_val = traits.Float( - desc='Isotropic diffusivity for -nod [3e-3]', argstr='-diso %f') + desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" + ) dpr_val = traits.Float( - desc='Parallel diffusivity for -nod [1.7e-3].', argstr='-dpr %f') + desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" + ) class DwiToolOutputSpec(TraitedSpec): - """ Output Spec for DwiTool. """ - desc = 'Filename of multi-compartment model parameter map \ -(-ivim,-ball,-nod)' + """Output Spec for DwiTool.""" - mcmap_file = traits.File(desc=desc) - syn_file = traits.File(desc='Filename of synthetic image') - mdmap_file = traits.File(desc='Filename of MD map/ADC') - famap_file = traits.File(desc='Filename of FA map') - v1map_file = traits.File(desc='Filename of PDD map [x,y,z]') - rgbmap_file = traits.File(desc='Filename of colour FA map') - logdti_file = traits.File(desc='Filename of output logdti map') + desc = "Filename of multi-compartment model parameter map \ +(-ivim,-ball,-nod)" + + mcmap_file = File(desc=desc) + syn_file = File(desc="Filename of synthetic image") + mdmap_file = File(desc="Filename of MD map/ADC") + famap_file = File(desc="Filename of FA map") + v1map_file = File(desc="Filename of PDD map [x,y,z]") + rgbmap_file = File(desc="Filename of colour FA map") + logdti_file = File(desc="Filename of output logdti map") class DwiTool(NiftyFitCommand): @@ -480,18 +583,21 @@ class DwiTool(NiftyFitCommand): -syn dwi_syn.nii.gz -v1map dwi_v1map.nii.gz' """ - _cmd = get_custom_path('dwi_tool', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") input_spec = DwiToolInputSpec output_spec = DwiToolOutputSpec - _suffix = '_dwi_tool' + _suffix = "_dwi_tool" def _format_arg(self, name, trait_spec, value): - if name == 'syn_file': - if not isdefined(self.inputs.bvec_file) or \ - not isdefined(self.inputs.b0_file): - return '' - if name in ['logdti_file', 'rgbmap_file']: - if not isdefined(self.inputs.dti_flag) and \ - not isdefined(self.inputs.dti_flag2): - return '' - return super(DwiTool, self)._format_arg(name, trait_spec, value) + if name == "syn_file": + if not isdefined(self.inputs.bvec_file) or not isdefined( + self.inputs.b0_file + ): + return "" + if name in ["logdti_file", "rgbmap_file"]: + if not isdefined(self.inputs.dti_flag) and not isdefined( + self.inputs.dti_flag2 + ): + return "" + return super()._format_arg(name, trait_spec, value) diff --git a/nipype/interfaces/niftyfit/qt1.py b/nipype/interfaces/niftyfit/qt1.py index ceefbae281..d868f856ab 100644 --- a/nipype/interfaces/niftyfit/qt1.py +++ b/nipype/interfaces/niftyfit/qt1.py @@ -5,157 +5,156 @@ in NiftyFit. """ -from ..base import TraitedSpec, File, traits, CommandLineInputSpec +from ..base import TraitedSpec, File, traits, Tuple, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitQt1InputSpec(CommandLineInputSpec): - """ Input Spec for FitQt1. """ - desc = 'Filename of the 4D Multi-Echo T1 source image.' + """Input Spec for FitQt1.""" + + desc = "Filename of the 4D Multi-Echo T1 source image." source_file = File( - position=1, - exists=True, - desc=desc, - argstr='-source %s', - mandatory=True) + position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True + ) # Output options: t1map_file = File( - name_source=['source_file'], - name_template='%s_t1map.nii.gz', - argstr='-t1map %s', - desc='Filename of the estimated output T1 map (in ms).') + name_source=["source_file"], + name_template="%s_t1map.nii.gz", + argstr="-t1map %s", + desc="Filename of the estimated output T1 map (in ms).", + ) m0map_file = File( - name_source=['source_file'], - name_template='%s_m0map.nii.gz', - argstr='-m0map %s', - desc='Filename of the estimated input M0 map.') - desc = 'Filename of the estimated output multi-parameter map.' + name_source=["source_file"], + name_template="%s_m0map.nii.gz", + argstr="-m0map %s", + desc="Filename of the estimated input M0 map.", + ) + desc = "Filename of the estimated output multi-parameter map." mcmap_file = File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - argstr='-mcmap %s', - desc=desc) + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + argstr="-mcmap %s", + desc=desc, + ) comp_file = File( - name_source=['source_file'], - name_template='%s_comp.nii.gz', - argstr='-comp %s', - desc='Filename of the estimated multi-component T1 map.') - desc = 'Filename of the error map (symmetric matrix, [Diag,OffDiag]).' + name_source=["source_file"], + name_template="%s_comp.nii.gz", + argstr="-comp %s", + desc="Filename of the estimated multi-component T1 map.", + ) + desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])." error_file = File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - argstr='-error %s', - desc=desc) + name_source=["source_file"], + name_template="%s_error.nii.gz", + argstr="-error %s", + desc=desc, + ) syn_file = File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - argstr='-syn %s', - desc='Filename of the synthetic ASL data.') + name_source=["source_file"], + name_template="%s_syn.nii.gz", + argstr="-syn %s", + desc="Filename of the synthetic ASL data.", + ) res_file = File( - name_source=['source_file'], - name_template='%s_res.nii.gz', - argstr='-res %s', - desc='Filename of the model fit residuals') + name_source=["source_file"], + name_template="%s_res.nii.gz", + argstr="-res %s", + desc="Filename of the model fit residuals", + ) # Other options: mask = File( - position=2, - exists=True, - desc='Filename of image mask.', - argstr='-mask %s') + position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" + ) prior = File( - position=3, - exists=True, - desc='Filename of parameter prior.', - argstr='-prior %s') - te_value = traits.Float( - desc='TE Echo Time [0ms!].', argstr='-TE %f', position=4) + position=3, exists=True, desc="Filename of parameter prior.", argstr="-prior %s" + ) + te_value = traits.Float(desc="TE Echo Time [0ms!].", argstr="-TE %f", position=4) tr_value = traits.Float( - desc='TR Repetition Time [10s!].', argstr='-TR %f', position=5) - desc = 'Number of components to fit [1] (currently IR/SR only)' + desc="TR Repetition Time [10s!].", argstr="-TR %f", position=5 + ) + desc = "Number of components to fit [1] (currently IR/SR only)" # set position to be ahead of TIs - nb_comp = traits.Int(desc=desc, position=6, argstr='-nc %d') - desc = 'Set LM parameters (initial value, decrease rate) [100,1.2].' - lm_val = traits.Tuple( - traits.Float, traits.Float, desc=desc, argstr='-lm %f %f', position=7) - desc = 'Use Gauss-Newton algorithm [Levenberg-Marquardt].' - gn_flag = traits.Bool(desc=desc, argstr='-gn', position=8) + nb_comp = traits.Int(desc=desc, position=6, argstr="-nc %d") + desc = "Set LM parameters (initial value, decrease rate) [100,1.2]." + lm_val = Tuple( + traits.Float, traits.Float, desc=desc, argstr="-lm %f %f", position=7 + ) + desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." + gn_flag = traits.Bool(desc=desc, argstr="-gn", position=8) slice_no = traits.Int( - desc='Fit to single slice number.', argstr='-slice %d', position=9) - voxel = traits.Tuple( + desc="Fit to single slice number.", argstr="-slice %d", position=9 + ) + voxel = Tuple( traits.Int, traits.Int, traits.Int, - desc='Fit to single voxel only.', - argstr='-voxel %d %d %d', - position=10) - maxit = traits.Int( - desc='NLSQR iterations [100].', argstr='-maxit %d', position=11) + desc="Fit to single voxel only.", + argstr="-voxel %d %d %d", + position=10, + ) + maxit = traits.Int(desc="NLSQR iterations [100].", argstr="-maxit %d", position=11) # IR options: sr_flag = traits.Bool( - desc='Saturation Recovery fitting [default].', - argstr='-SR', - position=12) + desc="Saturation Recovery fitting [default].", argstr="-SR", position=12 + ) ir_flag = traits.Bool( - desc='Inversion Recovery fitting [default].', - argstr='-IR', - position=13) + desc="Inversion Recovery fitting [default].", argstr="-IR", position=13 + ) tis = traits.List( traits.Float, position=14, - desc='Inversion times for T1 data [1s,2s,5s].', - argstr='-TIs %s', - sep=' ') - tis_list = traits.File( - exists=True, - argstr='-TIlist %s', - desc='Filename of list of pre-defined TIs.') - t1_list = traits.File( - exists=True, - argstr='-T1list %s', - desc='Filename of list of pre-defined T1s') - t1min = traits.Float( - desc='Minimum tissue T1 value [400ms].', argstr='-T1min %f') - t1max = traits.Float( - desc='Maximum tissue T1 value [4000ms].', argstr='-T1max %f') + desc="Inversion times for T1 data [1s,2s,5s].", + argstr="-TIs %s", + sep=" ", + ) + tis_list = File( + exists=True, argstr="-TIlist %s", desc="Filename of list of pre-defined TIs." + ) + t1_list = File( + exists=True, argstr="-T1list %s", desc="Filename of list of pre-defined T1s" + ) + t1min = traits.Float(desc="Minimum tissue T1 value [400ms].", argstr="-T1min %f") + t1max = traits.Float(desc="Maximum tissue T1 value [4000ms].", argstr="-T1max %f") # SPGR options - spgr = traits.Bool(desc='Spoiled Gradient Echo fitting', argstr='-SPGR') - flips = traits.List( - traits.Float, desc='Flip angles', argstr='-flips %s', sep=' ') - desc = 'Filename of list of pre-defined flip angles (deg).' - flips_list = traits.File(exists=True, argstr='-fliplist %s', desc=desc) - desc = 'Filename of B1 estimate for fitting (or include in prior).' - b1map = traits.File(exists=True, argstr='-b1map %s', desc=desc) + spgr = traits.Bool(desc="Spoiled Gradient Echo fitting", argstr="-SPGR") + flips = traits.List(traits.Float, desc="Flip angles", argstr="-flips %s", sep=" ") + desc = "Filename of list of pre-defined flip angles (deg)." + flips_list = File(exists=True, argstr="-fliplist %s", desc=desc) + desc = "Filename of B1 estimate for fitting (or include in prior)." + b1map = File(exists=True, argstr="-b1map %s", desc=desc) # MCMC options: - mcout = traits.File( - exists=True, - desc='Filename of mc samples (ascii text file)', - argstr='-mcout %s') + mcout = File( + exists=True, desc="Filename of mc samples (ascii text file)", argstr="-mcout %s" + ) mcsamples = traits.Int( - desc='Number of samples to keep [100].', argstr='-mcsamples %d') + desc="Number of samples to keep [100].", argstr="-mcsamples %d" + ) mcmaxit = traits.Int( - desc='Number of iterations to run [10,000].', argstr='-mcmaxit %d') + desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" + ) acceptance = traits.Float( - desc='Fraction of iterations to accept [0.23].', - argstr='-acceptance %f') + desc="Fraction of iterations to accept [0.23].", argstr="-acceptance %f" + ) class FitQt1OutputSpec(TraitedSpec): - """ Output Spec for FitQt1. """ - t1map_file = File(desc='Filename of the estimated output T1 map (in ms)') - m0map_file = File(desc='Filename of the m0 map') - desc = 'Filename of the estimated output multi-parameter map' + """Output Spec for FitQt1.""" + + t1map_file = File(desc="Filename of the estimated output T1 map (in ms)") + m0map_file = File(desc="Filename of the m0 map") + desc = "Filename of the estimated output multi-parameter map" mcmap_file = File(desc=desc) - comp_file = File(desc='Filename of the estimated multi-component T1 map.') - desc = 'Filename of the error map (symmetric matrix, [Diag,OffDiag])' + comp_file = File(desc="Filename of the estimated multi-component T1 map.") + desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])" error_file = File(desc=desc) - syn_file = File(desc='Filename of the synthetic ASL data') - res_file = File(desc='Filename of the model fit residuals') + syn_file = File(desc="Filename of the synthetic ASL data") + res_file = File(desc="Filename of the model fit residuals") class FitQt1(NiftyFitCommand): @@ -180,7 +179,8 @@ class FitQt1(NiftyFitCommand): -res TI4D_res.nii.gz -syn TI4D_syn.nii.gz -t1map TI4D_t1map.nii.gz' """ - _cmd = get_custom_path('fit_qt1', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") input_spec = FitQt1InputSpec output_spec = FitQt1OutputSpec - _suffix = '_fit_qt1' + _suffix = "_fit_qt1" diff --git a/nipype/interfaces/niftyfit/tests/test_asl.py b/nipype/interfaces/niftyfit/tests/test_asl.py index f703555c16..b919e0a483 100644 --- a/nipype/interfaces/niftyfit/tests/test_asl.py +++ b/nipype/interfaces/niftyfit/tests/test_asl.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -11,15 +10,14 @@ from ...niftyreg.tests.test_regutils import no_nifty_tool -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_asl'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_asl"), reason="niftyfit is not installed") def test_fit_asl(): - """ Testing FitAsl interface.""" + """Testing FitAsl interface.""" # Create the test node fit_asl = FitAsl() # Check if the command is properly defined - cmd = get_custom_path('fit_asl', env_dir='NIFTYFIT_DIR') + cmd = get_custom_path("fit_asl", env_dir="NIFTYFIT_DIR") assert fit_asl.cmd == cmd # test raising error with mandatory args absent @@ -28,16 +26,16 @@ def test_fit_asl(): # Tests on the interface: # Runs cbf fitting assuming all tissue is GM! - in_file = example_data('asl.nii.gz') + in_file = example_data("asl.nii.gz") fit_asl.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -cbf {cbf} -error {error} -syn {syn}' + cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} -syn {syn}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - cbf='asl_cbf.nii.gz', - error='asl_error.nii.gz', - syn='asl_syn.nii.gz', + cbf="asl_cbf.nii.gz", + error="asl_error.nii.gz", + syn="asl_syn.nii.gz", ) assert fit_asl.cmdline == expected_cmd @@ -46,24 +44,24 @@ def test_fit_asl(): # the segmentation data to fit tissue specific blood flow parameters # (lambda,transit times,T1) fit_asl2 = FitAsl(sig=True) - in_file = example_data('asl.nii.gz') - t1map = example_data('T1map.nii.gz') - seg = example_data('segmentation0.nii.gz') + in_file = example_data("asl.nii.gz") + t1map = example_data("T1map.nii.gz") + seg = example_data("segmentation0.nii.gz") fit_asl2.inputs.source_file = in_file fit_asl2.inputs.t1map = t1map fit_asl2.inputs.seg = seg - cmd_tmp = '{cmd} -source {in_file} -cbf {cbf} -error {error} \ --seg {seg} -sig -syn {syn} -t1map {t1map}' + cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} \ +-seg {seg} -sig -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, t1map=t1map, seg=seg, - cbf='asl_cbf.nii.gz', - error='asl_error.nii.gz', - syn='asl_syn.nii.gz', + cbf="asl_cbf.nii.gz", + error="asl_error.nii.gz", + syn="asl_syn.nii.gz", ) assert fit_asl2.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py b/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py index 20995e806e..ca14384031 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py @@ -1,56 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dwi import DwiTool def test_DwiTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), b0_file=dict( - argstr='-b0 %s', + argstr="-b0 %s", + extensions=None, position=4, ), ball_flag=dict( - argstr='-ball', + argstr="-ball", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), ballv_flag=dict( - argstr='-ballv', + argstr="-ballv", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "nod_flag", + "nodv_flag", ], ), bval_file=dict( - argstr='-bval %s', + argstr="-bval %s", + extensions=None, mandatory=True, position=2, ), bvec_file=dict( - argstr='-bvec %s', + argstr="-bvec %s", + extensions=None, position=3, ), - diso_val=dict(argstr='-diso %f', ), - dpr_val=dict(argstr='-dpr %f', ), + diso_val=dict( + argstr="-diso %f", + ), + dpr_val=dict( + argstr="-dpr %f", + ), dti_flag=dict( - argstr='-dti', + argstr="-dti", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), dti_flag2=dict( - argstr='-dti2', + argstr="-dti2", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), environ=dict( @@ -58,81 +86,110 @@ def test_DwiTool_inputs(): usedefault=True, ), famap_file=dict( - argstr='-famap %s', - name_source=['source_file'], - name_template='%s_famap.nii.gz', + argstr="-famap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_famap.nii.gz", ), ivim_flag=dict( - argstr='-ivim', + argstr="-ivim", position=6, xor=[ - 'mono_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), logdti_file=dict( - argstr='-logdti2 %s', - name_source=['source_file'], - name_template='%s_logdti2.nii.gz', + argstr="-logdti2 %s", + extensions=None, + name_source=["source_file"], + name_template="%s_logdti2.nii.gz", ), mask_file=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=5, ), mcmap_file=dict( - argstr='-mcmap %s', - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', + argstr="-mcmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", ), mdmap_file=dict( - argstr='-mdmap %s', - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', + argstr="-mdmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", ), mono_flag=dict( - argstr='-mono', + argstr="-mono", position=6, xor=[ - 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), nod_flag=dict( - argstr='-nod', + argstr="-nod", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nodv_flag", ], ), nodv_flag=dict( - argstr='-nodv', + argstr="-nodv", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", ], ), rgbmap_file=dict( - argstr='-rgbmap %s', - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', + argstr="-rgbmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", ), source_file=dict( - argstr='-source %s', + argstr="-source %s", + extensions=None, mandatory=True, position=1, ), syn_file=dict( - argstr='-syn %s', - name_source=['source_file'], - name_template='%s_syn.nii.gz', - requires=['bvec_file', 'b0_file'], + argstr="-syn %s", + extensions=None, + name_source=["source_file"], + name_template="%s_syn.nii.gz", + requires=["bvec_file", "b0_file"], ), v1map_file=dict( - argstr='-v1map %s', - name_source=['source_file'], - name_template='%s_v1map.nii.gz', + argstr="-v1map %s", + extensions=None, + name_source=["source_file"], + name_template="%s_v1map.nii.gz", ), ) inputs = DwiTool.input_spec() @@ -140,15 +197,31 @@ def test_DwiTool_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DwiTool_outputs(): output_map = dict( - famap_file=dict(), - logdti_file=dict(), - mcmap_file=dict(), - mdmap_file=dict(), - rgbmap_file=dict(), - syn_file=dict(), - v1map_file=dict(), + famap_file=dict( + extensions=None, + ), + logdti_file=dict( + extensions=None, + ), + mcmap_file=dict( + extensions=None, + ), + mdmap_file=dict( + extensions=None, + ), + rgbmap_file=dict( + extensions=None, + ), + syn_file=dict( + extensions=None, + ), + v1map_file=dict( + extensions=None, + ), ) outputs = DwiTool.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py b/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py index b2e1bef961..14093322cc 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py @@ -1,82 +1,166 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..asl import FitAsl def test_FitAsl_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cbf_file=dict( - argstr='-cbf %s', - name_source=['source_file'], - name_template='%s_cbf.nii.gz', + argstr="-cbf %s", + extensions=None, + name_source=["source_file"], + name_template="%s_cbf.nii.gz", + ), + dpld=dict( + argstr="-dPLD %f", + ), + dt_inv2=dict( + argstr="-dTinv2 %f", + ), + eff=dict( + argstr="-eff %f", ), - dpld=dict(argstr='-dPLD %f', ), - dt_inv2=dict(argstr='-dTinv2 %f', ), - eff=dict(argstr='-eff %f', ), environ=dict( nohash=True, usedefault=True, ), error_file=dict( - argstr='-error %s', - name_source=['source_file'], - name_template='%s_error.nii.gz', - ), - gm_plasma=dict(argstr='-gmL %f', ), - gm_t1=dict(argstr='-gmT1 %f', ), - gm_ttt=dict(argstr='-gmTTT %f', ), - ir_output=dict(argstr='-IRoutput %s', ), - ir_volume=dict(argstr='-IRvolume %s', ), - ldd=dict(argstr='-LDD %f', ), - m0map=dict(argstr='-m0map %s', ), - m0mape=dict(argstr='-m0mape %s', ), + argstr="-error %s", + extensions=None, + name_source=["source_file"], + name_template="%s_error.nii.gz", + ), + gm_plasma=dict( + argstr="-gmL %f", + ), + gm_t1=dict( + argstr="-gmT1 %f", + ), + gm_ttt=dict( + argstr="-gmTTT %f", + ), + ir_output=dict( + argstr="-IRoutput %s", + extensions=None, + ), + ir_volume=dict( + argstr="-IRvolume %s", + extensions=None, + ), + ldd=dict( + argstr="-LDD %f", + ), + m0map=dict( + argstr="-m0map %s", + extensions=None, + ), + m0mape=dict( + argstr="-m0mape %s", + extensions=None, + ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=2, ), - mul=dict(argstr='-mul %f', ), - mulgm=dict(argstr='-sig', ), - out=dict(argstr='-out %f', ), - pasl=dict(argstr='-pasl', ), - pcasl=dict(argstr='-pcasl', ), - plasma_coeff=dict(argstr='-L %f', ), - pld=dict(argstr='-PLD %f', ), - pv0=dict(argstr='-pv0 %d', ), - pv2=dict(argstr='-pv2 %d', ), - pv3=dict(argstr='-pv3 %d %d %d', ), - pv_threshold=dict(argstr='-pvthreshold', ), - seg=dict(argstr='-seg %s', ), - segstyle=dict(argstr='-segstyle', ), - sig=dict(argstr='-sig', ), + mul=dict( + argstr="-mul %f", + ), + mulgm=dict( + argstr="-sig", + ), + out=dict( + argstr="-out %f", + ), + pasl=dict( + argstr="-pasl", + ), + pcasl=dict( + argstr="-pcasl", + ), + plasma_coeff=dict( + argstr="-L %f", + ), + pld=dict( + argstr="-PLD %f", + ), + pv0=dict( + argstr="-pv0 %d", + ), + pv2=dict( + argstr="-pv2 %d", + ), + pv3=dict( + argstr="-pv3 %d %d %d", + ), + pv_threshold=dict( + argstr="-pvthreshold", + ), + seg=dict( + argstr="-seg %s", + extensions=None, + ), + segstyle=dict( + argstr="-segstyle", + ), + sig=dict( + argstr="-sig", + ), source_file=dict( - argstr='-source %s', + argstr="-source %s", + extensions=None, mandatory=True, position=1, ), syn_file=dict( - argstr='-syn %s', - name_source=['source_file'], - name_template='%s_syn.nii.gz', - ), - t1_art_cmp=dict(argstr='-T1a %f', ), - t1map=dict(argstr='-t1map %s', ), - t_inv1=dict(argstr='-Tinv1 %f', ), - t_inv2=dict(argstr='-Tinv2 %f', ), - wm_plasma=dict(argstr='-wmL %f', ), - wm_t1=dict(argstr='-wmT1 %f', ), - wm_ttt=dict(argstr='-wmTTT %f', ), + argstr="-syn %s", + extensions=None, + name_source=["source_file"], + name_template="%s_syn.nii.gz", + ), + t1_art_cmp=dict( + argstr="-T1a %f", + ), + t1map=dict( + argstr="-t1map %s", + extensions=None, + ), + t_inv1=dict( + argstr="-Tinv1 %f", + ), + t_inv2=dict( + argstr="-Tinv2 %f", + ), + wm_plasma=dict( + argstr="-wmL %f", + ), + wm_t1=dict( + argstr="-wmT1 %f", + ), + wm_ttt=dict( + argstr="-wmTTT %f", + ), ) inputs = FitAsl.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitAsl_outputs(): output_map = dict( - cbf_file=dict(), - error_file=dict(), - syn_file=dict(), + cbf_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + syn_file=dict( + extensions=None, + ), ) outputs = FitAsl.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py b/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py index 700d9a31c4..87650ffbd6 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py @@ -1,48 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dwi import FitDwi def test_FitDwi_inputs(): input_map = dict( - acceptance=dict(argstr='-accpetance %f', ), - args=dict(argstr='%s', ), + acceptance=dict( + argstr="-accpetance %f", + ), + args=dict( + argstr="%s", + ), ball_flag=dict( - argstr='-ball', + argstr="-ball", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), ballv_flag=dict( - argstr='-ballv', + argstr="-ballv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "nod_flag", + "nodv_flag", ], ), bval_file=dict( - argstr='-bval %s', + argstr="-bval %s", + extensions=None, mandatory=True, position=2, ), bvec_file=dict( - argstr='-bvec %s', + argstr="-bvec %s", + extensions=None, mandatory=True, position=3, ), - cov_file=dict(argstr='-cov %s', ), - csf_t2_val=dict(argstr='-csfT2 %f', ), - diso_val=dict(argstr='-diso %f', ), - dpr_val=dict(argstr='-dpr %f', ), + cov_file=dict( + argstr="-cov %s", + extensions=None, + ), + csf_t2_val=dict( + argstr="-csfT2 %f", + ), + diso_val=dict( + argstr="-diso %f", + ), + dpr_val=dict( + argstr="-dpr %f", + ), dti_flag=dict( - argstr='-dti', + argstr="-dti", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), environ=dict( @@ -50,162 +76,245 @@ def test_FitDwi_inputs(): usedefault=True, ), error_file=dict( - argstr='-error %s', - name_source=['source_file'], - name_template='%s_error.nii.gz', + argstr="-error %s", + extensions=None, + name_source=["source_file"], + name_template="%s_error.nii.gz", ), famap_file=dict( - argstr='-famap %s', - name_source=['source_file'], - name_template='%s_famap.nii.gz', + argstr="-famap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_famap.nii.gz", ), gn_flag=dict( - argstr='-gn', - xor=['wls_flag'], + argstr="-gn", + xor=["wls_flag"], ), ivim_flag=dict( - argstr='-ivim', + argstr="-ivim", position=4, xor=[ - 'mono_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), lm_vals=dict( - argstr='-lm %f %f', - requires=['gn_flag'], + argstr="-lm %f %f", + requires=["gn_flag"], + ), + mask_file=dict( + argstr="-mask %s", + extensions=None, ), - mask_file=dict(argstr='-mask %s', ), maxit_val=dict( - argstr='-maxit %d', - requires=['gn_flag'], + argstr="-maxit %d", + requires=["gn_flag"], ), mcmap_file=dict( - argstr='-mcmap %s', - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - requires=['nodv_flag'], + argstr="-mcmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + requires=["nodv_flag"], + ), + mcmaxit=dict( + argstr="-mcmaxit %d", ), - mcmaxit=dict(argstr='-mcmaxit %d', ), mcout=dict( - argstr='-mcout %s', - name_source=['source_file'], - name_template='%s_mcout.txt', + argstr="-mcout %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mcout.txt", + ), + mcsamples=dict( + argstr="-mcsamples %d", ), - mcsamples=dict(argstr='-mcsamples %d', ), mdmap_file=dict( - argstr='-mdmap %s', - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', + argstr="-mdmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", ), mono_flag=dict( - argstr='-mono', + argstr="-mono", position=4, xor=[ - 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), nod_flag=dict( - argstr='-nod', + argstr="-nod", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nodv_flag", ], ), nodiff_file=dict( - argstr='-nodiff %s', - name_source=['source_file'], - name_template='%s_no_diff.nii.gz', + argstr="-nodiff %s", + extensions=None, + name_source=["source_file"], + name_template="%s_no_diff.nii.gz", ), nodv_flag=dict( - argstr='-nodv', + argstr="-nodv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nod_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", ], ), - perf_thr=dict(argstr='-perfthreshold %f', ), - prior_file=dict(argstr='-prior %s', ), + perf_thr=dict( + argstr="-perfthreshold %f", + ), + prior_file=dict( + argstr="-prior %s", + extensions=None, + ), res_file=dict( - argstr='-res %s', - name_source=['source_file'], - name_template='%s_resmap.nii.gz', + argstr="-res %s", + extensions=None, + name_source=["source_file"], + name_template="%s_resmap.nii.gz", ), rgbmap_file=dict( - argstr='-rgbmap %s', - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - requires=['dti_flag'], + argstr="-rgbmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + requires=["dti_flag"], + ), + rot_sform_flag=dict( + argstr="-rotsform %d", + ), + slice_no=dict( + argstr="-slice %d", ), - rot_sform_flag=dict(argstr='-rotsform %d', ), - slice_no=dict(argstr='-slice %d', ), source_file=dict( - argstr='-source %s', + argstr="-source %s", + extensions=None, mandatory=True, position=1, ), - swls_val=dict(argstr='-swls %f', ), + swls_val=dict( + argstr="-swls %f", + ), syn_file=dict( - argstr='-syn %s', - name_source=['source_file'], - name_template='%s_syn.nii.gz', + argstr="-syn %s", + extensions=None, + name_source=["source_file"], + name_template="%s_syn.nii.gz", ), te_file=dict( - argstr='-TE %s', - xor=['te_file'], + argstr="-TE %s", + extensions=None, + xor=["te_file"], ), te_value=dict( - argstr='-TE %s', - xor=['te_file'], + argstr="-TE %s", + extensions=None, + xor=["te_file"], + ), + ten_type=dict( + usedefault=True, ), - ten_type=dict(usedefault=True, ), tenmap2_file=dict( - argstr='-tenmap2 %s', - name_source=['source_file'], - name_template='%s_tenmap2.nii.gz', - requires=['dti_flag'], + argstr="-tenmap2 %s", + extensions=None, + name_source=["source_file"], + name_template="%s_tenmap2.nii.gz", + requires=["dti_flag"], ), tenmap_file=dict( - argstr='-tenmap %s', - name_source=['source_file'], - name_template='%s_tenmap.nii.gz', - requires=['dti_flag'], + argstr="-tenmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_tenmap.nii.gz", + requires=["dti_flag"], ), v1map_file=dict( - argstr='-v1map %s', - name_source=['source_file'], - name_template='%s_v1map.nii.gz', + argstr="-v1map %s", + extensions=None, + name_source=["source_file"], + name_template="%s_v1map.nii.gz", + ), + vb_flag=dict( + argstr="-vb", + ), + voxel=dict( + argstr="-voxel %d %d %d", ), - vb_flag=dict(argstr='-vb', ), - voxel=dict(argstr='-voxel %d %d %d', ), wls_flag=dict( - argstr='-wls', - xor=['gn_flag'], + argstr="-wls", + xor=["gn_flag"], + ), + wm_t2_val=dict( + argstr="-wmT2 %f", ), - wm_t2_val=dict(argstr='-wmT2 %f', ), ) inputs = FitDwi.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitDwi_outputs(): output_map = dict( - error_file=dict(), - famap_file=dict(), - mcmap_file=dict(), - mcout=dict(), - mdmap_file=dict(), - nodiff_file=dict(), - res_file=dict(), - rgbmap_file=dict(), - syn_file=dict(), - tenmap2_file=dict(), - tenmap_file=dict(), - v1map_file=dict(), + error_file=dict( + extensions=None, + ), + famap_file=dict( + extensions=None, + ), + mcmap_file=dict( + extensions=None, + ), + mcout=dict( + extensions=None, + ), + mdmap_file=dict( + extensions=None, + ), + nodiff_file=dict( + extensions=None, + ), + res_file=dict( + extensions=None, + ), + rgbmap_file=dict( + extensions=None, + ), + syn_file=dict( + extensions=None, + ), + tenmap2_file=dict( + extensions=None, + ), + tenmap_file=dict( + extensions=None, + ), + v1map_file=dict( + extensions=None, + ), ) outputs = FitDwi.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py b/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py index 0505ee514c..ee82b5c900 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py @@ -1,121 +1,159 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..qt1 import FitQt1 def test_FitQt1_inputs(): input_map = dict( - acceptance=dict(argstr='-acceptance %f', ), - args=dict(argstr='%s', ), - b1map=dict(argstr='-b1map %s', ), + acceptance=dict( + argstr="-acceptance %f", + ), + args=dict( + argstr="%s", + ), + b1map=dict( + argstr="-b1map %s", + extensions=None, + ), comp_file=dict( - argstr='-comp %s', - name_source=['source_file'], - name_template='%s_comp.nii.gz', + argstr="-comp %s", + extensions=None, + name_source=["source_file"], + name_template="%s_comp.nii.gz", ), environ=dict( nohash=True, usedefault=True, ), error_file=dict( - argstr='-error %s', - name_source=['source_file'], - name_template='%s_error.nii.gz', + argstr="-error %s", + extensions=None, + name_source=["source_file"], + name_template="%s_error.nii.gz", ), flips=dict( - argstr='-flips %s', - sep=' ', + argstr="-flips %s", + sep=" ", + ), + flips_list=dict( + argstr="-fliplist %s", + extensions=None, ), - flips_list=dict(argstr='-fliplist %s', ), gn_flag=dict( - argstr='-gn', + argstr="-gn", position=8, ), ir_flag=dict( - argstr='-IR', + argstr="-IR", position=13, ), lm_val=dict( - argstr='-lm %f %f', + argstr="-lm %f %f", position=7, ), m0map_file=dict( - argstr='-m0map %s', - name_source=['source_file'], - name_template='%s_m0map.nii.gz', + argstr="-m0map %s", + extensions=None, + name_source=["source_file"], + name_template="%s_m0map.nii.gz", ), mask=dict( - argstr='-mask %s', + argstr="-mask %s", + extensions=None, position=2, ), maxit=dict( - argstr='-maxit %d', + argstr="-maxit %d", position=11, ), mcmap_file=dict( - argstr='-mcmap %s', - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', + argstr="-mcmap %s", + extensions=None, + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + ), + mcmaxit=dict( + argstr="-mcmaxit %d", + ), + mcout=dict( + argstr="-mcout %s", + extensions=None, + ), + mcsamples=dict( + argstr="-mcsamples %d", ), - mcmaxit=dict(argstr='-mcmaxit %d', ), - mcout=dict(argstr='-mcout %s', ), - mcsamples=dict(argstr='-mcsamples %d', ), nb_comp=dict( - argstr='-nc %d', + argstr="-nc %d", position=6, ), prior=dict( - argstr='-prior %s', + argstr="-prior %s", + extensions=None, position=3, ), res_file=dict( - argstr='-res %s', - name_source=['source_file'], - name_template='%s_res.nii.gz', + argstr="-res %s", + extensions=None, + name_source=["source_file"], + name_template="%s_res.nii.gz", ), slice_no=dict( - argstr='-slice %d', + argstr="-slice %d", position=9, ), source_file=dict( - argstr='-source %s', + argstr="-source %s", + extensions=None, mandatory=True, position=1, ), - spgr=dict(argstr='-SPGR', ), + spgr=dict( + argstr="-SPGR", + ), sr_flag=dict( - argstr='-SR', + argstr="-SR", position=12, ), syn_file=dict( - argstr='-syn %s', - name_source=['source_file'], - name_template='%s_syn.nii.gz', + argstr="-syn %s", + extensions=None, + name_source=["source_file"], + name_template="%s_syn.nii.gz", + ), + t1_list=dict( + argstr="-T1list %s", + extensions=None, ), - t1_list=dict(argstr='-T1list %s', ), t1map_file=dict( - argstr='-t1map %s', - name_source=['source_file'], - name_template='%s_t1map.nii.gz', + argstr="-t1map %s", + extensions=None, + name_source=["source_file"], + name_template="%s_t1map.nii.gz", + ), + t1max=dict( + argstr="-T1max %f", + ), + t1min=dict( + argstr="-T1min %f", ), - t1max=dict(argstr='-T1max %f', ), - t1min=dict(argstr='-T1min %f', ), te_value=dict( - argstr='-TE %f', + argstr="-TE %f", position=4, ), tis=dict( - argstr='-TIs %s', + argstr="-TIs %s", position=14, - sep=' ', + sep=" ", + ), + tis_list=dict( + argstr="-TIlist %s", + extensions=None, ), - tis_list=dict(argstr='-TIlist %s', ), tr_value=dict( - argstr='-TR %f', + argstr="-TR %f", position=5, ), voxel=dict( - argstr='-voxel %d %d %d', + argstr="-voxel %d %d %d", position=10, ), ) @@ -124,15 +162,31 @@ def test_FitQt1_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitQt1_outputs(): output_map = dict( - comp_file=dict(), - error_file=dict(), - m0map_file=dict(), - mcmap_file=dict(), - res_file=dict(), - syn_file=dict(), - t1map_file=dict(), + comp_file=dict( + extensions=None, + ), + error_file=dict( + extensions=None, + ), + m0map_file=dict( + extensions=None, + ), + mcmap_file=dict( + extensions=None, + ), + res_file=dict( + extensions=None, + ), + syn_file=dict( + extensions=None, + ), + t1map_file=dict( + extensions=None, + ), ) outputs = FitQt1.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py b/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py index 6bfeb3c9d3..f12ccad480 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import NiftyFitCommand def test_NiftyFitCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/niftyfit/tests/test_dwi.py b/nipype/interfaces/niftyfit/tests/test_dwi.py index 270d9c666a..e3bfab3d66 100644 --- a/nipype/interfaces/niftyfit/tests/test_dwi.py +++ b/nipype/interfaces/niftyfit/tests/test_dwi.py @@ -10,15 +10,14 @@ from ...niftyreg.tests.test_regutils import no_nifty_tool -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_dwi'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_dwi"), reason="niftyfit is not installed") def test_fit_dwi(): - """ Testing FitDwi interface.""" + """Testing FitDwi interface.""" # Create a node object fit_dwi = FitDwi() # Check if the command is properly defined - cmd = get_custom_path('fit_dwi', env_dir='NIFTYFITDIR') + cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") assert fit_dwi.cmd == cmd # test raising error with mandatory args absent @@ -26,47 +25,47 @@ def test_fit_dwi(): fit_dwi.run() # Assign some input data - in_file = example_data('dwi.nii.gz') - bval_file = example_data('bvals') - bvec_file = example_data('bvecs') + in_file = example_data("dwi.nii.gz") + bval_file = example_data("bvals") + bvec_file = example_data("bvecs") fit_dwi.inputs.source_file = in_file fit_dwi.inputs.bval_file = bval_file fit_dwi.inputs.bvec_file = bvec_file fit_dwi.inputs.dti_flag = True - cmd_tmp = '{cmd} -source {in_file} -bval {bval} -bvec {bvec} -dti \ + cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -dti \ -error {error} -famap {fa} -mcmap {mc} -mcout {mcout} -mdmap {md} -nodiff \ -{nodiff} -res {res} -rgbmap {rgb} -syn {syn} -tenmap2 {ten2} -v1map {v1}' +{nodiff} -res {res} -rgbmap {rgb} -syn {syn} -tenmap2 {ten2} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, bval=bval_file, bvec=bvec_file, - error='dwi_error.nii.gz', - fa='dwi_famap.nii.gz', - mc='dwi_mcmap.nii.gz', - md='dwi_mdmap.nii.gz', - nodiff='dwi_no_diff.nii.gz', - res='dwi_resmap.nii.gz', - rgb='dwi_rgbmap.nii.gz', - syn='dwi_syn.nii.gz', - ten2='dwi_tenmap2.nii.gz', - v1='dwi_v1map.nii.gz', - mcout='dwi_mcout.txt') + error="dwi_error.nii.gz", + fa="dwi_famap.nii.gz", + mc="dwi_mcmap.nii.gz", + md="dwi_mdmap.nii.gz", + nodiff="dwi_no_diff.nii.gz", + res="dwi_resmap.nii.gz", + rgb="dwi_rgbmap.nii.gz", + syn="dwi_syn.nii.gz", + ten2="dwi_tenmap2.nii.gz", + v1="dwi_v1map.nii.gz", + mcout="dwi_mcout.txt", + ) assert fit_dwi.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='dwi_tool'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="dwi_tool"), reason="niftyfit is not installed") def test_dwi_tool(): - """ Testing DwiTool interface.""" + """Testing DwiTool interface.""" # Create a node object dwi_tool = DwiTool() # Check if the command is properly defined - cmd = get_custom_path('dwi_tool', env_dir='NIFTYFITDIR') + cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") assert dwi_tool.cmd == cmd # test raising error with mandatory args absent @@ -74,11 +73,11 @@ def test_dwi_tool(): dwi_tool.run() # Assign some input data - in_file = example_data('dwi.nii.gz') - bval_file = example_data('bvals') - bvec_file = example_data('bvecs') - b0_file = example_data('b0.nii') - mask_file = example_data('mask.nii.gz') + in_file = example_data("dwi.nii.gz") + bval_file = example_data("bvals") + bvec_file = example_data("bvecs") + b0_file = example_data("b0.nii") + mask_file = example_data("mask.nii.gz") dwi_tool.inputs.source_file = in_file dwi_tool.inputs.mask_file = mask_file dwi_tool.inputs.bval_file = bval_file @@ -86,9 +85,9 @@ def test_dwi_tool(): dwi_tool.inputs.b0_file = b0_file dwi_tool.inputs.dti_flag = True - cmd_tmp = '{cmd} -source {in_file} -bval {bval} -bvec {bvec} -b0 {b0} \ + cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -b0 {b0} \ -mask {mask} -dti -famap {fa} -logdti2 {log} -mcmap {mc} -mdmap {md} \ --rgbmap {rgb} -syn {syn} -v1map {v1}' +-rgbmap {rgb} -syn {syn} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, @@ -97,12 +96,13 @@ def test_dwi_tool(): bvec=bvec_file, b0=b0_file, mask=mask_file, - fa='dwi_famap.nii.gz', - log='dwi_logdti2.nii.gz', - mc='dwi_mcmap.nii.gz', - md='dwi_mdmap.nii.gz', - rgb='dwi_rgbmap.nii.gz', - syn='dwi_syn.nii.gz', - v1='dwi_v1map.nii.gz') + fa="dwi_famap.nii.gz", + log="dwi_logdti2.nii.gz", + mc="dwi_mcmap.nii.gz", + md="dwi_mdmap.nii.gz", + rgb="dwi_rgbmap.nii.gz", + syn="dwi_syn.nii.gz", + v1="dwi_v1map.nii.gz", + ) assert dwi_tool.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyfit/tests/test_qt1.py b/nipype/interfaces/niftyfit/tests/test_qt1.py index 894017e654..794e6c5130 100644 --- a/nipype/interfaces/niftyfit/tests/test_qt1.py +++ b/nipype/interfaces/niftyfit/tests/test_qt1.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -10,15 +9,14 @@ from ..qt1 import FitQt1 -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_qt1'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_qt1"), reason="niftyfit is not installed") def test_fit_qt1(): - """ Testing FitQt1 interface.""" + """Testing FitQt1 interface.""" # Create a node object fit_qt1 = FitQt1() # Check if the command is properly defined - cmd = get_custom_path('fit_qt1', env_dir='NIFTYFITDIR') + cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") assert fit_qt1.cmd == cmd # test raising error with mandatory args absent @@ -26,68 +24,68 @@ def test_fit_qt1(): fit_qt1.run() # Regular test: - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -comp {comp} -error {error} -m0map \ -{map0} -mcmap {cmap} -res {res} -syn {syn} -t1map {t1map}' + cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} -m0map \ +{map0} -mcmap {cmap} -res {res} -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1.cmdline == expected_cmd # Runs T1 fitting to inversion and saturation recovery data (NLSQR) fit_qt1_2 = FitQt1(tis=[1, 2, 5], ir_flag=True) - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1_2.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -IR -TIs 1.0 2.0 5.0 \ + cmd_tmp = "{cmd} -source {in_file} -IR -TIs 1.0 2.0 5.0 \ -comp {comp} -error {error} -m0map {map0} -mcmap {cmap} -res {res} \ --syn {syn} -t1map {t1map}' +-syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1_2.cmdline == expected_cmd # Runs T1 fitting to spoiled gradient echo (SPGR) data (NLSQR) fit_qt1_3 = FitQt1(flips=[2, 4, 8], spgr=True) - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1_3.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -comp {comp} -error {error} \ + cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} \ -flips 2.0 4.0 8.0 -m0map {map0} -mcmap {cmap} -res {res} -SPGR -syn {syn} \ --t1map {t1map}' +-t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1_3.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyreg/__init__.py b/nipype/interfaces/niftyreg/__init__.py index 9854ebaea3..e22eae03ed 100644 --- a/nipype/interfaces/niftyreg/__init__.py +++ b/nipype/interfaces/niftyreg/__init__.py @@ -1,14 +1,19 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ +NiftyReg is an open-source software for efficient medical image registration. + The niftyreg module provides classes for interfacing with the `NiftyReg `_ command line tools. -Top-level namespace for niftyreg. """ - from .base import get_custom_path from .reg import RegAladin, RegF3D -from .regutils import (RegResample, RegJacobian, RegAverage, RegTools, - RegTransform, RegMeasure) +from .regutils import ( + RegResample, + RegJacobian, + RegAverage, + RegTools, + RegTransform, + RegMeasure, +) diff --git a/nipype/interfaces/niftyreg/base.py b/nipype/interfaces/niftyreg/base.py index bd8a280aa5..76555c3573 100644 --- a/nipype/interfaces/niftyreg/base.py +++ b/nipype/interfaces/niftyreg/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -15,114 +14,117 @@ See the docstrings of the individual classes for examples. """ - -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import property, super -from distutils.version import StrictVersion import os +from packaging.version import Version from ... import logging -from ..base import CommandLine, CommandLineInputSpec, traits, Undefined +from ..base import CommandLine, CommandLineInputSpec, traits, Undefined, PackageInfo from ...utils.filemanip import split_filename -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") + + +def get_custom_path(command, env_dir="NIFTYREGDIR"): + return os.path.join(os.getenv(env_dir, ""), command) -def get_custom_path(command, env_dir='NIFTYREGDIR'): - return os.path.join(os.getenv(env_dir, ''), command) +class Info(PackageInfo): + version_cmd = get_custom_path("reg_aladin") + " --version" + + @staticmethod + def parse_version(raw_info): + return raw_info class NiftyRegCommandInputSpec(CommandLineInputSpec): """Input Spec for niftyreg interfaces.""" + # Set the number of omp thread to use omp_core_val = traits.Int( - int(os.environ.get('OMP_NUM_THREADS', '1')), - desc='Number of openmp thread to use', - argstr='-omp %i', - usedefault=True) + int(os.environ.get("OMP_NUM_THREADS", "1")), + desc="Number of openmp thread to use", + argstr="-omp %i", + usedefault=True, + ) class NiftyRegCommand(CommandLine): """ Base support interface for NiftyReg commands. """ - _suffix = '_nr' - _min_version = '1.5.30' + + _suffix = "_nr" + _min_version = "1.5.30" input_spec = NiftyRegCommandInputSpec def __init__(self, required_version=None, **inputs): self.num_threads = 1 - super(NiftyRegCommand, self).__init__(**inputs) + super().__init__(**inputs) self.required_version = required_version - _version = self.version_from_command() + _version = self.version if _version: - _version = _version.decode("utf-8") - if self._min_version is not None and \ - StrictVersion(_version) < StrictVersion(self._min_version): - msg = 'A later version of Niftyreg is required (%s < %s)' + if self._min_version is not None and Version(_version) < Version( + self._min_version + ): + msg = "A later version of Niftyreg is required (%s < %s)" iflogger.warning(msg, _version, self._min_version) if required_version is not None: - if StrictVersion(_version) != StrictVersion(required_version): - msg = 'The version of NiftyReg differs from the required' - msg += '(%s != %s)' + if Version(_version) != Version(required_version): + msg = "The version of NiftyReg differs from the required" + msg += "(%s != %s)" iflogger.warning(msg, _version, self.required_version) - self.inputs.on_trait_change(self._omp_update, 'omp_core_val') - self.inputs.on_trait_change(self._environ_update, 'environ') + self.inputs.on_trait_change(self._omp_update, "omp_core_val") + self.inputs.on_trait_change(self._environ_update, "environ") self._omp_update() def _omp_update(self): if self.inputs.omp_core_val: - self.inputs.environ['OMP_NUM_THREADS'] = \ - str(self.inputs.omp_core_val) + self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.omp_core_val) self.num_threads = self.inputs.omp_core_val else: - if 'OMP_NUM_THREADS' in self.inputs.environ: - del self.inputs.environ['OMP_NUM_THREADS'] + if "OMP_NUM_THREADS" in self.inputs.environ: + del self.inputs.environ["OMP_NUM_THREADS"] self.num_threads = 1 def _environ_update(self): if self.inputs.environ: - if 'OMP_NUM_THREADS' in self.inputs.environ: - self.inputs.omp_core_val = \ - int(self.inputs.environ['OMP_NUM_THREADS']) + if "OMP_NUM_THREADS" in self.inputs.environ: + self.inputs.omp_core_val = int(self.inputs.environ["OMP_NUM_THREADS"]) else: self.inputs.omp_core_val = Undefined else: self.inputs.omp_core_val = Undefined def check_version(self): - _version = self.version_from_command() + _version = self.version if not _version: - raise Exception('Niftyreg not found') - # Decoding to string: - _version = _version.decode("utf-8") - if StrictVersion(_version) < StrictVersion(self._min_version): - err = 'A later version of Niftyreg is required (%s < %s)' + raise Exception("Niftyreg not found") + if Version(_version) < Version(self._min_version): + err = "A later version of Niftyreg is required (%s < %s)" raise ValueError(err % (_version, self._min_version)) if self.required_version: - if StrictVersion(_version) != StrictVersion(self.required_version): - err = 'The version of NiftyReg differs from the required' - err += '(%s != %s)' + if Version(_version) != Version(self.required_version): + err = "The version of NiftyReg differs from the required" + err += "(%s != %s)" raise ValueError(err % (_version, self.required_version)) @property def version(self): - return self.version_from_command() + return Info.version() def exists(self): - return self.version_from_command() is not None + return self.version is not None def _format_arg(self, name, spec, value): - if name == 'omp_core_val': + if name == "omp_core_val": self.numthreads = value - return super(NiftyRegCommand, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: @@ -130,5 +132,5 @@ def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if ext is not None: final_ext = ext if suffix is not None: - final_bn = ''.join((final_bn, suffix)) + final_bn = f"{final_bn}{suffix}" return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) diff --git a/nipype/interfaces/niftyreg/reg.py b/nipype/interfaces/niftyreg/reg.py index f36752b872..2c7657e6ae 100644 --- a/nipype/interfaces/niftyreg/reg.py +++ b/nipype/interfaces/niftyreg/reg.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,118 +6,120 @@ The interfaces were written to work with niftyreg version 1.5.10 """ - -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import staticmethod import os -from ..base import TraitedSpec, File, traits, isdefined +from ..base import TraitedSpec, File, traits, Tuple, isdefined from .base import get_custom_path, NiftyRegCommand, NiftyRegCommandInputSpec from ...utils.filemanip import split_filename class RegAladinInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegAladin. """ + """Input Spec for RegAladin.""" + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # No symmetric flag - nosym_flag = traits.Bool( - argstr='-noSym', desc='Turn off symmetric registration') + nosym_flag = traits.Bool(argstr="-noSym", desc="Turn off symmetric registration") # Rigid only registration - rig_only_flag = traits.Bool( - argstr='-rigOnly', desc='Do only a rigid registration') + rig_only_flag = traits.Bool(argstr="-rigOnly", desc="Do only a rigid registration") # Directly optimise affine flag - desc = 'Directly optimise the affine parameters' - aff_direct_flag = traits.Bool(argstr='-affDirect', desc=desc) + desc = "Directly optimise the affine parameters" + aff_direct_flag = traits.Bool(argstr="-affDirect", desc=desc) # Input affine in_aff_file = File( - exists=True, - desc='The input affine transformation', - argstr='-inaff %s') + exists=True, desc="The input affine transformation", argstr="-inaff %s" + ) # Input reference mask - rmask_file = File( - exists=True, desc='The input reference mask', argstr='-rmask %s') + rmask_file = File(exists=True, desc="The input reference mask", argstr="-rmask %s") # Input floating mask - fmask_file = File( - exists=True, desc='The input floating mask', argstr='-fmask %s') + fmask_file = File(exists=True, desc="The input floating mask", argstr="-fmask %s") # Maximum number of iterations maxit_val = traits.Range( - desc='Maximum number of iterations', argstr='-maxit %d', low=0) + desc="Maximum number of iterations", argstr="-maxit %d", low=0 + ) # Multiresolution levels ln_val = traits.Range( - desc='Number of resolution levels to create', argstr='-ln %d', low=0) + desc="Number of resolution levels to create", argstr="-ln %d", low=0 + ) # Number of resolution levels to process lp_val = traits.Range( - desc='Number of resolution levels to perform', argstr='-lp %d', low=0) + desc="Number of resolution levels to perform", argstr="-lp %d", low=0 + ) # Smoothing to apply on reference image - desc = 'Amount of smoothing to apply to reference image' - smoo_r_val = traits.Float(desc=desc, argstr='-smooR %f') + desc = "Amount of smoothing to apply to reference image" + smoo_r_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing to apply on floating image - desc = 'Amount of smoothing to apply to floating image' - smoo_f_val = traits.Float(desc=desc, argstr='-smooF %f') + desc = "Amount of smoothing to apply to floating image" + smoo_f_val = traits.Float(desc=desc, argstr="-smooF %f") # Use nifti header to initialise transformation - desc = 'Use nifti header to initialise transformation' - nac_flag = traits.Bool(desc=desc, argstr='-nac') + desc = "Use nifti header to initialise transformation" + nac_flag = traits.Bool(desc=desc, argstr="-nac") # Use the input masks centre of mass to initialise the transformation - desc = 'Use the masks centre of mass to initialise the transformation' - cog_flag = traits.Bool(desc=desc, argstr='-cog') + desc = "Use the masks centre of mass to initialise the transformation" + cog_flag = traits.Bool(desc=desc, argstr="-cog") # Percent of blocks that are considered active. v_val = traits.Range( - desc='Percent of blocks that are active', argstr='-pv %d', low=0) + desc="Percent of blocks that are active", argstr="-pv %d", low=0 + ) # Percent of inlier blocks - i_val = traits.Range( - desc='Percent of inlier blocks', argstr='-pi %d', low=0) + i_val = traits.Range(desc="Percent of inlier blocks", argstr="-pi %d", low=0) # Lower threshold on reference image ref_low_val = traits.Float( - desc='Lower threshold value on reference image', - argstr='-refLowThr %f') + desc="Lower threshold value on reference image", argstr="-refLowThr %f" + ) # Upper threshold on reference image ref_up_val = traits.Float( - desc='Upper threshold value on reference image', argstr='-refUpThr %f') + desc="Upper threshold value on reference image", argstr="-refUpThr %f" + ) # Lower threshold on floating image flo_low_val = traits.Float( - desc='Lower threshold value on floating image', argstr='-floLowThr %f') + desc="Lower threshold value on floating image", argstr="-floLowThr %f" + ) # Upper threshold on floating image flo_up_val = traits.Float( - desc='Upper threshold value on floating image', argstr='-floUpThr %f') + desc="Upper threshold value on floating image", argstr="-floUpThr %f" + ) # Platform to use - platform_val = traits.Int(desc='Platform index', argstr='-platf %i') + platform_val = traits.Int(desc="Platform index", argstr="-platf %i") # Platform to use - gpuid_val = traits.Int(desc='Device to use id', argstr='-gpuid %i') + gpuid_val = traits.Int(desc="Device to use id", argstr="-gpuid %i") # Verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Affine output transformation matrix file aff_file = File( - name_source=['flo_file'], - name_template='%s_aff.txt', - desc='The output affine matrix file', - argstr='-aff %s') + name_source=["flo_file"], + name_template="%s_aff.txt", + desc="The output affine matrix file", + argstr="-aff %s", + ) # Result warped image file res_file = File( - name_source=['flo_file'], - name_template='%s_res.nii.gz', - desc='The affine transformed floating image', - argstr='-res %s') + name_source=["flo_file"], + name_template="%s_res.nii.gz", + desc="The affine transformed floating image", + argstr="-res %s", + ) class RegAladinOutputSpec(TraitedSpec): - """ Output Spec for RegAladin. """ - aff_file = File(desc='The output affine file') - res_file = File(desc='The output transformed image') - desc = 'Output string in the format for reg_average' + """Output Spec for RegAladin.""" + + aff_file = File(desc="The output affine file") + res_file = File(desc="The output transformed image") + desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) @@ -145,204 +146,209 @@ class RegAladin(NiftyRegCommand): -res im2_res.nii.gz -rmask mask.nii' """ - _cmd = get_custom_path('reg_aladin') + + _cmd = get_custom_path("reg_aladin") input_spec = RegAladinInputSpec output_spec = RegAladinOutputSpec def _list_outputs(self): - outputs = super(RegAladin, self)._list_outputs() + outputs = super()._list_outputs() # Make a list of the linear transformation file and the input image - aff = os.path.abspath(outputs['aff_file']) + aff = os.path.abspath(outputs["aff_file"]) flo = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s' % (aff, flo) + outputs["avg_output"] = f"{aff} {flo}" return outputs class RegF3DInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegF3D. """ + """Input Spec for RegF3D.""" + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # Input Affine file aff_file = File( - exists=True, - desc='The input affine transformation file', - argstr='-aff %s') + exists=True, desc="The input affine transformation file", argstr="-aff %s" + ) # Input cpp file incpp_file = File( - exists=True, - desc='The input cpp transformation file', - argstr='-incpp %s') + exists=True, desc="The input cpp transformation file", argstr="-incpp %s" + ) # Reference mask - rmask_file = File( - exists=True, desc='Reference image mask', argstr='-rmask %s') + rmask_file = File(exists=True, desc="Reference image mask", argstr="-rmask %s") # Smoothing kernel for reference - desc = 'Smoothing kernel width for reference image' - ref_smooth_val = traits.Float(desc=desc, argstr='-smooR %f') + desc = "Smoothing kernel width for reference image" + ref_smooth_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing kernel for floating - desc = 'Smoothing kernel width for floating image' - flo_smooth_val = traits.Float(desc=desc, argstr='-smooF %f') + desc = "Smoothing kernel width for floating image" + flo_smooth_val = traits.Float(desc=desc, argstr="-smooF %f") # Lower threshold for reference image rlwth_thr_val = traits.Float( - desc='Lower threshold for reference image', argstr='--rLwTh %f') + desc="Lower threshold for reference image", argstr="--rLwTh %f" + ) # Upper threshold for reference image rupth_thr_val = traits.Float( - desc='Upper threshold for reference image', argstr='--rUpTh %f') + desc="Upper threshold for reference image", argstr="--rUpTh %f" + ) # Lower threshold for reference image flwth_thr_val = traits.Float( - desc='Lower threshold for floating image', argstr='--fLwTh %f') + desc="Lower threshold for floating image", argstr="--fLwTh %f" + ) # Upper threshold for reference image fupth_thr_val = traits.Float( - desc='Upper threshold for floating image', argstr='--fUpTh %f') + desc="Upper threshold for floating image", argstr="--fUpTh %f" + ) # Lower threshold for reference image - desc = 'Lower threshold for reference image at the specified time point' - rlwth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-rLwTh %d %f') + desc = "Lower threshold for reference image at the specified time point" + rlwth2_thr_val = Tuple( + traits.Range(low=0), traits.Float, desc=desc, argstr="-rLwTh %d %f" + ) # Upper threshold for reference image - desc = 'Upper threshold for reference image at the specified time point' - rupth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-rUpTh %d %f') + desc = "Upper threshold for reference image at the specified time point" + rupth2_thr_val = Tuple( + traits.Range(low=0), traits.Float, desc=desc, argstr="-rUpTh %d %f" + ) # Lower threshold for reference image - desc = 'Lower threshold for floating image at the specified time point' - flwth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-fLwTh %d %f') + desc = "Lower threshold for floating image at the specified time point" + flwth2_thr_val = Tuple( + traits.Range(low=0), traits.Float, desc=desc, argstr="-fLwTh %d %f" + ) # Upper threshold for reference image - desc = 'Upper threshold for floating image at the specified time point' - fupth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-fUpTh %d %f') + desc = "Upper threshold for floating image at the specified time point" + fupth2_thr_val = Tuple( + traits.Range(low=0), traits.Float, desc=desc, argstr="-fUpTh %d %f" + ) # Final grid spacing along the 3 axes - sx_val = traits.Float( - desc='Final grid spacing along the x axes', argstr='-sx %f') - sy_val = traits.Float( - desc='Final grid spacing along the y axes', argstr='-sy %f') - sz_val = traits.Float( - desc='Final grid spacing along the z axes', argstr='-sz %f') + sx_val = traits.Float(desc="Final grid spacing along the x axes", argstr="-sx %f") + sy_val = traits.Float(desc="Final grid spacing along the y axes", argstr="-sy %f") + sz_val = traits.Float(desc="Final grid spacing along the z axes", argstr="-sz %f") # Regularisation options - be_val = traits.Float(desc='Bending energy value', argstr='-be %f') - le_val = traits.Float( - desc='Linear elasticity penalty term', argstr='-le %f') + be_val = traits.Float(desc="Bending energy value", argstr="-be %f") + le_val = traits.Float(desc="Linear elasticity penalty term", argstr="-le %f") jl_val = traits.Float( - desc='Log of jacobian of deformation penalty value', argstr='-jl %f') - desc = 'Do not approximate the log of jacobian penalty at control points \ -only' + desc="Log of jacobian of deformation penalty value", argstr="-jl %f" + ) + desc = "Do not approximate the log of jacobian penalty at control points \ +only" - no_app_jl_flag = traits.Bool(argstr='-noAppJL', desc=desc) + no_app_jl_flag = traits.Bool(argstr="-noAppJL", desc=desc) # Similarity measure options - desc = 'use NMI even when other options are specified' - nmi_flag = traits.Bool(argstr='--nmi', desc=desc) - desc = 'Number of bins in the histogram for reference image' - rbn_val = traits.Range(low=0, desc=desc, argstr='--rbn %d') - desc = 'Number of bins in the histogram for reference image' - fbn_val = traits.Range(low=0, desc=desc, argstr='--fbn %d') - desc = 'Number of bins in the histogram for reference image for given \ -time point' - - rbn2_val = traits.Tuple( - traits.Range(low=0), - traits.Range(low=0), - desc=desc, - argstr='-rbn %d %d') - - desc = 'Number of bins in the histogram for reference image for given \ -time point' - - fbn2_val = traits.Tuple( - traits.Range(low=0), - traits.Range(low=0), - desc=desc, - argstr='-fbn %d %d') + desc = "use NMI even when other options are specified" + nmi_flag = traits.Bool(argstr="--nmi", desc=desc) + desc = "Number of bins in the histogram for reference image" + rbn_val = traits.Range(low=0, desc=desc, argstr="--rbn %d") + desc = "Number of bins in the histogram for reference image" + fbn_val = traits.Range(low=0, desc=desc, argstr="--fbn %d") + desc = "Number of bins in the histogram for reference image for given \ +time point" + + rbn2_val = Tuple( + traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-rbn %d %d" + ) + + desc = "Number of bins in the histogram for reference image for given \ +time point" + + fbn2_val = Tuple( + traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-fbn %d %d" + ) lncc_val = traits.Float( - desc='SD of the Gaussian for computing LNCC', argstr='--lncc %f') - desc = 'SD of the Gaussian for computing LNCC for a given time point' - lncc2_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-lncc %d %f') - - ssd_flag = traits.Bool( - desc='Use SSD as the similarity measure', argstr='--ssd') - desc = 'Use SSD as the similarity measure for a given time point' - ssd2_flag = traits.Range(low=0, desc=desc, argstr='-ssd %d') + desc="SD of the Gaussian for computing LNCC", argstr="--lncc %f" + ) + desc = "SD of the Gaussian for computing LNCC for a given time point" + lncc2_val = Tuple( + traits.Range(low=0), traits.Float, desc=desc, argstr="-lncc %d %f" + ) + + ssd_flag = traits.Bool(desc="Use SSD as the similarity measure", argstr="--ssd") + desc = "Use SSD as the similarity measure for a given time point" + ssd2_flag = traits.Range(low=0, desc=desc, argstr="-ssd %d") kld_flag = traits.Bool( - desc='Use KL divergence as the similarity measure', argstr='--kld') - desc = 'Use KL divergence as the similarity measure for a given time point' - kld2_flag = traits.Range(low=0, desc=desc, argstr='-kld %d') - amc_flag = traits.Bool(desc='Use additive NMI', argstr='-amc') + desc="Use KL divergence as the similarity measure", argstr="--kld" + ) + desc = "Use KL divergence as the similarity measure for a given time point" + kld2_flag = traits.Range(low=0, desc=desc, argstr="-kld %d") + amc_flag = traits.Bool(desc="Use additive NMI", argstr="-amc") - nox_flag = traits.Bool(desc="Don't optimise in x direction", argstr='-nox') - noy_flag = traits.Bool(desc="Don't optimise in y direction", argstr='-noy') - noz_flag = traits.Bool(desc="Don't optimise in z direction", argstr='-noz') + nox_flag = traits.Bool(desc="Don't optimise in x direction", argstr="-nox") + noy_flag = traits.Bool(desc="Don't optimise in y direction", argstr="-noy") + noz_flag = traits.Bool(desc="Don't optimise in z direction", argstr="-noz") # Optimization options maxit_val = traits.Range( - low=0, - argstr='-maxit %d', - desc='Maximum number of iterations per level') + low=0, argstr="-maxit %d", desc="Maximum number of iterations per level" + ) ln_val = traits.Range( - low=0, argstr='-ln %d', desc='Number of resolution levels to create') + low=0, argstr="-ln %d", desc="Number of resolution levels to create" + ) lp_val = traits.Range( - low=0, argstr='-lp %d', desc='Number of resolution levels to perform') + low=0, argstr="-lp %d", desc="Number of resolution levels to perform" + ) nopy_flag = traits.Bool( - desc='Do not use the multiresolution approach', argstr='-nopy') - noconj_flag = traits.Bool( - desc='Use simple GD optimization', argstr='-noConj') - desc = 'Add perturbation steps after each optimization step' - pert_val = traits.Range(low=0, desc=desc, argstr='-pert %d') + desc="Do not use the multiresolution approach", argstr="-nopy" + ) + noconj_flag = traits.Bool(desc="Use simple GD optimization", argstr="-noConj") + desc = "Add perturbation steps after each optimization step" + pert_val = traits.Range(low=0, desc=desc, argstr="-pert %d") # F3d2 options - vel_flag = traits.Bool( - desc='Use velocity field integration', argstr='-vel') - fmask_file = File( - exists=True, desc='Floating image mask', argstr='-fmask %s') + vel_flag = traits.Bool(desc="Use velocity field integration", argstr="-vel") + fmask_file = File(exists=True, desc="Floating image mask", argstr="-fmask %s") # Other options - desc = 'Kernel width for smoothing the metric gradient' - smooth_grad_val = traits.Float(desc=desc, argstr='-smoothGrad %f') + desc = "Kernel width for smoothing the metric gradient" + smooth_grad_val = traits.Float(desc=desc, argstr="-smoothGrad %f") # Padding value - pad_val = traits.Float(desc='Padding value', argstr='-pad %f') + pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Output CPP image file cpp_file = File( - name_source=['flo_file'], - name_template='%s_cpp.nii.gz', - desc='The output CPP file', - argstr='-cpp %s') + name_source=["flo_file"], + name_template="%s_cpp.nii.gz", + desc="The output CPP file", + argstr="-cpp %s", + ) # Output warped image file res_file = File( - name_source=['flo_file'], - name_template='%s_res.nii.gz', - desc='The output resampled image', - argstr='-res %s') + name_source=["flo_file"], + name_template="%s_res.nii.gz", + desc="The output resampled image", + argstr="-res %s", + ) class RegF3DOutputSpec(TraitedSpec): - """ Output Spec for RegF3D. """ - cpp_file = File(desc='The output CPP file') - res_file = File(desc='The output resampled image') - invcpp_file = File(desc='The output inverse CPP file') - invres_file = File(desc='The output inverse res file') - desc = 'Output string in the format for reg_average' + """Output Spec for RegF3D.""" + + cpp_file = File(desc="The output CPP file") + res_file = File(desc="The output resampled image") + invcpp_file = File(desc="The output inverse CPP file") + invres_file = File(desc="The output inverse res file") + desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) @@ -368,7 +374,8 @@ class RegF3D(NiftyRegCommand): -res im2_res.nii.gz -rmask mask.nii' """ - _cmd = get_custom_path('reg_f3d') + + _cmd = get_custom_path("reg_f3d") input_spec = RegF3DInputSpec output_spec = RegF3DOutputSpec @@ -378,23 +385,26 @@ def _remove_extension(in_file): return os.path.join(dn, bn) def _list_outputs(self): - outputs = super(RegF3D, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.vel_flag is True: - res_name = self._remove_extension(outputs['res_file']) - cpp_name = self._remove_extension(outputs['cpp_file']) - outputs['invres_file'] = '%s_backward.nii.gz' % res_name - outputs['invcpp_file'] = '%s_backward.nii.gz' % cpp_name + res_name = self._remove_extension(outputs["res_file"]) + cpp_name = self._remove_extension(outputs["cpp_file"]) + outputs["invres_file"] = "%s_backward.nii.gz" % res_name + outputs["invcpp_file"] = "%s_backward.nii.gz" % cpp_name # Make a list of the linear transformation file and the input image if self.inputs.vel_flag is True and isdefined(self.inputs.aff_file): - cpp_file = os.path.abspath(outputs['cpp_file']) + cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s %s' % (self.inputs.aff_file, - cpp_file, flo_file) + outputs["avg_output"] = "{} {} {}".format( + self.inputs.aff_file, + cpp_file, + flo_file, + ) else: - cpp_file = os.path.abspath(outputs['cpp_file']) + cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s' % (cpp_file, flo_file) + outputs["avg_output"] = f"{cpp_file} {flo_file}" return outputs diff --git a/nipype/interfaces/niftyreg/regutils.py b/nipype/interfaces/niftyreg/regutils.py index c90aa53bed..c69cde5a83 100644 --- a/nipype/interfaces/niftyreg/regutils.py +++ b/nipype/interfaces/niftyreg/regutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The regutils module provides classes for interfacing with the `niftyreg @@ -6,83 +5,81 @@ The interfaces were written to work with niftyreg version 1.5.10 """ - -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import len, open, property, super import os -from ..base import TraitedSpec, File, traits, isdefined +from ..base import TraitedSpec, File, traits, Tuple, isdefined from .base import get_custom_path, NiftyRegCommand, NiftyRegCommandInputSpec from ...utils.filemanip import split_filename class RegResampleInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegResample. """ + """Input Spec for RegResample.""" + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # Input deformation field trans_file = File( - exists=True, desc='The input transformation file', argstr='-trans %s') + exists=True, desc="The input transformation file", argstr="-trans %s" + ) type = traits.Enum( - 'res', - 'blank', - argstr='-%s', + "res", + "blank", + argstr="-%s", position=-2, usedefault=True, - desc='Type of output') + desc="Type of output", + ) # Output file name out_file = File( - name_source=['flo_file'], - name_template='%s', - argstr='%s', + name_source=["flo_file"], + name_template="%s", + argstr="%s", position=-1, - desc='The output filename of the transformed image') + desc="The output filename of the transformed image", + ) # Interpolation type inter_val = traits.Enum( - 'NN', - 'LIN', - 'CUB', - 'SINC', - desc='Interpolation type', - argstr='-inter %d') + "NN", "LIN", "CUB", "SINC", desc="Interpolation type", argstr="-inter %d" + ) # Padding value - pad_val = traits.Float(desc='Padding value', argstr='-pad %f') + pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # Tensor flag - tensor_flag = traits.Bool(desc='Resample Tensor Map', argstr='-tensor ') + tensor_flag = traits.Bool(desc="Resample Tensor Map", argstr="-tensor ") # Verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # PSF flag - desc = 'Perform the resampling in two steps to resample an image to a \ -lower resolution' + desc = "Perform the resampling in two steps to resample an image to a \ +lower resolution" - psf_flag = traits.Bool(argstr='-psf', desc=desc) - desc = 'Minimise the matrix metric (0) or the determinant (1) when \ -estimating the PSF [0]' + psf_flag = traits.Bool(argstr="-psf", desc=desc) + desc = "Minimise the matrix metric (0) or the determinant (1) when \ +estimating the PSF [0]" - psf_alg = traits.Enum(0, 1, argstr='-psf_alg %d', desc=desc) + psf_alg = traits.Enum(0, 1, argstr="-psf_alg %d", desc=desc) class RegResampleOutputSpec(TraitedSpec): - """ Output Spec for RegResample. """ - out_file = File(desc='The output filename of the transformed image') + """Output Spec for RegResample.""" + + out_file = File(desc="The output filename of the transformed image") class RegResample(NiftyRegCommand): @@ -108,54 +105,60 @@ class RegResample(NiftyRegCommand): warpfield.nii -res im2_res.nii.gz' """ - _cmd = get_custom_path('reg_resample') + + _cmd = get_custom_path("reg_resample") input_spec = RegResampleInputSpec output_spec = RegResampleOutputSpec # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): - if name == 'inter_val': - inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} + if name == "inter_val": + inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: - return super(RegResample, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, '{0}_{1}.nii.gz'.format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.nii.gz") class RegJacobianInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegJacobian. """ + """Input Spec for RegJacobian.""" + # Reference file name - desc = 'Reference/target file (required if specifying CPP transformations.' - ref_file = File(exists=True, desc=desc, argstr='-ref %s') + desc = "Reference/target file (required if specifying CPP transformations." + ref_file = File(exists=True, desc=desc, argstr="-ref %s") # Input transformation file trans_file = File( exists=True, - desc='The input non-rigid transformation', - argstr='-trans %s', - mandatory=True) + desc="The input non-rigid transformation", + argstr="-trans %s", + mandatory=True, + ) type = traits.Enum( - 'jac', - 'jacL', - 'jacM', + "jac", + "jacL", + "jacM", usedefault=True, - argstr='-%s', + argstr="-%s", position=-2, - desc='Type of jacobian outcome') + desc="Type of jacobian outcome", + ) out_file = File( - name_source=['trans_file'], - name_template='%s', - desc='The output jacobian determinant file name', - argstr='%s', - position=-1) + name_source=["trans_file"], + name_template="%s", + desc="The output jacobian determinant file name", + argstr="%s", + position=-1, + ) class RegJacobianOutputSpec(TraitedSpec): - """ Output Spec for RegJacobian. """ - out_file = File(desc='The output file') + """Output Spec for RegJacobian.""" + + out_file = File(desc="The output file") class RegJacobian(NiftyRegCommand): @@ -178,129 +181,129 @@ class RegJacobian(NiftyRegCommand): warpfield_jac.nii.gz' """ - _cmd = get_custom_path('reg_jacobian') + + _cmd = get_custom_path("reg_jacobian") input_spec = RegJacobianInputSpec output_spec = RegJacobianOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, '{0}_{1}.nii.gz'.format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.nii.gz") class RegToolsInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegTools. """ + """Input Spec for RegTools.""" + # Input image file in_file = File( - exists=True, - desc='The input image file path', - argstr='-in %s', - mandatory=True) + exists=True, desc="The input image file path", argstr="-in %s", mandatory=True + ) # Output file path out_file = File( - name_source=['in_file'], - name_template='%s_tools.nii.gz', - desc='The output file name', - argstr='-out %s') + name_source=["in_file"], + name_template="%s_tools.nii.gz", + desc="The output file name", + argstr="-out %s", + ) # Make the output image isotropic - iso_flag = traits.Bool(argstr='-iso', desc='Make output image isotropic') + iso_flag = traits.Bool(argstr="-iso", desc="Make output image isotropic") # Set scale, slope to 0 and 1. - noscl_flag = traits.Bool( - argstr='-noscl', desc='Set scale, slope to 0 and 1') + noscl_flag = traits.Bool(argstr="-noscl", desc="Set scale, slope to 0 and 1") # Values outside the mask are set to NaN mask_file = File( - exists=True, - desc='Values outside the mask are set to NaN', - argstr='-nan %s') + exists=True, desc="Values outside the mask are set to NaN", argstr="-nan %s" + ) # Threshold the input image - desc = 'Binarise the input image with the given threshold' - thr_val = traits.Float(desc=desc, argstr='-thr %f') + desc = "Binarise the input image with the given threshold" + thr_val = traits.Float(desc=desc, argstr="-thr %f") # Binarise the input image - bin_flag = traits.Bool(argstr='-bin', desc='Binarise the input image') + bin_flag = traits.Bool(argstr="-bin", desc="Binarise the input image") # Compute the mean RMS between the two images rms_val = File( - exists=True, - desc='Compute the mean RMS between the images', - argstr='-rms %s') + exists=True, desc="Compute the mean RMS between the images", argstr="-rms %s" + ) # Perform division by image or value div_val = traits.Either( traits.Float, File(exists=True), - desc='Divide the input by image or value', - argstr='-div %s') + desc="Divide the input by image or value", + argstr="-div %s", + ) # Perform multiplication by image or value mul_val = traits.Either( traits.Float, File(exists=True), - desc='Multiply the input by image or value', - argstr='-mul %s') + desc="Multiply the input by image or value", + argstr="-mul %s", + ) # Perform addition by image or value add_val = traits.Either( traits.Float, File(exists=True), - desc='Add to the input image or value', - argstr='-add %s') + desc="Add to the input image or value", + argstr="-add %s", + ) # Perform subtraction by image or value sub_val = traits.Either( traits.Float, File(exists=True), - desc='Add to the input image or value', - argstr='-sub %s') + desc="Add to the input image or value", + argstr="-sub %s", + ) # Downsample the image by a factor of 2. down_flag = traits.Bool( - desc='Downsample the image by a factor of 2', argstr='-down') + desc="Downsample the image by a factor of 2", argstr="-down" + ) # Smoothing using spline kernel - desc = 'Smooth the input image using a cubic spline kernel' - smo_s_val = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - desc=desc, - argstr='-smoS %f %f %f') + desc = "Smooth the input image using a cubic spline kernel" + smo_s_val = Tuple( + traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoS %f %f %f" + ) # Change the resolution of the input image - chg_res_val = traits.Tuple( + chg_res_val = Tuple( traits.Float, traits.Float, traits.Float, - desc='Change the resolution of the input image', - argstr='-chgres %f %f %f') + desc="Change the resolution of the input image", + argstr="-chgres %f %f %f", + ) # Smoothing using Gaussian kernel - desc = 'Smooth the input image using a Gaussian kernel' - smo_g_val = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - desc=desc, - argstr='-smoG %f %f %f') + desc = "Smooth the input image using a Gaussian kernel" + smo_g_val = Tuple( + traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoG %f %f %f" + ) # Interpolation type inter_val = traits.Enum( - 'NN', - 'LIN', - 'CUB', - 'SINC', - desc='Interpolation order to use to warp the floating image', - argstr='-interp %d') + "NN", + "LIN", + "CUB", + "SINC", + desc="Interpolation order to use to warp the floating image", + argstr="-interp %d", + ) class RegToolsOutputSpec(TraitedSpec): - """ Output Spec for RegTools. """ - out_file = File(desc='The output file', exists=True) + """Output Spec for RegTools.""" + + out_file = File(desc="The output file", exists=True) class RegTools(NiftyRegCommand): @@ -323,115 +326,144 @@ class RegTools(NiftyRegCommand): 'reg_tools -in im1.nii -mul 4.0 -omp 4 -out im1_tools.nii.gz' """ - _cmd = get_custom_path('reg_tools') + + _cmd = get_custom_path("reg_tools") input_spec = RegToolsInputSpec output_spec = RegToolsOutputSpec - _suffix = '_tools' + _suffix = "_tools" # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): - if name == 'inter_val': - inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} + if name == "inter_val": + inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: - return super(RegTools, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class RegAverageInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegAverage. """ + """Input Spec for RegAverage.""" + avg_files = traits.List( File(exist=True), position=1, - argstr='-avg %s', - sep=' ', + argstr="-avg %s", + sep=" ", xor=[ - 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], - desc='Averaging of images/affine transformations') + desc="Averaging of images/affine transformations", + ) - desc = 'Robust average of affine transformations' + desc = "Robust average of affine transformations" avg_lts_files = traits.List( File(exist=True), position=1, - argstr='-avg_lts %s', - sep=' ', + argstr="-avg_lts %s", + sep=" ", xor=[ - 'avg_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], - desc=desc) + desc=desc, + ) - desc = 'All input images are resampled into the space of \ - and averaged. A cubic spline interpolation scheme is used for resampling' + desc = "All input images are resampled into the space of \ + and averaged. A cubic spline interpolation scheme is used for resampling" avg_ref_file = File( position=1, - argstr='-avg_tran %s', + argstr="-avg_tran %s", xor=[ - 'avg_files', 'avg_lts_files', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have affine \ -transformations to a common space' + desc = "Average images and demean average image that have affine \ +transformations to a common space" demean1_ref_file = File( position=1, - argstr='-demean1 %s', + argstr="-demean1 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean2_ref_file', - 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have non-rigid \ -transformations to a common space' + desc = "Average images and demean average image that have non-rigid \ +transformations to a common space" demean2_ref_file = File( position=1, - argstr='-demean2 %s', + argstr="-demean2 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have linear and \ -non-rigid transformations to a common space' + desc = "Average images and demean average image that have linear and \ +non-rigid transformations to a common space" demean3_ref_file = File( position=1, - argstr='-demean3 %s', + argstr="-demean3 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'transformation files and floating image pairs/triplets to the \ -reference space' + desc = "transformation files and floating image pairs/triplets to the \ +reference space" warp_files = traits.List( File(exist=True), position=-1, - argstr='%s', - sep=' ', - xor=['avg_files', 'avg_lts_files'], - desc=desc) + argstr="%s", + sep=" ", + xor=["avg_files", "avg_lts_files"], + desc=desc, + ) - out_file = File( - genfile=True, position=0, desc='Output file name', argstr='%s') + out_file = File(genfile=True, position=0, desc="Output file name", argstr="%s") class RegAverageOutputSpec(TraitedSpec): - """ Output Spec for RegAverage. """ - out_file = File(desc='Output file name') + """Output Spec for RegAverage.""" + + out_file = File(desc="Output file name") class RegAverage(NiftyRegCommand): @@ -459,20 +491,21 @@ class RegAverage(NiftyRegCommand): >>> node.cmdline # doctest: +ELLIPSIS 'reg_average --cmd_file .../reg_average_cmd' """ - _cmd = get_custom_path('reg_average') + + _cmd = get_custom_path("reg_average") input_spec = RegAverageInputSpec output_spec = RegAverageOutputSpec - _suffix = 'avg_out' + _suffix = "avg_out" def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": if isdefined(self.inputs.avg_lts_files): - return self._gen_fname(self._suffix, ext='.txt') + return self._gen_fname(self._suffix, ext=".txt") elif isdefined(self.inputs.avg_files): _, _, _ext = split_filename(self.inputs.avg_files[0]) - if _ext not in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: + if _ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: return self._gen_fname(self._suffix, ext=_ext) - return self._gen_fname(self._suffix, ext='.nii.gz') + return self._gen_fname(self._suffix, ext=".nii.gz") return None @@ -480,145 +513,207 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = self.inputs.out_file + outputs["out_file"] = self.inputs.out_file else: - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs @property def cmdline(self): - """ Rewrite the cmdline to write options in text_file.""" - argv = super(RegAverage, self).cmdline - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'w') as f: + """Rewrite the cmdline to write options in text_file.""" + argv = super().cmdline + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "w") as f: f.write(argv) - return '%s --cmd_file %s' % (self.cmd, reg_average_cmd) + return f"{self.cmd} --cmd_file {reg_average_cmd}" class RegTransformInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegTransform. """ + """Input Spec for RegTransform.""" + ref1_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - position=0) + desc="The input reference/target image", + argstr="-ref %s", + position=0, + ) ref2_file = File( exists=True, - desc='The input second reference/target image', - argstr='-ref2 %s', + desc="The input second reference/target image", + argstr="-ref2 %s", position=1, - requires=['ref1_file']) + requires=["ref1_file"], + ) def_input = File( exists=True, - argstr='-def %s', + argstr="-def %s", position=-2, - desc='Compute deformation field from transformation', + desc="Compute deformation field from transformation", xor=[ - 'disp_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) disp_input = File( exists=True, - argstr='-disp %s', + argstr="-disp %s", position=-2, - desc='Compute displacement field from transformation', + desc="Compute displacement field from transformation", xor=[ - 'def_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) flow_input = File( exists=True, - argstr='-flow %s', + argstr="-flow %s", position=-2, - desc='Compute flow field from spline SVF', + desc="Compute flow field from spline SVF", xor=[ - 'def_input', 'disp_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) comp_input = File( exists=True, - argstr='-comp %s', + argstr="-comp %s", position=-3, - desc='compose two transformations', + desc="compose two transformations", xor=[ - 'def_input', 'disp_input', 'flow_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], - requires=['comp_input2']) + requires=["comp_input2"], + ) comp_input2 = File( - exists=True, - argstr='%s', - position=-2, - desc='compose two transformations') + exists=True, argstr="%s", position=-2, desc="compose two transformations" + ) - desc = 'Update s-form using the affine transformation' + desc = "Update s-form using the affine transformation" upd_s_form_input = File( exists=True, - argstr='-updSform %s', + argstr="-updSform %s", position=-3, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], - requires=['upd_s_form_input2']) + requires=["upd_s_form_input2"], + ) - desc = 'Update s-form using the affine transformation' + desc = "Update s-form using the affine transformation" upd_s_form_input2 = File( - exists=True, - argstr='%s', - position=-2, - desc=desc, - requires=['upd_s_form_input']) + exists=True, argstr="%s", position=-2, desc=desc, requires=["upd_s_form_input"] + ) inv_aff_input = File( exists=True, - argstr='-invAff %s', + argstr="-invAff %s", position=-2, - desc='Invert an affine transformation', + desc="Invert an affine transformation", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) - inv_nrr_input = traits.Tuple( + inv_nrr_input = Tuple( File(exists=True), File(exists=True), - desc='Invert a non-linear transformation', - argstr='-invNrr %s %s', + desc="Invert a non-linear transformation", + argstr="-invNrr %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) half_input = File( exists=True, - argstr='-half %s', + argstr="-half %s", position=-2, - desc='Half way to the input transformation', + desc="Half way to the input transformation", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) - argstr_tmp = '-makeAff %f %f %f %f %f %f %f %f %f %f %f %f' - make_aff_input = traits.Tuple( + argstr_tmp = "-makeAff %f %f %f %f %f %f %f %f %f %f %f %f" + make_aff_input = Tuple( traits.Float, traits.Float, traits.Float, @@ -633,51 +728,74 @@ class RegTransformInputSpec(NiftyRegCommandInputSpec): traits.Float, argstr=argstr_tmp, position=-2, - desc='Make an affine transformation matrix', + desc="Make an affine transformation matrix", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) - desc = 'Extract the rigid component from affine transformation' + desc = "Extract the rigid component from affine transformation" aff_2_rig_input = File( exists=True, - argstr='-aff2rig %s', + argstr="-aff2rig %s", position=-2, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "flirt_2_nr_input", + ], + ) - desc = 'Convert a FLIRT affine transformation to niftyreg affine \ -transformation' + desc = "Convert a FLIRT affine transformation to niftyreg affine \ +transformation" - flirt_2_nr_input = traits.Tuple( + flirt_2_nr_input = Tuple( File(exists=True), File(exists=True), File(exists=True), - argstr='-flirtAff2NR %s %s %s', + argstr="-flirtAff2NR %s %s %s", position=-2, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + ], + ) out_file = File( - genfile=True, - position=-1, - argstr='%s', - desc='transformation file to write') + genfile=True, position=-1, argstr="%s", desc="transformation file to write" + ) class RegTransformOutputSpec(TraitedSpec): - """ Output Spec for RegTransform. """ - out_file = File(desc='Output File (transformation in any format)') + """Output Spec for RegTransform.""" + + out_file = File(desc="Output File (transformation in any format)") class RegTransform(NiftyRegCommand): @@ -698,62 +816,68 @@ class RegTransform(NiftyRegCommand): 'reg_transform -omp 4 -def warpfield.nii .../warpfield_trans.nii.gz' """ - _cmd = get_custom_path('reg_transform') + + _cmd = get_custom_path("reg_transform") input_spec = RegTransformInputSpec output_spec = RegTransformOutputSpec - _suffix = '_trans' + _suffix = "_trans" def _find_input(self): inputs = [ - self.inputs.def_input, self.inputs.disp_input, - self.inputs.flow_input, self.inputs.comp_input, - self.inputs.comp_input2, self.inputs.upd_s_form_input, - self.inputs.inv_aff_input, self.inputs.inv_nrr_input, - self.inputs.half_input, self.inputs.make_aff_input, - self.inputs.aff_2_rig_input, self.inputs.flirt_2_nr_input + self.inputs.def_input, + self.inputs.disp_input, + self.inputs.flow_input, + self.inputs.comp_input, + self.inputs.comp_input2, + self.inputs.upd_s_form_input, + self.inputs.inv_aff_input, + self.inputs.inv_nrr_input, + self.inputs.half_input, + self.inputs.make_aff_input, + self.inputs.aff_2_rig_input, + self.inputs.flirt_2_nr_input, ] entries = [] for entry in inputs: if isdefined(entry): entries.append(entry) _, _, ext = split_filename(entry) - if ext == '.nii' or ext == '.nii.gz' or ext == '.hdr': + if ext == ".nii" or ext == ".nii.gz" or ext == ".hdr": return entry if len(entries): return entries[0] return None def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": if isdefined(self.inputs.make_aff_input): - return self._gen_fname( - 'matrix', suffix=self._suffix, ext='.txt') + return self._gen_fname("matrix", suffix=self._suffix, ext=".txt") - if isdefined(self.inputs.comp_input) and \ - isdefined(self.inputs.comp_input2): + if isdefined(self.inputs.comp_input) and isdefined(self.inputs.comp_input2): _, bn1, ext1 = split_filename(self.inputs.comp_input) _, _, ext2 = split_filename(self.inputs.comp_input2) - if ext1 in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz'] or \ - ext2 in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: - return self._gen_fname( - bn1, suffix=self._suffix, ext='.nii.gz') + if ext1 in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"] or ext2 in [ + ".nii", + ".nii.gz", + ".hdr", + ".img", + ".img.gz", + ]: + return self._gen_fname(bn1, suffix=self._suffix, ext=".nii.gz") else: return self._gen_fname(bn1, suffix=self._suffix, ext=ext1) if isdefined(self.inputs.flirt_2_nr_input): return self._gen_fname( - self.inputs.flirt_2_nr_input[0], - suffix=self._suffix, - ext='.txt') + self.inputs.flirt_2_nr_input[0], suffix=self._suffix, ext=".txt" + ) input_to_use = self._find_input() _, _, ext = split_filename(input_to_use) - if ext not in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: - return self._gen_fname( - input_to_use, suffix=self._suffix, ext=ext) + if ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: + return self._gen_fname(input_to_use, suffix=self._suffix, ext=ext) else: - return self._gen_fname( - input_to_use, suffix=self._suffix, ext='.nii.gz') + return self._gen_fname(input_to_use, suffix=self._suffix, ext=".nii.gz") return None @@ -761,45 +885,51 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = self.inputs.out_file + outputs["out_file"] = self.inputs.out_file else: - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs class RegMeasureInputSpec(NiftyRegCommandInputSpec): - """ Input Spec for RegMeasure. """ + """Input Spec for RegMeasure.""" + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) measure_type = traits.Enum( - 'ncc', - 'lncc', - 'nmi', - 'ssd', + "ncc", + "lncc", + "nmi", + "ssd", mandatory=True, - argstr='-%s', - desc='Measure of similarity to compute') + argstr="-%s", + desc="Measure of similarity to compute", + ) out_file = File( - name_source=['flo_file'], - name_template='%s', - argstr='-out %s', - desc='The output text file containing the measure') + name_source=["flo_file"], + name_template="%s", + argstr="-out %s", + desc="The output text file containing the measure", + ) class RegMeasureOutputSpec(TraitedSpec): - """ Output Spec for RegMeasure. """ - out_file = File(desc='The output text file containing the measure') + """Output Spec for RegMeasure.""" + + out_file = File(desc="The output text file containing the measure") class RegMeasure(NiftyRegCommand): @@ -821,11 +951,12 @@ class RegMeasure(NiftyRegCommand): 'reg_measure -flo im2.nii -lncc -omp 4 -out im2_lncc.txt -ref im1.nii' """ - _cmd = get_custom_path('reg_measure') + + _cmd = get_custom_path("reg_measure") input_spec = RegMeasureInputSpec output_spec = RegMeasureOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.measure_type - return os.path.join(path, '{0}_{1}.txt'.format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.txt") diff --git a/nipype/interfaces/niftyreg/tests/__init__.py b/nipype/interfaces/niftyreg/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/niftyreg/tests/__init__.py +++ b/nipype/interfaces/niftyreg/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py b/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py index 89615b50d7..43f72df69f 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import NiftyRegCommand def test_NiftyRegCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), ) diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py b/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py index 9507f53fa9..a4485d0e20 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py @@ -1,70 +1,128 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reg import RegAladin def test_RegAladin_inputs(): input_map = dict( - aff_direct_flag=dict(argstr='-affDirect', ), + aff_direct_flag=dict( + argstr="-affDirect", + ), aff_file=dict( - argstr='-aff %s', - name_source=['flo_file'], - name_template='%s_aff.txt', + argstr="-aff %s", + extensions=None, + name_source=["flo_file"], + name_template="%s_aff.txt", + ), + args=dict( + argstr="%s", + ), + cog_flag=dict( + argstr="-cog", ), - args=dict(argstr='%s', ), - cog_flag=dict(argstr='-cog', ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( - argstr='-flo %s', + argstr="-flo %s", + extensions=None, mandatory=True, ), - flo_low_val=dict(argstr='-floLowThr %f', ), - flo_up_val=dict(argstr='-floUpThr %f', ), - fmask_file=dict(argstr='-fmask %s', ), - gpuid_val=dict(argstr='-gpuid %i', ), - i_val=dict(argstr='-pi %d', ), - in_aff_file=dict(argstr='-inaff %s', ), - ln_val=dict(argstr='-ln %d', ), - lp_val=dict(argstr='-lp %d', ), - maxit_val=dict(argstr='-maxit %d', ), - nac_flag=dict(argstr='-nac', ), - nosym_flag=dict(argstr='-noSym', ), + flo_low_val=dict( + argstr="-floLowThr %f", + ), + flo_up_val=dict( + argstr="-floUpThr %f", + ), + fmask_file=dict( + argstr="-fmask %s", + extensions=None, + ), + gpuid_val=dict( + argstr="-gpuid %i", + ), + i_val=dict( + argstr="-pi %d", + ), + in_aff_file=dict( + argstr="-inaff %s", + extensions=None, + ), + ln_val=dict( + argstr="-ln %d", + ), + lp_val=dict( + argstr="-lp %d", + ), + maxit_val=dict( + argstr="-maxit %d", + ), + nac_flag=dict( + argstr="-nac", + ), + nosym_flag=dict( + argstr="-noSym", + ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), - platform_val=dict(argstr='-platf %i', ), + platform_val=dict( + argstr="-platf %i", + ), ref_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, ), - ref_low_val=dict(argstr='-refLowThr %f', ), - ref_up_val=dict(argstr='-refUpThr %f', ), + ref_low_val=dict( + argstr="-refLowThr %f", + ), + ref_up_val=dict( + argstr="-refUpThr %f", + ), res_file=dict( - argstr='-res %s', - name_source=['flo_file'], - name_template='%s_res.nii.gz', - ), - rig_only_flag=dict(argstr='-rigOnly', ), - rmask_file=dict(argstr='-rmask %s', ), - smoo_f_val=dict(argstr='-smooF %f', ), - smoo_r_val=dict(argstr='-smooR %f', ), - v_val=dict(argstr='-pv %d', ), - verbosity_off_flag=dict(argstr='-voff', ), + argstr="-res %s", + extensions=None, + name_source=["flo_file"], + name_template="%s_res.nii.gz", + ), + rig_only_flag=dict( + argstr="-rigOnly", + ), + rmask_file=dict( + argstr="-rmask %s", + extensions=None, + ), + smoo_f_val=dict( + argstr="-smooF %f", + ), + smoo_r_val=dict( + argstr="-smooR %f", + ), + v_val=dict( + argstr="-pv %d", + ), + verbosity_off_flag=dict( + argstr="-voff", + ), ) inputs = RegAladin.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegAladin_outputs(): output_map = dict( - aff_file=dict(), + aff_file=dict( + extensions=None, + ), avg_output=dict(), - res_file=dict(), + res_file=dict( + extensions=None, + ), ) outputs = RegAladin.output_spec() diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py b/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py index 2510a46eb8..0077b85faa 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py @@ -1,63 +1,88 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegAverage def test_RegAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), avg_files=dict( - argstr='-avg %s', + argstr="-avg %s", position=1, - sep=' ', + sep=" ", xor=[ - 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], ), avg_lts_files=dict( - argstr='-avg_lts %s', + argstr="-avg_lts %s", position=1, - sep=' ', + sep=" ", xor=[ - 'avg_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], ), avg_ref_file=dict( - argstr='-avg_tran %s', + argstr="-avg_tran %s", + extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], ), demean1_ref_file=dict( - argstr='-demean1 %s', + argstr="-demean1 %s", + extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], ), demean2_ref_file=dict( - argstr='-demean2 %s', + argstr="-demean2 %s", + extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean1_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean3_ref_file", ], ), demean3_ref_file=dict( - argstr='-demean3 %s', + argstr="-demean3 %s", + extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean1_ref_file', 'demean2_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", ], ), environ=dict( @@ -65,19 +90,20 @@ def test_RegAverage_inputs(): usedefault=True, ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=0, ), warp_files=dict( - argstr='%s', + argstr="%s", position=-1, - sep=' ', - xor=['avg_files', 'avg_lts_files'], + sep=" ", + xor=["avg_files", "avg_lts_files"], ), ) inputs = RegAverage.input_spec() @@ -85,8 +111,14 @@ def test_RegAverage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegAverage_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegAverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py b/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py index e9bdab82c0..b760ebb3d1 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py @@ -1,96 +1,207 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..reg import RegF3D def test_RegF3D_inputs(): input_map = dict( - aff_file=dict(argstr='-aff %s', ), - amc_flag=dict(argstr='-amc', ), - args=dict(argstr='%s', ), - be_val=dict(argstr='-be %f', ), + aff_file=dict( + argstr="-aff %s", + extensions=None, + ), + amc_flag=dict( + argstr="-amc", + ), + args=dict( + argstr="%s", + ), + be_val=dict( + argstr="-be %f", + ), cpp_file=dict( - argstr='-cpp %s', - name_source=['flo_file'], - name_template='%s_cpp.nii.gz', + argstr="-cpp %s", + extensions=None, + name_source=["flo_file"], + name_template="%s_cpp.nii.gz", ), environ=dict( nohash=True, usedefault=True, ), - fbn2_val=dict(argstr='-fbn %d %d', ), - fbn_val=dict(argstr='--fbn %d', ), + fbn2_val=dict( + argstr="-fbn %d %d", + ), + fbn_val=dict( + argstr="--fbn %d", + ), flo_file=dict( - argstr='-flo %s', + argstr="-flo %s", + extensions=None, mandatory=True, ), - flo_smooth_val=dict(argstr='-smooF %f', ), - flwth2_thr_val=dict(argstr='-fLwTh %d %f', ), - flwth_thr_val=dict(argstr='--fLwTh %f', ), - fmask_file=dict(argstr='-fmask %s', ), - fupth2_thr_val=dict(argstr='-fUpTh %d %f', ), - fupth_thr_val=dict(argstr='--fUpTh %f', ), - incpp_file=dict(argstr='-incpp %s', ), - jl_val=dict(argstr='-jl %f', ), - kld2_flag=dict(argstr='-kld %d', ), - kld_flag=dict(argstr='--kld', ), - le_val=dict(argstr='-le %f', ), - ln_val=dict(argstr='-ln %d', ), - lncc2_val=dict(argstr='-lncc %d %f', ), - lncc_val=dict(argstr='--lncc %f', ), - lp_val=dict(argstr='-lp %d', ), - maxit_val=dict(argstr='-maxit %d', ), - nmi_flag=dict(argstr='--nmi', ), - no_app_jl_flag=dict(argstr='-noAppJL', ), - noconj_flag=dict(argstr='-noConj', ), - nopy_flag=dict(argstr='-nopy', ), - nox_flag=dict(argstr='-nox', ), - noy_flag=dict(argstr='-noy', ), - noz_flag=dict(argstr='-noz', ), + flo_smooth_val=dict( + argstr="-smooF %f", + ), + flwth2_thr_val=dict( + argstr="-fLwTh %d %f", + ), + flwth_thr_val=dict( + argstr="--fLwTh %f", + ), + fmask_file=dict( + argstr="-fmask %s", + extensions=None, + ), + fupth2_thr_val=dict( + argstr="-fUpTh %d %f", + ), + fupth_thr_val=dict( + argstr="--fUpTh %f", + ), + incpp_file=dict( + argstr="-incpp %s", + extensions=None, + ), + jl_val=dict( + argstr="-jl %f", + ), + kld2_flag=dict( + argstr="-kld %d", + ), + kld_flag=dict( + argstr="--kld", + ), + le_val=dict( + argstr="-le %f", + ), + ln_val=dict( + argstr="-ln %d", + ), + lncc2_val=dict( + argstr="-lncc %d %f", + ), + lncc_val=dict( + argstr="--lncc %f", + ), + lp_val=dict( + argstr="-lp %d", + ), + maxit_val=dict( + argstr="-maxit %d", + ), + nmi_flag=dict( + argstr="--nmi", + ), + no_app_jl_flag=dict( + argstr="-noAppJL", + ), + noconj_flag=dict( + argstr="-noConj", + ), + nopy_flag=dict( + argstr="-nopy", + ), + nox_flag=dict( + argstr="-nox", + ), + noy_flag=dict( + argstr="-noy", + ), + noz_flag=dict( + argstr="-noz", + ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), - pad_val=dict(argstr='-pad %f', ), - pert_val=dict(argstr='-pert %d', ), - rbn2_val=dict(argstr='-rbn %d %d', ), - rbn_val=dict(argstr='--rbn %d', ), + pad_val=dict( + argstr="-pad %f", + ), + pert_val=dict( + argstr="-pert %d", + ), + rbn2_val=dict( + argstr="-rbn %d %d", + ), + rbn_val=dict( + argstr="--rbn %d", + ), ref_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, ), - ref_smooth_val=dict(argstr='-smooR %f', ), + ref_smooth_val=dict( + argstr="-smooR %f", + ), res_file=dict( - argstr='-res %s', - name_source=['flo_file'], - name_template='%s_res.nii.gz', - ), - rlwth2_thr_val=dict(argstr='-rLwTh %d %f', ), - rlwth_thr_val=dict(argstr='--rLwTh %f', ), - rmask_file=dict(argstr='-rmask %s', ), - rupth2_thr_val=dict(argstr='-rUpTh %d %f', ), - rupth_thr_val=dict(argstr='--rUpTh %f', ), - smooth_grad_val=dict(argstr='-smoothGrad %f', ), - ssd2_flag=dict(argstr='-ssd %d', ), - ssd_flag=dict(argstr='--ssd', ), - sx_val=dict(argstr='-sx %f', ), - sy_val=dict(argstr='-sy %f', ), - sz_val=dict(argstr='-sz %f', ), - vel_flag=dict(argstr='-vel', ), - verbosity_off_flag=dict(argstr='-voff', ), + argstr="-res %s", + extensions=None, + name_source=["flo_file"], + name_template="%s_res.nii.gz", + ), + rlwth2_thr_val=dict( + argstr="-rLwTh %d %f", + ), + rlwth_thr_val=dict( + argstr="--rLwTh %f", + ), + rmask_file=dict( + argstr="-rmask %s", + extensions=None, + ), + rupth2_thr_val=dict( + argstr="-rUpTh %d %f", + ), + rupth_thr_val=dict( + argstr="--rUpTh %f", + ), + smooth_grad_val=dict( + argstr="-smoothGrad %f", + ), + ssd2_flag=dict( + argstr="-ssd %d", + ), + ssd_flag=dict( + argstr="--ssd", + ), + sx_val=dict( + argstr="-sx %f", + ), + sy_val=dict( + argstr="-sy %f", + ), + sz_val=dict( + argstr="-sz %f", + ), + vel_flag=dict( + argstr="-vel", + ), + verbosity_off_flag=dict( + argstr="-voff", + ), ) inputs = RegF3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegF3D_outputs(): output_map = dict( avg_output=dict(), - cpp_file=dict(), - invcpp_file=dict(), - invres_file=dict(), - res_file=dict(), + cpp_file=dict( + extensions=None, + ), + invcpp_file=dict( + extensions=None, + ), + invres_file=dict( + extensions=None, + ), + res_file=dict( + extensions=None, + ), ) outputs = RegF3D.output_spec() diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py b/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py index 63f917c683..60c8ce5c08 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py @@ -1,32 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegJacobian def test_RegJacobian_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='%s', - name_source=['trans_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["trans_file"], + name_template="%s", position=-1, ), - ref_file=dict(argstr='-ref %s', ), + ref_file=dict( + argstr="-ref %s", + extensions=None, + ), trans_file=dict( - argstr='-trans %s', + argstr="-trans %s", + extensions=None, mandatory=True, ), type=dict( - argstr='-%s', + argstr="-%s", position=-2, usedefault=True, ), @@ -36,8 +42,14 @@ def test_RegJacobian_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegJacobian_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegJacobian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py b/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py index 3321d87afc..8a7e470e6c 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py @@ -1,34 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegMeasure def test_RegMeasure_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( - argstr='-flo %s', + argstr="-flo %s", + extensions=None, mandatory=True, ), measure_type=dict( - argstr='-%s', + argstr="-%s", mandatory=True, ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='-out %s', - name_source=['flo_file'], - name_template='%s', + argstr="-out %s", + extensions=None, + name_source=["flo_file"], + name_template="%s", ), ref_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, ), ) @@ -37,8 +41,14 @@ def test_RegMeasure_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegMeasure_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegMeasure.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py b/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py index 06b2b48401..6d9c9a93e5 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py @@ -1,53 +1,78 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegResample def test_RegResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( - argstr='-flo %s', + argstr="-flo %s", + extensions=None, mandatory=True, ), - inter_val=dict(argstr='-inter %d', ), + inter_val=dict( + argstr="-inter %d", + ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='%s', - name_source=['flo_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["flo_file"], + name_template="%s", position=-1, ), - pad_val=dict(argstr='-pad %f', ), - psf_alg=dict(argstr='-psf_alg %d', ), - psf_flag=dict(argstr='-psf', ), + pad_val=dict( + argstr="-pad %f", + ), + psf_alg=dict( + argstr="-psf_alg %d", + ), + psf_flag=dict( + argstr="-psf", + ), ref_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, mandatory=True, ), - tensor_flag=dict(argstr='-tensor ', ), - trans_file=dict(argstr='-trans %s', ), + tensor_flag=dict( + argstr="-tensor ", + ), + trans_file=dict( + argstr="-trans %s", + extensions=None, + ), type=dict( - argstr='-%s', + argstr="-%s", position=-2, usedefault=True, ), - verbosity_off_flag=dict(argstr='-voff', ), + verbosity_off_flag=dict( + argstr="-voff", + ), ) inputs = RegResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegResample_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py index 5deb4206e6..9abf8184ec 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py @@ -1,51 +1,92 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegTools def test_RegTools_inputs(): input_map = dict( - add_val=dict(argstr='-add %s', ), - args=dict(argstr='%s', ), - bin_flag=dict(argstr='-bin', ), - chg_res_val=dict(argstr='-chgres %f %f %f', ), - div_val=dict(argstr='-div %s', ), - down_flag=dict(argstr='-down', ), + add_val=dict( + argstr="-add %s", + ), + args=dict( + argstr="%s", + ), + bin_flag=dict( + argstr="-bin", + ), + chg_res_val=dict( + argstr="-chgres %f %f %f", + ), + div_val=dict( + argstr="-div %s", + ), + down_flag=dict( + argstr="-down", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, ), - inter_val=dict(argstr='-interp %d', ), - iso_flag=dict(argstr='-iso', ), - mask_file=dict(argstr='-nan %s', ), - mul_val=dict(argstr='-mul %s', ), - noscl_flag=dict(argstr='-noscl', ), + inter_val=dict( + argstr="-interp %d", + ), + iso_flag=dict( + argstr="-iso", + ), + mask_file=dict( + argstr="-nan %s", + extensions=None, + ), + mul_val=dict( + argstr="-mul %s", + ), + noscl_flag=dict( + argstr="-noscl", + ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='-out %s', - name_source=['in_file'], - name_template='%s_tools.nii.gz', - ), - rms_val=dict(argstr='-rms %s', ), - smo_g_val=dict(argstr='-smoG %f %f %f', ), - smo_s_val=dict(argstr='-smoS %f %f %f', ), - sub_val=dict(argstr='-sub %s', ), - thr_val=dict(argstr='-thr %f', ), + argstr="-out %s", + extensions=None, + name_source=["in_file"], + name_template="%s_tools.nii.gz", + ), + rms_val=dict( + argstr="-rms %s", + extensions=None, + ), + smo_g_val=dict( + argstr="-smoG %f %f %f", + ), + smo_s_val=dict( + argstr="-smoS %f %f %f", + ), + sub_val=dict( + argstr="-sub %s", + ), + thr_val=dict( + argstr="-thr %f", + ), ) inputs = RegTools.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegTools_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegTools.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py b/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py index 57c1b0ad86..b9ee8bf2af 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py @@ -1,50 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..regutils import RegTransform def test_RegTransform_inputs(): input_map = dict( aff_2_rig_input=dict( - argstr='-aff2rig %s', + argstr="-aff2rig %s", + extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'make_aff_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "flirt_2_nr_input", ], ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), comp_input=dict( - argstr='-comp %s', + argstr="-comp %s", + extensions=None, position=-3, - requires=['comp_input2'], + requires=["comp_input2"], xor=[ - 'def_input', 'disp_input', 'flow_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), comp_input2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), def_input=dict( - argstr='-def %s', + argstr="-def %s", + extensions=None, position=-2, xor=[ - 'disp_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), disp_input=dict( - argstr='-disp %s', + argstr="-disp %s", + extensions=None, position=-2, xor=[ - 'def_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), environ=dict( @@ -52,91 +86,148 @@ def test_RegTransform_inputs(): usedefault=True, ), flirt_2_nr_input=dict( - argstr='-flirtAff2NR %s %s %s', + argstr="-flirtAff2NR %s %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'make_aff_input', 'aff_2_rig_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", ], ), flow_input=dict( - argstr='-flow %s', + argstr="-flow %s", + extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), half_input=dict( - argstr='-half %s', + argstr="-half %s", + extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), inv_aff_input=dict( - argstr='-invAff %s', + argstr="-invAff %s", + extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), inv_nrr_input=dict( - argstr='-invNrr %s %s', + argstr="-invNrr %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), make_aff_input=dict( - argstr='-makeAff %f %f %f %f %f %f %f %f %f %f %f %f', + argstr="-makeAff %f %f %f %f %f %f %f %f %f %f %f %f", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), omp_core_val=dict( - argstr='-omp %i', + argstr="-omp %i", usedefault=True, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, genfile=True, position=-1, ), ref1_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, position=0, ), ref2_file=dict( - argstr='-ref2 %s', + argstr="-ref2 %s", + extensions=None, position=1, - requires=['ref1_file'], + requires=["ref1_file"], ), upd_s_form_input=dict( - argstr='-updSform %s', + argstr="-updSform %s", + extensions=None, position=-3, - requires=['upd_s_form_input2'], + requires=["upd_s_form_input2"], xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), upd_s_form_input2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, - requires=['upd_s_form_input'], + requires=["upd_s_form_input"], ), ) inputs = RegTransform.input_spec() @@ -144,8 +235,14 @@ def test_RegTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegTransform_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = RegTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_reg.py b/nipype/interfaces/niftyreg/tests/test_reg.py index 862760139e..4af3e15bdb 100644 --- a/nipype/interfaces/niftyreg/tests/test_reg.py +++ b/nipype/interfaces/niftyreg/tests/test_reg.py @@ -1,47 +1,47 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data -from .. import (get_custom_path, RegAladin, RegF3D) +from .. import get_custom_path, RegAladin, RegF3D from .test_regutils import no_nifty_tool @pytest.mark.skipif( - no_nifty_tool(cmd='reg_aladin'), - reason="niftyreg is not installed. reg_aladin not found.") + no_nifty_tool(cmd="reg_aladin"), + reason="niftyreg is not installed. reg_aladin not found.", +) def test_reg_aladin(): - """ tests for reg_aladin interface""" + """tests for reg_aladin interface""" # Create a reg_aladin object nr_aladin = RegAladin() # Check if the command is properly defined - assert nr_aladin.cmd == get_custom_path('reg_aladin') + assert nr_aladin.cmd == get_custom_path("reg_aladin") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_aladin.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - rmask_file = example_data('mask.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + rmask_file = example_data("mask.nii") nr_aladin.inputs.ref_file = ref_file nr_aladin.inputs.flo_file = flo_file nr_aladin.inputs.rmask_file = rmask_file nr_aladin.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -aff {aff} -flo {flo} -omp 4 -ref {ref} -res {res} \ --rmask {rmask}' + cmd_tmp = "{cmd} -aff {aff} -flo {flo} -omp 4 -ref {ref} -res {res} \ +-rmask {rmask}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_aladin'), - aff='im2_aff.txt', + cmd=get_custom_path("reg_aladin"), + aff="im2_aff.txt", flo=flo_file, ref=ref_file, - res='im2_res.nii.gz', + res="im2_res.nii.gz", rmask=rmask_file, ) @@ -49,24 +49,24 @@ def test_reg_aladin(): @pytest.mark.skipif( - no_nifty_tool(cmd='reg_f3d'), - reason="niftyreg is not installed. reg_f3d not found.") + no_nifty_tool(cmd="reg_f3d"), reason="niftyreg is not installed. reg_f3d not found." +) def test_reg_f3d(): - """ tests for reg_f3d interface""" + """tests for reg_f3d interface""" # Create a reg_f3d object nr_f3d = RegF3D() # Check if the command is properly defined - assert nr_f3d.cmd == get_custom_path('reg_f3d') + assert nr_f3d.cmd == get_custom_path("reg_f3d") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_f3d.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - rmask_file = example_data('mask.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + rmask_file = example_data("mask.nii") nr_f3d.inputs.ref_file = ref_file nr_f3d.inputs.flo_file = flo_file nr_f3d.inputs.rmask_file = rmask_file @@ -75,15 +75,15 @@ def test_reg_f3d(): nr_f3d.inputs.be_val = 0.1 nr_f3d.inputs.le_val = 0.1 - cmd_tmp = '{cmd} -be 0.100000 -cpp {cpp} -flo {flo} -le 0.100000 -omp 4 \ --ref {ref} -res {res} -rmask {rmask} -vel' + cmd_tmp = "{cmd} -be 0.100000 -cpp {cpp} -flo {flo} -le 0.100000 -omp 4 \ +-ref {ref} -res {res} -rmask {rmask} -vel" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_f3d'), - cpp='im2_cpp.nii.gz', + cmd=get_custom_path("reg_f3d"), + cpp="im2_cpp.nii.gz", flo=flo_file, ref=ref_file, - res='im2_res.nii.gz', + res="im2_res.nii.gz", rmask=rmask_file, ) diff --git a/nipype/interfaces/niftyreg/tests/test_regutils.py b/nipype/interfaces/niftyreg/tests/test_regutils.py index 918d556ab2..86ec9e5d3a 100644 --- a/nipype/interfaces/niftyreg/tests/test_regutils.py +++ b/nipype/interfaces/niftyreg/tests/test_regutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -6,8 +5,15 @@ from ....utils.filemanip import which from ....testing import example_data -from .. import (get_custom_path, RegAverage, RegResample, RegJacobian, - RegTools, RegMeasure, RegTransform) +from .. import ( + get_custom_path, + RegAverage, + RegResample, + RegJacobian, + RegTools, + RegMeasure, + RegTransform, +) def no_nifty_tool(cmd=None): @@ -15,455 +21,509 @@ def no_nifty_tool(cmd=None): @pytest.mark.skipif( - no_nifty_tool(cmd='reg_resample'), - reason="niftyreg is not installed. reg_resample not found.") + no_nifty_tool(cmd="reg_resample"), + reason="niftyreg is not installed. reg_resample not found.", +) def test_reg_resample_res(): - """ tests for reg_resample interface """ + """tests for reg_resample interface""" # Create a reg_resample object nr_resample = RegResample() # Check if the command is properly defined - assert nr_resample.cmd == get_custom_path('reg_resample') + assert nr_resample.cmd == get_custom_path("reg_resample") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_resample.run() # Resample res - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + trans_file = example_data("warpfield.nii") nr_resample.inputs.ref_file = ref_file nr_resample.inputs.flo_file = flo_file nr_resample.inputs.trans_file = trans_file - nr_resample.inputs.inter_val = 'LIN' + nr_resample.inputs.inter_val = "LIN" nr_resample.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ --res {res}' + cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ +-res {res}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_resample'), + cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, - res='im2_res.nii.gz') + res="im2_res.nii.gz", + ) assert nr_resample.cmdline == expected_cmd # test_reg_resample_blank() - nr_resample_2 = RegResample(type='blank', inter_val='LIN', omp_core_val=4) - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - trans_file = example_data('warpfield.nii') + nr_resample_2 = RegResample(type="blank", inter_val="LIN", omp_core_val=4) + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + trans_file = example_data("warpfield.nii") nr_resample_2.inputs.ref_file = ref_file nr_resample_2.inputs.flo_file = flo_file nr_resample_2.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ --blank {blank}' + cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ +-blank {blank}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_resample'), + cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, - blank='im2_blank.nii.gz') + blank="im2_blank.nii.gz", + ) assert nr_resample_2.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_jacobian'), - reason="niftyreg is not installed. reg_jacobian not found.") + no_nifty_tool(cmd="reg_jacobian"), + reason="niftyreg is not installed. reg_jacobian not found.", +) def test_reg_jacobian_jac(): - """ Test interface for RegJacobian """ + """Test interface for RegJacobian""" # Create a reg_jacobian object nr_jacobian = RegJacobian() # Check if the command is properly defined - assert nr_jacobian.cmd == get_custom_path('reg_jacobian') + assert nr_jacobian.cmd == get_custom_path("reg_jacobian") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_jacobian.run() # Test Reg Jacobian: jac - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian.inputs.ref_file = ref_file nr_jacobian.inputs.trans_file = trans_file nr_jacobian.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jac {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jac {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jac.nii.gz') + jac="warpfield_jac.nii.gz", + ) assert nr_jacobian.cmdline == expected_cmd # Test Reg Jacobian: jac m - nr_jacobian_2 = RegJacobian(type='jacM', omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + nr_jacobian_2 = RegJacobian(type="jacM", omp_core_val=4) + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian_2.inputs.ref_file = ref_file nr_jacobian_2.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jacM {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacM {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jacM.nii.gz') + jac="warpfield_jacM.nii.gz", + ) assert nr_jacobian_2.cmdline == expected_cmd # Test Reg Jacobian: jac l - nr_jacobian_3 = RegJacobian(type='jacL', omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + nr_jacobian_3 = RegJacobian(type="jacL", omp_core_val=4) + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian_3.inputs.ref_file = ref_file nr_jacobian_3.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jacL {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacL {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jacL.nii.gz') + jac="warpfield_jacL.nii.gz", + ) assert nr_jacobian_3.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_tools'), - reason="niftyreg is not installed. reg_tools not found.") + no_nifty_tool(cmd="reg_tools"), + reason="niftyreg is not installed. reg_tools not found.", +) def test_reg_tools_mul(): - """ tests for reg_tools interface """ + """tests for reg_tools interface""" # Create a reg_tools object nr_tools = RegTools() # Check if the command is properly defined - assert nr_tools.cmd == get_custom_path('reg_tools') + assert nr_tools.cmd == get_custom_path("reg_tools") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_tools.run() # Test reg_tools: mul - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") nr_tools.inputs.in_file = in_file nr_tools.inputs.mul_val = 4 nr_tools.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -in {in_file} -mul 4.0 -omp 4 -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -mul 4.0 -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_tools'), - in_file=in_file, - out_file='im1_tools.nii.gz') + cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" + ) assert nr_tools.cmdline == expected_cmd # Test reg_tools: iso nr_tools_2 = RegTools(iso_flag=True, omp_core_val=4) - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") nr_tools_2.inputs.in_file = in_file - cmd_tmp = '{cmd} -in {in_file} -iso -omp 4 -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -iso -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_tools'), - in_file=in_file, - out_file='im1_tools.nii.gz') + cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" + ) assert nr_tools_2.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_average'), - reason="niftyreg is not installed. reg_average not found.") + no_nifty_tool(cmd="reg_average"), + reason="niftyreg is not installed. reg_average not found.", +) def test_reg_average(): - """ tests for reg_average interface """ + """tests for reg_average interface""" # Create a reg_average object nr_average = RegAverage() # Check if the command is properly defined - assert nr_average.cmd == get_custom_path('reg_average') + assert nr_average.cmd == get_custom_path("reg_average") # Average niis - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") nr_average.inputs.avg_files = [one_file, two_file, three_file] nr_average.inputs.omp_core_val = 1 generated_cmd = nr_average.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = '%s %s -avg %s %s %s -omp 1' % ( - get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), one_file, two_file, - three_file) + expected_argv = "{} {} -avg {} {} {} -omp 1".format( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test command line with text file - expected_cmd = ('%s --cmd_file %s' % (get_custom_path('reg_average'), - reg_average_cmd)) + expected_cmd = "{} --cmd_file {}".format( + get_custom_path("reg_average"), + reg_average_cmd, + ) assert generated_cmd == expected_cmd # Test Reg Average: average txt nr_average_2 = RegAverage() - one_file = example_data('TransformParameters.0.txt') - two_file = example_data('ants_Affine.txt') - three_file = example_data('elastix.txt') + one_file = example_data("TransformParameters.0.txt") + two_file = example_data("ants_Affine.txt") + three_file = example_data("elastix.txt") nr_average_2.inputs.avg_files = [one_file, two_file, three_file] nr_average_2.inputs.omp_core_val = 1 generated_cmd = nr_average_2.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = '%s %s -avg %s %s %s -omp 1' % ( - get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.txt'), one_file, two_file, - three_file) + expected_argv = "{} {} -avg {} {} {} -omp 1".format( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.txt"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: average list nr_average_3 = RegAverage() - one_file = example_data('TransformParameters.0.txt') - two_file = example_data('ants_Affine.txt') - three_file = example_data('elastix.txt') + one_file = example_data("TransformParameters.0.txt") + two_file = example_data("ants_Affine.txt") + three_file = example_data("elastix.txt") nr_average_3.inputs.avg_lts_files = [one_file, two_file, three_file] nr_average_3.inputs.omp_core_val = 1 generated_cmd = nr_average_3.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -avg_lts %s %s %s -omp 1' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.txt'), one_file, - two_file, three_file)) + expected_argv = "{} {} -avg_lts {} {} {} -omp 1".format( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.txt"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: average ref nr_average_4 = RegAverage() - ref_file = example_data('anatomical.nii') - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') - trans1_file = example_data('roi01.nii') - trans2_file = example_data('roi02.nii') - trans3_file = example_data('roi03.nii') + ref_file = example_data("anatomical.nii") + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") + trans1_file = example_data("roi01.nii") + trans2_file = example_data("roi02.nii") + trans3_file = example_data("roi03.nii") nr_average_4.inputs.warp_files = [ - trans1_file, one_file, trans2_file, two_file, trans3_file, three_file + trans1_file, + one_file, + trans2_file, + two_file, + trans3_file, + three_file, ] nr_average_4.inputs.avg_ref_file = ref_file nr_average_4.inputs.omp_core_val = 1 generated_cmd = nr_average_4.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -avg_tran %s -omp 1 %s %s %s %s %s %s' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), ref_file, - trans1_file, one_file, trans2_file, two_file, - trans3_file, three_file)) + expected_argv = "{} {} -avg_tran {} -omp 1 {} {} {} {} {} {}".format( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + ref_file, + trans1_file, + one_file, + trans2_file, + two_file, + trans3_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: demean3 nr_average_5 = RegAverage() - ref_file = example_data('anatomical.nii') - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') - aff1_file = example_data('TransformParameters.0.txt') - aff2_file = example_data('ants_Affine.txt') - aff3_file = example_data('elastix.txt') - trans1_file = example_data('roi01.nii') - trans2_file = example_data('roi02.nii') - trans3_file = example_data('roi03.nii') + ref_file = example_data("anatomical.nii") + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") + aff1_file = example_data("TransformParameters.0.txt") + aff2_file = example_data("ants_Affine.txt") + aff3_file = example_data("elastix.txt") + trans1_file = example_data("roi01.nii") + trans2_file = example_data("roi02.nii") + trans3_file = example_data("roi03.nii") nr_average_5.inputs.warp_files = [ - aff1_file, trans1_file, one_file, aff2_file, trans2_file, two_file, - aff3_file, trans3_file, three_file + aff1_file, + trans1_file, + one_file, + aff2_file, + trans2_file, + two_file, + aff3_file, + trans3_file, + three_file, ] nr_average_5.inputs.demean3_ref_file = ref_file nr_average_5.inputs.omp_core_val = 1 generated_cmd = nr_average_5.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -demean3 %s -omp 1 %s %s %s %s %s %s %s %s %s' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), ref_file, - aff1_file, trans1_file, one_file, aff2_file, trans2_file, - two_file, aff3_file, trans3_file, three_file)) + expected_argv = "{} {} -demean3 {} -omp 1 {} {} {} {} {} {} {} {} {}".format( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + ref_file, + aff1_file, + trans1_file, + one_file, + aff2_file, + trans2_file, + two_file, + aff3_file, + trans3_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv @pytest.mark.skipif( - no_nifty_tool(cmd='reg_transform'), - reason="niftyreg is not installed. reg_transform not found.") + no_nifty_tool(cmd="reg_transform"), + reason="niftyreg is not installed. reg_transform not found.", +) def test_reg_transform_def(): - """ tests for reg_transform interface """ + """tests for reg_transform interface""" # Create a reg_transform object nr_transform = RegTransform() # Check if the command is properly defined - assert nr_transform.cmd == get_custom_path('reg_transform') + assert nr_transform.cmd == get_custom_path("reg_transform") # Assign some input data - trans_file = example_data('warpfield.nii') + trans_file = example_data("warpfield.nii") nr_transform.inputs.def_input = trans_file nr_transform.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -omp 4 -def {trans_file} {out_file}' + cmd_tmp = "{cmd} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), trans_file=trans_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform.cmdline == expected_cmd # Test reg_transform: def ref nr_transform_2 = RegTransform(omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_transform_2.inputs.ref1_file = ref_file nr_transform_2.inputs.def_input = trans_file - cmd_tmp = '{cmd} -ref {ref_file} -omp 4 -def {trans_file} {out_file}' + cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans_file=trans_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform_2.cmdline == expected_cmd # Test reg_transform: comp nii nr_transform_3 = RegTransform(omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') - trans2_file = example_data('anatomical.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") + trans2_file = example_data("anatomical.nii") nr_transform_3.inputs.ref1_file = ref_file nr_transform_3.inputs.comp_input2 = trans2_file nr_transform_3.inputs.comp_input = trans_file - cmd_tmp = '{cmd} -ref {ref_file} -omp 4 -comp {trans1} {trans2} {out_file}' + cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -comp {trans1} {trans2} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans1=trans_file, trans2=trans2_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform_3.cmdline == expected_cmd # Test reg_transform: comp txt nr_transform_4 = RegTransform(omp_core_val=4) - aff1_file = example_data('ants_Affine.txt') - aff2_file = example_data('elastix.txt') + aff1_file = example_data("ants_Affine.txt") + aff2_file = example_data("elastix.txt") nr_transform_4.inputs.comp_input2 = aff2_file nr_transform_4.inputs.comp_input = aff1_file - cmd_tmp = '{cmd} -omp 4 -comp {aff1} {aff2} {out_file}' + cmd_tmp = "{cmd} -omp 4 -comp {aff1} {aff2} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff1=aff1_file, aff2=aff2_file, - out_file=os.path.join(os.getcwd(), 'ants_Affine_trans.txt')) + out_file=os.path.join(os.getcwd(), "ants_Affine_trans.txt"), + ) assert nr_transform_4.cmdline == expected_cmd # Test reg_transform: comp nr_transform_5 = RegTransform(omp_core_val=4) - trans_file = example_data('warpfield.nii') - aff_file = example_data('elastix.txt') + trans_file = example_data("warpfield.nii") + aff_file = example_data("elastix.txt") nr_transform_5.inputs.comp_input2 = trans_file nr_transform_5.inputs.comp_input = aff_file - cmd_tmp = '{cmd} -omp 4 -comp {aff} {trans} {out_file}' + cmd_tmp = "{cmd} -omp 4 -comp {aff} {trans} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff=aff_file, trans=trans_file, - out_file=os.path.join(os.getcwd(), 'elastix_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "elastix_trans.nii.gz"), + ) assert nr_transform_5.cmdline == expected_cmd # Test reg_transform: flirt nr_transform_6 = RegTransform(omp_core_val=4) - aff_file = example_data('elastix.txt') - ref_file = example_data('im1.nii') - in_file = example_data('im2.nii') + aff_file = example_data("elastix.txt") + ref_file = example_data("im1.nii") + in_file = example_data("im2.nii") nr_transform_6.inputs.flirt_2_nr_input = (aff_file, ref_file, in_file) - cmd_tmp = '{cmd} -omp 4 -flirtAff2NR {aff} {ref} {in_file} {out_file}' + cmd_tmp = "{cmd} -omp 4 -flirtAff2NR {aff} {ref} {in_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff=aff_file, ref=ref_file, in_file=in_file, - out_file=os.path.join(os.getcwd(), 'elastix_trans.txt')) + out_file=os.path.join(os.getcwd(), "elastix_trans.txt"), + ) assert nr_transform_6.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_measure'), - reason="niftyreg is not installed. reg_measure not found.") + no_nifty_tool(cmd="reg_measure"), + reason="niftyreg is not installed. reg_measure not found.", +) def test_reg_measure(): - """ tests for reg_measure interface """ + """tests for reg_measure interface""" # Create a reg_measure object nr_measure = RegMeasure() # Check if the command is properly defined - assert nr_measure.cmd == get_custom_path('reg_measure') + assert nr_measure.cmd == get_custom_path("reg_measure") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_measure.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") nr_measure.inputs.ref_file = ref_file nr_measure.inputs.flo_file = flo_file - nr_measure.inputs.measure_type = 'lncc' + nr_measure.inputs.measure_type = "lncc" nr_measure.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -flo {flo} -lncc -omp 4 -out {out} -ref {ref}' + cmd_tmp = "{cmd} -flo {flo} -lncc -omp 4 -out {out} -ref {ref}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_measure'), + cmd=get_custom_path("reg_measure"), flo=flo_file, - out='im2_lncc.txt', - ref=ref_file) + out="im2_lncc.txt", + ref=ref_file, + ) assert nr_measure.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/__init__.py b/nipype/interfaces/niftyseg/__init__.py index 14b391edd5..f5d908a8ff 100644 --- a/nipype/interfaces/niftyseg/__init__.py +++ b/nipype/interfaces/niftyseg/__init__.py @@ -10,7 +10,6 @@ from .em import EM from .label_fusion import LabelFusion, CalcTopNCC from .lesions import FillLesions -from .maths import (UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, - Merge) +from .maths import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge from .patchmatch import PatchMatch from .stats import UnaryStats, BinaryStats diff --git a/nipype/interfaces/niftyseg/base.py b/nipype/interfaces/niftyseg/base.py index d68bbcc73b..efc6c51721 100644 --- a/nipype/interfaces/niftyseg/base.py +++ b/nipype/interfaces/niftyseg/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -15,8 +14,6 @@ -------- See the docstrings of the individual classes for examples. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) from ..niftyfit.base import NiftyFitCommand @@ -25,12 +22,12 @@ class NiftySegCommand(NiftyFitCommand): """ Base support interface for NiftySeg commands. """ - _suffix = '_ns' + + _suffix = "_ns" _min_version = None def __init__(self, **inputs): - super(NiftySegCommand, self).__init__(**inputs) + super().__init__(**inputs) def get_version(self): - return super(NiftySegCommand, self).version_from_command( - cmd='seg_EM', flag='--version') + return super().version_from_command(cmd="seg_EM", flag="--version") diff --git a/nipype/interfaces/niftyseg/em.py b/nipype/interfaces/niftyseg/em.py index e9c749c282..615fe2e64a 100644 --- a/nipype/interfaces/niftyseg/em.py +++ b/nipype/interfaces/niftyseg/em.py @@ -11,119 +11,135 @@ See the docstrings of the individual classes for examples. """ -from ..base import (TraitedSpec, File, traits, CommandLineInputSpec, - InputMultiPath) +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + CommandLineInputSpec, + InputMultiPath, +) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path class EMInputSpec(CommandLineInputSpec): """Input Spec for EM.""" + in_file = File( - argstr='-in %s', + argstr="-in %s", exists=True, mandatory=True, - desc='Input image to segment', - position=4) + desc="Input image to segment", + position=4, + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) # Priors no_prior = traits.Int( - argstr='-nopriors %s', + argstr="-nopriors %s", mandatory=True, - desc='Number of classes to use without prior', - xor=['prior_4D', 'priors']) + desc="Number of classes to use without prior", + xor=["prior_4D", "priors"], + ) prior_4D = File( - argstr='-prior4D %s', + argstr="-prior4D %s", exists=True, mandatory=True, - desc='4D file containing the priors', - xor=['no_prior', 'priors']) + desc="4D file containing the priors", + xor=["no_prior", "priors"], + ) priors = InputMultiPath( - argstr='%s', + argstr="%s", mandatory=True, - desc='List of priors filepaths.', - xor=['no_prior', 'prior_4D']) + desc="List of priors filepaths.", + xor=["no_prior", "prior_4D"], + ) # iterations max_iter = traits.Int( - argstr='-max_iter %s', + argstr="-max_iter %s", default_value=100, usedefault=True, - desc='Maximum number of iterations') + desc="Maximum number of iterations", + ) min_iter = traits.Int( - argstr='-min_iter %s', + argstr="-min_iter %s", default_value=0, usedefault=True, - desc='Minimum number of iterations') + desc="Minimum number of iterations", + ) # other options bc_order_val = traits.Int( - argstr='-bc_order %s', + argstr="-bc_order %s", default_value=3, usedefault=True, - desc='Polynomial order for the bias field') + desc="Polynomial order for the bias field", + ) mrf_beta_val = traits.Float( - argstr='-mrf_beta %s', desc='Weight of the Markov Random Field') + argstr="-mrf_beta %s", desc="Weight of the Markov Random Field" + ) - desc = 'Bias field correction will run only if the ratio of improvement \ -is below bc_thresh. (default=0 [OFF])' + desc = "Bias field correction will run only if the ratio of improvement \ +is below bc_thresh. (default=0 [OFF])" bc_thresh_val = traits.Float( - argstr='-bc_thresh %s', - default_value=0, - usedefault=True, - desc=desc) + argstr="-bc_thresh %s", default_value=0, usedefault=True, desc=desc + ) - desc = 'Amount of regularization over the diagonal of the covariance \ -matrix [above 1]' + desc = "Amount of regularization over the diagonal of the covariance \ +matrix [above 1]" - reg_val = traits.Float(argstr='-reg %s', desc=desc) + reg_val = traits.Float(argstr="-reg %s", desc=desc) - desc = 'Outlier detection as in (Van Leemput TMI 2003). is the \ + desc = "Outlier detection as in (Van Leemput TMI 2003). is the \ Mahalanobis threshold [recommended between 3 and 7] is a convergence \ -ratio below which the outlier detection is going to be done [recommended 0.01]' +ratio below which the outlier detection is going to be done [recommended 0.01]" - outlier_val = traits.Tuple( - traits.Float(), traits.Float(), argstr='-outlier %s %s', desc=desc) + outlier_val = Tuple( + traits.Float(), traits.Float(), argstr="-outlier %s %s", desc=desc + ) - desc = 'Relax Priors [relaxation factor: 00 (recommended=2.0)] /only 3D/' + desc = "Relax Priors [relaxation factor: 00 (recommended=2.0)] /only 3D/" - relax_priors = traits.Tuple( - traits.Float(), traits.Float(), argstr='-rf %s %s', desc=desc) + relax_priors = Tuple(traits.Float(), traits.Float(), argstr="-rf %s %s", desc=desc) # outputs out_file = File( - name_source=['in_file'], - name_template='%s_em.nii.gz', - argstr='-out %s', - desc='Output segmentation') + name_source=["in_file"], + name_template="%s_em.nii.gz", + argstr="-out %s", + desc="Output segmentation", + ) out_bc_file = File( - name_source=['in_file'], - name_template='%s_bc_em.nii.gz', - argstr='-bc_out %s', - desc='Output bias corrected image') + name_source=["in_file"], + name_template="%s_bc_em.nii.gz", + argstr="-bc_out %s", + desc="Output bias corrected image", + ) out_outlier_file = File( - name_source=['in_file'], - name_template='%s_outlier_em.nii.gz', - argstr='-out_outlier %s', - desc='Output outlierness image') + name_source=["in_file"], + name_template="%s_outlier_em.nii.gz", + argstr="-out_outlier %s", + desc="Output outlierness image", + ) class EMOutputSpec(TraitedSpec): """Output Spec for EM.""" + out_file = File(desc="Output segmentation") out_bc_file = File(desc="Output bias corrected image") - out_outlier_file = File(desc='Output outlierness image') + out_outlier_file = File(desc="Output outlierness image") class EM(NiftySegCommand): @@ -147,15 +163,16 @@ class EM(NiftySegCommand): -bc_out im1_bc_em.nii.gz -out im1_em.nii.gz -out_outlier im1_outlier_em.nii.gz' """ - _cmd = get_custom_path('seg_EM', env_dir='NIFTYSEGDIR') - _suffix = '_em' + + _cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") + _suffix = "_em" input_spec = EMInputSpec output_spec = EMOutputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_EM.""" - if opt == 'priors': + if opt == "priors": _nb_priors = len(self.inputs.priors) - return '-priors %d %s' % (_nb_priors, ' '.join(self.inputs.priors)) + return "-priors %d %s" % (_nb_priors, " ".join(self.inputs.priors)) else: - return super(EM, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/label_fusion.py b/nipype/interfaces/niftyseg/label_fusion.py index 1b0237d37c..56af8a0d20 100644 --- a/nipype/interfaces/niftyseg/label_fusion.py +++ b/nipype/interfaces/niftyseg/label_fusion.py @@ -4,112 +4,122 @@ The fusion module provides higher-level interfaces to some of the operations that can be performed with the seg_LabFusion command-line program. """ - -from builtins import str import os import warnings -from ..base import (TraitedSpec, File, traits, isdefined, CommandLineInputSpec, - NipypeInterfaceError) +from ..base import ( + TraitedSpec, + File, + traits, + Tuple, + isdefined, + CommandLineInputSpec, + NipypeInterfaceError, +) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import load_json, save_json, split_filename warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class LabelFusionInput(CommandLineInputSpec): """Input Spec for LabelFusion.""" + in_file = File( - argstr='-in %s', + argstr="-in %s", exists=True, mandatory=True, position=1, - desc='Filename of the 4D integer label image.') + desc="Filename of the 4D integer label image.", + ) - template_file = File(exists=True, desc='Registered templates (4D Image)') + template_file = File(exists=True, desc="Registered templates (4D Image)") file_to_seg = File( - exists=True, - mandatory=True, - desc='Original image to segment (3D Image)') + exists=True, mandatory=True, desc="Original image to segment (3D Image)" + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) out_file = File( - argstr='-out %s', - name_source=['in_file'], - name_template='%s', - desc='Output consensus segmentation') + argstr="-out %s", + name_source=["in_file"], + name_template="%s", + desc="Output consensus segmentation", + ) prob_flag = traits.Bool( - desc='Probabilistic/Fuzzy segmented image', argstr='-outProb') + desc="Probabilistic/Fuzzy segmented image", argstr="-outProb" + ) - desc = 'Verbose level [0 = off, 1 = on, 2 = debug] (default = 0)' - verbose = traits.Enum('0', '1', '2', desc=desc, argstr='-v %s') + desc = "Verbose level [0 = off, 1 = on, 2 = debug] (default = 0)" + verbose = traits.Enum("0", "1", "2", desc=desc, argstr="-v %s") - desc = 'Only consider non-consensus voxels to calculate statistics' - unc = traits.Bool(desc=desc, argstr='-unc') + desc = "Only consider non-consensus voxels to calculate statistics" + unc = traits.Bool(desc=desc, argstr="-unc") classifier_type = traits.Enum( - 'STEPS', - 'STAPLE', - 'MV', - 'SBA', - argstr='-%s', + "STEPS", + "STAPLE", + "MV", + "SBA", + argstr="-%s", mandatory=True, position=2, - desc='Type of Classifier Fusion.') + desc="Type of Classifier Fusion.", + ) desc = "Gaussian kernel size in mm to compute the local similarity" kernel_size = traits.Float(desc=desc) - template_num = traits.Int(desc='Number of labels to use') + template_num = traits.Int(desc="Number of labels to use") # STAPLE and MV options sm_ranking = traits.Enum( - 'ALL', - 'GNCC', - 'ROINCC', - 'LNCC', - argstr='-%s', + "ALL", + "GNCC", + "ROINCC", + "LNCC", + argstr="-%s", usedefault=True, position=3, - desc='Ranking for STAPLE and MV') + desc="Ranking for STAPLE and MV", + ) - dilation_roi = traits.Int(desc='Dilation of the ROI ( d>=1 )') + dilation_roi = traits.Int(desc="Dilation of the ROI ( d>=1 )") # STAPLE and STEPS options - desc = 'Proportion of the label (only for single labels).' - proportion = traits.Float(argstr='-prop %s', desc=desc) + desc = "Proportion of the label (only for single labels)." + proportion = traits.Float(argstr="-prop %s", desc=desc) - desc = 'Update label proportions at each iteration' - prob_update_flag = traits.Bool(desc=desc, argstr='-prop_update') + desc = "Update label proportions at each iteration" + prob_update_flag = traits.Bool(desc=desc, argstr="-prop_update") - desc = 'Value of P and Q [ 0 < (P,Q) < 1 ] (default = 0.99 0.99)' - set_pq = traits.Tuple( - traits.Float, traits.Float, argstr='-setPQ %f %f', desc=desc) + desc = "Value of P and Q [ 0 < (P,Q) < 1 ] (default = 0.99 0.99)" + set_pq = Tuple(traits.Float, traits.Float, argstr="-setPQ %f %f", desc=desc) mrf_value = traits.Float( - argstr='-MRF_beta %f', desc='MRF prior strength (between 0 and 5)') + argstr="-MRF_beta %f", desc="MRF prior strength (between 0 and 5)" + ) - desc = 'Maximum number of iterations (default = 15).' - max_iter = traits.Int(argstr='-max_iter %d', desc=desc) + desc = "Maximum number of iterations (default = 15)." + max_iter = traits.Int(argstr="-max_iter %d", desc=desc) - desc = 'If percent of labels agree, then area is not uncertain.' - unc_thresh = traits.Float(argstr='-uncthres %f', desc=desc) + desc = "If percent of labels agree, then area is not uncertain." + unc_thresh = traits.Float(argstr="-uncthres %f", desc=desc) - desc = 'Ratio for convergence (default epsilon = 10^-5).' - conv = traits.Float(argstr='-conv %f', desc=desc) + desc = "Ratio for convergence (default epsilon = 10^-5)." + conv = traits.Float(argstr="-conv %f", desc=desc) class LabelFusionOutput(TraitedSpec): """Output Spec for LabelFusion.""" - out_file = File(exists=True, desc='image written after calculations') + + out_file = File(exists=True, desc="image written after calculations") class LabelFusion(NiftySegCommand): @@ -150,27 +160,34 @@ class LabelFusion(NiftySegCommand): 'seg_LabFusion -in im1.nii -STEPS 2.000000 2 im2.nii im3.nii -out im1_steps.nii' """ - _cmd = get_custom_path('seg_LabFusion', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") input_spec = LabelFusionInput output_spec = LabelFusionOutput - _suffix = '_label_fused' + _suffix = "_label_fused" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" # Remove options if not STAPLE or STEPS as fusion type: - if opt in ['proportion', 'prob_update_flag', 'set_pq', 'mrf_value', - 'max_iter', 'unc_thresh', 'conv'] and\ - self.inputs.classifier_type not in ['STAPLE', 'STEPS']: - return '' - - if opt == 'sm_ranking': + if opt in [ + "proportion", + "prob_update_flag", + "set_pq", + "mrf_value", + "max_iter", + "unc_thresh", + "conv", + ] and self.inputs.classifier_type not in ["STAPLE", "STEPS"]: + return "" + + if opt == "sm_ranking": return self.get_staple_args(val) # Return options string if STEPS: - if opt == 'classifier_type' and val == 'STEPS': + if opt == "classifier_type" and val == "STEPS": return self.get_steps_args() - return super(LabelFusion, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def get_steps_args(self): if not isdefined(self.inputs.template_file): @@ -188,18 +205,20 @@ def get_steps_args(self): 'classifier_type' is set to 'STEPS'." raise NipypeInterfaceError(err) - return "-STEPS %f %d %s %s" % (self.inputs.kernel_size, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-STEPS %f %d %s %s" % ( + self.inputs.kernel_size, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) def get_staple_args(self, ranking): classtype = self.inputs.classifier_type - if classtype not in ['STAPLE', 'MV']: + if classtype not in ["STAPLE", "MV"]: return None - if ranking == 'ALL': - return '-ALL' + if ranking == "ALL": + return "-ALL" if not isdefined(self.inputs.template_file): err = "LabelFusion requires a value for input 'tramplate_file' \ @@ -212,18 +231,21 @@ def get_staple_args(self, ranking): raise NipypeInterfaceError(err % (classtype, ranking)) - if ranking == 'GNCC': + if ranking == "GNCC": if not isdefined(self.inputs.template_num): err = "LabelFusion requires a value for input 'template_num' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) - return "-%s %d %s %s" % (ranking, self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-%s %d %s %s" % ( + ranking, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) - elif ranking == 'ROINCC': + elif ranking == "ROINCC": if not isdefined(self.inputs.dilation_roi): err = "LabelFusion requires a value for input 'dilation_roi' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." @@ -236,61 +258,62 @@ def get_staple_args(self, ranking): raise NipypeInterfaceError(err % self.inputs.dilation_roi) - return "-%s %d %d %s %s" % (ranking, self.inputs.dilation_roi, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) - elif ranking == 'LNCC': + return "-%s %d %d %s %s" % ( + ranking, + self.inputs.dilation_roi, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) + elif ranking == "LNCC": if not isdefined(self.inputs.kernel_size): err = "LabelFusion requires a value for input 'kernel_size' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) - return "-%s %f %d %s %s" % (ranking, self.inputs.kernel_size, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-%s %f %d %s %s" % ( + ranking, + self.inputs.kernel_size, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self.inputs.classifier_type.lower() - return os.path.join(path, '{0}_{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, f"{base}_{suffix}{ext}") class CalcTopNCCInputSpec(CommandLineInputSpec): """Input Spec for CalcTopNCC.""" + in_file = File( - argstr='-target %s', - exists=True, - mandatory=True, - desc='Target file', - position=1) + argstr="-target %s", exists=True, mandatory=True, desc="Target file", position=1 + ) num_templates = traits.Int( - argstr='-templates %s', - mandatory=True, - position=2, - desc='Number of Templates') + argstr="-templates %s", mandatory=True, position=2, desc="Number of Templates" + ) in_templates = traits.List( - File(exists=True), argstr="%s", position=3, mandatory=True) + File(exists=True), argstr="%s", position=3, mandatory=True + ) top_templates = traits.Int( - argstr='-n %s', - mandatory=True, - position=4, - desc='Number of Top Templates') + argstr="-n %s", mandatory=True, position=4, desc="Number of Top Templates" + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) class CalcTopNCCOutputSpec(TraitedSpec): """Output Spec for CalcTopNCC.""" + out_files = traits.Any(File(exists=True)) @@ -309,23 +332,24 @@ class CalcTopNCC(NiftySegCommand): 'seg_CalcTopNCC -target im1.nii -templates 2 im2.nii im3.nii -n 1' """ - _cmd = get_custom_path('seg_CalcTopNCC', env_dir='NIFTYSEGDIR') - _suffix = '_topNCC' + + _cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") + _suffix = "_topNCC" input_spec = CalcTopNCCInputSpec output_spec = CalcTopNCCOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility - outfile = os.path.join(os.getcwd(), 'CalcTopNCC.json') + outfile = os.path.join(os.getcwd(), "CalcTopNCC.json") if runtime is None or not runtime.stdout: try: - out_files = load_json(outfile)['files'] - except IOError: + out_files = load_json(outfile)["files"] + except OSError: return self.run().outputs else: out_files = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: diff --git a/nipype/interfaces/niftyseg/lesions.py b/nipype/interfaces/niftyseg/lesions.py index 14d7f23c6b..0d055a55f2 100644 --- a/nipype/interfaces/niftyseg/lesions.py +++ b/nipype/interfaces/niftyseg/lesions.py @@ -18,85 +18,88 @@ from ..niftyreg.base import get_custom_path warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class FillLesionsInputSpec(CommandLineInputSpec): """Input Spec for FillLesions.""" + # Mandatory input arguments in_file = File( - argstr='-i %s', + argstr="-i %s", exists=True, mandatory=True, - desc='Input image to fill lesions', - position=1) + desc="Input image to fill lesions", + position=1, + ) lesion_mask = File( - argstr='-l %s', - exists=True, - mandatory=True, - desc='Lesion mask', - position=2) + argstr="-l %s", exists=True, mandatory=True, desc="Lesion mask", position=2 + ) # Output file name out_file = File( - name_source=['in_file'], - name_template='%s_lesions_filled.nii.gz', - desc='The output filename of the fill lesions results', - argstr='-o %s', - position=3) + name_source=["in_file"], + name_template="%s_lesions_filled.nii.gz", + desc="The output filename of the fill lesions results", + argstr="-o %s", + position=3, + ) # Optional arguments desc = "Dilate the mask times (in voxels, by default 0)" - in_dilation = traits.Int(desc=desc, argstr='-dil %d') + in_dilation = traits.Int(desc=desc, argstr="-dil %d") - desc = 'Percentage of minimum number of voxels between patches \ -(by default 0.5).' + desc = "Percentage of minimum number of voxels between patches \ +(by default 0.5)." - match = traits.Float(desc=desc, argstr='-match %f') + match = traits.Float(desc=desc, argstr="-match %f") - desc = 'Minimum percentage of valid voxels in target patch \ -(by default 0).' + desc = "Minimum percentage of valid voxels in target patch \ +(by default 0)." - search = traits.Float(desc=desc, argstr='-search %f') + search = traits.Float(desc=desc, argstr="-search %f") - desc = 'Smoothing by (in minimal 6-neighbourhood voxels \ -(by default 0.1)).' + desc = "Smoothing by (in minimal 6-neighbourhood voxels \ +(by default 0.1))." - smooth = traits.Float(desc=desc, argstr='-smo %f') + smooth = traits.Float(desc=desc, argstr="-smo %f") - desc = 'Search regions size respect biggest patch size (by default 4).' - size = traits.Int(desc=desc, argstr='-size %d') + desc = "Search regions size respect biggest patch size (by default 4)." + size = traits.Int(desc=desc, argstr="-size %d") - desc = 'Patch cardinality weighting factor (by default 2).' - cwf = traits.Float(desc=desc, argstr='-cwf %f') + desc = "Patch cardinality weighting factor (by default 2)." + cwf = traits.Float(desc=desc, argstr="-cwf %f") - desc = 'Give a binary mask with the valid search areas.' - bin_mask = File(desc=desc, argstr='-mask %s') + desc = "Give a binary mask with the valid search areas." + bin_mask = File(desc=desc, argstr="-mask %s") desc = "Guizard et al. (FIN 2015) method, it doesn't include the \ multiresolution/hierarchical inpainting part, this part needs to be done \ with some external software such as reg_tools and reg_resample from NiftyReg. \ By default it uses the method presented in Prados et al. (Neuroimage 2016)." - other = traits.Bool(desc=desc, argstr='-other') + other = traits.Bool(desc=desc, argstr="-other") use_2d = traits.Bool( - desc='Uses 2D patches in the Z axis, by default 3D.', argstr='-2D') + desc="Uses 2D patches in the Z axis, by default 3D.", argstr="-2D" + ) debug = traits.Bool( - desc='Save all intermidium files (by default OFF).', argstr='-debug') + desc="Save all intermidium files (by default OFF).", argstr="-debug" + ) - desc = 'Set output (char, short, int, uchar, ushort, uint, \ -float, double).' + desc = "Set output (char, short, int, uchar, ushort, uint, \ +float, double)." - out_datatype = traits.String(desc=desc, argstr='-odt %s') + out_datatype = traits.String(desc=desc, argstr="-odt %s") - verbose = traits.Bool(desc='Verbose (by default OFF).', argstr='-v') + verbose = traits.Bool(desc="Verbose (by default OFF).", argstr="-v") class FillLesionsOutputSpec(TraitedSpec): """Output Spec for FillLesions.""" + out_file = File(desc="Output segmentation") @@ -118,6 +121,7 @@ class FillLesions(NiftySegCommand): 'seg_FillLesions -i im1.nii -l im2.nii -o im1_lesions_filled.nii.gz' """ - _cmd = get_custom_path('seg_FillLesions', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") input_spec = FillLesionsInputSpec output_spec = FillLesionsOutputSpec diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index d4773f86e8..726dba3e7a 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -6,15 +6,18 @@ The maths module provides higher-level interfaces to some of the operations that can be performed with the niftysegmaths (seg_maths) command-line program. -Examples --------- -See the docstrings of the individual classes for examples. """ import os -from ..base import (TraitedSpec, File, traits, isdefined, CommandLineInputSpec, - NipypeInterfaceError) +from ..base import ( + TraitedSpec, + File, + traits, + isdefined, + CommandLineInputSpec, + NipypeInterfaceError, +) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import split_filename @@ -22,36 +25,37 @@ class MathsInput(CommandLineInputSpec): """Input Spec for seg_maths interfaces.""" + in_file = File( - position=2, - argstr='%s', - exists=True, - mandatory=True, - desc='image to operate on') + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) out_file = File( - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, - argstr='%s', - desc='image to write') + argstr="%s", + desc="image to write", + ) - desc = 'datatype to use for output (default uses input type)' + desc = "datatype to use for output (default uses input type)" output_datatype = traits.Enum( - 'float', - 'char', - 'int', - 'short', - 'double', - 'input', + "float", + "char", + "int", + "short", + "double", + "input", position=-3, - argstr='-odt %s', - desc=desc) + argstr="-odt %s", + desc=desc, + ) class MathsOutput(TraitedSpec): """Output Spec for seg_maths interfaces.""" - out_file = File(desc='image written after calculations') + + out_file = File(desc="image written after calculations") class MathsCommand(NiftySegCommand): @@ -61,7 +65,7 @@ class MathsCommand(NiftySegCommand): The executable seg_maths enables the sequential execution of arithmetic operations, like multiplication (-mul), division (-div) or addition (-add), binarisation (-bin) or thresholding (-thr) operations and - convolution by a Gaussian kernel (-smo). It also alows mathematical + convolution by a Gaussian kernel (-smo). It also allows mathematical morphology based operations like dilation (-dil), erosion (-ero), connected components (-lconcomp) and hole filling (-fill), Euclidean (- euc) and geodesic (-geo) distance transforms, local image similarity @@ -71,113 +75,91 @@ class MathsCommand(NiftySegCommand): into several 3D images, to estimating the maximum, minimum and average over all time-points, etc. """ - _cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") input_spec = MathsInput output_spec = MathsOutput - _suffix = '_maths' + _suffix = "_maths" def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self._suffix - if suffix != '_merged' and isdefined(self.inputs.operation): - suffix = '_' + self.inputs.operation + if suffix != "_merged" and isdefined(self.inputs.operation): + suffix = "_" + self.inputs.operation - return os.path.join(path, '{0}{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, f"{base}{suffix}{ext}") class UnaryMathsInput(MathsInput): """Input Spec for seg_maths Unary operations.""" + operation = traits.Enum( - 'sqrt', - 'exp', - 'log', - 'recip', - 'abs', - 'bin', - 'otsu', - 'lconcomp', - 'concomp6', - 'concomp26', - 'fill', - 'euc', - 'tpmax', - 'tmean', - 'tmax', - 'tmin', - 'splitlab', - 'removenan', - 'isnan', - 'subsamp2', - 'scl', - '4to5', - 'range', - argstr='-%s', + "sqrt", + "exp", + "log", + "recip", + "abs", + "bin", + "otsu", + "lconcomp", + "concomp6", + "concomp26", + "fill", + "euc", + "tpmax", + "tmean", + "tmax", + "tmin", + "splitlab", + "removenan", + "isnan", + "subsamp2", + "scl", + "4to5", + "range", + argstr="-%s", position=4, mandatory=True, - desc='operation to perform') + desc="""\ +Operation to perform: + + * sqrt - Square root of the image). + * exp - Exponential root of the image. + * log - Log of the image. + * recip - Reciprocal (1/I) of the image. + * abs - Absolute value of the image. + * bin - Binarise the image. + * otsu - Otsu thresholding of the current image. + * lconcomp - Take the largest connected component + * concomp6 - Label the different connected components with a 6NN kernel + * concomp26 - Label the different connected components with a 26NN kernel + * fill - Fill holes in binary object (e.g. fill ventricle in brain mask). + * euc - Euclidean distance transform + * tpmax - Get the time point with the highest value (binarise 4D probabilities) + * tmean - Mean value of all time points. + * tmax - Max value of all time points. + * tmin - Mean value of all time points. + * splitlab - Split the integer labels into multiple timepoints + * removenan - Remove all NaNs and replace then with 0 + * isnan - Binary image equal to 1 if the value is NaN and 0 otherwise + * subsamp2 - Subsample the image by 2 using NN sampling (qform and sform scaled) + * scl - Reset scale and slope info. + * 4to5 - Flip the 4th and 5th dimension. + * range - Reset the image range to the min max. + +""", + ) class UnaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any unary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - sqrt - Square root of the image). - - exp - Exponential root of the image. - - log - Log of the image. - - recip - Reciprocal (1/I) of the image. - - abs - Absolute value of the image. - - bin - Binarise the image. - - otsu - Otsu thresholding of the current image. - - lconcomp - Take the largest connected component - - concomp6 - Label the different connected components with a 6NN kernel - - concomp26 - Label the different connected components with a 26NN kernel - - fill - Fill holes in binary object (e.g. fill ventricle in brain mask). - - euc - Euclidean distance trasnform - - tpmax - Get the time point with the highest value (binarise 4D \ -probabilities) - - tmean - Mean value of all time points. - - tmax - Max value of all time points. - - tmin - Mean value of all time points. - - splitlab - Split the integer labels into multiple timepoints - - removenan - Remove all NaNs and replace then with 0 + """Unary mathematical operations. - isnan - Binary image equal to 1 if the value is NaN and 0 otherwise - - subsamp2 - Subsample the image by 2 using NN sampling (qform and sform \ -scaled) - - scl - Reset scale and slope info. - - 4to5 - Flip the 4th and 5th dimension. - - range - Reset the image range to the min max. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -186,30 +168,35 @@ class UnaryMaths(MathsCommand): >>> unary = niftyseg.UnaryMaths() >>> unary.inputs.output_datatype = 'float' >>> unary.inputs.in_file = 'im1.nii' + >>> # Test sqrt operation >>> unary_sqrt = copy.deepcopy(unary) >>> unary_sqrt.inputs.operation = 'sqrt' >>> unary_sqrt.cmdline 'seg_maths im1.nii -sqrt -odt float im1_sqrt.nii' >>> unary_sqrt.run() # doctest: +SKIP + >>> # Test sqrt operation >>> unary_abs = copy.deepcopy(unary) >>> unary_abs.inputs.operation = 'abs' >>> unary_abs.cmdline 'seg_maths im1.nii -abs -odt float im1_abs.nii' >>> unary_abs.run() # doctest: +SKIP + >>> # Test bin operation >>> unary_bin = copy.deepcopy(unary) >>> unary_bin.inputs.operation = 'bin' >>> unary_bin.cmdline 'seg_maths im1.nii -bin -odt float im1_bin.nii' >>> unary_bin.run() # doctest: +SKIP + >>> # Test otsu operation >>> unary_otsu = copy.deepcopy(unary) >>> unary_otsu.inputs.operation = 'otsu' >>> unary_otsu.cmdline 'seg_maths im1.nii -otsu -odt float im1_otsu.nii' >>> unary_otsu.run() # doctest: +SKIP + >>> # Test isnan operation >>> unary_isnan = copy.deepcopy(unary) >>> unary_isnan.inputs.operation = 'isnan' @@ -218,118 +205,100 @@ class UnaryMaths(MathsCommand): >>> unary_isnan.run() # doctest: +SKIP """ + input_spec = UnaryMathsInput class BinaryMathsInput(MathsInput): """Input Spec for seg_maths Binary operations.""" + operation = traits.Enum( - 'mul', - 'div', - 'add', - 'sub', - 'pow', - 'thr', - 'uthr', - 'smo', - 'edge', - 'sobel3', - 'sobel5', - 'min', - 'smol', - 'geo', - 'llsnorm', - 'masknan', - 'hdr_copy', - 'splitinter', + "mul", + "div", + "add", + "sub", + "pow", + "thr", + "uthr", + "smo", + "edge", + "sobel3", + "sobel5", + "min", + "smol", + "geo", + "llsnorm", + "masknan", + "hdr_copy", + "splitinter", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="""\ +Operation to perform: + + * mul - - Multiply image value or by other image. + * div - - Divide image by or by other image. + * add - - Add image by or by other image. + * sub - - Subtract image by or by other image. + * pow - - Image to the power of . + * thr - - Threshold the image below . + * uthr - - Threshold image above . + * smo - - Gaussian smoothing by std (in voxels and up to 4-D). + * edge - - Calculate the edges of the image using a threshold . + * sobel3 - - Calculate the edges of all timepoints using a Sobel filter + with a 3x3x3 kernel and applying gaussian smoothing. + * sobel5 - - Calculate the edges of all timepoints using a Sobel filter + with a 5x5x5 kernel and applying gaussian smoothing. + * min - - Get the min per voxel between and . + * smol - - Gaussian smoothing of a 3D label image. + * geo - - Geodesic distance according to the speed function + * llsnorm - Linear LS normalisation between current and + * masknan - Assign everything outside the mask (mask==0) with NaNs + * hdr_copy - Copy header from working image to and save in . + * splitinter - Split interleaved slices in direction + into separate time points + +""", + ) operand_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_str'], - desc='second image to perform operation with') + xor=["operand_value", "operand_str"], + desc="second image to perform operation with", + ) operand_value = traits.Float( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file', 'operand_str'], - desc='float value to perform operation with') + xor=["operand_file", "operand_str"], + desc="float value to perform operation with", + ) - desc = 'string value to perform operation splitinter' + desc = "string value to perform operation splitinter" operand_str = traits.Enum( - 'x', - 'y', - 'z', - argstr='%s', + "x", + "y", + "z", + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_file'], - desc=desc) + xor=["operand_value", "operand_file"], + desc=desc, + ) class BinaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any binary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - mul - - Multiply image value or by other image. - - div - - Divide image by or by other image. - - add - - Add image by or by other image. - - sub - - Subtract image by or by other image. - - pow - - Image to the power of . - - thr - - Threshold the image below . - - uthr - - Threshold image above . - - smo - - Gaussian smoothing by std (in voxels and up to \ -4-D). - - edge - - Calculate the edges of the image using a threshold <\ -float>. - - sobel3 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 3x3x3 kernel and applying gaussian smoothing. + """Binary mathematical operations. - sobel5 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 5x5x5 kernel and applying gaussian smoothing. - - min - - Get the min per voxel between and . - - smol - - Gaussian smoothing of a 3D label image. - - geo - - Geodesic distance according to the speed function \ - - - llsnorm - Linear LS normalisation between current and \ - - - masknan - Assign everything outside the mask (mask==0) \ -with NaNs - - hdr_copy - Copy header from working image to and save in \ -. - - splitinter - Split interleaved slices in direction into \ -separate time points - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -338,6 +307,7 @@ class BinaryMaths(MathsCommand): >>> binary = niftyseg.BinaryMaths() >>> binary.inputs.in_file = 'im1.nii' >>> binary.inputs.output_datatype = 'float' + >>> # Test sub operation >>> binary_sub = copy.deepcopy(binary) >>> binary_sub.inputs.operation = 'sub' @@ -345,6 +315,7 @@ class BinaryMaths(MathsCommand): >>> binary_sub.cmdline 'seg_maths im1.nii -sub im2.nii -odt float im1_sub.nii' >>> binary_sub.run() # doctest: +SKIP + >>> # Test mul operation >>> binary_mul = copy.deepcopy(binary) >>> binary_mul.inputs.operation = 'mul' @@ -352,6 +323,7 @@ class BinaryMaths(MathsCommand): >>> binary_mul.cmdline 'seg_maths im1.nii -mul 2.00000000 -odt float im1_mul.nii' >>> binary_mul.run() # doctest: +SKIP + >>> # Test llsnorm operation >>> binary_llsnorm = copy.deepcopy(binary) >>> binary_llsnorm.inputs.operation = 'llsnorm' @@ -359,6 +331,7 @@ class BinaryMaths(MathsCommand): >>> binary_llsnorm.cmdline 'seg_maths im1.nii -llsnorm im2.nii -odt float im1_llsnorm.nii' >>> binary_llsnorm.run() # doctest: +SKIP + >>> # Test splitinter operation >>> binary_splitinter = copy.deepcopy(binary) >>> binary_splitinter.inputs.operation = 'splitinter' @@ -368,95 +341,90 @@ class BinaryMaths(MathsCommand): >>> binary_splitinter.run() # doctest: +SKIP """ + input_spec = BinaryMathsInput def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" - if opt == 'operand_str' and self.inputs.operation != 'splitinter': + if opt == "operand_str" and self.inputs.operation != "splitinter": err = 'operand_str set but with an operation different than \ "splitinter"' raise NipypeInterfaceError(err) - if opt == 'operation': + if opt == "operation": # Only float - if val in [ - 'pow', 'thr', 'uthr', 'smo', 'edge', 'sobel3', 'sobel5', - 'smol' - ]: + if val in ["pow", "thr", "uthr", "smo", "edge", "sobel3", "sobel5", "smol"]: if not isdefined(self.inputs.operand_value): - err = 'operand_value not set for {0}.'.format(val) + err = f"operand_value not set for {val}." raise NipypeInterfaceError(err) # only files - elif val in ['min', 'llsnorm', 'masknan', 'hdr_copy']: + elif val in ["min", "llsnorm", "masknan", "hdr_copy"]: if not isdefined(self.inputs.operand_file): - err = 'operand_file not set for {0}.'.format(val) + err = f"operand_file not set for {val}." raise NipypeInterfaceError(err) # splitinter: - elif val == 'splitinter': + elif val == "splitinter": if not isdefined(self.inputs.operand_str): - err = 'operand_str not set for splitinter.' + err = "operand_str not set for splitinter." raise NipypeInterfaceError(err) - if opt == 'operand_value' and float(val) == 0.0: - return '0' + if opt == "operand_value" and float(val) == 0.0: + return "0" - return super(BinaryMaths, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _overload_extension(self, value, name=None): - if self.inputs.operation == 'hdr_copy': + if self.inputs.operation == "hdr_copy": path, base, _ = split_filename(value) _, base, ext = split_filename(self.inputs.operand_file) suffix = self.inputs.operation - return os.path.join(path, '{0}{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, f"{base}{suffix}{ext}") else: - return super(BinaryMaths, self)._overload_extension(value, name) + return super()._overload_extension(value, name) class BinaryMathsInputInteger(MathsInput): """Input Spec for seg_maths Binary operations that require integer.""" + operation = traits.Enum( - 'dil', - 'ero', - 'tp', - 'equal', - 'pad', - 'crop', + "dil", + "ero", + "tp", + "equal", + "pad", + "crop", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="""\ +Operation to perform: + + * equal - - Get voxels equal to + * dil - - Dilate the image times (in voxels). + * ero - - Erode the image times (in voxels). + * tp - - Extract time point + * crop - - Crop voxels around each 3D volume. + * pad - - Pad voxels with NaN value around each 3D volume. + +""", + ) operand_value = traits.Int( - argstr='%d', + argstr="%d", mandatory=True, position=5, - desc='int value to perform operation with') + desc="int value to perform operation with", + ) class BinaryMathsInteger(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any integer mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: (requiring integer values) - - equal - - Get voxels equal to - - dil - - Dilate the image times (in voxels). - - ero - - Erode the image times (in voxels). - - tp - - Extract time point - - crop - - Crop voxels around each 3D volume. - - pad - - Pad voxels with NaN value around each 3D volume. + """Integer mathematical operations. - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -488,73 +456,66 @@ class BinaryMathsInteger(MathsCommand): >>> binaryi_pad.run() # doctest: +SKIP """ + input_spec = BinaryMathsInputInteger class TupleMathsInput(MathsInput): """Input Spec for seg_maths Tuple operations.""" + operation = traits.Enum( - 'lncc', - 'lssd', - 'lltsnorm', + "lncc", + "lssd", + "lltsnorm", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="""\ +Operation to perform: + + * lncc Local CC between current img and on a kernel with + * lssd Local SSD between current img and on a kernel with + * lltsnorm Linear LTS normalisation assuming percent outliers + +""", + ) operand_file1 = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value1'], - desc='image to perform operation 1 with') + xor=["operand_value1"], + desc="image to perform operation 1 with", + ) - desc = 'float value to perform operation 1 with' + desc = "float value to perform operation 1 with" operand_value1 = traits.Float( - argstr='%.8f', - mandatory=True, - position=5, - xor=['operand_file1'], - desc=desc) + argstr="%.8f", mandatory=True, position=5, xor=["operand_file1"], desc=desc + ) operand_file2 = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=6, - xor=['operand_value2'], - desc='image to perform operation 2 with') + xor=["operand_value2"], + desc="image to perform operation 2 with", + ) - desc = 'float value to perform operation 2 with' + desc = "float value to perform operation 2 with" operand_value2 = traits.Float( - argstr='%.8f', - mandatory=True, - position=6, - xor=['operand_file2'], - desc=desc) + argstr="%.8f", mandatory=True, position=6, xor=["operand_file2"], desc=desc + ) class TupleMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any tuple mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - lncc Local CC between current img and on a kernel \ -with + """Mathematical operations on tuples. - lssd Local SSD between current img and on a kernel \ -with - - lltsnorm Linear LTS normalisation assuming \ -percent outliers - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -588,37 +549,34 @@ class TupleMaths(MathsCommand): >>> tuple_lltsnorm.inputs.operand_file1 = 'im2.nii' >>> tuple_lltsnorm.inputs.operand_value2 = 0.01 >>> tuple_lltsnorm.cmdline - 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float \ -im1_lltsnorm.nii' + 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float im1_lltsnorm.nii' >>> tuple_lltsnorm.run() # doctest: +SKIP + """ + input_spec = TupleMathsInput class MergeInput(MathsInput): """Input Spec for seg_maths merge operation.""" - dimension = traits.Int( - mandatory=True, desc='Dimension to merge the images.') - desc = 'List of images to merge to the working image .' + dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") merge_files = traits.List( - File(exists=True), argstr='%s', mandatory=True, position=4, desc=desc) + File(exists=True), + argstr="%s", + mandatory=True, + position=4, + desc="List of images to merge to the working image .", + ) class Merge(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. + """Merge image files. - Interface to use the merge operation that can be performed - - with the seg_maths command-line program. - - See below for this option:: - - merge Merge images and the working image in the \ - dimension - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -633,13 +591,13 @@ class Merge(MathsCommand): 'seg_maths im1.nii -merge 2 2 im2.nii im3.nii -odt float im1_merged.nii' """ + input_spec = MergeInput - _suffix = '_merged' + _suffix = "_merged" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" - if opt == 'merge_files': - return "-merge %d %d %s" % (len(val), self.inputs.dimension, - ' '.join(val)) + if opt == "merge_files": + return "-merge %d %d %s" % (len(val), self.inputs.dimension, " ".join(val)) - return super(Merge, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/patchmatch.py b/nipype/interfaces/niftyseg/patchmatch.py index 5732b1ba17..9dd7ddff5e 100644 --- a/nipype/interfaces/niftyseg/patchmatch.py +++ b/nipype/interfaces/niftyseg/patchmatch.py @@ -12,61 +12,65 @@ from ..niftyreg.base import get_custom_path warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class PatchMatchInputSpec(CommandLineInputSpec): """Input Spec for PatchMatch.""" + # Mandatory input arguments in_file = File( - argstr='-i %s', + argstr="-i %s", exists=True, mandatory=True, - desc='Input image to segment', - position=1) + desc="Input image to segment", + position=1, + ) mask_file = File( - argstr='-m %s', + argstr="-m %s", exists=True, mandatory=True, - desc='Input mask for the area where applies PatchMatch', - position=2) + desc="Input mask for the area where applies PatchMatch", + position=2, + ) database_file = File( - argstr='-db %s', + argstr="-db %s", exists=True, mandatory=True, - desc='Database with the segmentations', - position=3) + desc="Database with the segmentations", + position=3, + ) # Output file name out_file = File( - name_source=['in_file'], - name_template='%s_pm.nii.gz', - desc='The output filename of the patchmatch results', - argstr='-o %s', - position=4) + name_source=["in_file"], + name_template="%s_pm.nii.gz", + desc="The output filename of the patchmatch results", + argstr="-o %s", + position=4, + ) # Optional arguments - patch_size = traits.Int(desc="Patch size, #voxels", argstr='-size %i') + patch_size = traits.Int(desc="Patch size, #voxels", argstr="-size %i") desc = "Constrained search area size, number of times bigger than the \ patchsize" - cs_size = traits.Int(desc=desc, argstr='-cs %i') + cs_size = traits.Int(desc=desc, argstr="-cs %i") - match_num = traits.Int( - desc="Number of better matching", argstr='-match %i') + match_num = traits.Int(desc="Number of better matching", argstr="-match %i") - pm_num = traits.Int( - desc="Number of patchmatch executions", argstr='-pm %i') + pm_num = traits.Int(desc="Number of patchmatch executions", argstr="-pm %i") desc = "Number of iterations for the patchmatch algorithm" - it_num = traits.Int(desc=desc, argstr='-it %i') + it_num = traits.Int(desc=desc, argstr="-it %i") class PatchMatchOutputSpec(TraitedSpec): """OutputSpec for PatchMatch.""" + out_file = File(desc="Output segmentation") @@ -99,7 +103,8 @@ class PatchMatch(NiftySegCommand): 'seg_PatchMatch -i im1.nii -m im2.nii -db db.xml -o im1_pm.nii.gz' """ - _cmd = get_custom_path('seg_PatchMatch', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") input_spec = PatchMatchInputSpec output_spec = PatchMatchOutputSpec - _suffix = '_pm' + _suffix = "_pm" diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 796e07410c..8db7764fce 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -4,7 +4,6 @@ The stats module provides higher-level interfaces to some of the operations that can be performed with the niftyseg stats (seg_stats) command-line program. """ -from __future__ import print_function import numpy as np from ..base import TraitedSpec, File, traits, CommandLineInputSpec @@ -14,27 +13,27 @@ class StatsInput(CommandLineInputSpec): """Input Spec for seg_stats interfaces.""" + in_file = File( - position=2, - argstr='%s', - exists=True, - mandatory=True, - desc='image to operate on') + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) # Constrains mask_file = File( exists=True, position=-2, - argstr='-m %s', - desc='statistics within the masked area') + argstr="-m %s", + desc="statistics within the masked area", + ) - desc = 'Only estimate statistics if voxel is larger than ' - larger_voxel = traits.Float(argstr='-t %f', position=-3, desc=desc) + desc = "Only estimate statistics if voxel is larger than " + larger_voxel = traits.Float(argstr="-t %f", position=-3, desc=desc) class StatsOutput(TraitedSpec): """Output Spec for seg_stats interfaces.""" - output = traits.Array(desc='Output array from seg_stats') + + output = traits.Array(desc="Output array from seg_stats") class StatsCommand(NiftySegCommand): @@ -51,14 +50,15 @@ class StatsCommand(NiftySegCommand): robust to the presence of NaNs, and can be constrained by a mask and/or thresholded at a certain level. """ - _cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") input_spec = StatsInput output_spec = StatsOutput def _parse_stdout(self, stdout): out = [] for string_line in stdout.split("\n"): - if string_line.startswith('#'): + if string_line.startswith("#"): continue if len(string_line) <= 1: continue @@ -67,91 +67,74 @@ def _parse_stdout(self, stdout): return np.array(out).squeeze() def _run_interface(self, runtime): - new_runtime = super(StatsCommand, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) self.output = self._parse_stdout(new_runtime.stdout) return new_runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['output'] = self.output + outputs["output"] = self.output return outputs class UnaryStatsInput(StatsInput): """Input Spec for seg_stats unary operations.""" + operation = traits.Enum( - 'r', - 'R', - 'a', - 's', - 'v', - 'vl', - 'vp', - 'n', - 'np', - 'e', - 'ne', - 'x', - 'X', - 'c', - 'B', - 'xvox', - 'xdim', - argstr='-%s', + "r", + "R", + "a", + "s", + "v", + "vl", + "vp", + "n", + "np", + "e", + "ne", + "x", + "X", + "c", + "B", + "xvox", + "xdim", + argstr="-%s", position=4, mandatory=True, - desc='operation to perform') + desc="""\ +Operation to perform: + + * r - The range of all voxels. + * R - The robust range (assuming 2% outliers on both sides) of all voxels + * a - Average of all voxels + * s - Standard deviation of all voxels + * v - Volume of all voxels above 0 (<# voxels> * ) + * vl - Volume of each integer label (<# voxels per label> x ) + * vp - Volume of all probabilsitic voxels (sum() x ) + * n - Count of all voxels above 0 (<# voxels>) + * np - Sum of all fuzzy voxels (sum()) + * e - Entropy of all voxels + * ne - Normalized entropy of all voxels + * x - Location (i j k x y z) of the smallest value in the image + * X - Location (i j k x y z) of the largest value in the image + * c - Location (i j k x y z) of the centre of mass of the object + * B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] + * xvox - Output the number of voxels in the x direction. + Replace x with y/z for other directions. + * xdim - Output the voxel dimension in the x direction. + Replace x with y/z for other directions. + +""", + ) class UnaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any unary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: - - r - The range of all voxels. - - R - The robust range (assuming 2% outliers on both sides) of all voxels - - a - Average of all voxels - - s - Standard deviation of all voxels - - v - Volume of all voxels above 0 (<# voxels> * ) - - vl - Volume of each integer label (<# voxels per label> * \ -) - - vp - Volume of all probabilsitic voxels (sum() * ) - - n - Count of all voxels above 0 (<# voxels>) - - np - Sum of all fuzzy voxels (sum()) - - e - Entropy of all voxels - - ne - Normalized entropy of all voxels - - x - Location (i j k x y z) of the smallest value in the image - - X - Location (i j k x y z) of the largest value in the image - - c - Location (i j k x y z) of the centre of mass of the object - - B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] + """Unary statistical operations. - xvox - Output the number of voxels in the x direction. Replace x with \ -y/z for other directions. - - xdim - Output the voxel dimention in the x direction. Replace x with \ -y/z for other directions. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -159,18 +142,21 @@ class UnaryStats(StatsCommand): >>> from nipype.interfaces import niftyseg >>> unary = niftyseg.UnaryStats() >>> unary.inputs.in_file = 'im1.nii' + >>> # Test v operation >>> unary_v = copy.deepcopy(unary) >>> unary_v.inputs.operation = 'v' >>> unary_v.cmdline 'seg_stats im1.nii -v' >>> unary_v.run() # doctest: +SKIP + >>> # Test vl operation >>> unary_vl = copy.deepcopy(unary) >>> unary_vl.inputs.operation = 'vl' >>> unary_vl.cmdline 'seg_stats im1.nii -vl' >>> unary_vl.run() # doctest: +SKIP + >>> # Test x operation >>> unary_x = copy.deepcopy(unary) >>> unary_x.inputs.operation = 'x' @@ -179,26 +165,43 @@ class UnaryStats(StatsCommand): >>> unary_x.run() # doctest: +SKIP """ + input_spec = UnaryStatsInput class BinaryStatsInput(StatsInput): """Input Spec for seg_stats Binary operations.""" + operation = traits.Enum( - 'p', - 'sa', - 'ss', - 'svp', - 'al', - 'd', - 'ncc', - 'nmi', - 'Vl', - 'Nl', + "p", + "sa", + "ss", + "svp", + "al", + "d", + "ncc", + "nmi", + "Vl", + "Nl", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="""\ +Operation to perform: + + * p - - The th percentile of all voxels intensity (float=[0,100]) + * sa - - Average of all voxels + * ss - - Standard deviation of all voxels + * svp - - Volume of all probabilsitic voxels (sum() x ) + * al - - Average value in for each label in + * d - - Calculate the Dice score between all classes in and + * ncc - - Normalized cross correlation between and + * nmi - - Normalized Mutual Information between and + * Vl - - Volume of each integer label . Save to file. + * Nl - - Count of each label . Save to file. + +""", + ) operand_file = File( exists=True, @@ -206,51 +209,25 @@ class BinaryStatsInput(StatsInput): mandatory=True, position=5, xor=["operand_value"], - desc="second image to perform operation with") + desc="second image to perform operation with", + ) operand_value = traits.Float( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], - desc='value to perform operation with') + desc="value to perform operation with", + ) class BinaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any binary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: + """Binary statistical operations. - p - - The th percentile of all voxels intensity \ -(float=[0,100]) - - sa - - Average of all voxels - - ss - - Standard deviation of all voxels - - svp - - Volume of all probabilsitic voxels (sum() * \ -) - - al - - Average value in for each label in - - d - - Calculate the Dice score between all classes in \ -and - - ncc - - Normalized cross correlation between and - - nmi - - Normalized Mutual Information between and - - Vl - - Volume of each integer label . Save to file. - - Nl - - Count of each label . Save to file. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -281,4 +258,5 @@ class BinaryStats(StatsCommand): >>> binary_nl.run() # doctest: +SKIP """ + input_spec = BinaryStatsInput diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py index cc7bc896c6..4c0a962a21 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py @@ -1,51 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import BinaryMaths def test_BinaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), operand_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=5, - xor=['operand_value', 'operand_str'], + xor=["operand_value", "operand_str"], ), operand_str=dict( - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_file'], + xor=["operand_value", "operand_file"], ), operand_value=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file', 'operand_str'], + xor=["operand_file", "operand_str"], ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -54,8 +58,14 @@ def test_BinaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py index 464ed6f6c4..440cb92bbc 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py @@ -1,38 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import BinaryMathsInteger def test_BinaryMathsInteger_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), operand_value=dict( - argstr='%d', + argstr="%d", mandatory=True, position=5, ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -41,8 +44,14 @@ def test_BinaryMathsInteger_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMathsInteger_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = BinaryMathsInteger.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py index 0771d72f5c..5a5ac7298b 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py @@ -1,42 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..stats import BinaryStats def test_BinaryStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), larger_voxel=dict( - argstr='-t %f', + argstr="-t %f", position=-3, ), mask_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, position=-2, ), operand_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=5, - xor=['operand_value'], + xor=["operand_value"], ), operand_value=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file'], + xor=["operand_file"], ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), @@ -46,8 +50,12 @@ def test_BinaryStats_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryStats_outputs(): - output_map = dict(output=dict(), ) + output_map = dict( + output=dict(), + ) outputs = BinaryStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py b/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py index dceea837c0..e0943be61e 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py @@ -1,33 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..label_fusion import CalcTopNCC def test_CalcTopNCC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-target %s', + argstr="-target %s", + extensions=None, mandatory=True, position=1, ), in_templates=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), num_templates=dict( - argstr='-templates %s', + argstr="-templates %s", mandatory=True, position=2, ), top_templates=dict( - argstr='-n %s', + argstr="-n %s", mandatory=True, position=4, ), @@ -37,8 +42,12 @@ def test_CalcTopNCC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalcTopNCC_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = CalcTopNCC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_EM.py b/nipype/interfaces/niftyseg/tests/test_auto_EM.py index fc270ed22f..a0394b174e 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_EM.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_EM.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..em import EM def test_EM_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), bc_order_val=dict( - argstr='-bc_order %s', + argstr="-bc_order %s", usedefault=True, ), bc_thresh_val=dict( - argstr='-bc_thresh %s', + argstr="-bc_thresh %s", usedefault=True, ), environ=dict( @@ -19,64 +20,88 @@ def test_EM_inputs(): usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=4, ), - mask_file=dict(argstr='-mask %s', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), max_iter=dict( - argstr='-max_iter %s', + argstr="-max_iter %s", usedefault=True, ), min_iter=dict( - argstr='-min_iter %s', + argstr="-min_iter %s", usedefault=True, ), - mrf_beta_val=dict(argstr='-mrf_beta %s', ), + mrf_beta_val=dict( + argstr="-mrf_beta %s", + ), no_prior=dict( - argstr='-nopriors %s', + argstr="-nopriors %s", mandatory=True, - xor=['prior_4D', 'priors'], + xor=["prior_4D", "priors"], ), out_bc_file=dict( - argstr='-bc_out %s', - name_source=['in_file'], - name_template='%s_bc_em.nii.gz', + argstr="-bc_out %s", + extensions=None, + name_source=["in_file"], + name_template="%s_bc_em.nii.gz", ), out_file=dict( - argstr='-out %s', - name_source=['in_file'], - name_template='%s_em.nii.gz', + argstr="-out %s", + extensions=None, + name_source=["in_file"], + name_template="%s_em.nii.gz", ), out_outlier_file=dict( - argstr='-out_outlier %s', - name_source=['in_file'], - name_template='%s_outlier_em.nii.gz', + argstr="-out_outlier %s", + extensions=None, + name_source=["in_file"], + name_template="%s_outlier_em.nii.gz", + ), + outlier_val=dict( + argstr="-outlier %s %s", ), - outlier_val=dict(argstr='-outlier %s %s', ), prior_4D=dict( - argstr='-prior4D %s', + argstr="-prior4D %s", + extensions=None, mandatory=True, - xor=['no_prior', 'priors'], + xor=["no_prior", "priors"], ), priors=dict( - argstr='%s', + argstr="%s", mandatory=True, - xor=['no_prior', 'prior_4D'], + xor=["no_prior", "prior_4D"], + ), + reg_val=dict( + argstr="-reg %s", + ), + relax_priors=dict( + argstr="-rf %s %s", ), - reg_val=dict(argstr='-reg %s', ), - relax_priors=dict(argstr='-rf %s %s', ), ) inputs = EM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EM_outputs(): output_map = dict( - out_bc_file=dict(), - out_file=dict(), - out_outlier_file=dict(), + out_bc_file=dict( + extensions=None, + ), + out_file=dict( + extensions=None, + ), + out_outlier_file=dict( + extensions=None, + ), ) outputs = EM.output_spec() diff --git a/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py b/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py index 7cbfe58c6c..9e1b06a892 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py @@ -1,51 +1,86 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..lesions import FillLesions def test_FillLesions_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bin_mask=dict(argstr='-mask %s', ), - cwf=dict(argstr='-cwf %f', ), - debug=dict(argstr='-debug', ), + args=dict( + argstr="%s", + ), + bin_mask=dict( + argstr="-mask %s", + extensions=None, + ), + cwf=dict( + argstr="-cwf %f", + ), + debug=dict( + argstr="-debug", + ), environ=dict( nohash=True, usedefault=True, ), - in_dilation=dict(argstr='-dil %d', ), + in_dilation=dict( + argstr="-dil %d", + ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), lesion_mask=dict( - argstr='-l %s', + argstr="-l %s", + extensions=None, mandatory=True, position=2, ), - match=dict(argstr='-match %f', ), - other=dict(argstr='-other', ), - out_datatype=dict(argstr='-odt %s', ), + match=dict( + argstr="-match %f", + ), + other=dict( + argstr="-other", + ), + out_datatype=dict( + argstr="-odt %s", + ), out_file=dict( - argstr='-o %s', - name_source=['in_file'], - name_template='%s_lesions_filled.nii.gz', + argstr="-o %s", + extensions=None, + name_source=["in_file"], + name_template="%s_lesions_filled.nii.gz", position=3, ), - search=dict(argstr='-search %f', ), - size=dict(argstr='-size %d', ), - smooth=dict(argstr='-smo %f', ), - use_2d=dict(argstr='-2D', ), - verbose=dict(argstr='-v', ), + search=dict( + argstr="-search %f", + ), + size=dict( + argstr="-size %d", + ), + smooth=dict( + argstr="-smo %f", + ), + use_2d=dict( + argstr="-2D", + ), + verbose=dict( + argstr="-v", + ), ) inputs = FillLesions.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FillLesions_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = FillLesions.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py b/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py index eea345e1df..dc4590a15b 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py @@ -1,59 +1,96 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..label_fusion import LabelFusion def test_LabelFusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), classifier_type=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=2, ), - conv=dict(argstr='-conv %f', ), + conv=dict( + argstr="-conv %f", + ), dilation_roi=dict(), environ=dict( nohash=True, usedefault=True, ), - file_to_seg=dict(mandatory=True, ), + file_to_seg=dict( + extensions=None, + mandatory=True, + ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=1, ), kernel_size=dict(), - mask_file=dict(argstr='-mask %s', ), - max_iter=dict(argstr='-max_iter %d', ), - mrf_value=dict(argstr='-MRF_beta %f', ), + mask_file=dict( + argstr="-mask %s", + extensions=None, + ), + max_iter=dict( + argstr="-max_iter %d", + ), + mrf_value=dict( + argstr="-MRF_beta %f", + ), out_file=dict( - argstr='-out %s', - name_source=['in_file'], - name_template='%s', - ), - prob_flag=dict(argstr='-outProb', ), - prob_update_flag=dict(argstr='-prop_update', ), - proportion=dict(argstr='-prop %s', ), - set_pq=dict(argstr='-setPQ %f %f', ), + argstr="-out %s", + extensions=None, + name_source=["in_file"], + name_template="%s", + ), + prob_flag=dict( + argstr="-outProb", + ), + prob_update_flag=dict( + argstr="-prop_update", + ), + proportion=dict( + argstr="-prop %s", + ), + set_pq=dict( + argstr="-setPQ %f %f", + ), sm_ranking=dict( - argstr='-%s', + argstr="-%s", position=3, usedefault=True, ), - template_file=dict(), + template_file=dict( + extensions=None, + ), template_num=dict(), - unc=dict(argstr='-unc', ), - unc_thresh=dict(argstr='-uncthres %f', ), - verbose=dict(argstr='-v %s', ), + unc=dict( + argstr="-unc", + ), + unc_thresh=dict( + argstr="-uncthres %f", + ), + verbose=dict( + argstr="-v %s", + ), ) inputs = LabelFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelFusion_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = LabelFusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py index 247dc9773e..963ddf96f8 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py @@ -1,28 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import MathsCommand def test_MathsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -31,8 +34,14 @@ def test_MathsCommand_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MathsCommand_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_Merge.py b/nipype/interfaces/niftyseg/tests/test_auto_Merge.py index c53e4edf40..de8dc903e6 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_Merge.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_Merge.py @@ -1,34 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import Merge def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(mandatory=True, ), + args=dict( + argstr="%s", + ), + dimension=dict( + mandatory=True, + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), merge_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -37,8 +42,14 @@ def test_Merge_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py index d840a40b2a..37a6ee059c 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import NiftySegCommand def test_NiftySegCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py b/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py index bfeded6eef..c5b9dba115 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py @@ -1,14 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..patchmatch import PatchMatch def test_PatchMatch_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cs_size=dict(argstr='-cs %i', ), + args=dict( + argstr="%s", + ), + cs_size=dict( + argstr="-cs %i", + ), database_file=dict( - argstr='-db %s', + argstr="-db %s", + extensions=None, mandatory=True, position=3, ), @@ -17,33 +21,50 @@ def test_PatchMatch_inputs(): usedefault=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, position=1, ), - it_num=dict(argstr='-it %i', ), + it_num=dict( + argstr="-it %i", + ), mask_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, position=2, ), - match_num=dict(argstr='-match %i', ), + match_num=dict( + argstr="-match %i", + ), out_file=dict( - argstr='-o %s', - name_source=['in_file'], - name_template='%s_pm.nii.gz', + argstr="-o %s", + extensions=None, + name_source=["in_file"], + name_template="%s_pm.nii.gz", position=4, ), - patch_size=dict(argstr='-size %i', ), - pm_num=dict(argstr='-pm %i', ), + patch_size=dict( + argstr="-size %i", + ), + pm_num=dict( + argstr="-pm %i", + ), ) inputs = PatchMatch.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PatchMatch_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PatchMatch.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py index e845e18952..6b173663a9 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py @@ -1,26 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..stats import StatsCommand def test_StatsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), larger_voxel=dict( - argstr='-t %f', + argstr="-t %f", position=-3, ), mask_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, position=-2, ), ) @@ -29,8 +32,12 @@ def test_StatsCommand_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StatsCommand_outputs(): - output_map = dict(output=dict(), ) + output_map = dict( + output=dict(), + ) outputs = StatsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py index 7241264eee..ef1d4c401f 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py @@ -1,57 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import TupleMaths def test_TupleMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), operand_file1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=5, - xor=['operand_value1'], + xor=["operand_value1"], ), operand_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=6, - xor=['operand_value2'], + xor=["operand_value2"], ), operand_value1=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file1'], + xor=["operand_file1"], ), operand_value2=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=6, - xor=['operand_file2'], + xor=["operand_file2"], ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -60,8 +65,14 @@ def test_TupleMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TupleMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = TupleMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py index 9b1cd7d194..f8189f0f84 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py @@ -1,33 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maths import UnaryMaths def test_UnaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), out_file=dict( - argstr='%s', - name_source=['in_file'], - name_template='%s', + argstr="%s", + extensions=None, + name_source=["in_file"], + name_template="%s", position=-2, ), output_datatype=dict( - argstr='-odt %s', + argstr="-odt %s", position=-3, ), ) @@ -36,8 +39,14 @@ def test_UnaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryMaths_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py b/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py index 34edf1a190..117ab819b6 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py @@ -1,30 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..stats import UnaryStats def test_UnaryStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), larger_voxel=dict( - argstr='-t %f', + argstr="-t %f", position=-3, ), mask_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, position=-2, ), operation=dict( - argstr='-%s', + argstr="-%s", mandatory=True, position=4, ), @@ -34,8 +37,12 @@ def test_UnaryStats_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryStats_outputs(): - output_map = dict(output=dict(), ) + output_map = dict( + output=dict(), + ) outputs = UnaryStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py index f4c56da2fe..c90d93a6ba 100644 --- a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py +++ b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py @@ -9,15 +9,13 @@ from .. import EM -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_EM'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_EM"), reason="niftyseg is not installed") def test_seg_em(): - # Create a node object seg_em = EM() # Check if the command is properly defined - cmd = get_custom_path('seg_EM', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") assert seg_em.cmd == cmd # test raising error with mandatory args absent @@ -25,19 +23,19 @@ def test_seg_em(): seg_em.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") seg_em.inputs.in_file = in_file seg_em.inputs.no_prior = 4 - cmd_tmp = '{cmd} -in {in_file} -nopriors 4 -bc_out {bc_out} -out \ -{out_file} -out_outlier {out_outlier}' + cmd_tmp = "{cmd} -in {in_file} -nopriors 4 -bc_out {bc_out} -out \ +{out_file} -out_outlier {out_outlier}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - out_file='im1_em.nii.gz', - bc_out='im1_bc_em.nii.gz', - out_outlier='im1_outlier_em.nii.gz', + out_file="im1_em.nii.gz", + bc_out="im1_bc_em.nii.gz", + out_outlier="im1_outlier_em.nii.gz", ) assert seg_em.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py index 9fe82ac544..53d5bd4170 100644 --- a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py +++ b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py @@ -10,14 +10,14 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_PatchMatch'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_PatchMatch"), reason="niftyseg is not installed" +) def test_seg_patchmatch(): - # Create a node object seg_patchmatch = PatchMatch() # Check if the command is properly defined - cmd = get_custom_path('seg_PatchMatch', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") assert seg_patchmatch.cmd == cmd # test raising error with mandatory args absent @@ -25,20 +25,20 @@ def test_seg_patchmatch(): seg_patchmatch.run() # Assign some input data - in_file = example_data('im1.nii') - mask_file = example_data('im2.nii') - db_file = example_data('db.xml') + in_file = example_data("im1.nii") + mask_file = example_data("im2.nii") + db_file = example_data("db.xml") seg_patchmatch.inputs.in_file = in_file seg_patchmatch.inputs.mask_file = mask_file seg_patchmatch.inputs.database_file = db_file - cmd_tmp = '{cmd} -i {in_file} -m {mask_file} -db {db} -o {out_file}' + cmd_tmp = "{cmd} -i {in_file} -m {mask_file} -db {db} -o {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, mask_file=mask_file, db=db_file, - out_file='im1_pm.nii.gz', + out_file="im1_pm.nii.gz", ) assert seg_patchmatch.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_label_fusion.py b/nipype/interfaces/niftyseg/tests/test_label_fusion.py index fdc186d6c2..824b98c230 100644 --- a/nipype/interfaces/niftyseg/tests/test_label_fusion.py +++ b/nipype/interfaces/niftyseg/tests/test_label_fusion.py @@ -10,14 +10,15 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_LabFusion'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_LabFusion"), reason="niftyseg is not installed" +) def test_seg_lab_fusion(): - """ Test interfaces for seg_labfusion""" + """Test interfaces for seg_labfusion""" # Create a node object steps = LabelFusion() # Check if the command is properly defined - cmd = get_custom_path('seg_LabFusion', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") assert steps.cmd == cmd # test raising error with mandatory args absent @@ -25,86 +26,84 @@ def test_seg_lab_fusion(): steps.run() # Assign some input data - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") steps.inputs.in_file = in_file steps.inputs.kernel_size = 2.0 steps.inputs.file_to_seg = file_to_seg steps.inputs.template_file = template_file steps.inputs.template_num = 2 - steps.inputs.classifier_type = 'STEPS' + steps.inputs.classifier_type = "STEPS" - cmd_tmp = '{cmd} -in {in_file} -STEPS 2.000000 2 {file_to_seg} \ -{template_file} -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -STEPS 2.000000 2 {file_to_seg} \ +{template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_steps.nii', + out_file="im1_steps.nii", ) assert steps.cmdline == expected_cmd # Staple - staple = LabelFusion( - kernel_size=2.0, template_num=2, classifier_type='STAPLE') - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + staple = LabelFusion(kernel_size=2.0, template_num=2, classifier_type="STAPLE") + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") staple.inputs.in_file = in_file staple.inputs.file_to_seg = file_to_seg staple.inputs.template_file = template_file - cmd_tmp = '{cmd} -in {in_file} -STAPLE -ALL -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -STAPLE -ALL -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_staple.nii', + out_file="im1_staple.nii", ) assert staple.cmdline == expected_cmd # Assign some input data mv_node = LabelFusion( - template_num=2, - classifier_type='MV', - sm_ranking='ROINCC', - dilation_roi=2) - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + template_num=2, classifier_type="MV", sm_ranking="ROINCC", dilation_roi=2 + ) + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") mv_node.inputs.in_file = in_file mv_node.inputs.file_to_seg = file_to_seg mv_node.inputs.template_file = template_file - cmd_tmp = '{cmd} -in {in_file} -MV -ROINCC 2 2 {file_to_seg} \ -{template_file} -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -MV -ROINCC 2 2 {file_to_seg} \ +{template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_mv.nii', + out_file="im1_mv.nii", ) assert mv_node.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='seg_CalcTopNCC'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_CalcTopNCC"), reason="niftyseg is not installed" +) def test_seg_calctopncc(): - """ Test interfaces for seg_CalctoNCC""" + """Test interfaces for seg_CalctoNCC""" # Create a node object calctopncc = CalcTopNCC() # Check if the command is properly defined - cmd = get_custom_path('seg_CalcTopNCC', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") assert calctopncc.cmd == cmd # test raising error with mandatory args absent @@ -112,20 +111,15 @@ def test_seg_calctopncc(): calctopncc.run() # Assign some input data - in_file = example_data('im1.nii') - file1 = example_data('im2.nii') - file2 = example_data('im3.nii') + in_file = example_data("im1.nii") + file1 = example_data("im2.nii") + file2 = example_data("im3.nii") calctopncc.inputs.in_file = in_file calctopncc.inputs.num_templates = 2 calctopncc.inputs.in_templates = [file1, file2] calctopncc.inputs.top_templates = 1 - cmd_tmp = '{cmd} -target {in_file} -templates 2 {file1} {file2} -n 1' - expected_cmd = cmd_tmp.format( - cmd=cmd, - in_file=in_file, - file1=file1, - file2=file2, - ) + cmd_tmp = "{cmd} -target {in_file} -templates 2 {file1} {file2} -n 1" + expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, file1=file1, file2=file2) assert calctopncc.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_lesions.py b/nipype/interfaces/niftyseg/tests/test_lesions.py index 24b7e82cd7..2daece08cb 100644 --- a/nipype/interfaces/niftyseg/tests/test_lesions.py +++ b/nipype/interfaces/niftyseg/tests/test_lesions.py @@ -10,14 +10,14 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_FillLesions'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_FillLesions"), reason="niftyseg is not installed" +) def test_seg_filllesions(): - # Create a node object seg_fill = FillLesions() # Check if the command is properly defined - cmd = get_custom_path('seg_FillLesions', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") assert seg_fill.cmd == cmd # test raising error with mandatory args absent @@ -25,16 +25,16 @@ def test_seg_filllesions(): seg_fill.run() # Assign some input data - in_file = example_data('im1.nii') - lesion_mask = example_data('im2.nii') + in_file = example_data("im1.nii") + lesion_mask = example_data("im2.nii") seg_fill.inputs.in_file = in_file seg_fill.inputs.lesion_mask = lesion_mask - expected_cmd = '{cmd} -i {in_file} -l {lesion_mask} -o {out_file}'.format( + expected_cmd = "{cmd} -i {in_file} -l {lesion_mask} -o {out_file}".format( cmd=cmd, in_file=in_file, lesion_mask=lesion_mask, - out_file='im1_lesions_filled.nii.gz', + out_file="im1_lesions_filled.nii.gz", ) assert seg_fill.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_maths.py b/nipype/interfaces/niftyseg/tests/test_maths.py index 0680a8a481..6c0251d7f5 100644 --- a/nipype/interfaces/niftyseg/tests/test_maths.py +++ b/nipype/interfaces/niftyseg/tests/test_maths.py @@ -6,18 +6,16 @@ from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool -from .. import (UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge) +from .. import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_unary_maths(): - # Create a node object unarym = UnaryMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert unarym.cmd == cmd # test raising error with mandatory args absent @@ -25,26 +23,25 @@ def test_unary_maths(): unarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") unarym.inputs.in_file = in_file - unarym.inputs.operation = 'otsu' - unarym.inputs.output_datatype = 'float' + unarym.inputs.operation = "otsu" + unarym.inputs.output_datatype = "float" - expected_cmd = '{cmd} {in_file} -otsu -odt float {out_file}'.format( - cmd=cmd, in_file=in_file, out_file='im1_otsu.nii') + expected_cmd = "{cmd} {in_file} -otsu -odt float {out_file}".format( + cmd=cmd, in_file=in_file, out_file="im1_otsu.nii" + ) assert unarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_binary_maths(): - # Create a node object binarym = BinaryMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert binarym.cmd == cmd # test raising error with mandatory args absent @@ -52,28 +49,25 @@ def test_binary_maths(): binarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") binarym.inputs.in_file = in_file binarym.inputs.operand_value = 2.0 - binarym.inputs.operation = 'sub' - binarym.inputs.output_datatype = 'float' + binarym.inputs.operation = "sub" + binarym.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -sub 2.00000000 -odt float {out_file}' - expected_cmd = cmd_tmp.format( - cmd=cmd, in_file=in_file, out_file='im1_sub.nii') + cmd_tmp = "{cmd} {in_file} -sub 2.00000000 -odt float {out_file}" + expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, out_file="im1_sub.nii") assert binarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_int_binary_maths(): - # Create a node object ibinarym = BinaryMathsInteger() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert ibinarym.cmd == cmd # test raising error with mandatory args absent @@ -81,27 +75,26 @@ def test_int_binary_maths(): ibinarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") ibinarym.inputs.in_file = in_file ibinarym.inputs.operand_value = 2 - ibinarym.inputs.operation = 'dil' - ibinarym.inputs.output_datatype = 'float' + ibinarym.inputs.operation = "dil" + ibinarym.inputs.output_datatype = "float" - expected_cmd = '{cmd} {in_file} -dil 2 -odt float {out_file}'.format( - cmd=cmd, in_file=in_file, out_file='im1_dil.nii') + expected_cmd = "{cmd} {in_file} -dil 2 -odt float {out_file}".format( + cmd=cmd, in_file=in_file, out_file="im1_dil.nii" + ) assert ibinarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_tuple_maths(): - # Create a node object tuplem = TupleMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert tuplem.cmd == cmd # test raising error with mandatory args absent @@ -109,30 +102,29 @@ def test_tuple_maths(): tuplem.run() # Assign some input data - in_file = example_data('im1.nii') - op_file = example_data('im2.nii') + in_file = example_data("im1.nii") + op_file = example_data("im2.nii") tuplem.inputs.in_file = in_file - tuplem.inputs.operation = 'lncc' + tuplem.inputs.operation = "lncc" tuplem.inputs.operand_file1 = op_file tuplem.inputs.operand_value2 = 2.0 - tuplem.inputs.output_datatype = 'float' + tuplem.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -lncc {op} 2.00000000 -odt float {out_file}' + cmd_tmp = "{cmd} {in_file} -lncc {op} 2.00000000 -odt float {out_file}" expected_cmd = cmd_tmp.format( - cmd=cmd, in_file=in_file, op=op_file, out_file='im1_lncc.nii') + cmd=cmd, in_file=in_file, op=op_file, out_file="im1_lncc.nii" + ) assert tuplem.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_merge(): - # Create a node object merge = Merge() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert merge.cmd == cmd # test raising error with mandatory args absent @@ -140,20 +132,17 @@ def test_merge(): merge.run() # Assign some input data - in_file = example_data('im1.nii') - file1 = example_data('im2.nii') - file2 = example_data('im3.nii') + in_file = example_data("im1.nii") + file1 = example_data("im2.nii") + file2 = example_data("im3.nii") merge.inputs.in_file = in_file merge.inputs.merge_files = [file1, file2] merge.inputs.dimension = 2 - merge.inputs.output_datatype = 'float' + merge.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -merge 2 2 {f1} {f2} -odt float {out_file}' + cmd_tmp = "{cmd} {in_file} -merge 2 2 {f1} {f2} -odt float {out_file}" expected_cmd = cmd_tmp.format( - cmd=cmd, - in_file=in_file, - f1=file1, - f2=file2, - out_file='im1_merged.nii') + cmd=cmd, in_file=in_file, f1=file1, f2=file2, out_file="im1_merged.nii" + ) assert merge.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_stats.py b/nipype/interfaces/niftyseg/tests/test_stats.py index 91c234e98d..b3bb9a3bb0 100644 --- a/nipype/interfaces/niftyseg/tests/test_stats.py +++ b/nipype/interfaces/niftyseg/tests/test_stats.py @@ -9,15 +9,14 @@ from .. import UnaryStats, BinaryStats -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_stats'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_unary_stats(): - """ Test for the seg_stats interfaces """ + """Test for the seg_stats interfaces""" # Create a node object unarys = UnaryStats() # Check if the command is properly defined - cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert unarys.cmd == cmd # test raising error with mandatory args absent @@ -25,24 +24,23 @@ def test_unary_stats(): unarys.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") unarys.inputs.in_file = in_file - unarys.inputs.operation = 'a' + unarys.inputs.operation = "a" - expected_cmd = '{cmd} {in_file} -a'.format(cmd=cmd, in_file=in_file) + expected_cmd = f"{cmd} {in_file} -a" assert unarys.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_stats'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_binary_stats(): - """ Test for the seg_stats interfaces """ + """Test for the seg_stats interfaces""" # Create a node object binarys = BinaryStats() # Check if the command is properly defined - cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert binarys.cmd == cmd # test raising error with mandatory args absent @@ -50,12 +48,11 @@ def test_binary_stats(): binarys.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") binarys.inputs.in_file = in_file binarys.inputs.operand_value = 2 - binarys.inputs.operation = 'sa' + binarys.inputs.operation = "sa" - expected_cmd = '{cmd} {in_file} -sa 2.00000000'.format( - cmd=cmd, in_file=in_file) + expected_cmd = f"{cmd} {in_file} -sa 2.00000000" assert binarys.cmdline == expected_cmd diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 699b7dc8d3..9d78517f79 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,80 +1,85 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' -Algorithms to compute statistics on :abbr:`fMRI (functional MRI)` -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) +"""Nilearn is a Python library for fast and easy statistical learning on NeuroImaging data.""" import os import numpy as np import nibabel as nb -from ..interfaces.base import (traits, TraitedSpec, LibraryBaseInterface, - SimpleInterface, BaseInterfaceInputSpec, File, - InputMultiPath) +from ..interfaces.base import ( + traits, + TraitedSpec, + LibraryBaseInterface, + SimpleInterface, + BaseInterfaceInputSpec, + File, + InputMultiPath, +) class NilearnBaseInterface(LibraryBaseInterface): - _pkg = 'nilearn' + _pkg = "nilearn" class SignalExtractionInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='4-D fMRI nii file') + in_file = File(exists=True, mandatory=True, desc="4-D fMRI nii file") label_files = InputMultiPath( File(exists=True), mandatory=True, - desc='a 3-D label image, with 0 denoting ' - 'background, or a list of 3-D probability ' - 'maps (one per label) or the equivalent 4D ' - 'file.') + desc="a 3-D label image, with 0 denoting " + "background, or a list of 3-D probability " + "maps (one per label) or the equivalent 4D " + "file.", + ) class_labels = traits.List( mandatory=True, - desc='Human-readable labels for each segment ' - 'in the label file, in order. The length of ' - 'class_labels must be equal to the number of ' - 'segments (background excluded). This list ' - 'corresponds to the class labels in label_file ' - 'in ascending order') + desc="Human-readable labels for each segment " + "in the label file, in order. The length of " + "class_labels must be equal to the number of " + "segments (background excluded). This list " + "corresponds to the class labels in label_file " + "in ascending order", + ) out_file = File( - 'signals.tsv', + "signals.tsv", usedefault=True, exists=False, - desc='The name of the file to output to. ' - 'signals.tsv by default') + desc="The name of the file to output to. signals.tsv by default", + ) incl_shared_variance = traits.Bool( True, usedefault=True, - desc='By default ' - '(True), returns simple time series calculated from each ' - 'region independently (e.g., for noise regression). If ' - 'False, returns unique signals for each region, discarding ' - 'shared variance (e.g., for connectivity. Only has effect ' - 'with 4D probability maps.') + desc="By default " + "(True), returns simple time series calculated from each " + "region independently (e.g., for noise regression). If " + "False, returns unique signals for each region, discarding " + "shared variance (e.g., for connectivity. Only has effect " + "with 4D probability maps.", + ) include_global = traits.Bool( False, usedefault=True, - desc='If True, include an extra column ' + desc="If True, include an extra column " 'labeled "GlobalSignal", with values calculated from the entire brain ' - '(instead of just regions).') + "(instead of just regions).", + ) detrend = traits.Bool( - False, - usedefault=True, - desc='If True, perform detrending using nilearn.') + False, usedefault=True, desc="If True, perform detrending using nilearn." + ) class SignalExtractionOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='tsv file containing the computed ' - 'signals, with as many columns as there are labels and as ' - 'many rows as there are timepoints in in_file, plus a ' - 'header row with values from class_labels') + desc="tsv file containing the computed " + "signals, with as many columns as there are labels and as " + "many rows as there are timepoints in in_file, plus a " + "header row with values from class_labels", + ) class SignalExtraction(NilearnBaseInterface, SimpleInterface): - ''' + """ Extracts signals over tissue classes or brain regions >>> seinterface = SignalExtraction() @@ -85,32 +90,28 @@ class SignalExtraction(NilearnBaseInterface, SimpleInterface): >>> seinterface.inputs.class_labels = segments >>> seinterface.inputs.detrend = True >>> seinterface.inputs.include_global = True - ''' + """ + input_spec = SignalExtractionInputSpec output_spec = SignalExtractionOutputSpec def _run_interface(self, runtime): maskers = self._process_inputs() - signals = [] - for masker in maskers: - signals.append(masker.fit_transform(self.inputs.in_file)) + signals = [masker.fit_transform(self.inputs.in_file) for masker in maskers] region_signals = np.hstack(signals) - output = np.vstack((self.inputs.class_labels, - region_signals.astype(str))) + output = np.vstack((self.inputs.class_labels, region_signals.astype(str))) # save output - self._results['out_file'] = os.path.join(runtime.cwd, - self.inputs.out_file) - np.savetxt( - self._results['out_file'], output, fmt=b'%s', delimiter='\t') + self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) + np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") return runtime def _process_inputs(self): - ''' validate and process inputs into useful form. + """validate and process inputs into useful form. Returns a list of nilearn maskers and the list of corresponding label - names.''' + names.""" import nilearn.input_data as nl import nilearn.image as nli @@ -118,42 +119,47 @@ def _process_inputs(self): maskers = [] # determine form of label files, choose appropriate nilearn masker - if np.amax(label_data.get_data()) > 1: # 3d label file - n_labels = np.amax(label_data.get_data()) + if np.amax(label_data.dataobj) > 1: # 3d label file + n_labels = np.amax(label_data.dataobj) maskers.append(nl.NiftiLabelsMasker(label_data)) else: # 4d labels - n_labels = label_data.get_data().shape[3] + n_labels = label_data.shape[3] if self.inputs.incl_shared_variance: # independent computation - for img in nli.iter_img(label_data): - maskers.append( - nl.NiftiMapsMasker( - self._4d(img.get_data(), img.affine))) + maskers.extend( + nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine)) + for img in nli.iter_img(label_data) + ) else: # one computation fitting all maskers.append(nl.NiftiMapsMasker(label_data)) # check label list size if not np.isclose(int(n_labels), n_labels): raise ValueError( - 'The label files {} contain invalid value {}. Check input.' - .format(self.inputs.label_files, n_labels)) + "The label files {} contain invalid value {}. Check input.".format( + self.inputs.label_files, n_labels + ) + ) if len(self.inputs.class_labels) != n_labels: - raise ValueError('The length of class_labels {} does not ' - 'match the number of regions {} found in ' - 'label_files {}'.format(self.inputs.class_labels, - n_labels, - self.inputs.label_files)) + raise ValueError( + "The length of class_labels {} does not " + "match the number of regions {} found in " + "label_files {}".format( + self.inputs.class_labels, n_labels, self.inputs.label_files + ) + ) if self.inputs.include_global: - global_label_data = label_data.get_data().sum( - axis=3) # sum across all regions - global_label_data = np.rint(global_label_data).astype(int).clip( - 0, 1) # binarize + global_label_data = label_data.dataobj.sum(axis=3) # sum across all regions + global_label_data = ( + np.rint(global_label_data).clip(0, 1).astype('u1') + ) # binarize global_label_data = self._4d(global_label_data, label_data.affine) global_masker = nl.NiftiLabelsMasker( - global_label_data, detrend=self.inputs.detrend) + global_label_data, detrend=self.inputs.detrend + ) maskers.insert(0, global_masker) - self.inputs.class_labels.insert(0, 'GlobalSignal') + self.inputs.class_labels.insert(0, "GlobalSignal") for masker in maskers: masker.set_params(detrend=self.inputs.detrend) @@ -161,6 +167,6 @@ def _process_inputs(self): return maskers def _4d(self, array, affine): - ''' takes a 3-dimensional numpy array and an affine, - returns the equivalent 4th dimensional nifti file ''' + """takes a 3-dimensional numpy array and an affine, + returns the equivalent 4th dimensional nifti file""" return nb.Nifti1Image(array[:, :, :, np.newaxis], affine) diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 19d030b61a..ad8b66d887 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +"""NIPY is a python project for analysis of structural and functional neuroimaging data.""" + from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py index 077499c52a..25aef8b873 100644 --- a/nipype/interfaces/nipy/base.py +++ b/nipype/interfaces/nipy/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nipy """ @@ -11,10 +10,10 @@ # Remove in 2.0 have_nipy = True try: - package_check('nipy') -except ImportError: + package_check("nipy") +except: have_nipy = False class NipyBaseInterface(LibraryBaseInterface): - _pkg = 'nipy' + _pkg = "nipy" diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 0c2ddf4334..c99a4acaea 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -1,15 +1,15 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str, bytes - import os -from ...utils import NUMPY_MMAP - from .base import NipyBaseInterface -from ..base import (TraitedSpec, traits, File, OutputMultiPath, - BaseInterfaceInputSpec, isdefined) +from ..base import ( + TraitedSpec, + traits, + Tuple, + File, + OutputMultiPath, + BaseInterfaceInputSpec, + isdefined, +) class FitGLMInputSpec(BaseInterfaceInputSpec): @@ -17,49 +17,61 @@ class FitGLMInputSpec(BaseInterfaceInputSpec): minlen=1, maxlen=1, mandatory=True, - desc=('Session specific information generated by' - ' ``modelgen.SpecifyModel``, FitGLM does ' - 'not support multiple runs uless they are ' - 'concatenated (see SpecifyModel options)')) + desc=( + "Session specific information generated by" + " ``modelgen.SpecifyModel``, FitGLM does " + "not support multiple runs unless they are " + "concatenated (see SpecifyModel options)" + ), + ) hrf_model = traits.Enum( - 'Canonical', - 'Canonical With Derivative', - 'FIR', - desc=("that specifies the hemodynamic reponse " - "function it can be 'Canonical', 'Canonical " - "With Derivative' or 'FIR'"), - usedefault=True) + "Canonical", + "Canonical With Derivative", + "FIR", + desc=( + "that specifies the hemodynamic response " + "function it can be 'Canonical', 'Canonical " + "With Derivative' or 'FIR'" + ), + usedefault=True, + ) drift_model = traits.Enum( "Cosine", "Polynomial", "Blank", - desc=("string that specifies the desired drift " - "model, to be chosen among 'Polynomial', " - "'Cosine', 'Blank'"), - usedefault=True) + desc=( + "string that specifies the desired drift " + "model, to be chosen among 'Polynomial', " + "'Cosine', 'Blank'" + ), + usedefault=True, + ) TR = traits.Float(mandatory=True) model = traits.Enum( "ar1", "spherical", - desc=("autoregressive mode is available only for the " - "kalman method"), - usedefault=True) + desc=("autoregressive mode is available only for the kalman method"), + usedefault=True, + ) method = traits.Enum( "kalman", "ols", - desc=("method to fit the model, ols or kalma; kalman " - "is more time consuming but it supports " - "autoregressive model"), - usedefault=True) - mask = traits.File( + desc=( + "method to fit the model, ols or kalma; kalman " + "is more time consuming but it supports " + "autoregressive model" + ), + usedefault=True, + ) + mask = File( exists=True, - desc=("restrict the fitting only to the region defined " - "by this mask")) + desc=("restrict the fitting only to the region defined by this mask"), + ) normalize_design_matrix = traits.Bool( False, - desc=("normalize (zscore) the " - "regressors before fitting"), - usedefault=True) + desc=("normalize (zscore) the regressors before fitting"), + usedefault=True, + ) save_residuals = traits.Bool(False, usedefault=True) plot_design_matrix = traits.Bool(False, usedefault=True) @@ -72,14 +84,15 @@ class FitGLMOutputSpec(TraitedSpec): constants = traits.Any() axis = traits.Any() reg_names = traits.List() - residuals = traits.File() + residuals = File() a = File(exists=True) class FitGLM(NipyBaseInterface): - ''' + """ Fit GLM model based on the specified design. Supports only single or concatenated runs. - ''' + """ + input_spec = FitGLMInputSpec output_spec = FitGLMOutputSpec @@ -88,6 +101,7 @@ def _run_interface(self, runtime): import numpy as np import nipy.modalities.fmri.glm as GLM import nipy.modalities.fmri.design_matrix as dm + try: BlockParadigm = dm.BlockParadigm except AttributeError: @@ -95,45 +109,41 @@ def _run_interface(self, runtime): session_info = self.inputs.session_info - functional_runs = self.inputs.session_info[0]['scans'] + functional_runs = self.inputs.session_info[0]["scans"] if isinstance(functional_runs, (str, bytes)): functional_runs = [functional_runs] nii = nb.load(functional_runs[0]) - data = nii.get_data() + data = nii.get_fdata(caching="unchanged") if isdefined(self.inputs.mask): - mask = nb.load(self.inputs.mask).get_data() > 0 + mask = np.asanyarray(nb.load(self.inputs.mask).dataobj) > 0 else: mask = np.ones(nii.shape[:3]) == 1 - timeseries = data.copy()[mask, :] + timeseries = data[mask, :] del data for functional_run in functional_runs[1:]: - nii = nb.load(functional_run, mmap=NUMPY_MMAP) - data = nii.get_data() - npdata = data.copy() - del data + nii = nb.load(functional_run, mmap=False) + npdata = np.asarray(nii.dataobj) timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1) del npdata nscans = timeseries.shape[1] - if 'hpf' in list(session_info[0].keys()): - hpf = session_info[0]['hpf'] + if "hpf" in list(session_info[0].keys()): + hpf = session_info[0]["hpf"] drift_model = self.inputs.drift_model else: hpf = 0 drift_model = "Blank" - reg_names = [] - for reg in session_info[0]['regress']: - reg_names.append(reg['name']) - + reg_names = [reg["name"] for reg in session_info[0]["regress"]] reg_vals = np.zeros((nscans, len(reg_names))) for i in range(len(reg_names)): - reg_vals[:, i] = np.array( - session_info[0]['regress'][i]['val']).reshape(1, -1) + reg_vals[:, i] = np.array(session_info[0]["regress"][i]["val"]).reshape( + 1, -1 + ) frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans) @@ -141,17 +151,16 @@ def _run_interface(self, runtime): onsets = [] duration = [] - for i, cond in enumerate(session_info[0]['cond']): - onsets += cond['onset'] - conditions += [cond['name']] * len(cond['onset']) - if len(cond['duration']) == 1: - duration += cond['duration'] * len(cond['onset']) + for i, cond in enumerate(session_info[0]["cond"]): + onsets += cond["onset"] + conditions += [cond["name"]] * len(cond["onset"]) + if len(cond["duration"]) == 1: + duration += cond["duration"] * len(cond["onset"]) else: - duration += cond['duration'] + duration += cond["duration"] if conditions: - paradigm = BlockParadigm( - con_id=conditions, onset=onsets, duration=duration) + paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration) else: paradigm = None design_matrix, self._reg_names = dm.dmtx_light( @@ -161,15 +170,17 @@ def _run_interface(self, runtime): hfcut=hpf, hrf_model=self.inputs.hrf_model, add_regs=reg_vals, - add_reg_names=reg_names) + add_reg_names=reg_names, + ) if self.inputs.normalize_design_matrix: for i in range(len(self._reg_names) - 1): - design_matrix[:, i] = (( - design_matrix[:, i] - design_matrix[:, i].mean()) / - design_matrix[:, i].std()) + design_matrix[:, i] = ( + design_matrix[:, i] - design_matrix[:, i].mean() + ) / design_matrix[:, i].std() if self.inputs.plot_design_matrix: import pylab + pylab.pcolor(design_matrix) pylab.savefig("design_matrix.pdf") pylab.close() @@ -180,10 +191,11 @@ def _run_interface(self, runtime): timeseries.T, design_matrix, method=self.inputs.method, - model=self.inputs.model) + model=self.inputs.model, + ) self._beta_file = os.path.abspath("beta.nii") - beta = np.zeros(mask.shape + (glm.beta.shape[0], )) + beta = np.zeros(mask.shape + (glm.beta.shape[0],)) beta[mask, :] = glm.beta.T nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file) @@ -194,11 +206,10 @@ def _run_interface(self, runtime): if self.inputs.save_residuals: explained = np.dot(design_matrix, glm.beta) - residuals = np.zeros(mask.shape + (nscans, )) + residuals = np.zeros(mask.shape + (nscans,)) residuals[mask, :] = timeseries - explained.T self._residuals_file = os.path.abspath("residuals.nii") - nb.save( - nb.Nifti1Image(residuals, nii.affine), self._residuals_file) + nb.save(nb.Nifti1Image(residuals, nii.affine), self._residuals_file) self._nvbeta = glm.nvbeta self._dof = glm.dof @@ -233,38 +244,58 @@ def _list_outputs(self): class EstimateContrastInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])]. if session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", - mandatory=True) + mandatory=True, + ) beta = File( - exists=True, - desc="beta coefficients of the fitted model", - mandatory=True) + exists=True, desc="beta coefficients of the fitted model", mandatory=True + ) nvbeta = traits.Any(mandatory=True) - s2 = File( - exists=True, desc="squared variance of the residuals", mandatory=True) + s2 = File(exists=True, desc="squared variance of the residuals", mandatory=True) dof = traits.Any(desc="degrees of freedom", mandatory=True) constants = traits.Any(mandatory=True) axis = traits.Any(mandatory=True) reg_names = traits.List(mandatory=True) - mask = traits.File(exists=True) + mask = File(exists=True) class EstimateContrastOutputSpec(TraitedSpec): @@ -274,9 +305,10 @@ class EstimateContrastOutputSpec(TraitedSpec): class EstimateContrast(NipyBaseInterface): - ''' + """ Estimate contrast of a fitted model. - ''' + """ + input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec @@ -287,15 +319,14 @@ def _run_interface(self, runtime): beta_nii = nb.load(self.inputs.beta) if isdefined(self.inputs.mask): - mask = nb.load(self.inputs.mask).get_data() > 0 + mask = np.asanyarray(nb.load(self.inputs.mask).dataobj) > 0 else: mask = np.ones(beta_nii.shape[:3]) == 1 glm = GLM.GeneralLinearModel() - nii = nb.load(self.inputs.beta) - glm.beta = beta_nii.get_data().copy()[mask, :].T + glm.beta = np.array(beta_nii.dataobj)[mask, :].T glm.nvbeta = self.inputs.nvbeta - glm.s2 = nb.load(self.inputs.s2).get_data().copy()[mask] + glm.s2 = np.array(nb.load(self.inputs.s2).dataobj)[mask] glm.dof = self.inputs.dof glm._axis = self.inputs.axis glm._constants = self.inputs.constants @@ -319,7 +350,7 @@ def _run_interface(self, runtime): stat_map = np.zeros(mask.shape) stat_map[mask] = est_contrast.stat().T stat_map_file = os.path.abspath(name + "_stat_map.nii") - nb.save(nb.Nifti1Image(stat_map, nii.affine), stat_map_file) + nb.save(nb.Nifti1Image(stat_map, beta_nii.affine), stat_map_file) self._stat_maps.append(stat_map_file) p_map = np.zeros(mask.shape) diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 666aa27837..a85c8d3ad4 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -1,30 +1,35 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - import os import nibabel as nb import numpy as np -from ...utils import NUMPY_MMAP from ...utils.filemanip import split_filename, fname_presuffix -from .base import NipyBaseInterface, have_nipy -from ..base import (TraitedSpec, traits, BaseInterfaceInputSpec, - isdefined, File, InputMultiPath, OutputMultiPath) +from .base import NipyBaseInterface +from ..base import ( + TraitedSpec, + traits, + BaseInterfaceInputSpec, + isdefined, + File, + InputMultiPath, + OutputMultiPath, +) class ComputeMaskInputSpec(BaseInterfaceInputSpec): mean_volume = File( exists=True, mandatory=True, - desc="mean EPI image, used to compute the threshold for the mask") + desc="mean EPI image, used to compute the threshold for the mask", + ) reference_volume = File( exists=True, - desc=("reference volume used to compute the mask. " - "If none is give, the mean volume is used.")) + desc=( + "reference volume used to compute the mask. " + "If none is give, the mean volume is used." + ), + ) m = traits.Float(desc="lower fraction of the histogram to be discarded") M = traits.Float(desc="upper fraction of the histogram to be discarded") cc = traits.Bool(desc="Keep only the largest connected component") @@ -40,24 +45,26 @@ class ComputeMask(NipyBaseInterface): def _run_interface(self, runtime): from nipy.labs.mask import compute_mask + args = {} for key in [ - k for k, _ in list(self.inputs.items()) - if k not in BaseInterfaceInputSpec().trait_names() + k + for k, _ in list(self.inputs.items()) + if k not in BaseInterfaceInputSpec().trait_names() ]: value = getattr(self.inputs, key) if isdefined(value): - if key in ['mean_volume', 'reference_volume']: - nii = nb.load(value, mmap=NUMPY_MMAP) - value = nii.get_data() + if key in ["mean_volume", "reference_volume"]: + value = np.asanyarray(nb.load(value).dataobj) args[key] = value brain_mask = compute_mask(**args) _, name, ext = split_filename(self.inputs.mean_volume) - self._brain_mask_path = os.path.abspath("%s_mask.%s" % (name, ext)) + self._brain_mask_path = os.path.abspath(f"{name}_mask.{ext}") nb.save( nb.Nifti1Image(brain_mask.astype(np.uint8), nii.affine), - self._brain_mask_path) + self._brain_mask_path, + ) return runtime @@ -69,34 +76,42 @@ def _list_outputs(self): class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( - File(exists=True), - mandatory=True, - min_ver='0.4.0.dev', - desc="File to realign") - tr = traits.Float(desc="TR in seconds", requires=['slice_times']) + File(exists=True), mandatory=True, min_ver="0.4.0.dev", desc="File to realign" + ) + tr = traits.Float(desc="TR in seconds", requires=["slice_times"]) slice_times = traits.Either( traits.List(traits.Float()), - traits.Enum('asc_alt_2', 'asc_alt_2_1', 'asc_alt_half', - 'asc_alt_siemens', 'ascending', 'desc_alt_2', - 'desc_alt_half', 'descending'), - desc=('Actual slice acquisition times.')) + traits.Enum( + "asc_alt_2", + "asc_alt_2_1", + "asc_alt_half", + "asc_alt_siemens", + "ascending", + "desc_alt_2", + "desc_alt_half", + "descending", + ), + desc=("Actual slice acquisition times."), + ) slice_info = traits.Either( traits.Int, traits.List(min_len=2, max_len=2), - desc=('Single integer or length 2 sequence ' - 'If int, the axis in `images` that is the ' - 'slice axis. In a 4D image, this will ' - 'often be axis = 2. If a 2 sequence, then' - ' elements are ``(slice_axis, ' - 'slice_direction)``, where ``slice_axis`` ' - 'is the slice axis in the image as above, ' - 'and ``slice_direction`` is 1 if the ' - 'slices were acquired slice 0 first, slice' - ' -1 last, or -1 if acquired slice -1 ' - 'first, slice 0 last. If `slice_info` is ' - 'an int, assume ' - '``slice_direction`` == 1.'), - requires=['slice_times'], + desc=( + "Single integer or length 2 sequence " + "If int, the axis in `images` that is the " + "slice axis. In a 4D image, this will " + "often be axis = 2. If a 2 sequence, then" + " elements are ``(slice_axis, " + "slice_direction)``, where ``slice_axis`` " + "is the slice axis in the image as above, " + "and ``slice_direction`` is 1 if the " + "slices were acquired slice 0 first, slice" + " -1 last, or -1 if acquired slice -1 " + "first, slice 0 last. If `slice_info` is " + "an int, assume " + "``slice_direction`` == 1." + ), + requires=["slice_times"], ) @@ -104,8 +119,8 @@ class SpaceTimeRealignerOutputSpec(TraitedSpec): out_file = OutputMultiPath(File(exists=True), desc="Realigned files") par_file = OutputMultiPath( File(exists=True), - desc=("Motion parameter files. Angles are not " - "euler angles")) + desc=("Motion parameter files. Angles are not euler angles"), + ) class SpaceTimeRealigner(NipyBaseInterface): @@ -145,18 +160,20 @@ class SpaceTimeRealigner(NipyBaseInterface): input_spec = SpaceTimeRealignerInputSpec output_spec = SpaceTimeRealignerOutputSpec - keywords = ['slice timing', 'motion correction'] + keywords = ["slice timing", "motion correction"] def _run_interface(self, runtime): from nipy import save_image, load_image + all_ims = [load_image(fname) for fname in self.inputs.in_file] if not isdefined(self.inputs.slice_times): - from nipy.algorithms.registration.groupwise_registration import \ - SpaceRealign + from nipy.algorithms.registration.groupwise_registration import SpaceRealign + R = SpaceRealign(all_ims) else: from nipy.algorithms.registration import SpaceTimeRealign + R = SpaceTimeRealign( all_ims, tr=self.inputs.tr, @@ -172,24 +189,25 @@ def _run_interface(self, runtime): for j, corr in enumerate(corr_run): self._out_file_path.append( - os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) + os.path.abspath( + "corr_%s.nii.gz" % (split_filename(self.inputs.in_file[j])[1]) + ) + ) save_image(corr, self._out_file_path[j]) self._par_file_path.append( - os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) - mfile = open(self._par_file_path[j], 'w') + os.path.abspath("%s.par" % (os.path.split(self.inputs.in_file[j])[1])) + ) + mfile = open(self._par_file_path[j], "w") motion = R._transforms[j] # nipy does not encode euler angles. return in original form of # translation followed by rotation vector see: # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula - for i, mo in enumerate(motion): + for mo in motion: params = [ - '%.10f' % item - for item in np.hstack((mo.translation, mo.rotation)) + "%.10f" % item for item in np.hstack((mo.translation, mo.rotation)) ] - string = ' '.join(params) + '\n' + string = " ".join(params) + "\n" mfile.write(string) mfile.close() @@ -197,23 +215,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._out_file_path - outputs['par_file'] = self._par_file_path + outputs["out_file"] = self._out_file_path + outputs["par_file"] = self._par_file_path return outputs class TrimInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="EPI image to trim") - begin_index = traits.Int(0, usedefault=True, desc='first volume') + begin_index = traits.Int(0, usedefault=True, desc="first volume") end_index = traits.Int( - 0, - usedefault=True, - desc='last volume indexed as in python (and 0 for last)') - out_file = File(desc='output filename') + 0, usedefault=True, desc="last volume indexed as in python (and 0 for last)" + ) + out_file = File(desc="output filename") suffix = traits.Str( - '_trim', + "_trim", usedefault=True, - desc='suffix for out_file to use if no out_file provided') + desc="suffix for out_file to use if no out_file provided", + ) class TrimOutputSpec(TraitedSpec): @@ -221,7 +239,7 @@ class TrimOutputSpec(TraitedSpec): class Trim(NipyBaseInterface): - """ Simple interface to trim a few volumes from a 4d fmri nifti file + """Simple interface to trim a few volumes from a 4d fmri nifti file Examples -------- @@ -237,23 +255,22 @@ class Trim(NipyBaseInterface): output_spec = TrimOutputSpec def _run_interface(self, runtime): - out_file = self._list_outputs()['out_file'] + out_file = self._list_outputs()["out_file"] nii = nb.load(self.inputs.in_file) if self.inputs.end_index == 0: s = slice(self.inputs.begin_index, nii.shape[3]) else: s = slice(self.inputs.begin_index, self.inputs.end_index) - nii2 = nb.Nifti1Image(nii.get_data()[..., s], nii.affine, nii.header) + nii2 = nb.Nifti1Image(nii.dataobj[..., s], nii.affine, nii.header) nb.save(nii2, out_file) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = fname_presuffix( - self.inputs.in_file, - newpath=os.getcwd(), - suffix=self.inputs.suffix) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = fname_presuffix( + self.inputs.in_file, newpath=os.getcwd(), suffix=self.inputs.suffix + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs diff --git a/nipype/interfaces/nipy/tests/__init__.py b/nipype/interfaces/nipy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/nipy/tests/__init__.py +++ b/nipype/interfaces/nipy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py b/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py index f0f2453274..db1b784a03 100644 --- a/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py +++ b/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ComputeMask @@ -8,16 +7,27 @@ def test_ComputeMask_inputs(): M=dict(), cc=dict(), m=dict(), - mean_volume=dict(mandatory=True, ), - reference_volume=dict(), + mean_volume=dict( + extensions=None, + mandatory=True, + ), + reference_volume=dict( + extensions=None, + ), ) inputs = ComputeMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMask_outputs(): - output_map = dict(brain_mask=dict(), ) + output_map = dict( + brain_mask=dict( + extensions=None, + ), + ) outputs = ComputeMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py b/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py index 383d1cbea3..172f2205fd 100644 --- a/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py +++ b/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py @@ -1,25 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import EstimateContrast def test_EstimateContrast_inputs(): input_map = dict( - axis=dict(mandatory=True, ), - beta=dict(mandatory=True, ), - constants=dict(mandatory=True, ), - contrasts=dict(mandatory=True, ), - dof=dict(mandatory=True, ), - mask=dict(), - nvbeta=dict(mandatory=True, ), - reg_names=dict(mandatory=True, ), - s2=dict(mandatory=True, ), + axis=dict( + mandatory=True, + ), + beta=dict( + extensions=None, + mandatory=True, + ), + constants=dict( + mandatory=True, + ), + contrasts=dict( + mandatory=True, + ), + dof=dict( + mandatory=True, + ), + mask=dict( + extensions=None, + ), + nvbeta=dict( + mandatory=True, + ), + reg_names=dict( + mandatory=True, + ), + s2=dict( + extensions=None, + mandatory=True, + ), ) inputs = EstimateContrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateContrast_outputs(): output_map = dict( p_maps=dict(), diff --git a/nipype/interfaces/nipy/tests/test_auto_FitGLM.py b/nipype/interfaces/nipy/tests/test_auto_FitGLM.py index 4ac3aa720e..f04081214c 100644 --- a/nipype/interfaces/nipy/tests/test_auto_FitGLM.py +++ b/nipype/interfaces/nipy/tests/test_auto_FitGLM.py @@ -1,37 +1,66 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FitGLM def test_FitGLM_inputs(): input_map = dict( - TR=dict(mandatory=True, ), - drift_model=dict(usedefault=True, ), - hrf_model=dict(usedefault=True, ), - mask=dict(), - method=dict(usedefault=True, ), - model=dict(usedefault=True, ), - normalize_design_matrix=dict(usedefault=True, ), - plot_design_matrix=dict(usedefault=True, ), - save_residuals=dict(usedefault=True, ), - session_info=dict(mandatory=True, ), + TR=dict( + mandatory=True, + ), + drift_model=dict( + usedefault=True, + ), + hrf_model=dict( + usedefault=True, + ), + mask=dict( + extensions=None, + ), + method=dict( + usedefault=True, + ), + model=dict( + usedefault=True, + ), + normalize_design_matrix=dict( + usedefault=True, + ), + plot_design_matrix=dict( + usedefault=True, + ), + save_residuals=dict( + usedefault=True, + ), + session_info=dict( + mandatory=True, + ), ) inputs = FitGLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitGLM_outputs(): output_map = dict( - a=dict(), + a=dict( + extensions=None, + ), axis=dict(), - beta=dict(), + beta=dict( + extensions=None, + ), constants=dict(), dof=dict(), nvbeta=dict(), reg_names=dict(), - residuals=dict(), - s2=dict(), + residuals=dict( + extensions=None, + ), + s2=dict( + extensions=None, + ), ) outputs = FitGLM.output_spec() diff --git a/nipype/interfaces/nipy/tests/test_auto_NipyBaseInterface.py b/nipype/interfaces/nipy/tests/test_auto_NipyBaseInterface.py index ca91500995..64df6d9636 100644 --- a/nipype/interfaces/nipy/tests/test_auto_NipyBaseInterface.py +++ b/nipype/interfaces/nipy/tests/test_auto_NipyBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import NipyBaseInterface diff --git a/nipype/interfaces/nipy/tests/test_auto_Similarity.py b/nipype/interfaces/nipy/tests/test_auto_Similarity.py index fbdb6fffa6..81e8622078 100644 --- a/nipype/interfaces/nipy/tests/test_auto_Similarity.py +++ b/nipype/interfaces/nipy/tests/test_auto_Similarity.py @@ -1,23 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Similarity def test_Similarity_inputs(): input_map = dict( - mask1=dict(), - mask2=dict(), - metric=dict(usedefault=True, ), - volume1=dict(mandatory=True, ), - volume2=dict(mandatory=True, ), + mask1=dict( + extensions=None, + ), + mask2=dict( + extensions=None, + ), + metric=dict( + usedefault=True, + ), + volume1=dict( + extensions=None, + mandatory=True, + ), + volume2=dict( + extensions=None, + mandatory=True, + ), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Similarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict( + similarity=dict(), + ) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py index fca6664a49..fd65848f72 100644 --- a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py +++ b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SpaceTimeRealigner @@ -7,17 +6,23 @@ def test_SpaceTimeRealigner_inputs(): input_map = dict( in_file=dict( mandatory=True, - min_ver='0.4.0.dev', + min_ver="0.4.0.dev", + ), + slice_info=dict( + requires=["slice_times"], ), - slice_info=dict(requires=['slice_times'], ), slice_times=dict(), - tr=dict(requires=['slice_times'], ), + tr=dict( + requires=["slice_times"], + ), ) inputs = SpaceTimeRealigner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpaceTimeRealigner_outputs(): output_map = dict( out_file=dict(), diff --git a/nipype/interfaces/nipy/tests/test_auto_Trim.py b/nipype/interfaces/nipy/tests/test_auto_Trim.py index 6073c9082b..c4ecee3007 100644 --- a/nipype/interfaces/nipy/tests/test_auto_Trim.py +++ b/nipype/interfaces/nipy/tests/test_auto_Trim.py @@ -1,23 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Trim def test_Trim_inputs(): input_map = dict( - begin_index=dict(usedefault=True, ), - end_index=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - out_file=dict(), - suffix=dict(usedefault=True, ), + begin_index=dict( + usedefault=True, + ), + end_index=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_file=dict( + extensions=None, + ), + suffix=dict( + usedefault=True, + ), ) inputs = Trim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Trim_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Trim.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 406960fc6d..96773f27dc 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import warnings +import numpy as np import nibabel as nb -from .base import NipyBaseInterface, have_nipy -from ..base import (TraitedSpec, traits, BaseInterfaceInputSpec, - File, isdefined) +from .base import NipyBaseInterface +from ..base import TraitedSpec, traits, BaseInterfaceInputSpec, File, isdefined class SimilarityInputSpec(BaseInterfaceInputSpec): @@ -16,7 +12,7 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( - traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), + traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, @@ -26,7 +22,8 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", - usedefault=True) + usedefault=True, + ) class SimilarityOutputSpec(TraitedSpec): @@ -57,25 +54,31 @@ class Similarity(NipyBaseInterface): output_spec = SimilarityOutputSpec def __init__(self, **inputs): - warnings.warn(("This interface is deprecated since 0.10.0." - " Please use nipype.algorithms.metrics.Similarity"), - DeprecationWarning) - super(Similarity, self).__init__(**inputs) + warnings.warn( + ( + "This interface is deprecated since 0.10.0." + " Please use nipype.algorithms.metrics.Similarity" + ), + DeprecationWarning, + ) + super().__init__(**inputs) def _run_interface(self, runtime): - from nipy.algorithms.registration.histogram_registration import HistogramRegistration + from nipy.algorithms.registration.histogram_registration import ( + HistogramRegistration, + ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) vol2_nii = nb.load(self.inputs.volume2) if isdefined(self.inputs.mask1): - mask1 = nb.load(self.inputs.mask1).get_data() == 1 + mask1 = np.asanyarray(nb.load(self.inputs.mask1).dataobj) == 1 else: mask1 = None if isdefined(self.inputs.mask2): - mask2 = nb.load(self.inputs.mask2).get_data() == 1 + mask2 = np.asanyarray(nb.load(self.inputs.mask2).dataobj) == 1 else: mask2 = None @@ -84,12 +87,13 @@ def _run_interface(self, runtime): to_img=vol2_nii, similarity=self.inputs.metric, from_mask=mask1, - to_mask=mask2) + to_mask=mask2, + ) self._similarity = histreg.eval(Affine()) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['similarity'] = self._similarity + outputs["similarity"] = self._similarity return outputs diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index 656d601b3e..883d417ba0 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -1,6 +1,8 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - -from .analysis import (CoherenceAnalyzerInputSpec, CoherenceAnalyzerOutputSpec, - CoherenceAnalyzer) +"""Nitime is a library for time-series analysis of data from neuroscience experiments.""" +from .analysis import ( + CoherenceAnalyzerInputSpec, + CoherenceAnalyzerOutputSpec, + CoherenceAnalyzer, +) diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 6e8ff17d0f..7c936e4b03 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -1,8 +1,6 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ - Interfaces to functionality from nitime for time-series analysis of fmri data - nitime.analysis.CoherenceAnalyzer: Coherence/y @@ -10,110 +8,124 @@ - nitime.viz.drawmatrix_channels """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import zip, object, open - import numpy as np import tempfile from ...utils.misc import package_check from ...utils.filemanip import fname_presuffix from .base import NitimeBaseInterface -from ..base import (TraitedSpec, File, Undefined, traits, - isdefined, BaseInterfaceInputSpec) +from ..base import ( + TraitedSpec, + File, + Undefined, + traits, + isdefined, + BaseInterfaceInputSpec, +) have_nitime = True try: - package_check('nitime') + package_check("nitime") except ImportError: have_nitime = False class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): - # Input either csv file, or time-series object and use _xor_inputs to # discriminate - _xor_inputs = ('in_file', 'in_TS') + _xor_inputs = ("in_file", "in_TS") in_file = File( - desc=('csv file with ROIs on the columns and ' - 'time-points on the rows. ROI names at the top row'), + desc=( + "csv file with ROIs on the columns and " + "time-points on the rows. ROI names at the top row" + ), exists=True, - requires=('TR', )) + requires=("TR",), + ) # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float( - desc=('The TR used to collect the data' - 'in your csv file ')) + desc=("The TR used to collect the data in your csv file ") + ) - in_TS = traits.Any(desc='a nitime TimeSeries object') + in_TS = traits.Any(desc="a nitime TimeSeries object") NFFT = traits.Range( low=32, value=64, usedefault=True, - desc=('This is the size of the window used for ' - 'the spectral estimation. Use values between ' - '32 and the number of samples in your time-series.' - '(Defaults to 64.)')) + desc=( + "This is the size of the window used for " + "the spectral estimation. Use values between " + "32 and the number of samples in your time-series." + "(Defaults to 64.)" + ), + ) n_overlap = traits.Range( low=0, value=0, usedefault=True, - desc=('The number of samples which overlap' - 'between subsequent windows.(Defaults to 0)')) + desc=( + "The number of samples which overlap" + "between subsequent windows.(Defaults to 0)" + ), + ) frequency_range = traits.List( value=[0.02, 0.15], usedefault=True, minlen=2, maxlen=2, - desc=('The range of frequencies over' - 'which the analysis will average.' - '[low,high] (Default [0.02,0.15]')) + desc=( + "The range of frequencies over" + "which the analysis will average." + "[low,high] (Default [0.02,0.15]" + ), + ) output_csv_file = File( - desc= - 'File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}' + desc="File to write outputs (coherence,time-delay) with file-names: " + "``file_name_{coherence,timedelay}``" ) output_figure_file = File( - desc= - 'File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,...' + desc="""\ +File to write output figures (coherence,time-delay) with file-names: +``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,...""" ) figure_type = traits.Enum( - 'matrix', - 'network', + "matrix", + "network", usedefault=True, - desc=("The type of plot to generate, where " - "'matrix' denotes a matrix image and" - "'network' denotes a graph representation." - " Default: 'matrix'")) + desc=( + "The type of plot to generate, where " + "'matrix' denotes a matrix image and" + "'network' denotes a graph representation." + " Default: 'matrix'" + ), + ) class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array( - desc=('The pairwise coherence values' - 'between the ROIs')) + desc=("The pairwise coherence values between the ROIs") + ) timedelay_array = traits.Array( - desc=('The pairwise time delays between the' - 'ROIs (in seconds)')) + desc=("The pairwise time delays between the ROIs (in seconds)") + ) - coherence_csv = File( - desc=('A csv file containing the pairwise ' - 'coherence values')) + coherence_csv = File(desc=("A csv file containing the pairwise coherence values")) - timedelay_csv = File( - desc=('A csv file containing the pairwise ' - 'time delay values')) + timedelay_csv = File(desc=("A csv file containing the pairwise time delay values")) - coherence_fig = File(desc=('Figure representing coherence values')) - timedelay_fig = File(desc=('Figure representing coherence values')) + coherence_fig = File(desc=("Figure representing coherence values")) + timedelay_fig = File(desc=("Figure representing coherence values")) class CoherenceAnalyzer(NitimeBaseInterface): + """Wraps nitime.analysis.CoherenceAnalyzer: Coherence/y""" input_spec = CoherenceAnalyzerInputSpec output_spec = CoherenceAnalyzerOutputSpec @@ -130,26 +142,25 @@ def _read_csv(self): """ # Check that input conforms to expectations: - first_row = open(self.inputs.in_file).readline() + with open(self.inputs.in_file) as f: + first_row = f.readline() if not first_row[1].isalpha(): raise ValueError( "First row of in_file should contain ROI names as strings of characters" ) - - roi_names = open(self.inputs.in_file).readline().replace( - '\"', '').strip('\n').split(',') + roi_names = first_row.replace('"', "").strip("\n").split(",") # Transpose, so that the time is the last dimension: - data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=',').T + data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=",").T return data, roi_names def _csv2ts(self): - """ Read data from the in_file and generate a nitime TimeSeries object""" + """Read data from the in_file and generate a nitime TimeSeries object""" from nitime.timeseries import TimeSeries + data, roi_names = self._read_csv() - TS = TimeSeries( - data=data, sampling_interval=self.inputs.TR, time_unit='s') + TS = TimeSeries(data=data, sampling_interval=self.inputs.TR, time_unit="s") TS.metadata = dict(ROIs=roi_names) @@ -158,6 +169,7 @@ def _csv2ts(self): # Rewrite _run_interface, but not run def _run_interface(self, runtime): import nitime.analysis as nta + lb, ub = self.inputs.frequency_range if self.inputs.in_TS is Undefined: @@ -169,21 +181,24 @@ def _run_interface(self, runtime): TS = self.inputs.in_TS # deal with creating or storing ROI names: - if 'ROIs' not in TS.metadata: - self.ROIs = ['roi_%d' % x for x, _ in enumerate(TS.data)] + if "ROIs" not in TS.metadata: + self.ROIs = ["roi_%d" % x for x, _ in enumerate(TS.data)] else: - self.ROIs = TS.metadata['ROIs'] + self.ROIs = TS.metadata["ROIs"] A = nta.CoherenceAnalyzer( TS, method=dict( - this_method='welch', + this_method="welch", NFFT=self.inputs.NFFT, - n_overlap=self.inputs.n_overlap)) + n_overlap=self.inputs.n_overlap, + ), + ) freq_idx = np.where( - (A.frequencies > self.inputs.frequency_range[0]) * - (A.frequencies < self.inputs.frequency_range[1]))[0] + (A.frequencies > self.inputs.frequency_range[0]) + * (A.frequencies < self.inputs.frequency_range[1]) + )[0] # Get the coherence matrix from the analyzer, averaging on the last # (frequency) dimension: (roi X roi array) @@ -202,29 +217,31 @@ def _list_outputs(self): # file name + path) # Always defined (the arrays): - outputs['coherence_array'] = self.coherence - outputs['timedelay_array'] = self.delay + outputs["coherence_array"] = self.coherence + outputs["timedelay_array"] = self.delay # Conditional - if isdefined(self.inputs.output_csv_file) and hasattr( - self, 'coherence'): + if isdefined(self.inputs.output_csv_file) and hasattr(self, "coherence"): # we need to make a function that we call here that writes the # coherence values to this file "coherence_csv" and makes the # time_delay csv file?? self._make_output_files() - outputs['coherence_csv'] = fname_presuffix( - self.inputs.output_csv_file, suffix='_coherence') + outputs["coherence_csv"] = fname_presuffix( + self.inputs.output_csv_file, suffix="_coherence" + ) - outputs['timedelay_csv'] = fname_presuffix( - self.inputs.output_csv_file, suffix='_delay') + outputs["timedelay_csv"] = fname_presuffix( + self.inputs.output_csv_file, suffix="_delay" + ) - if isdefined(self.inputs.output_figure_file) and hasattr( - self, 'coherence'): + if isdefined(self.inputs.output_figure_file) and hasattr(self, "coherence"): self._make_output_figures() - outputs['coherence_fig'] = fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence') - outputs['timedelay_fig'] = fname_presuffix( - self.inputs.output_figure_file, suffix='_delay') + outputs["coherence_fig"] = fname_presuffix( + self.inputs.output_figure_file, suffix="_coherence" + ) + outputs["timedelay_fig"] = fname_presuffix( + self.inputs.output_figure_file, suffix="_delay" + ) return outputs @@ -232,19 +249,19 @@ def _make_output_files(self): """ Generate the output csv files. """ - for this in zip([self.coherence, self.delay], ['coherence', 'delay']): + for this in zip([self.coherence, self.delay], ["coherence", "delay"]): tmp_f = tempfile.mkstemp()[1] - np.savetxt(tmp_f, this[0], delimiter=',') - - fid = open( - fname_presuffix( - self.inputs.output_csv_file, suffix='_%s' % this[1]), 'w+') - # this writes ROIs as header line - fid.write(',' + ','.join(self.ROIs) + '\n') - # this writes ROI and data to a line - for r, line in zip(self.ROIs, open(tmp_f)): - fid.write('%s,%s' % (r, line)) - fid.close() + np.savetxt(tmp_f, this[0], delimiter=",") + + with open( + fname_presuffix(self.inputs.output_csv_file, suffix="_%s" % this[1]), + "w+", + ) as fid: + # this writes ROIs as header line + fid.write("," + ",".join(self.ROIs) + "\n") + # this writes ROI and data to a line + for r, line in zip(self.ROIs, open(tmp_f)): + fid.write(f"{r},{line}") def _make_output_figures(self): """ @@ -253,31 +270,32 @@ def _make_output_figures(self): """ import nitime.viz as viz - if self.inputs.figure_type == 'matrix': + + if self.inputs.figure_type == "matrix": fig_coh = viz.drawmatrix_channels( - self.coherence, channel_names=self.ROIs, color_anchor=0) + self.coherence, channel_names=self.ROIs, color_anchor=0 + ) fig_coh.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence')) + fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") + ) fig_dt = viz.drawmatrix_channels( - self.delay, channel_names=self.ROIs, color_anchor=0) + self.delay, channel_names=self.ROIs, color_anchor=0 + ) fig_dt.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_delay')) + fname_presuffix(self.inputs.output_figure_file, suffix="_delay") + ) else: - fig_coh = viz.drawgraph_channels( - self.coherence, channel_names=self.ROIs) + fig_coh = viz.drawgraph_channels(self.coherence, channel_names=self.ROIs) fig_coh.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence')) + fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") + ) - fig_dt = viz.drawgraph_channels( - self.delay, channel_names=self.ROIs) + fig_dt = viz.drawgraph_channels(self.delay, channel_names=self.ROIs) fig_dt.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_delay')) + fname_presuffix(self.inputs.output_figure_file, suffix="_delay") + ) diff --git a/nipype/interfaces/nitime/base.py b/nipype/interfaces/nitime/base.py index d9139f2c71..7e434f1d3e 100644 --- a/nipype/interfaces/nitime/base.py +++ b/nipype/interfaces/nitime/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nitime """ @@ -7,4 +6,4 @@ class NitimeBaseInterface(LibraryBaseInterface): - _pkg = 'nitime' + _pkg = "nitime" diff --git a/nipype/interfaces/nitime/tests/__init__.py b/nipype/interfaces/nitime/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/nitime/tests/__init__.py +++ b/nipype/interfaces/nitime/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py index 4a641ebb50..8c70d059ab 100644 --- a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py +++ b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py @@ -1,33 +1,57 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..analysis import CoherenceAnalyzer def test_CoherenceAnalyzer_inputs(): input_map = dict( - NFFT=dict(usedefault=True, ), + NFFT=dict( + usedefault=True, + ), TR=dict(), - figure_type=dict(usedefault=True, ), - frequency_range=dict(usedefault=True, ), + figure_type=dict( + usedefault=True, + ), + frequency_range=dict( + usedefault=True, + ), in_TS=dict(), - in_file=dict(requires=('TR', ), ), - n_overlap=dict(usedefault=True, ), - output_csv_file=dict(), - output_figure_file=dict(), + in_file=dict( + extensions=None, + requires=("TR",), + ), + n_overlap=dict( + usedefault=True, + ), + output_csv_file=dict( + extensions=None, + ), + output_figure_file=dict( + extensions=None, + ), ) inputs = CoherenceAnalyzer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CoherenceAnalyzer_outputs(): output_map = dict( coherence_array=dict(), - coherence_csv=dict(), - coherence_fig=dict(), + coherence_csv=dict( + extensions=None, + ), + coherence_fig=dict( + extensions=None, + ), timedelay_array=dict(), - timedelay_csv=dict(), - timedelay_fig=dict(), + timedelay_csv=dict( + extensions=None, + ), + timedelay_fig=dict( + extensions=None, + ), ) outputs = CoherenceAnalyzer.output_spec() diff --git a/nipype/interfaces/nitime/tests/test_auto_NitimeBaseInterface.py b/nipype/interfaces/nitime/tests/test_auto_NitimeBaseInterface.py index bf827b81b2..56b6b5d923 100644 --- a/nipype/interfaces/nitime/tests/test_auto_NitimeBaseInterface.py +++ b/nipype/interfaces/nitime/tests/test_auto_NitimeBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import NitimeBaseInterface diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index daea1a177b..64bb8366a0 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -11,28 +10,28 @@ import nipype.interfaces.nitime as nitime no_nitime = not nitime.analysis.have_nitime -display_available = 'DISPLAY' in os.environ and os.environ['DISPLAY'] +display_available = "DISPLAY" in os.environ and os.environ["DISPLAY"] @pytest.mark.skipif(no_nitime, reason="nitime is not installed") def test_read_csv(): """Test that reading the data from csv file gives you back a reasonable - time-series object """ + time-series object""" CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 # bogus value just to pass traits test - CA.inputs.in_file = example_data('fmri_timeseries_nolabels.csv') + CA.inputs.in_file = example_data("fmri_timeseries_nolabels.csv") with pytest.raises(ValueError): CA._read_csv() - CA.inputs.in_file = example_data('fmri_timeseries.csv') + CA.inputs.in_file = example_data("fmri_timeseries.csv") data, roi_names = CA._read_csv() assert data[0][0] == 10125.9 - assert roi_names[0] == 'WM' + assert roi_names[0] == "WM" @pytest.mark.skipif(no_nitime, reason="nitime is not installed") def test_coherence_analysis(tmpdir): - """Test that the coherence analyzer works """ + """Test that the coherence analyzer works""" import nitime.analysis as nta import nitime.timeseries as ts @@ -40,11 +39,11 @@ def test_coherence_analysis(tmpdir): # This is the nipype interface analysis: CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 - CA.inputs.in_file = example_data('fmri_timeseries.csv') + CA.inputs.in_file = example_data("fmri_timeseries.csv") if display_available: - tmp_png = tempfile.mkstemp(suffix='.png')[1] + tmp_png = tempfile.mkstemp(suffix=".png")[1] CA.inputs.output_figure_file = tmp_png - tmp_csv = tempfile.mkstemp(suffix='.csv')[1] + tmp_csv = tempfile.mkstemp(suffix=".csv")[1] CA.inputs.output_csv_file = tmp_csv o = CA.run() @@ -52,7 +51,7 @@ def test_coherence_analysis(tmpdir): # This is the nitime analysis: TR = 1.89 - data_rec = np.recfromcsv(example_data('fmri_timeseries.csv')) + data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) @@ -64,16 +63,18 @@ def test_coherence_analysis(tmpdir): assert (CA._csv2ts().data == T.data).all() - T.metadata['roi'] = roi_names + T.metadata["roi"] = roi_names C = nta.CoherenceAnalyzer( T, method=dict( - this_method='welch', - NFFT=CA.inputs.NFFT, - n_overlap=CA.inputs.n_overlap)) - - freq_idx = np.where((C.frequencies > CA.inputs.frequency_range[0]) * - (C.frequencies < CA.inputs.frequency_range[1]))[0] + this_method="welch", NFFT=CA.inputs.NFFT, n_overlap=CA.inputs.n_overlap + ), + ) + + freq_idx = np.where( + (C.frequencies > CA.inputs.frequency_range[0]) + * (C.frequencies < CA.inputs.frequency_range[1]) + )[0] # Extract the coherence and average across these frequency bands: # Averaging is done on the last dimension diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index c73b408bf3..a1f042eed9 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -1,62 +1,121 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - +"""PETPVC is a toolbox for partial volume correction in positron emission tomography.""" import os -from .base import TraitedSpec, CommandLineInputSpec, CommandLine, File, isdefined, traits +from .base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + isdefined, + traits, +) from ..utils.filemanip import fname_presuffix from ..external.due import BibTeX pvc_methods = [ - 'GTM', 'IY', 'IY+RL', 'IY+VC', 'LABBE', 'LABBE+MTC', 'LABBE+MTC+RL', - 'LABBE+MTC+VC', 'LABBE+RBV', 'LABBE+RBV+RL', 'LABBE+RBV+VC', 'MG', 'MG+RL', - 'MG+VC', 'MTC', 'MTC+RL', 'MTC+VC', 'RBV', 'RBV+RL', 'RBV+VC', 'RL', 'VC' + "GTM", + "IY", + "IY+RL", + "IY+VC", + "LABBE", + "LABBE+MTC", + "LABBE+MTC+RL", + "LABBE+MTC+VC", + "LABBE+RBV", + "LABBE+RBV+RL", + "LABBE+RBV+VC", + "MG", + "MG+RL", + "MG+VC", + "MTC", + "MTC+RL", + "MTC+VC", + "RBV", + "RBV+RL", + "RBV+VC", + "RL", + "VC", + "STC", ] class PETPVCInputSpec(CommandLineInputSpec): - in_file = File( - desc="PET image file", exists=True, mandatory=True, argstr="-i %s") - out_file = File( - desc="Output file", genfile=True, hash_files=False, argstr="-o %s") + in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") + out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") mask_file = File( - desc="Mask image file", exists=True, mandatory=True, argstr="-m %s") + desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" + ) pvc = traits.Enum( - pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s") + pvc_methods, + mandatory=True, + argstr="-p %s", + desc="""\ +Desired PVC method: + + * Geometric transfer matrix -- ``GTM`` + * Labbe approach -- ``LABBE`` + * Richardson-Lucy -- ``RL`` + * Van-Cittert -- ``VC`` + * Region-based voxel-wise correction -- ``RBV`` + * RBV with Labbe -- ``LABBE+RBV`` + * RBV with Van-Cittert -- ``RBV+VC`` + * RBV with Richardson-Lucy -- ``RBV+RL`` + * RBV with Labbe and Van-Cittert -- ``LABBE+RBV+VC`` + * RBV with Labbe and Richardson-Lucy -- ``LABBE+RBV+RL`` + * Multi-target correction -- ``MTC`` + * MTC with Labbe -- ``LABBE+MTC`` + * MTC with Van-Cittert -- ``MTC+VC`` + * MTC with Richardson-Lucy -- ``MTC+RL`` + * MTC with Labbe and Van-Cittert -- ``LABBE+MTC+VC`` + * MTC with Labbe and Richardson-Lucy -- ``LABBE+MTC+RL`` + * Iterative Yang -- ``IY`` + * Iterative Yang with Van-Cittert -- ``IY+VC`` + * Iterative Yang with Richardson-Lucy -- ``IY+RL`` + * Muller Gartner -- ``MG`` + * Muller Gartner with Van-Cittert -- ``MG+VC`` + * Muller Gartner with Richardson-Lucy -- ``MG+RL`` + * Single-target correction -- ``STC`` + +""", + ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", mandatory=True, - argstr="-x %.4f") + argstr="-x %.4f", + ) fwhm_y = traits.Float( desc="The full-width at half maximum in mm along y-axis", mandatory=True, - argstr="-y %.4f") + argstr="-y %.4f", + ) fwhm_z = traits.Float( desc="The full-width at half maximum in mm along z-axis", mandatory=True, - argstr="-z %.4f") + argstr="-z %.4f", + ) debug = traits.Bool( desc="Prints debug information", usedefault=True, default_value=False, - argstr="-d") + argstr="-d", + ) n_iter = traits.Int( - desc="Number of iterations", default_value=10, usedefault=True, - argstr="-n %d") + desc="Number of iterations", default_value=10, usedefault=True, argstr="-n %d" + ) n_deconv = traits.Int( desc="Number of deconvolution iterations", default_value=10, usedefault=True, - argstr="-k %d") + argstr="-k %d", + ) alpha = traits.Float( - desc="Alpha value", default_value=1.5, usedefault=True, - argstr="-a %.4f") + desc="Alpha value", default_value=1.5, usedefault=True, argstr="-a %.4f" + ) stop_crit = traits.Float( - desc="Stopping criterion", default_value=0.01, usedefault=True, - argstr="-a %.4f") + desc="Stopping criterion", default_value=0.01, usedefault=True, argstr="-s %.4f" + ) class PETPVCOutputSpec(TraitedSpec): @@ -64,75 +123,11 @@ class PETPVCOutputSpec(TraitedSpec): class PETPVC(CommandLine): - """ Use PETPVC for partial volume correction of PET images. + """Use PETPVC for partial volume correction of PET images. - PETPVC is a software from the Nuclear Medicine Department + PETPVC ([1]_, [2]_) is a software from the Nuclear Medicine Department of the UCL University Hospital, London, UK. - Its source code is here: https://github.com/UCL/PETPVC - - The methods that it implement are explained here: - K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, - "A review of partial volume correction techniques for emission tomography - and their applications in neurology, cardiology and oncology," Phys. Med. - Biol., vol. 57, no. 21, p. R119, 2012. - - Its command line help shows this: - - -i --input < filename > - = PET image file - -o --output < filename > - = Output file - [ -m --mask < filename > ] - = Mask image file - -p --pvc < keyword > - = Desired PVC method - -x < X > - = The full-width at half maximum in mm along x-axis - -y < Y > - = The full-width at half maximum in mm along y-axis - -z < Z > - = The full-width at half maximum in mm along z-axis - [ -d --debug ] - = Prints debug information - [ -n --iter [ Val ] ] - = Number of iterations - With: Val (Default = 10) - [ -k [ Val ] ] - = Number of deconvolution iterations - With: Val (Default = 10) - [ -a --alpha [ aval ] ] - = Alpha value - With: aval (Default = 1.5) - [ -s --stop [ stopval ] ] - = Stopping criterion - With: stopval (Default = 0.01) - - Technique - keyword - ------------------- - - Geometric transfer matrix - "GTM" - - Labbe approach - "LABBE" - - Richardson-Lucy - "RL" - - Van-Cittert - "VC" - - Region-based voxel-wise correction - "RBV" - - RBV with Labbe - "LABBE+RBV" - - RBV with Van-Cittert - "RBV+VC" - - RBV with Richardson-Lucy - "RBV+RL" - - RBV with Labbe and Van-Cittert - "LABBE+RBV+VC" - - RBV with Labbe and Richardson-Lucy- "LABBE+RBV+RL" - - Multi-target correction - "MTC" - - MTC with Labbe - "LABBE+MTC" - - MTC with Van-Cittert - "MTC+VC" - - MTC with Richardson-Lucy - "MTC+RL" - - MTC with Labbe and Van-Cittert - "LABBE+MTC+VC" - - MTC with Labbe and Richardson-Lucy- "LABBE+MTC+RL" - - Iterative Yang - "IY" - - Iterative Yang with Van-Cittert - "IY+VC" - - Iterative Yang with Richardson-Lucy - "IY+RL" - - Muller Gartner - "MG" - - Muller Gartner with Van-Cittert - "MG+VC" - - Muller Gartner with Richardson-Lucy - "MG+RL" - Examples -------- >>> from ..testing import example_data @@ -146,54 +141,63 @@ class PETPVC(CommandLine): >>> pvc.inputs.fwhm_y = 2.0 >>> pvc.inputs.fwhm_z = 2.0 >>> outs = pvc.run() #doctest: +SKIP + + References + ---------- + .. [1] K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, + "A review of partial volume correction techniques for emission tomography + and their applications in neurology, cardiology and oncology," Phys. Med. + Biol., vol. 57, no. 21, p. R119, 2012. + .. [2] https://github.com/UCL/PETPVC + """ + input_spec = PETPVCInputSpec output_spec = PETPVCOutputSpec - _cmd = 'petpvc' - - references_ = [{ - 'entry': - BibTeX( - "@article{0031-9155-61-22-7975," - "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " - "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," - "title={PETPVC: a toolbox for performing partial volume correction " - "techniques in positron emission tomography}," - "journal={Physics in Medicine and Biology}," - "volume={61}," - "number={22}," - "pages={7975}," - "url={http://stacks.iop.org/0031-9155/61/i=22/a=7975}," - "doi={https://doi.org/10.1088/0031-9155/61/22/7975}," - "year={2016}," - "}"), - 'description': - 'PETPVC software implementation publication', - 'tags': ['implementation'], - }] + _cmd = "petpvc" + + _references = [ + { + "entry": BibTeX( + "@article{0031-9155-61-22-7975," + "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " + "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," + "title={PETPVC: a toolbox for performing partial volume correction " + "techniques in positron emission tomography}," + "journal={Physics in Medicine and Biology}," + "volume={61}," + "number={22}," + "pages={7975}," + "url={http://stacks.iop.org/0031-9155/61/i=22/a=7975}," + "doi={https://doi.org/10.1088/0031-9155/61/22/7975}," + "year={2016}," + "}" + ), + "description": "PETPVC software implementation publication", + "tags": ["implementation"], + } + ] def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): method_name = self.inputs.pvc.lower() - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_{}_pvc'.format(method_name)) + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix=f"_{method_name}_pvc" + ) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext='.nii.gz'): + def _gen_fname( + self, basename, cwd=None, suffix=None, change_ext=True, ext=".nii.gz" + ): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. - If change_ext is True, it will use the extentions specified in - intputs.output_type. + If change_ext is True, it will use the extensions specified in + inputs.output_type. Parameters ---------- @@ -213,24 +217,23 @@ def _gen_fname(self, New filename based on given parameters. """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = f"{suffix}{ext}" else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None diff --git a/nipype/interfaces/quickshear.py b/nipype/interfaces/quickshear.py index b1317c3599..8dee91e2c2 100644 --- a/nipype/interfaces/quickshear.py +++ b/nipype/interfaces/quickshear.py @@ -1,7 +1,4 @@ -# -*- coding: utf-8 -*- -""" Quickshear is a simple geometric defacing algorithm -""" -from __future__ import unicode_literals +"""Quickshear is a simple geometric defacing algorithm.""" from .base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..external.due import BibTeX @@ -11,27 +8,26 @@ class QuickshearInputSpec(CommandLineInputSpec): in_file = File( exists=True, position=1, - argstr='%s', + argstr="%s", mandatory=True, - desc="neuroimage to deface") + desc="neuroimage to deface", + ) mask_file = File( - exists=True, - position=2, - argstr='%s', - desc="brain mask", - mandatory=True) + exists=True, position=2, argstr="%s", desc="brain mask", mandatory=True + ) out_file = File( name_template="%s_defaced", - name_source='in_file', + name_source="in_file", position=3, - argstr='%s', + argstr="%s", desc="defaced output image", - keep_extension=True) + keep_extension=True, + ) buff = traits.Int( position=4, - argstr='%d', - desc='buffer size (in voxels) between shearing ' - 'plane and the brain') + argstr="%d", + desc="buffer size (in voxels) between shearing plane and the brain", + ) class QuickshearOutputSpec(TraitedSpec): @@ -73,19 +69,23 @@ class Quickshear(CommandLine): >>> inputnode.inputs.in_file = 'T1.nii' >>> res = deface_wf.run() # doctest: +SKIP """ - _cmd = 'quickshear' + + _cmd = "quickshear" input_spec = QuickshearInputSpec output_spec = QuickshearOutputSpec - references_ = [{ - 'entry': - BibTeX('@inproceedings{Schimke2011,' - 'address = {San Francisco},' - 'author = {Schimke, Nakeisha and Hale, John},' - 'booktitle = {Proceedings of the 2nd USENIX Conference on ' - 'Health Security and Privacy},' - 'title = {{Quickshear Defacing for Neuroimages}},' - 'year = {2011},' - 'month = sep}'), - 'tags': ['implementation'], - }] + _references = [ + { + "entry": BibTeX( + "@inproceedings{Schimke2011," + "address = {San Francisco}," + "author = {Schimke, Nakeisha and Hale, John}," + "booktitle = {Proceedings of the 2nd USENIX Conference on " + "Health Security and Privacy}," + "title = {{Quickshear Defacing for Neuroimages}}," + "year = {2011}," + "month = sep}" + ), + "tags": ["implementation"], + } + ] diff --git a/nipype/interfaces/r.py b/nipype/interfaces/r.py new file mode 100644 index 0000000000..1894d00960 --- /dev/null +++ b/nipype/interfaces/r.py @@ -0,0 +1,113 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Interfaces to run R scripts.""" +import os +from shutil import which + +from .base import ( + CommandLineInputSpec, + isdefined, + CommandLine, + traits, + File, +) + + +def get_r_command(): + if "NIPYPE_NO_R" in os.environ: + return None + r_cmd = os.getenv("RCMD", default="R") + + return r_cmd if which(r_cmd) else None + + +no_r = get_r_command() is None + + +class RInputSpec(CommandLineInputSpec): + """Basic expected inputs to R interface""" + + script = traits.Str( + argstr='-e "%s"', desc="R code to run", mandatory=True, position=-1 + ) + # non-commandline options + rfile = traits.Bool(True, desc="Run R using R script", usedefault=True) + script_file = File( + "pyscript.R", usedefault=True, desc="Name of file to write R code to" + ) + + +class RCommand(CommandLine): + """Interface that runs R code + + >>> import nipype.interfaces.r as r + >>> r = r.RCommand(rfile=False) # doctest: +SKIP + >>> r.inputs.script = "Sys.getenv('USER')" # doctest: +SKIP + >>> out = r.run() # doctest: +SKIP + """ + + _cmd = get_r_command() + input_spec = RInputSpec + + def __init__(self, r_cmd=None, **inputs): + """initializes interface to r + (default 'R') + """ + super().__init__(**inputs) + if r_cmd and isdefined(r_cmd): + self._cmd = r_cmd + + # For r commands force all output to be returned since r + # does not have a clean way of notifying an error + self.terminal_output = "allatonce" + + def set_default_r_cmd(self, r_cmd): + """Set the default R command line for R classes. + + This method is used to set values for all R + subclasses. + """ + self._cmd = r_cmd + + def set_default_rfile(self, rfile): + """Set the default R script file format for R classes. + + This method is used to set values for all R + subclasses. + """ + self._rfile = rfile + + def _run_interface(self, runtime): + self.terminal_output = "allatonce" + runtime = super()._run_interface(runtime) + if "R code threw an exception" in runtime.stderr: + self.raise_exception(runtime) + return runtime + + def _format_arg(self, name, trait_spec, value): + if name in ["script"]: + argstr = trait_spec.argstr + return self._gen_r_command(argstr, value) + return super()._format_arg(name, trait_spec, value) + + def _gen_r_command(self, argstr, script_lines): + """Generates commands and, if rfile specified, writes it to disk.""" + if not self.inputs.rfile: + # replace newlines with ;, strip comments + script = "; ".join( + [ + line + for line in script_lines.split("\n") + if not line.strip().startswith("#") + ] + ) + # escape " and $ + script = script.replace('"', '\\"') + script = script.replace("$", "\\$") + else: + script_path = os.path.join(os.getcwd(), self.inputs.script_file) + with open(script_path, "w") as rfile: + rfile.write(script_lines) + script = "source('%s')" % script_path + + return argstr % script diff --git a/examples/fmri_openfmri.py b/nipype/interfaces/robex/__init__.py old mode 100755 new mode 100644 similarity index 100% rename from examples/fmri_openfmri.py rename to nipype/interfaces/robex/__init__.py diff --git a/nipype/interfaces/robex/preprocess.py b/nipype/interfaces/robex/preprocess.py new file mode 100644 index 0000000000..b2e92e94cb --- /dev/null +++ b/nipype/interfaces/robex/preprocess.py @@ -0,0 +1,64 @@ +from nipype.interfaces.base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + traits, +) + + +class RobexInputSpec(CommandLineInputSpec): + in_file = File( + desc="Input volume", exists=True, mandatory=True, position=0, argstr="%s" + ) + out_file = File( + desc="Output volume", + position=1, + argstr="%s", + hash_files=False, + name_template='%s_brain', + name_source=["in_file"], + keep_extension=True, + ) + out_mask = File( + desc="Output mask", + position=2, + argstr="%s", + hash_files=False, + name_template='%s_brainmask', + name_source=["in_file"], + keep_extension=True, + ) + seed = traits.Int(desc="Seed for random number generator", position=3, argstr="%i") + + +class RobexOutputSpec(TraitedSpec): + out_file = File(desc="Output volume") + out_mask = File(desc="Output mask") + + +class RobexSegment(CommandLine): + """ + + ROBEX is an automatic whole-brain extraction tool for T1-weighted MRI data (commonly known as skull stripping). + ROBEX aims for robust skull-stripping across datasets with no parameter settings. It fits a triangular mesh, + constrained by a shape model, to the probabilistic output of a supervised brain boundary classifier. + Because the shape model cannot perfectly accommodate unseen cases, a small free deformation is subsequently allowed. + The deformation is optimized using graph cuts. + The method ROBEX is based on was published in IEEE Transactions on Medical Imaging; + please visit the website http://www.jeiglesias.com to download the paper. + + Examples + -------- + >>> from nipype.interfaces.robex.preprocess import RobexSegment + >>> robex = RobexSegment() + >>> robex.inputs.in_file = 'structural.nii' + >>> robex.cmdline + 'runROBEX.sh structural.nii structural_brain.nii structural_brainmask.nii' + >>> robex.run() # doctest: +SKIP + + """ + + input_spec = RobexInputSpec + output_spec = RobexOutputSpec + _cmd = 'runROBEX.sh' diff --git a/nipype/interfaces/robex/tests/__init__.py b/nipype/interfaces/robex/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/interfaces/robex/tests/test_auto_RobexSegment.py b/nipype/interfaces/robex/tests/test_auto_RobexSegment.py new file mode 100644 index 0000000000..caccd469e3 --- /dev/null +++ b/nipype/interfaces/robex/tests/test_auto_RobexSegment.py @@ -0,0 +1,63 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import RobexSegment + + +def test_RobexSegment_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + out_file=dict( + argstr="%s", + extensions=None, + hash_files=False, + keep_extension=True, + name_source=["in_file"], + name_template="%s_brain", + position=1, + ), + out_mask=dict( + argstr="%s", + extensions=None, + hash_files=False, + keep_extension=True, + name_source=["in_file"], + name_template="%s_brainmask", + position=2, + ), + seed=dict( + argstr="%i", + position=3, + ), + ) + inputs = RobexSegment.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_RobexSegment_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + out_mask=dict( + extensions=None, + ), + ) + outputs = RobexSegment.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/semtools/__init__.py b/nipype/interfaces/semtools/__init__.py index 14473b8381..317273cfd8 100644 --- a/nipype/interfaces/semtools/__init__.py +++ b/nipype/interfaces/semtools/__init__.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import +"""SEM Tools are useful tools for Structural Equation Modeling.""" + from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage from .segmentation import * diff --git a/nipype/interfaces/semtools/brains/__init__.py b/nipype/interfaces/semtools/brains/__init__.py index ebfab84bf3..9177db7e3d 100644 --- a/nipype/interfaces/semtools/brains/__init__.py +++ b/nipype/interfaces/semtools/brains/__init__.py @@ -1,6 +1,7 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask -from .utilities import (HistogramMatchingFilter, GenerateEdgeMapImage, - GeneratePurePlugMask) +from .utilities import ( + HistogramMatchingFilter, + GenerateEdgeMapImage, + GeneratePurePlugMask, +) from .classify import BRAINSPosteriorToContinuousClass diff --git a/nipype/interfaces/semtools/brains/classify.py b/nipype/interfaces/semtools/brains/classify.py index 89bb74f039..149e63c95e 100644 --- a/nipype/interfaces/semtools/brains/classify.py +++ b/nipype/interfaces/semtools/brains/classify.py @@ -1,76 +1,80 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class BRAINSPosteriorToContinuousClassInputSpec(CommandLineInputSpec): inputWhiteVolume = File( desc="White Matter Posterior Volume", exists=True, - argstr="--inputWhiteVolume %s") + argstr="--inputWhiteVolume %s", + ) inputBasalGmVolume = File( desc="Basal Grey Matter Posterior Volume", exists=True, - argstr="--inputBasalGmVolume %s") + argstr="--inputBasalGmVolume %s", + ) inputSurfaceGmVolume = File( desc="Surface Grey Matter Posterior Volume", exists=True, - argstr="--inputSurfaceGmVolume %s") + argstr="--inputSurfaceGmVolume %s", + ) inputCsfVolume = File( - desc="CSF Posterior Volume", exists=True, argstr="--inputCsfVolume %s") + desc="CSF Posterior Volume", exists=True, argstr="--inputCsfVolume %s" + ) inputVbVolume = File( - desc="Venous Blood Posterior Volume", - exists=True, - argstr="--inputVbVolume %s") + desc="Venous Blood Posterior Volume", exists=True, argstr="--inputVbVolume %s" + ) inputCrblGmVolume = File( desc="Cerebellum Grey Matter Posterior Volume", exists=True, - argstr="--inputCrblGmVolume %s") + argstr="--inputCrblGmVolume %s", + ) inputCrblWmVolume = File( desc="Cerebellum White Matter Posterior Volume", exists=True, - argstr="--inputCrblWmVolume %s") + argstr="--inputCrblWmVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Continuous Tissue Classified Image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class BRAINSPosteriorToContinuousClassOutputSpec(TraitedSpec): - outputVolume = File( - desc="Output Continuous Tissue Classified Image", exists=True) + outputVolume = File(desc="Output Continuous Tissue Classified Image", exists=True) class BRAINSPosteriorToContinuousClass(SEMLikeCommandLine): """title: Tissue Classification -category: BRAINS.Classify - -description: This program will generate an 8-bit continuous tissue classified image based on BRAINSABC posterior images. + category: BRAINS.Classify -version: 3.0 + description: This program will generate an 8-bit continuous tissue classified image based on BRAINSABC posterior images. -documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSClassify + version: 3.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSClassify -contributor: Vincent A. Magnotta + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 + contributor: Vincent A. Magnotta -""" + acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 + """ input_spec = BRAINSPosteriorToContinuousClassInputSpec output_spec = BRAINSPosteriorToContinuousClassOutputSpec _cmd = " BRAINSPosteriorToContinuousClass " - _outputs_filenames = {'outputVolume': 'outputVolume'} + _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False diff --git a/nipype/interfaces/semtools/brains/segmentation.py b/nipype/interfaces/semtools/brains/segmentation.py index fae5e4f1a2..a6dbfbd449 100644 --- a/nipype/interfaces/semtools/brains/segmentation.py +++ b/nipype/interfaces/semtools/brains/segmentation.py @@ -1,32 +1,34 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class SimilarityIndexInputSpec(CommandLineInputSpec): outputCSVFilename = File( - desc="output CSV Filename", - exists=True, - argstr="--outputCSVFilename %s") + desc="output CSV Filename", exists=True, argstr="--outputCSVFilename %s" + ) ANNContinuousVolume = File( desc="ANN Continuous volume to be compared to the manual volume", exists=True, - argstr="--ANNContinuousVolume %s") + argstr="--ANNContinuousVolume %s", + ) inputManualVolume = File( desc="input manual(reference) volume", exists=True, - argstr="--inputManualVolume %s") + argstr="--inputManualVolume %s", + ) thresholdInterval = traits.Float( - desc= - "Threshold interval to compute similarity index between zero and one", - argstr="--thresholdInterval %f") + desc="Threshold interval to compute similarity index between zero and one", + argstr="--thresholdInterval %f", + ) class SimilarityIndexOutputSpec(TraitedSpec): @@ -36,17 +38,16 @@ class SimilarityIndexOutputSpec(TraitedSpec): class SimilarityIndex(SEMLikeCommandLine): """title: BRAINSCut:SimilarityIndexComputation -category: BRAINS.Segmentation - -description: Automatic analysis of BRAINSCut Output + category: BRAINS.Segmentation -version: 1.0 + description: Automatic analysis of BRAINSCut Output -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 1.0 -contributor: Eunyoung Regin Kim + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Eunyoung Regin Kim + """ input_spec = SimilarityIndexInputSpec output_spec = SimilarityIndexOutputSpec @@ -57,71 +58,71 @@ class SimilarityIndex(SEMLikeCommandLine): class BRAINSTalairachInputSpec(CommandLineInputSpec): AC = InputMultiPath( - traits.Float, desc="Location of AC Point ", sep=",", argstr="--AC %s") + traits.Float, desc="Location of AC Point ", sep=",", argstr="--AC %s" + ) ACisIndex = traits.Bool(desc="AC Point is Index", argstr="--ACisIndex ") PC = InputMultiPath( - traits.Float, desc="Location of PC Point ", sep=",", argstr="--PC %s") + traits.Float, desc="Location of PC Point ", sep=",", argstr="--PC %s" + ) PCisIndex = traits.Bool(desc="PC Point is Index", argstr="--PCisIndex ") SLA = InputMultiPath( - traits.Float, - desc="Location of SLA Point ", - sep=",", - argstr="--SLA %s") + traits.Float, desc="Location of SLA Point ", sep=",", argstr="--SLA %s" + ) SLAisIndex = traits.Bool(desc="SLA Point is Index", argstr="--SLAisIndex ") IRP = InputMultiPath( - traits.Float, - desc="Location of IRP Point ", - sep=",", - argstr="--IRP %s") + traits.Float, desc="Location of IRP Point ", sep=",", argstr="--IRP %s" + ) IRPisIndex = traits.Bool(desc="IRP Point is Index", argstr="--IRPisIndex ") inputVolume = File( desc="Input image used to define physical space of images", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputBox = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Bounding Box file", - argstr="--outputBox %s") + argstr="--outputBox %s", + ) outputGrid = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Grid file", - argstr="--outputGrid %s") + argstr="--outputGrid %s", + ) class BRAINSTalairachOutputSpec(TraitedSpec): outputBox = File( - desc="Name of the resulting Talairach Bounding Box file", exists=True) - outputGrid = File( - desc="Name of the resulting Talairach Grid file", exists=True) + desc="Name of the resulting Talairach Bounding Box file", exists=True + ) + outputGrid = File(desc="Name of the resulting Talairach Grid file", exists=True) class BRAINSTalairach(SEMLikeCommandLine): """title: BRAINS Talairach -category: BRAINS.Segmentation + category: BRAINS.Segmentation -description: This program creates a VTK structured grid defining the Talairach coordinate system based on four points: AC, PC, IRP, and SLA. The resulting structred grid can be written as either a classic VTK file or the new VTK XML file format. Two representations of the resulting grid can be written. The first is a bounding box representation that also contains the location of the AC and PC points. The second representation is the full Talairach grid representation that includes the additional rows of boxes added to the inferior allowing full coverage of the cerebellum. + description: This program creates a VTK structured grid defining the Talairach coordinate system based on four points: AC, PC, IRP, and SLA. The resulting structured grid can be written as either a classic VTK file or the new VTK XML file format. Two representations of the resulting grid can be written. The first is a bounding box representation that also contains the location of the AC and PC points. The second representation is the full Talairach grid representation that includes the additional rows of boxes added to the inferior allowing full coverage of the cerebellum. -version: 0.1 + version: 0.1 -documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairach + documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairach -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: Steven Dunn and Vincent Magnotta + contributor: Steven Dunn and Vincent Magnotta -acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 - -""" + acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 + """ input_spec = BRAINSTalairachInputSpec output_spec = BRAINSTalairachOutputSpec _cmd = " BRAINSTalairach " - _outputs_filenames = {'outputGrid': 'outputGrid', 'outputBox': 'outputBox'} + _outputs_filenames = {"outputGrid": "outputGrid", "outputBox": "outputBox"} _redirect_x = False @@ -129,57 +130,61 @@ class BRAINSTalairachMaskInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image used to define physical space of resulting mask", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) talairachParameters = File( desc="Name of the Talairach parameter file.", exists=True, - argstr="--talairachParameters %s") + argstr="--talairachParameters %s", + ) talairachBox = File( - desc="Name of the Talairach box file.", - exists=True, - argstr="--talairachBox %s") + desc="Name of the Talairach box file.", exists=True, argstr="--talairachBox %s" + ) hemisphereMode = traits.Enum( "left", "right", "both", desc="Mode for box creation: left, right, both", - argstr="--hemisphereMode %s") + argstr="--hemisphereMode %s", + ) expand = traits.Bool( - desc="Expand exterior box to include surface CSF", argstr="--expand ") + desc="Expand exterior box to include surface CSF", argstr="--expand " + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename for the resulting binary image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class BRAINSTalairachMaskOutputSpec(TraitedSpec): outputVolume = File( - desc="Output filename for the resulting binary image", exists=True) + desc="Output filename for the resulting binary image", exists=True + ) class BRAINSTalairachMask(SEMLikeCommandLine): """title: Talairach Mask -category: BRAINS.Segmentation - -description: This program creates a binary image representing the specified Talairach region. The input is an example image to define the physical space for the resulting image, the Talairach grid representation in VTK format, and the file containing the Talairach box definitions to be generated. These can be combined in BRAINS to create a label map using the procedure Brains::WorkupUtils::CreateLabelMapFromBinaryImages. + category: BRAINS.Segmentation -version: 0.1 + description: This program creates a binary image representing the specified Talairach region. The input is an example image to define the physical space for the resulting image, the Talairach grid representation in VTK format, and the file containing the Talairach box definitions to be generated. These can be combined in BRAINS to create a label map using the procedure Brains::WorkupUtils::CreateLabelMapFromBinaryImages. -documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairachMask + version: 0.1 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairachMask -contributor: Steven Dunn and Vincent Magnotta + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 + contributor: Steven Dunn and Vincent Magnotta -""" + acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 + """ input_spec = BRAINSTalairachMaskInputSpec output_spec = BRAINSTalairachMaskOutputSpec _cmd = " BRAINSTalairachMask " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/brains/tests/__init__.py b/nipype/interfaces/semtools/brains/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/brains/tests/__init__.py +++ b/nipype/interfaces/semtools/brains/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py index 81a22bfe38..9098ee2640 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py @@ -1,24 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..classify import BRAINSPosteriorToContinuousClass def test_BRAINSPosteriorToContinuousClass_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBasalGmVolume=dict(argstr='--inputBasalGmVolume %s', ), - inputCrblGmVolume=dict(argstr='--inputCrblGmVolume %s', ), - inputCrblWmVolume=dict(argstr='--inputCrblWmVolume %s', ), - inputCsfVolume=dict(argstr='--inputCsfVolume %s', ), - inputSurfaceGmVolume=dict(argstr='--inputSurfaceGmVolume %s', ), - inputVbVolume=dict(argstr='--inputVbVolume %s', ), - inputWhiteVolume=dict(argstr='--inputWhiteVolume %s', ), + inputBasalGmVolume=dict( + argstr="--inputBasalGmVolume %s", + extensions=None, + ), + inputCrblGmVolume=dict( + argstr="--inputCrblGmVolume %s", + extensions=None, + ), + inputCrblWmVolume=dict( + argstr="--inputCrblWmVolume %s", + extensions=None, + ), + inputCsfVolume=dict( + argstr="--inputCsfVolume %s", + extensions=None, + ), + inputSurfaceGmVolume=dict( + argstr="--inputSurfaceGmVolume %s", + extensions=None, + ), + inputVbVolume=dict( + argstr="--inputVbVolume %s", + extensions=None, + ), + inputWhiteVolume=dict( + argstr="--inputWhiteVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -27,8 +49,14 @@ def test_BRAINSPosteriorToContinuousClass_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSPosteriorToContinuousClass_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSPosteriorToContinuousClass.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py index ac589ad6dc..195ebdcad0 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py @@ -1,42 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import BRAINSTalairach def test_BRAINSTalairach_inputs(): input_map = dict( AC=dict( - argstr='--AC %s', - sep=',', + argstr="--AC %s", + sep=",", + ), + ACisIndex=dict( + argstr="--ACisIndex ", ), - ACisIndex=dict(argstr='--ACisIndex ', ), IRP=dict( - argstr='--IRP %s', - sep=',', + argstr="--IRP %s", + sep=",", + ), + IRPisIndex=dict( + argstr="--IRPisIndex ", ), - IRPisIndex=dict(argstr='--IRPisIndex ', ), PC=dict( - argstr='--PC %s', - sep=',', + argstr="--PC %s", + sep=",", + ), + PCisIndex=dict( + argstr="--PCisIndex ", ), - PCisIndex=dict(argstr='--PCisIndex ', ), SLA=dict( - argstr='--SLA %s', - sep=',', + argstr="--SLA %s", + sep=",", + ), + SLAisIndex=dict( + argstr="--SLAisIndex ", + ), + args=dict( + argstr="%s", ), - SLAisIndex=dict(argstr='--SLAisIndex ', ), - args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputBox=dict( - argstr='--outputBox %s', + argstr="--outputBox %s", hash_files=False, ), outputGrid=dict( - argstr='--outputGrid %s', + argstr="--outputGrid %s", hash_files=False, ), ) @@ -45,10 +57,16 @@ def test_BRAINSTalairach_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTalairach_outputs(): output_map = dict( - outputBox=dict(), - outputGrid=dict(), + outputBox=dict( + extensions=None, + ), + outputGrid=dict( + extensions=None, + ), ) outputs = BRAINSTalairach.output_spec() diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py index 5ada1576e7..2470e42f47 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py @@ -1,32 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import BRAINSTalairachMask def test_BRAINSTalairachMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - expand=dict(argstr='--expand ', ), - hemisphereMode=dict(argstr='--hemisphereMode %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), + expand=dict( + argstr="--expand ", + ), + hemisphereMode=dict( + argstr="--hemisphereMode %s", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - talairachBox=dict(argstr='--talairachBox %s', ), - talairachParameters=dict(argstr='--talairachParameters %s', ), + talairachBox=dict( + argstr="--talairachBox %s", + extensions=None, + ), + talairachParameters=dict( + argstr="--talairachParameters %s", + extensions=None, + ), ) inputs = BRAINSTalairachMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTalairachMask_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSTalairachMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py b/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py index eb4bdcffae..218c67a4b0 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py @@ -1,40 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utilities import GenerateEdgeMapImage def test_GenerateEdgeMapImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMRVolumes=dict(argstr='--inputMRVolumes %s...', ), - inputMask=dict(argstr='--inputMask %s', ), - lowerPercentileMatching=dict(argstr='--lowerPercentileMatching %f', ), - maximumOutputRange=dict(argstr='--maximumOutputRange %d', ), - minimumOutputRange=dict(argstr='--minimumOutputRange %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputMRVolumes=dict( + argstr="--inputMRVolumes %s...", + ), + inputMask=dict( + argstr="--inputMask %s", + extensions=None, + ), + lowerPercentileMatching=dict( + argstr="--lowerPercentileMatching %f", + ), + maximumOutputRange=dict( + argstr="--maximumOutputRange %d", + ), + minimumOutputRange=dict( + argstr="--minimumOutputRange %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputEdgeMap=dict( - argstr='--outputEdgeMap %s', + argstr="--outputEdgeMap %s", hash_files=False, ), outputMaximumGradientImage=dict( - argstr='--outputMaximumGradientImage %s', + argstr="--outputMaximumGradientImage %s", hash_files=False, ), - upperPercentileMatching=dict(argstr='--upperPercentileMatching %f', ), + upperPercentileMatching=dict( + argstr="--upperPercentileMatching %f", + ), ) inputs = GenerateEdgeMapImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateEdgeMapImage_outputs(): output_map = dict( - outputEdgeMap=dict(), - outputMaximumGradientImage=dict(), + outputEdgeMap=dict( + extensions=None, + ), + outputMaximumGradientImage=dict( + extensions=None, + ), ) outputs = GenerateEdgeMapImage.output_spec() diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py b/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py index ae16bc8fab..e68b03dcf9 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py @@ -1,33 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utilities import GeneratePurePlugMask def test_GeneratePurePlugMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputImageModalities=dict(argstr='--inputImageModalities %s...', ), + inputImageModalities=dict( + argstr="--inputImageModalities %s...", + ), numberOfSubSamples=dict( - argstr='--numberOfSubSamples %s', - sep=',', + argstr="--numberOfSubSamples %s", + sep=",", ), outputMaskFile=dict( - argstr='--outputMaskFile %s', + argstr="--outputMaskFile %s", hash_files=False, ), - threshold=dict(argstr='--threshold %f', ), + threshold=dict( + argstr="--threshold %f", + ), ) inputs = GeneratePurePlugMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GeneratePurePlugMask_outputs(): - output_map = dict(outputMaskFile=dict(), ) + output_map = dict( + outputMaskFile=dict( + extensions=None, + ), + ) outputs = GeneratePurePlugMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py b/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py index 1377072149..110aec4891 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py @@ -1,36 +1,65 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utilities import HistogramMatchingFilter def test_HistogramMatchingFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - histogramAlgorithm=dict(argstr='--histogramAlgorithm %s', ), - inputBinaryVolume=dict(argstr='--inputBinaryVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), + histogramAlgorithm=dict( + argstr="--histogramAlgorithm %s", + ), + inputBinaryVolume=dict( + argstr="--inputBinaryVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - referenceBinaryVolume=dict(argstr='--referenceBinaryVolume %s', ), - referenceVolume=dict(argstr='--referenceVolume %s', ), - verbose=dict(argstr='--verbose ', ), - writeHistogram=dict(argstr='--writeHistogram %s', ), + referenceBinaryVolume=dict( + argstr="--referenceBinaryVolume %s", + extensions=None, + ), + referenceVolume=dict( + argstr="--referenceVolume %s", + extensions=None, + ), + verbose=dict( + argstr="--verbose ", + ), + writeHistogram=dict( + argstr="--writeHistogram %s", + ), ) inputs = HistogramMatchingFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HistogramMatchingFilter_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = HistogramMatchingFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py b/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py index 534488ad10..881e3379de 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py @@ -1,25 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import SimilarityIndex def test_SimilarityIndex_inputs(): input_map = dict( - ANNContinuousVolume=dict(argstr='--ANNContinuousVolume %s', ), - args=dict(argstr='%s', ), + ANNContinuousVolume=dict( + argstr="--ANNContinuousVolume %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputManualVolume=dict(argstr='--inputManualVolume %s', ), - outputCSVFilename=dict(argstr='--outputCSVFilename %s', ), - thresholdInterval=dict(argstr='--thresholdInterval %f', ), + inputManualVolume=dict( + argstr="--inputManualVolume %s", + extensions=None, + ), + outputCSVFilename=dict( + argstr="--outputCSVFilename %s", + extensions=None, + ), + thresholdInterval=dict( + argstr="--thresholdInterval %f", + ), ) inputs = SimilarityIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimilarityIndex_outputs(): output_map = dict() outputs = SimilarityIndex.output_spec() diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py index d794c9c587..3b5596e6d7 100644 --- a/nipype/interfaces/semtools/brains/utilities.py +++ b/nipype/interfaces/semtools/brains/utilities.py @@ -1,51 +1,58 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class HistogramMatchingFilterInputSpec(CommandLineInputSpec): inputVolume = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) referenceVolume = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Image File Name", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) referenceBinaryVolume = File( - desc="referenceBinaryVolume", - exists=True, - argstr="--referenceBinaryVolume %s") + desc="referenceBinaryVolume", exists=True, argstr="--referenceBinaryVolume %s" + ) inputBinaryVolume = File( - desc="inputBinaryVolume", exists=True, argstr="--inputBinaryVolume %s") + desc="inputBinaryVolume", exists=True, argstr="--inputBinaryVolume %s" + ) numberOfMatchPoints = traits.Int( - desc=" number of histogram matching points", - argstr="--numberOfMatchPoints %d") + desc=" number of histogram matching points", argstr="--numberOfMatchPoints %d" + ) numberOfHistogramBins = traits.Int( - desc=" number of histogram bin", argstr="--numberOfHistogramBins %d") + desc=" number of histogram bin", argstr="--numberOfHistogramBins %d" + ) writeHistogram = traits.Str( - desc= - " decide if histogram data would be written with prefixe of the file name", - argstr="--writeHistogram %s") + desc=" decide if histogram data would be written with prefixe of the file name", + argstr="--writeHistogram %s", + ) histogramAlgorithm = traits.Enum( "OtsuHistogramMatching", - desc=" histogram algrithm selection", - argstr="--histogramAlgorithm %s") + desc=" histogram algorithm selection", + argstr="--histogramAlgorithm %s", + ) verbose = traits.Bool( - desc=" verbose mode running for debbuging", argstr="--verbose ") + desc=" verbose mode running for debugging", argstr="--verbose " + ) class HistogramMatchingFilterOutputSpec(TraitedSpec): @@ -55,92 +62,94 @@ class HistogramMatchingFilterOutputSpec(TraitedSpec): class HistogramMatchingFilter(SEMLikeCommandLine): """title: Write Out Image Intensities -category: BRAINS.Utilities - -description: For Analysis + category: BRAINS.Utilities -version: 0.1 + description: For Analysis -contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + version: 0.1 -""" + contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + """ input_spec = HistogramMatchingFilterInputSpec output_spec = HistogramMatchingFilterOutputSpec _cmd = " HistogramMatchingFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateEdgeMapImageInputSpec(CommandLineInputSpec): inputMRVolumes = InputMultiPath( File(exists=True), - desc= - "List of input structural MR volumes to create the maximum edgemap", - argstr="--inputMRVolumes %s...") + desc="List of input structural MR volumes to create the maximum edgemap", + argstr="--inputMRVolumes %s...", + ) inputMask = File( - desc= - "Input mask file name. If set, image histogram percentiles will be calculated within the mask", + desc="Input mask file name. If set, image histogram percentiles will be calculated within the mask", exists=True, - argstr="--inputMask %s") + argstr="--inputMask %s", + ) minimumOutputRange = traits.Int( - desc= - "Map lower quantile and below to minimum output range. It should be a small number greater than zero. Default is 1", - argstr="--minimumOutputRange %d") + desc="Map lower quantile and below to minimum output range. It should be a small number greater than zero. Default is 1", + argstr="--minimumOutputRange %d", + ) maximumOutputRange = traits.Int( - desc= - "Map upper quantile and above to maximum output range. Default is 255 that is the maximum range of unsigned char", - argstr="--maximumOutputRange %d") + desc="Map upper quantile and above to maximum output range. Default is 255 that is the maximum range of unsigned char", + argstr="--maximumOutputRange %d", + ) lowerPercentileMatching = traits.Float( - desc= - "Map lower quantile and below to minOutputRange. It should be a value between zero and one", - argstr="--lowerPercentileMatching %f") + desc="Map lower quantile and below to minOutputRange. It should be a value between zero and one", + argstr="--lowerPercentileMatching %f", + ) upperPercentileMatching = traits.Float( - desc= - "Map upper quantile and above to maxOutputRange. It should be a value between zero and one", - argstr="--upperPercentileMatching %f") + desc="Map upper quantile and above to maxOutputRange. It should be a value between zero and one", + argstr="--upperPercentileMatching %f", + ) outputEdgeMap = traits.Either( traits.Bool, File(), hash_files=False, desc="output edgemap file name", - argstr="--outputEdgeMap %s") + argstr="--outputEdgeMap %s", + ) outputMaximumGradientImage = traits.Either( traits.Bool, File(), hash_files=False, desc="output gradient image file name", - argstr="--outputMaximumGradientImage %s") + argstr="--outputMaximumGradientImage %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateEdgeMapImageOutputSpec(TraitedSpec): outputEdgeMap = File(desc="(required) output file name", exists=True) outputMaximumGradientImage = File( - desc="output gradient image file name", exists=True) + desc="output gradient image file name", exists=True + ) class GenerateEdgeMapImage(SEMLikeCommandLine): """title: GenerateEdgeMapImage -category: BRAINS.Utilities + category: BRAINS.Utilities -description: Automatic edgemap generation for edge-guided super-resolution reconstruction + description: Automatic edgemap generation for edge-guided super-resolution reconstruction -version: 1.0 + version: 1.0 -contributor: Ali Ghayoor - -""" + contributor: Ali Ghayoor + """ input_spec = GenerateEdgeMapImageInputSpec output_spec = GenerateEdgeMapImageOutputSpec _cmd = " GenerateEdgeMapImage " _outputs_filenames = { - 'outputEdgeMap': 'outputEdgeMap', - 'outputMaximumGradientImage': 'outputMaximumGradientImage' + "outputEdgeMap": "outputEdgeMap", + "outputMaximumGradientImage": "outputMaximumGradientImage", } _redirect_x = False @@ -149,44 +158,44 @@ class GeneratePurePlugMaskInputSpec(CommandLineInputSpec): inputImageModalities = InputMultiPath( File(exists=True), desc="List of input image file names to create pure plugs mask", - argstr="--inputImageModalities %s...") + argstr="--inputImageModalities %s...", + ) threshold = traits.Float( - desc="threshold value to define class membership", - argstr="--threshold %f") + desc="threshold value to define class membership", argstr="--threshold %f" + ) numberOfSubSamples = InputMultiPath( traits.Int, - desc= - "Number of continous index samples taken at each direction of lattice space for each plug volume", + desc="Number of continuous index samples taken at each direction of lattice space for each plug volume", sep=",", - argstr="--numberOfSubSamples %s") + argstr="--numberOfSubSamples %s", + ) outputMaskFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary mask file name", - argstr="--outputMaskFile %s") + argstr="--outputMaskFile %s", + ) class GeneratePurePlugMaskOutputSpec(TraitedSpec): - outputMaskFile = File( - desc="(required) Output binary mask file name", exists=True) + outputMaskFile = File(desc="(required) Output binary mask file name", exists=True) class GeneratePurePlugMask(SEMLikeCommandLine): """title: GeneratePurePlugMask -category: BRAINS.Utilities - -description: This program gets several modality image files and returns a binary mask that defines the pure plugs + category: BRAINS.Utilities -version: 1.0 + description: This program gets several modality image files and returns a binary mask that defines the pure plugs -contributor: Ali Ghayoor + version: 1.0 -""" + contributor: Ali Ghayoor + """ input_spec = GeneratePurePlugMaskInputSpec output_spec = GeneratePurePlugMaskOutputSpec _cmd = " GeneratePurePlugMask " - _outputs_filenames = {'outputMaskFile': 'outputMaskFile'} + _outputs_filenames = {"outputMaskFile": "outputMaskFile"} _redirect_x = False diff --git a/nipype/interfaces/semtools/converters.py b/nipype/interfaces/semtools/converters.py index de638935e5..091f832b2b 100644 --- a/nipype/interfaces/semtools/converters.py +++ b/nipype/interfaces/semtools/converters.py @@ -1,27 +1,30 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class DWISimpleCompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) checkDWIData = traits.Bool( desc="check for existence of DWI data, and if present, compare it", - argstr="--checkDWIData ") + argstr="--checkDWIData ", + ) class DWISimpleCompareOutputSpec(TraitedSpec): @@ -31,21 +34,20 @@ class DWISimpleCompareOutputSpec(TraitedSpec): class DWISimpleCompare(SEMLikeCommandLine): """title: Nrrd DWI comparison -category: Converters - -description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. + category: Converters -version: 0.1.0.$Revision: 916 $(alpha) + description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert + version: 0.1.0.$Revision: 916 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert -contributor: Mark Scully (UIowa) + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + contributor: Mark Scully (UIowa) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + """ input_spec = DWISimpleCompareInputSpec output_spec = DWISimpleCompareOutputSpec @@ -58,11 +60,13 @@ class DWICompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) class DWICompareOutputSpec(TraitedSpec): @@ -72,21 +76,20 @@ class DWICompareOutputSpec(TraitedSpec): class DWICompare(SEMLikeCommandLine): """title: Nrrd DWI comparison -category: Converters - -description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. + category: Converters -version: 0.1.0.$Revision: 916 $(alpha) + description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert + version: 0.1.0.$Revision: 916 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert -contributor: Mark Scully (UIowa) + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + contributor: Mark Scully (UIowa) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + """ input_spec = DWICompareInputSpec output_spec = DWICompareOutputSpec diff --git a/nipype/interfaces/semtools/diffusion/__init__.py b/nipype/interfaces/semtools/diffusion/__init__.py index 215cfa41d7..1f56f11145 100644 --- a/nipype/interfaces/semtools/diffusion/__init__.py +++ b/nipype/interfaces/semtools/diffusion/__init__.py @@ -1,15 +1,29 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * from .gtract import ( - gtractTransformToDisplacementField, gtractInvertBSplineTransform, - gtractConcatDwi, gtractAverageBvalues, gtractCoregBvalues, - gtractResampleAnisotropy, gtractResampleCodeImage, - gtractCopyImageOrientation, gtractCreateGuideFiber, gtractAnisotropyMap, - gtractClipAnisotropy, gtractResampleB0, gtractInvertRigidTransform, - gtractImageConformity, compareTractInclusion, gtractFastMarchingTracking, - gtractInvertDisplacementField, gtractCoRegAnatomy, - gtractResampleDWIInPlace, gtractCostFastMarching, gtractFiberTracking, - extractNrrdVectorIndex, gtractResampleFibers, gtractTensor) + gtractTransformToDisplacementField, + gtractInvertBSplineTransform, + gtractConcatDwi, + gtractAverageBvalues, + gtractCoregBvalues, + gtractResampleAnisotropy, + gtractResampleCodeImage, + gtractCopyImageOrientation, + gtractCreateGuideFiber, + gtractAnisotropyMap, + gtractClipAnisotropy, + gtractResampleB0, + gtractInvertRigidTransform, + gtractImageConformity, + compareTractInclusion, + gtractFastMarchingTracking, + gtractInvertDisplacementField, + gtractCoRegAnatomy, + gtractResampleDWIInPlace, + gtractCostFastMarching, + gtractFiberTracking, + extractNrrdVectorIndex, + gtractResampleFibers, + gtractTensor, +) from .maxcurvature import maxcurvature diff --git a/nipype/interfaces/semtools/diffusion/diffusion.py b/nipype/interfaces/semtools/diffusion/diffusion.py index af943a04fb..6a85d5cd7a 100644 --- a/nipype/interfaces/semtools/diffusion/diffusion.py +++ b/nipype/interfaces/semtools/diffusion/diffusion.py @@ -1,30 +1,34 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + InputMultiPath, +) class dtiaverageInputSpec(CommandLineInputSpec): inputs = InputMultiPath( File(exists=True), desc="List of all the tensor fields to be averaged", - argstr="--inputs %s...") + argstr="--inputs %s...", + ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Averaged tensor volume", - argstr="--tensor_output %s") + argstr="--tensor_output %s", + ) DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -35,342 +39,352 @@ class dtiaverageOutputSpec(TraitedSpec): class dtiaverage(SEMLikeCommandLine): """title: DTIAverage (DTIProcess) -category: Diffusion.Diffusion Tensor Images.CommandLineOnly - -description: dtiaverage is a program that allows to compute the average of an arbitrary number of tensor fields (listed after the --inputs option) This program is used in our pipeline as the last step of the atlas building processing. When all the tensor fields have been deformed in the same space, to create the average tensor field (--tensor_output) we use dtiaverage. - Several average method can be used (specified by the --method option): euclidian, log-euclidian and pga. The default being euclidian. + category: Diffusion.Diffusion Tensor Images.CommandLineOnly -version: 1.0.0 + description: dtiaverage is a program that allows to compute the average of an arbitrary number of tensor fields (listed after the --inputs option) This program is used in our pipeline as the last step of the atlas building processing. When all the tensor fields have been deformed in the same space, to create the average tensor field (--tensor_output) we use dtiaverage. + Several average method can be used (specified by the --method option): euclidean, log-euclidean and pga. The default being euclidean. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.0.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -""" + contributor: Casey Goodlett + """ input_spec = dtiaverageInputSpec output_spec = dtiaverageOutputSpec _cmd = " dtiaverage " - _outputs_filenames = {'tensor_output': 'tensor_output.nii'} + _outputs_filenames = {"tensor_output": "tensor_output.nii"} _redirect_x = False class dtiestimInputSpec(CommandLineInputSpec): dwi_image = File( - desc="DWI image volume (required)", - exists=True, - argstr="--dwi_image %s") + desc="DWI image volume (required)", exists=True, argstr="--dwi_image %s" + ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Tensor OutputImage", - argstr="--tensor_output %s") + argstr="--tensor_output %s", + ) B0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Baseline image, average of all baseline images", - argstr="--B0 %s") + argstr="--B0 %s", + ) idwi = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", - argstr="--idwi %s") + desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", + argstr="--idwi %s", + ) B0_mask_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", - argstr="--B0_mask_output %s") + desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", + argstr="--B0_mask_output %s", + ) brain_mask = File( - desc= - "Brain mask. Image where for every voxel == 0 the tensors are not estimated. Be aware that in addition a threshold based masking will be performed by default. If such an additional threshold masking is NOT desired, then use option -t 0.", + desc="Brain mask. Image where for every voxel == 0 the tensors are not estimated. Be aware that in addition a threshold based masking will be performed by default. If such an additional threshold masking is NOT desired, then use option -t 0.", exists=True, - argstr="--brain_mask %s") + argstr="--brain_mask %s", + ) bad_region_mask = File( - desc= - "Bad region mask. Image where for every voxel > 0 the tensors are not estimated", + desc="Bad region mask. Image where for every voxel > 0 the tensors are not estimated", exists=True, - argstr="--bad_region_mask %s") + argstr="--bad_region_mask %s", + ) method = traits.Enum( "lls", "wls", "nls", "ml", - desc= - "Esitmation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)", - argstr="--method %s") + desc="Estimation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)", + argstr="--method %s", + ) correction = traits.Enum( "none", "zero", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) threshold = traits.Int( - desc= - "Baseline threshold for estimation. If not specified calculated using an OTSU threshold on the baseline image.", - argstr="--threshold %d") + desc="Baseline threshold for estimation. If not specified calculated using an OTSU threshold on the baseline image.", + argstr="--threshold %d", + ) weight_iterations = traits.Int( - desc= - "Number of iterations to recaluate weightings from tensor estimate", - argstr="--weight_iterations %d") + desc="Number of iterations to recaluate weightings from tensor estimate", + argstr="--weight_iterations %d", + ) step = traits.Float( - desc="Gradient descent step size (for nls and ml methods)", - argstr="--step %f") + desc="Gradient descent step size (for nls and ml methods)", argstr="--step %f" + ) sigma = traits.Float(argstr="--sigma %f") DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") defaultTensor = InputMultiPath( traits.Float, - desc= - "Default tensor used if estimated tensor is below a given threshold", + desc="Default tensor used if estimated tensor is below a given threshold", sep=",", - argstr="--defaultTensor %s") + argstr="--defaultTensor %s", + ) shiftNeg = traits.Bool( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). This is the same option as the one available in DWIToDTIEstimation in Slicer (but instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues", - argstr="--shiftNeg ") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). This is the same option as the one available in DWIToDTIEstimation in Slicer (but instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues", + argstr="--shiftNeg ", + ) shiftNegCoeff = traits.Float( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). Instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues. Coefficient must be between 1.0 and 1.001 (included).", - argstr="--shiftNegCoeff %f") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). Instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues. Coefficient must be between 1.0 and 1.001 (included).", + argstr="--shiftNegCoeff %f", + ) class dtiestimOutputSpec(TraitedSpec): tensor_output = File(desc="Tensor OutputImage", exists=True) - B0 = File( - desc="Baseline image, average of all baseline images", exists=True) + B0 = File(desc="Baseline image, average of all baseline images", exists=True) idwi = File( - desc= - "idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", - exists=True) + desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", + exists=True, + ) B0_mask_output = File( - desc= - "B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", - exists=True) + desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", + exists=True, + ) class dtiestim(SEMLikeCommandLine): """title: DTIEstim (DTIProcess) -category: Diffusion.Diffusion Weighted Images - -description: dtiestim is a tool that takes in a set of DWIs (with --dwi_image option) in nrrd format and estimates a tensor field out of it. The output tensor file name is specified with the --tensor_output option -There are several methods to estimate the tensors which you can specify with the option --method lls|wls|nls|ml . Here is a short description of the different methods: + category: Diffusion.Diffusion Weighted Images -lls - Linear least squares. Standard estimation technique that recovers the tensor parameters by multiplying the log of the normalized signal intensities by the pseudo-inverse of the gradient matrix. Default option. + description: dtiestim is a tool that takes in a set of DWIs (with --dwi_image option) in nrrd format and estimates a tensor field out of it. The output tensor file name is specified with the --tensor_output option + There are several methods to estimate the tensors which you can specify with the option --method lls|wls|nls|ml . Here is a short description of the different methods: -wls - Weighted least squares. This method is similar to the linear least squares method except that the gradient matrix is weighted by the original lls estimate. (See Salvador, R., Pena, A., Menon, D. K., Carpenter, T. A., Pickard, J. D., and Bullmore, E. T. Formal characterization and extension of the linearized diffusion tensor model. Human Brain Mapping 24, 2 (Feb. 2005), 144-155. for more information on this method). This method is recommended for most applications. The weight for each iteration can be specified with the --weight_iterations. It is not currently the default due to occasional matrix singularities. -nls - Non-linear least squares. This method does not take the log of the signal and requires an optimization based on levenberg-marquadt to optimize the parameters of the signal. The lls estimate is used as an initialization. For this method the step size can be specified with the --step option. -ml - Maximum likelihood estimation. This method is experimental and is not currently recommended. For this ml method the sigma can be specified with the option --sigma and the step size can be specified with the --step option. + lls + Linear least squares. Standard estimation technique that recovers the tensor parameters by multiplying the log of the normalized signal intensities by the pseudo-inverse of the gradient matrix. Default option. -You can set a threshold (--threshold) to have the tensor estimated to only a subset of voxels. All the baseline voxel value higher than the threshold define the voxels where the tensors are computed. If not specified the threshold is calculated using an OTSU threshold on the baseline image.The masked generated by the -t option or by the otsu value can be saved with the --B0_mask_output option. + wls + Weighted least squares. This method is similar to the linear least squares method except that the gradient matrix is weighted by the original lls estimate. (See Salvador, R., Pena, A., Menon, D. K., Carpenter, T. A., Pickard, J. D., and Bullmore, E. T. Formal characterization and extension of the linearized diffusion tensor model. Human Brain Mapping 24, 2 (Feb. 2005), 144-155. for more information on this method). This method is recommended for most applications. The weight for each iteration can be specified with the --weight_iterations. It is not currently the default due to occasional matrix singularities. + nls + Non-linear least squares. This method does not take the log of the signal and requires an optimization based on levenberg-marquadt to optimize the parameters of the signal. The lls estimate is used as an initialization. For this method the step size can be specified with the --step option. + ml + Maximum likelihood estimation. This method is experimental and is not currently recommended. For this ml method the sigma can be specified with the option --sigma and the step size can be specified with the --step option. -dtiestim also can extract a few scalar images out of the DWI set of images: + You can set a threshold (--threshold) to have the tensor estimated to only a subset of voxels. All the baseline voxel value higher than the threshold define the voxels where the tensors are computed. If not specified the threshold is calculated using an OTSU threshold on the baseline image.The masked generated by the -t option or by the otsu value can be saved with the --B0_mask_output option. - - the average baseline image (--B0) which is the average of all the B0s. - - the IDWI (--idwi)which is the geometric mean of the diffusion images. + dtiestim also can extract a few scalar images out of the DWI set of images: -You can also load a mask if you want to compute the tensors only where the voxels are non-zero (--brain_mask) or a negative mask and the tensors will be estimated where the negative mask has zero values (--bad_region_mask) + - the average baseline image (--B0) which is the average of all the B0s. + - the IDWI (--idwi)which is the geometric mean of the diffusion images. -version: 1.2.0 + You can also load a mask if you want to compute the tensors only where the voxels are non-zero (--brain_mask) or a negative mask and the tensors will be estimated where the negative mask has zero values (--bad_region_mask) -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.2.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett, Francois Budin + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. + contributor: Casey Goodlett, Francois Budin -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler. + """ input_spec = dtiestimInputSpec output_spec = dtiestimOutputSpec _cmd = " dtiestim " _outputs_filenames = { - 'B0': 'B0.nii', - 'idwi': 'idwi.nii', - 'tensor_output': 'tensor_output.nii', - 'B0_mask_output': 'B0_mask_output.nii' + "B0": "B0.nii", + "idwi": "idwi.nii", + "tensor_output": "tensor_output.nii", + "B0_mask_output": "B0_mask_output.nii", } _redirect_x = False class dtiprocessInputSpec(CommandLineInputSpec): - dti_image = File( - desc="DTI tensor volume", exists=True, argstr="--dti_image %s") + dti_image = File(desc="DTI tensor volume", exists=True, argstr="--dti_image %s") fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy output file", - argstr="--fa_output %s") + argstr="--fa_output %s", + ) md_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Mean Diffusivity output file", - argstr="--md_output %s") + argstr="--md_output %s", + ) sigma = traits.Float(desc="Scale of gradients", argstr="--sigma %f") fa_gradient_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient output file", - argstr="--fa_gradient_output %s") + argstr="--fa_gradient_output %s", + ) fa_gradmag_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient Magnitude output file", - argstr="--fa_gradmag_output %s") + argstr="--fa_gradmag_output %s", + ) color_fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Color Fractional Anisotropy output file", - argstr="--color_fa_output %s") + argstr="--color_fa_output %s", + ) principal_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Principal Eigenvectors Output", - argstr="--principal_eigenvector_output %s") + argstr="--principal_eigenvector_output %s", + ) negative_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", - argstr="--negative_eigenvector_output %s") + desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", + argstr="--negative_eigenvector_output %s", + ) frobenius_norm_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Frobenius Norm Output", - argstr="--frobenius_norm_output %s") + argstr="--frobenius_norm_output %s", + ) lambda1_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", - argstr="--lambda1_output %s") + argstr="--lambda1_output %s", + ) lambda2_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 2 (middle eigenvalue) output", - argstr="--lambda2_output %s") + argstr="--lambda2_output %s", + ) lambda3_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 3 (smallest eigenvalue) output", - argstr="--lambda3_output %s") + argstr="--lambda3_output %s", + ) RD_output = traits.Either( traits.Bool, File(), hash_files=False, desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", - argstr="--RD_output %s") + argstr="--RD_output %s", + ) rot_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Rotated tensor output file. Must also specify the dof file.", - argstr="--rot_output %s") + argstr="--rot_output %s", + ) affineitk_file = File( desc="Transformation file for affine transformation. ITK format.", exists=True, - argstr="--affineitk_file %s") + argstr="--affineitk_file %s", + ) dof_file = File( - desc= - "Transformation file for affine transformation. This can be ITK format (or the outdated RView).", + desc="Transformation file for affine transformation. This can be ITK format (or the outdated RView).", exists=True, - argstr="--dof_file %s") + argstr="--dof_file %s", + ) newdof_file = File( - desc= - "Transformation file for affine transformation. RView NEW format. (txt file output of dof2mat)", + desc="Transformation file for affine transformation. RView NEW format. (txt file output of dof2mat)", exists=True, - argstr="--newdof_file %s") + argstr="--newdof_file %s", + ) mask = File( - desc= - "Mask tensors. Specify --outmask if you want to save the masked tensor field, otherwise the mask is applied just for the current processing ", + desc="Mask tensors. Specify --outmask if you want to save the masked tensor field, otherwise the mask is applied just for the current processing ", exists=True, - argstr="--mask %s") + argstr="--mask %s", + ) outmask = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the masked tensor field.", - argstr="--outmask %s") + argstr="--outmask %s", + ) hField = traits.Bool( - desc= - "forward and inverse transformations are h-fields instead of displacement fields", - argstr="--hField ") + desc="forward and inverse transformations are h-fields instead of displacement fields", + argstr="--hField ", + ) forward = File( - desc= - "Forward transformation. Assumed to be a deformation field in world coordinates, unless the --h-field option is specified.", + desc="Forward transformation. Assumed to be a deformation field in world coordinates, unless the --h-field option is specified.", exists=True, - argstr="--forward %s") + argstr="--forward %s", + ) deformation_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", - argstr="--deformation_output %s") + desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", + argstr="--deformation_output %s", + ) interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) reorientation = traits.Enum( - "fs", - "ppd", - desc="Reorientation type (fs, ppd)", - argstr="--reorientation %s") + "fs", "ppd", desc="Reorientation type (fs, ppd)", argstr="--reorientation %s" + ) correction = traits.Enum( "none", "zero", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) scalar_float = traits.Bool( - desc= - "Write scalar [FA,MD] as unscaled float (with their actual values, otherwise scaled by 10 000). Also causes FA to be unscaled [0..1].", - argstr="--scalar_float ") + desc="Write scalar [FA,MD] as unscaled float (with their actual values, otherwise scaled by 10 000). Also causes FA to be unscaled [0..1].", + argstr="--scalar_float ", + ) DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -378,91 +392,89 @@ class dtiprocessOutputSpec(TraitedSpec): fa_output = File(desc="Fractional Anisotropy output file", exists=True) md_output = File(desc="Mean Diffusivity output file", exists=True) fa_gradient_output = File( - desc="Fractional Anisotropy Gradient output file", exists=True) + desc="Fractional Anisotropy Gradient output file", exists=True + ) fa_gradmag_output = File( - desc="Fractional Anisotropy Gradient Magnitude output file", - exists=True) - color_fa_output = File( - desc="Color Fractional Anisotropy output file", exists=True) + desc="Fractional Anisotropy Gradient Magnitude output file", exists=True + ) + color_fa_output = File(desc="Color Fractional Anisotropy output file", exists=True) principal_eigenvector_output = File( - desc="Principal Eigenvectors Output", exists=True) + desc="Principal Eigenvectors Output", exists=True + ) negative_eigenvector_output = File( - desc= - "Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", - exists=True) + desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", + exists=True, + ) frobenius_norm_output = File(desc="Frobenius Norm Output", exists=True) lambda1_output = File( - desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", - exists=True) - lambda2_output = File( - desc="Lambda 2 (middle eigenvalue) output", exists=True) - lambda3_output = File( - desc="Lambda 3 (smallest eigenvalue) output", exists=True) + desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", exists=True + ) + lambda2_output = File(desc="Lambda 2 (middle eigenvalue) output", exists=True) + lambda3_output = File(desc="Lambda 3 (smallest eigenvalue) output", exists=True) RD_output = File( - desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", - exists=True) + desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", exists=True + ) rot_output = File( - desc="Rotated tensor output file. Must also specify the dof file.", - exists=True) + desc="Rotated tensor output file. Must also specify the dof file.", exists=True + ) outmask = File(desc="Name of the masked tensor field.", exists=True) deformation_output = File( - desc= - "Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", - exists=True) + desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", + exists=True, + ) class dtiprocess(SEMLikeCommandLine): """title: DTIProcess (DTIProcess) -category: Diffusion.Diffusion Tensor Images + category: Diffusion.Diffusion Tensor Images -description: dtiprocess is a tool that handles tensor fields. It takes as an input a tensor field in nrrd format. -It can generate diffusion scalar properties out of the tensor field such as : FA (--fa_output), Gradient FA image (--fa_gradient_output), color FA (--color_fa_output), MD (--md_output), Frobenius norm (--frobenius_norm_output), lbd1, lbd2, lbd3 (--lambda{1,2,3}_output), binary map of voxel where if any of the eigenvalue is negative, the voxel is set to 1 (--negative_eigenvector_output) + description: dtiprocess is a tool that handles tensor fields. It takes as an input a tensor field in nrrd format. + It can generate diffusion scalar properties out of the tensor field such as : FA (--fa_output), Gradient FA image (--fa_gradient_output), color FA (--color_fa_output), MD (--md_output), Frobenius norm (--frobenius_norm_output), lbd1, lbd2, lbd3 (--lambda{1,2,3}_output), binary map of voxel where if any of the eigenvalue is negative, the voxel is set to 1 (--negative_eigenvector_output) -It also creates 4D images out of the tensor field such as: Highest eigenvector map (highest eigenvector at each voxel) (--principal_eigenvector_output) + It also creates 4D images out of the tensor field such as: Highest eigenvector map (highest eigenvector at each voxel) (--principal_eigenvector_output) -Masking capabilities: For any of the processing done with dtiprocess, it's possible to apply it on a masked region of the tensor field. You need to use the --mask option for any of the option to be applied on that tensor field sub-region only. If you want to save the masked tensor field use the option --outmask and specify the new masked tensor field file name. -dtiprocess also allows a range of transformations on the tensor fields. The transformed tensor field file name is specified with the option --deformation_output. There are 3 resampling interpolation methods specified with the tag --interpolation followed by the type to use (nearestneighbor, linear, cubic) Then you have several transformations possible to apply: + Masking capabilities: For any of the processing done with dtiprocess, it's possible to apply it on a masked region of the tensor field. You need to use the --mask option for any of the option to be applied on that tensor field sub-region only. If you want to save the masked tensor field use the option --outmask and specify the new masked tensor field file name. + dtiprocess also allows a range of transformations on the tensor fields. The transformed tensor field file name is specified with the option --deformation_output. There are 3 resampling interpolation methods specified with the tag --interpolation followed by the type to use (nearestneighbor, linear, cubic) Then you have several transformations possible to apply: - - Affine transformations using as an input - - itk affine transformation file (based on the itkAffineTransform class) - - Affine transformations using rview (details and download at http://www.doc.ic.ac.uk/~dr/software/). There are 2 versions of rview both creating transformation files called dof files. The old version of rview outputs text files containing the transformation parameters. It can be read in with the --dof_file option. The new version outputs binary dof files. These dof files can be transformed into human readable file with the dof2mat tool which is part of the rview package. So you need to save the output of dof2mat into a text file which can then be used with the -- newdof_file option. Usage example: dof2mat mynewdoffile.dof >> mynewdoffile.txt dtiprocess --dti_image mytensorfield.nhdr --newdof_file mynewdoffile.txt --rot_output myaffinetensorfield.nhdr + - Affine transformations using as an input + - itk affine transformation file (based on the itkAffineTransform class) + - Affine transformations using rview (details and download at http://www.doc.ic.ac.uk/~dr/software/). There are 2 versions of rview both creating transformation files called dof files. The old version of rview outputs text files containing the transformation parameters. It can be read in with the --dof_file option. The new version outputs binary dof files. These dof files can be transformed into human readable file with the dof2mat tool which is part of the rview package. So you need to save the output of dof2mat into a text file which can then be used with the -- newdof_file option. Usage example: dof2mat mynewdoffile.dof >> mynewdoffile.txt dtiprocess --dti_image mytensorfield.nhdr --newdof_file mynewdoffile.txt --rot_output myaffinetensorfield.nhdr -Non linear transformations as an input: The default transformation file type is d-field (displacement field) in nrrd format. The option to use is --forward with the name of the file. If the transformation file is a h-field you have to add the option --hField. + Non linear transformations as an input: The default transformation file type is d-field (displacement field) in nrrd format. The option to use is --forward with the name of the file. If the transformation file is a h-field you have to add the option --hField. -version: 1.0.1 + version: 1.0.1 -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -contributor: Casey Goodlett - -""" + contributor: Casey Goodlett + """ input_spec = dtiprocessInputSpec output_spec = dtiprocessOutputSpec _cmd = " dtiprocess " _outputs_filenames = { - 'fa_gradmag_output': 'fa_gradmag_output.nii', - 'fa_gradient_output': 'fa_gradient_output.nii', - 'lambda1_output': 'lambda1_output.nii', - 'lambda2_output': 'lambda2_output.nii', - 'color_fa_output': 'color_fa_output.nii', - 'fa_output': 'fa_output.nii', - 'frobenius_norm_output': 'frobenius_norm_output.nii', - 'principal_eigenvector_output': 'principal_eigenvector_output.nii', - 'outmask': 'outmask.nii', - 'lambda3_output': 'lambda3_output.nii', - 'negative_eigenvector_output': 'negative_eigenvector_output.nii', - 'md_output': 'md_output.nii', - 'RD_output': 'RD_output.nii', - 'deformation_output': 'deformation_output.nii', - 'rot_output': 'rot_output.nii' + "fa_gradmag_output": "fa_gradmag_output.nii", + "fa_gradient_output": "fa_gradient_output.nii", + "lambda1_output": "lambda1_output.nii", + "lambda2_output": "lambda2_output.nii", + "color_fa_output": "color_fa_output.nii", + "fa_output": "fa_output.nii", + "frobenius_norm_output": "frobenius_norm_output.nii", + "principal_eigenvector_output": "principal_eigenvector_output.nii", + "outmask": "outmask.nii", + "lambda3_output": "lambda3_output.nii", + "negative_eigenvector_output": "negative_eigenvector_output.nii", + "md_output": "md_output.nii", + "RD_output": "RD_output.nii", + "deformation_output": "deformation_output.nii", + "rot_output": "rot_output.nii", } _redirect_x = False @@ -473,132 +485,140 @@ class DWIConvertInputSpec(CommandLineInputSpec): "DicomToFSL", "NrrdToFSL", "FSLToNrrd", - desc= - "Determine which conversion to perform. DicomToNrrd (default): Convert DICOM series to NRRD DicomToFSL: Convert DICOM series to NIfTI File + gradient/bvalue text files NrrdToFSL: Convert DWI NRRD file to NIfTI File + gradient/bvalue text files FSLToNrrd: Convert NIfTI File + gradient/bvalue text files to NRRD file.", - argstr="--conversionMode %s") + desc="Determine which conversion to perform. DicomToNrrd (default): Convert DICOM series to NRRD DicomToFSL: Convert DICOM series to NIfTI File + gradient/bvalue text files NrrdToFSL: Convert DWI NRRD file to NIfTI File + gradient/bvalue text files FSLToNrrd: Convert NIfTI File + gradient/bvalue text files to NRRD file.", + argstr="--conversionMode %s", + ) inputVolume = File( desc="Input DWI volume -- not used for DicomToNrrd mode.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename (.nhdr or .nrrd)", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, - argstr="--inputDicomDirectory %s") + argstr="--inputDicomDirectory %s", + ) fslNIFTIFile = File( desc="4D NIfTI file containing gradient volumes", exists=True, - argstr="--fslNIFTIFile %s") + argstr="--fslNIFTIFile %s", + ) inputBValues = File( desc="The B Values are stored in FSL .bval text file format", exists=True, - argstr="--inputBValues %s") + argstr="--inputBValues %s", + ) inputBVectors = File( desc="The Gradient Vectors are stored in FSL .bvec text file format", exists=True, - argstr="--inputBVectors %s") + argstr="--inputBVectors %s", + ) outputBValues = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The B Values are stored in FSL .bval text file format (defaults to .bval)", - argstr="--outputBValues %s") + desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", + argstr="--outputBValues %s", + ) outputBVectors = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", - argstr="--outputBVectors %s") + desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", + argstr="--outputBVectors %s", + ) fMRI = traits.Bool( - desc="Output a NRRD file, but without gradients", argstr="--fMRI ") + desc="Output a NRRD file, but without gradients", argstr="--fMRI " + ) writeProtocolGradientsFile = traits.Bool( - desc= - "Write the protocol gradients to a file suffixed by \'.txt\' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", - argstr="--writeProtocolGradientsFile ") + desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", + argstr="--writeProtocolGradientsFile ", + ) useIdentityMeaseurementFrame = traits.Bool( - desc= - "Adjust all the gradients so that the measurement frame is an identity matrix.", - argstr="--useIdentityMeaseurementFrame ") + desc="Adjust all the gradients so that the measurement frame is an identity matrix.", + argstr="--useIdentityMeaseurementFrame ", + ) useBMatrixGradientDirections = traits.Bool( - desc= - "Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can emperically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.", - argstr="--useBMatrixGradientDirections ") + desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can empirically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.", + argstr="--useBMatrixGradientDirections ", + ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD file", - argstr="--outputDirectory %s") + argstr="--outputDirectory %s", + ) gradientVectorFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Text file giving gradient vectors", - argstr="--gradientVectorFile %s") + argstr="--gradientVectorFile %s", + ) smallGradientThreshold = traits.Float( - desc= - "If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DWIConvert will display an error message and quit, unless the useBMatrixGradientDirections option is set.", - argstr="--smallGradientThreshold %f") + desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DWIConvert will display an error message and quit, unless the useBMatrixGradientDirections option is set.", + argstr="--smallGradientThreshold %f", + ) allowLossyConversion = traits.Bool( - desc= - "The only supported output type is \'short\'. Conversion from images of a different type may cause data loss due to rounding or truncation. Use with caution!", - argstr="--allowLossyConversion ") + desc="The only supported output type is 'short'. Conversion from images of a different type may cause data loss due to rounding or truncation. Use with caution!", + argstr="--allowLossyConversion ", + ) transposeInputBVectors = traits.Bool( - desc= - "FSL input BVectors are expected to be encoded in the input file as one vector per line. If it is not the case, use this option to transpose the file as it is read.", - argstr="--transposeInputBVectors ") + desc="FSL input BVectors are expected to be encoded in the input file as one vector per line. If it is not the case, use this option to transpose the file as it is read.", + argstr="--transposeInputBVectors ", + ) class DWIConvertOutputSpec(TraitedSpec): outputVolume = File(desc="Output filename (.nhdr or .nrrd)", exists=True) outputBValues = File( - desc= - "The B Values are stored in FSL .bval text file format (defaults to .bval)", - exists=True) + desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", + exists=True, + ) outputBVectors = File( - desc= - "The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", - exists=True) + desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", + exists=True, + ) outputDirectory = Directory( - desc="Directory holding the output NRRD file", exists=True) - gradientVectorFile = File( - desc="Text file giving gradient vectors", exists=True) + desc="Directory holding the output NRRD file", exists=True + ) + gradientVectorFile = File(desc="Text file giving gradient vectors", exists=True) class DWIConvert(SEMLikeCommandLine): """title: DWIConverter -category: Diffusion.Diffusion Data Conversion - -description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. + category: Diffusion.Diffusion Data Conversion -version: Version 1.0 + description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConverter + version: Version 1.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConverter -contributor: Vince Magnotta (UIowa), Hans Johnson (UIowa), Joy Matsui (UIowa), Kent Williams (UIowa), Mark Scully (Uiowa), Xiaodong Tao (GE) + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + contributor: Vince Magnotta (UIowa), Hans Johnson (UIowa), Joy Matsui (UIowa), Kent Williams (UIowa), Mark Scully (Uiowa), Xiaodong Tao (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + """ input_spec = DWIConvertInputSpec output_spec = DWIConvertOutputSpec _cmd = " DWIConvert " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputDirectory': 'outputDirectory', - 'outputBValues': 'outputBValues.bval', - 'gradientVectorFile': 'gradientVectorFile', - 'outputBVectors': 'outputBVectors.bvec' + "outputVolume": "outputVolume.nii", + "outputDirectory": "outputDirectory", + "outputBValues": "outputBValues.bval", + "gradientVectorFile": "gradientVectorFile", + "outputBVectors": "outputBVectors.bvec", } _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/gtract.py b/nipype/interfaces/semtools/diffusion/gtract.py index 999c898599..58d7264864 100644 --- a/nipype/interfaces/semtools/diffusion/gtract.py +++ b/nipype/interfaces/semtools/diffusion/gtract.py @@ -1,346 +1,350 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class gtractTransformToDisplacementFieldInputSpec(CommandLineInputSpec): inputTransform = File( - desc="Input Transform File Name", - exists=True, - argstr="--inputTransform %s") + desc="Input Transform File Name", exists=True, argstr="--inputTransform %s" + ) inputReferenceVolume = File( - desc= - "Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", + desc="Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputDeformationFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field", - argstr="--outputDeformationFieldVolume %s") + argstr="--outputDeformationFieldVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractTransformToDisplacementFieldOutputSpec(TraitedSpec): - outputDeformationFieldVolume = File( - desc="Output deformation field", exists=True) + outputDeformationFieldVolume = File(desc="Output deformation field", exists=True) class gtractTransformToDisplacementField(SEMLikeCommandLine): """title: Create Displacement Field -category: Diffusion.GTRACT - -description: This program will compute forward deformation from the given Transform. The size of the DF is equal to MNI space + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will compute forward deformation from the given Transform. The size of the DF is equal to MNI space -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta, Madhura Ingalhalikar, and Greg Harris + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta, Madhura Ingalhalikar, and Greg Harris -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractTransformToDisplacementFieldInputSpec output_spec = gtractTransformToDisplacementFieldOutputSpec _cmd = " gtractTransformToDisplacementField " _outputs_filenames = { - 'outputDeformationFieldVolume': 'outputDeformationFieldVolume.nii' + "outputDeformationFieldVolume": "outputDeformationFieldVolume.nii" } _redirect_x = False class gtractInvertBSplineTransformInputSpec(CommandLineInputSpec): inputReferenceVolume = File( - desc= - "Required: input image file name to exemplify the anatomical space to interpolate over.", + desc="Required: input image file name to exemplify the anatomical space to interpolate over.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) inputTransform = File( desc="Required: input B-Spline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", - argstr="--outputTransform %s") + argstr="--outputTransform %s", + ) landmarkDensity = InputMultiPath( traits.Int, desc="Number of landmark subdivisions in all 3 directions", sep=",", - argstr="--landmarkDensity %s") + argstr="--landmarkDensity %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertBSplineTransformOutputSpec(TraitedSpec): - outputTransform = File( - desc="Required: output transform file name", exists=True) + outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertBSplineTransform(SEMLikeCommandLine): """title: B-Spline Transform Inversion -category: Diffusion.GTRACT - -description: This program will invert a B-Spline transform using a thin-plate spline approximation. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will invert a B-Spline transform using a thin-plate spline approximation. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractInvertBSplineTransformInputSpec output_spec = gtractInvertBSplineTransformOutputSpec _cmd = " gtractInvertBSplineTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.h5'} + _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractConcatDwiInputSpec(CommandLineInputSpec): inputVolume = InputMultiPath( File(exists=True), - desc= - "Required: input file containing the first diffusion weighted image", - argstr="--inputVolume %s...") + desc="Required: input file containing the first diffusion weighted image", + argstr="--inputVolume %s...", + ) ignoreOrigins = traits.Bool( - desc= - "If image origins are different force all images to origin of first image", - argstr="--ignoreOrigins ") + desc="If image origins are different force all images to origin of first image", + argstr="--ignoreOrigins ", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the combined diffusion weighted images.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the combined diffusion weighted images.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractConcatDwiOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the combined diffusion weighted images.", - exists=True) + desc="Required: name of output NRRD file containing the combined diffusion weighted images.", + exists=True, + ) class gtractConcatDwi(SEMLikeCommandLine): """title: Concat DWI Images -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will concatenate two DTI runs together. + description: This program will concatenate two DTI runs together. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractConcatDwiInputSpec output_spec = gtractConcatDwiOutputSpec _cmd = " gtractConcatDwi " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractAverageBvaluesInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input image file name containing multiple baseline gradients to average", + desc="Required: input image file name containing multiple baseline gradients to average", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing directly averaged baseline images", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing directly averaged baseline images", + argstr="--outputVolume %s", + ) directionsTolerance = traits.Float( desc="Tolerance for matching identical gradient direction pairs", - argstr="--directionsTolerance %f") + argstr="--directionsTolerance %f", + ) averageB0only = traits.Bool( - desc= - "Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", - argstr="--averageB0only ") + desc="Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", + argstr="--averageB0only ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractAverageBvaluesOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing directly averaged baseline images", - exists=True) + desc="Required: name of output NRRD file containing directly averaged baseline images", + exists=True, + ) class gtractAverageBvalues(SEMLikeCommandLine): """title: Average B-Values -category: Diffusion.GTRACT - -description: This program will directly average together the baseline gradients (b value equals 0) within a DWI scan. This is usually used after gtractCoregBvalues. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will directly average together the baseline gradients (b value equals 0) within a DWI scan. This is usually used after gtractCoregBvalues. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractAverageBvaluesInputSpec output_spec = gtractAverageBvaluesOutputSpec _cmd = " gtractAverageBvalues " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoregBvaluesInputSpec(CommandLineInputSpec): movingVolume = File( - desc= - "Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", + desc="Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) fixedVolume = File( - desc= - "Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", + desc="Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) fixedVolumeIndex = traits.Int( - desc= - "Index in the fixed image for registration. It is recommended that this image should be a b0 image.", - argstr="--fixedVolumeIndex %d") + desc="Index in the fixed image for registration. It is recommended that this image should be a b0 image.", + argstr="--fixedVolumeIndex %d", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", + argstr="--outputVolume %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", - argstr="--outputTransform %s") + desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", + argstr="--outputTransform %s", + ) eddyCurrentCorrection = traits.Bool( - desc= - "Flag to perform eddy current corection in addition to motion correction (recommended)", - argstr="--eddyCurrentCorrection ") + desc="Flag to perform eddy current correction in addition to motion correction (recommended)", + argstr="--eddyCurrentCorrection ", + ) numberOfIterations = traits.Int( - desc="Number of iterations in each 3D fit", - argstr="--numberOfIterations %d") + desc="Number of iterations in each 3D fit", argstr="--numberOfIterations %d" + ) numberOfSpatialSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", - argstr="--numberOfSpatialSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", + argstr="--numberOfSpatialSamples %d", + ) samplingPercentage = traits.Float( - desc= - "This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", - argstr="--samplingPercentage %f") + desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", + argstr="--samplingPercentage %f", + ) relaxationFactor = traits.Float( - desc= - "Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", - argstr="--relaxationFactor %f") + desc="Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", + argstr="--relaxationFactor %f", + ) maximumStepSize = traits.Float( - desc= - "Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", - argstr="--maximumStepSize %f") + desc="Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", + argstr="--maximumStepSize %f", + ) minimumStepSize = traits.Float( - desc= - "Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", - argstr="--minimumStepSize %f") + desc="Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", + argstr="--minimumStepSize %f", + ) spatialScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", - argstr="--spatialScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", + argstr="--spatialScale %f", + ) registerB0Only = traits.Bool( - desc="Register the B0 images only", argstr="--registerB0Only ") + desc="Register the B0 images only", argstr="--registerB0Only " + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCoregBvaluesOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", - exists=True) + desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", + exists=True, + ) outputTransform = File( - desc= - "Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", - exists=True) + desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", + exists=True, + ) class gtractCoregBvalues(SEMLikeCommandLine): """title: Coregister B-Values -category: Diffusion.GTRACT - -description: This step should be performed after converting DWI scans from DICOM to NRRD format. This program will register all gradients in a NRRD diffusion weighted 4D vector image (moving image) to a specified index in a fixed image. It also supports co-registration with a T2 weighted image or field map in the same plane as the DWI data. The fixed image for the registration should be a b0 image. A mutual information metric cost function is used for the registration because of the differences in signal intensity as a result of the diffusion gradients. The full affine allows the registration procedure to correct for eddy current distortions that may exist in the data. If the eddyCurrentCorrection is enabled, relaxationFactor (0.25) and maximumStepSize (0.1) should be adjusted. + category: Diffusion.GTRACT -version: 4.0.0 + description: This step should be performed after converting DWI scans from DICOM to NRRD format. This program will register all gradients in a NRRD diffusion weighted 4D vector image (moving image) to a specified index in a fixed image. It also supports co-registration with a T2 weighted image or field map in the same plane as the DWI data. The fixed image for the registration should be a b0 image. A mutual information metric cost function is used for the registration because of the differences in signal intensity as a result of the diffusion gradients. The full affine allows the registration procedure to correct for eddy current distortions that may exist in the data. If the eddyCurrentCorrection is enabled, relaxationFactor (0.25) and maximumStepSize (0.1) should be adjusted. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractCoregBvaluesInputSpec output_spec = gtractCoregBvaluesOutputSpec _cmd = " gtractCoregBvalues " _outputs_filenames = { - 'outputVolume': 'outputVolume.nrrd', - 'outputTransform': 'outputTransform.h5' + "outputVolume": "outputVolume.nrrd", + "outputTransform": "outputTransform.h5", } _redirect_x = False @@ -349,63 +353,66 @@ class gtractResampleAnisotropyInputSpec(CommandLineInputSpec): inputAnisotropyVolume = File( desc="Required: input file containing the anisotropy image", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input file containing the anatomical image whose characteristics will be cloned.", + desc="Required: input file containing the anatomical image whose characteristics will be cloned.", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled transformed anisotropy image.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleAnisotropyOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled transformed anisotropy image.", - exists=True) + desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", + exists=True, + ) class gtractResampleAnisotropy(SEMLikeCommandLine): """title: Resample Anisotropy -category: Diffusion.GTRACT - -description: This program will resample a floating point image using either the Rigid or B-Spline transform. You may want to save the aligned B0 image after each of the anisotropy map co-registration steps with the anatomical image to check the registration quality with another tool. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will resample a floating point image using either the Rigid or B-Spline transform. You may want to save the aligned B0 image after each of the anisotropy map co-registration steps with the anatomical image to check the registration quality with another tool. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractResampleAnisotropyInputSpec output_spec = gtractResampleAnisotropyOutputSpec _cmd = " gtractResampleAnisotropy " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -413,16 +420,18 @@ class gtractResampleCodeImageInputSpec(CommandLineInputSpec): inputCodeVolume = File( desc="Required: input file containing the code image", exists=True, - argstr="--inputCodeVolume %s") + argstr="--inputCodeVolume %s", + ) inputReferenceVolume = File( - desc= - "Required: input file containing the standard image to clone the characteristics of.", + desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) inputTransform = File( desc="Required: input Rigid or Inverse-B-Spline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) transformType = traits.Enum( "Rigid", "Affine", @@ -430,104 +439,106 @@ class gtractResampleCodeImageInputSpec(CommandLineInputSpec): "Inverse-B-Spline", "None", desc="Transform type: Rigid or Inverse-B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled code image in acquisition space.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleCodeImageOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled code image in acquisition space.", - exists=True) + desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", + exists=True, + ) class gtractResampleCodeImage(SEMLikeCommandLine): """title: Resample Code Image -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will resample a short integer code image using either the Rigid or Inverse-B-Spline transform. The reference image is the DTI tensor anisotropy image space, and the input code image is in anatomical space. + description: This program will resample a short integer code image using either the Rigid or Inverse-B-Spline transform. The reference image is the DTI tensor anisotropy image space, and the input code image is in anatomical space. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractResampleCodeImageInputSpec output_spec = gtractResampleCodeImageOutputSpec _cmd = " gtractResampleCodeImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCopyImageOrientationInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the signed short image to reorient without resampling.", + desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputReferenceVolume = File( - desc="Required: input file containing orietation that will be cloned.", + desc="Required: input file containing orientation that will be cloned.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCopyImageOrientationOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", - exists=True) + desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", + exists=True, + ) class gtractCopyImageOrientation(SEMLikeCommandLine): """title: Copy Image Orientation -category: Diffusion.GTRACT - -description: This program will copy the orientation from the reference image into the moving image. Currently, the registration process requires that the diffusion weighted images and the anatomical images have the same image orientation (i.e. Axial, Coronal, Sagittal). It is suggested that you copy the image orientation from the diffusion weighted images and apply this to the anatomical image. This image can be subsequently removed after the registration step is complete. We anticipate that this limitation will be removed in future versions of the registration programs. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will copy the orientation from the reference image into the moving image. Currently, the registration process requires that the diffusion weighted images and the anatomical images have the same image orientation (i.e. Axial, Coronal, Sagittal). It is suggested that you copy the image orientation from the diffusion weighted images and apply this to the anatomical image. This image can be subsequently removed after the registration step is complete. We anticipate that this limitation will be removed in future versions of the registration programs. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractCopyImageOrientationInputSpec output_spec = gtractCopyImageOrientationOutputSpec _cmd = " gtractCopyImageOrientation " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -535,53 +546,54 @@ class gtractCreateGuideFiberInputSpec(CommandLineInputSpec): inputFiber = File( desc="Required: input fiber tract file name", exists=True, - argstr="--inputFiber %s") + argstr="--inputFiber %s", + ) numberOfPoints = traits.Int( - desc="Number of points in output guide fiber", - argstr="--numberOfPoints %d") + desc="Number of points in output guide fiber", argstr="--numberOfPoints %d" + ) outputFiber = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output guide fiber file name", - argstr="--outputFiber %s") + argstr="--outputFiber %s", + ) writeXMLPolyDataFile = traits.Bool( - desc= - "Flag to make use of XML files when reading and writing vtkPolyData.", - argstr="--writeXMLPolyDataFile ") + desc="Flag to make use of XML files when reading and writing vtkPolyData.", + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCreateGuideFiberOutputSpec(TraitedSpec): - outputFiber = File( - desc="Required: output guide fiber file name", exists=True) + outputFiber = File(desc="Required: output guide fiber file name", exists=True) class gtractCreateGuideFiber(SEMLikeCommandLine): """title: Create Guide Fiber -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will create a guide fiber by averaging fibers from a previously generated tract. + description: This program will create a guide fiber by averaging fibers from a previously generated tract. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractCreateGuideFiberInputSpec output_spec = gtractCreateGuideFiberOutputSpec _cmd = " gtractCreateGuideFiber " - _outputs_filenames = {'outputFiber': 'outputFiber.vtk'} + _outputs_filenames = {"outputFiber": "outputFiber.vtk"} _redirect_x = False @@ -589,7 +601,8 @@ class gtractAnisotropyMapInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input file containing the diffusion tensor image", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) anisotropyType = traits.Enum( "ADC", "FA", @@ -599,105 +612,105 @@ class gtractAnisotropyMapInputSpec(CommandLineInputSpec): "RD", "LI", desc="Anisotropy Mapping Type: ADC, FA, RA, VR, AD, RD, LI", - argstr="--anisotropyType %s") + argstr="--anisotropyType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the selected kind of anisotropy scalar.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractAnisotropyMapOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the selected kind of anisotropy scalar.", - exists=True) + desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", + exists=True, + ) class gtractAnisotropyMap(SEMLikeCommandLine): """title: Anisotropy Map -category: Diffusion.GTRACT - -description: This program will generate a scalar map of anisotropy, given a tensor representation. Anisotropy images are used for fiber tracking, but the anisotropy scalars are not defined along the path. Instead, the tensor representation is included as point data allowing all of these metrics to be computed using only the fiber tract point data. The images can be saved in any ITK supported format, but it is suggested that you use an image format that supports the definition of the image origin. This includes NRRD, NifTI, and Meta formats. These images can also be used for scalar analysis including regional anisotropy measures or VBM style analysis. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will generate a scalar map of anisotropy, given a tensor representation. Anisotropy images are used for fiber tracking, but the anisotropy scalars are not defined along the path. Instead, the tensor representation is included as point data allowing all of these metrics to be computed using only the fiber tract point data. The images can be saved in any ITK supported format, but it is suggested that you use an image format that supports the definition of the image origin. This includes NRRD, NifTI, and Meta formats. These images can also be used for scalar analysis including regional anisotropy measures or VBM style analysis. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractAnisotropyMapInputSpec output_spec = gtractAnisotropyMapOutputSpec _cmd = " gtractAnisotropyMap " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractClipAnisotropyInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image file name", - exists=True, - argstr="--inputVolume %s") + desc="Required: input image file name", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the clipped anisotropy image", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the clipped anisotropy image", + argstr="--outputVolume %s", + ) clipFirstSlice = traits.Bool( - desc="Clip the first slice of the anisotropy image", - argstr="--clipFirstSlice ") + desc="Clip the first slice of the anisotropy image", argstr="--clipFirstSlice " + ) clipLastSlice = traits.Bool( - desc="Clip the last slice of the anisotropy image", - argstr="--clipLastSlice ") + desc="Clip the last slice of the anisotropy image", argstr="--clipLastSlice " + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractClipAnisotropyOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the clipped anisotropy image", - exists=True) + desc="Required: name of output NRRD file containing the clipped anisotropy image", + exists=True, + ) class gtractClipAnisotropy(SEMLikeCommandLine): """title: Clip Anisotropy -category: Diffusion.GTRACT - -description: This program will zero the first and/or last slice of an anisotropy image, creating a clipped anisotropy image. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will zero the first and/or last slice of an anisotropy image, creating a clipped anisotropy image. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractClipAnisotropyInputSpec output_spec = gtractClipAnisotropyOutputSpec _cmd = " gtractClipAnisotropy " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -705,66 +718,70 @@ class gtractResampleB0InputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the 4D image", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", + desc="Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) vectorIndex = traits.Int( desc="Index in the diffusion weighted image set for the B0 image", - argstr="--vectorIndex %d") + argstr="--vectorIndex %d", + ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled input image.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled input image.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleB0OutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled input image.", - exists=True) + desc="Required: name of output NRRD file containing the resampled input image.", + exists=True, + ) class gtractResampleB0(SEMLikeCommandLine): """title: Resample B0 -category: Diffusion.GTRACT - -description: This program will resample a signed short image using either a Rigid or B-Spline transform. The user must specify a template image that will be used to define the origin, orientation, spacing, and size of the resampled image. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will resample a signed short image using either a Rigid or B-Spline transform. The user must specify a template image that will be used to define the origin, orientation, spacing, and size of the resampled image. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractResampleB0InputSpec output_spec = gtractResampleB0OutputSpec _cmd = " gtractResampleB0 " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -772,102 +789,103 @@ class gtractInvertRigidTransformInputSpec(CommandLineInputSpec): inputTransform = File( desc="Required: input rigid transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", - argstr="--outputTransform %s") + argstr="--outputTransform %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertRigidTransformOutputSpec(TraitedSpec): - outputTransform = File( - desc="Required: output transform file name", exists=True) + outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertRigidTransform(SEMLikeCommandLine): """title: Rigid Transform Inversion -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will invert a Rigid transform. + description: This program will invert a Rigid transform. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractInvertRigidTransformInputSpec output_spec = gtractInvertRigidTransformOutputSpec _cmd = " gtractInvertRigidTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.h5'} + _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractImageConformityInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the signed short image to reorient without resampling.", + desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputReferenceVolume = File( - desc= - "Required: input file containing the standard image to clone the characteristics of.", + desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", - argstr="--outputVolume %s") + desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractImageConformityOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", - exists=True) + desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", + exists=True, + ) class gtractImageConformity(SEMLikeCommandLine): """title: Image Conformity -category: Diffusion.GTRACT - -description: This program will straighten out the Direction and Origin to match the Reference Image. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will straighten out the Direction and Origin to match the Reference Image. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractImageConformityInputSpec output_spec = gtractImageConformityOutputSpec _cmd = " gtractImageConformity " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -875,31 +893,36 @@ class compareTractInclusionInputSpec(CommandLineInputSpec): testFiber = File( desc="Required: test fiber tract file name", exists=True, - argstr="--testFiber %s") + argstr="--testFiber %s", + ) standardFiber = File( desc="Required: standard fiber tract file name", exists=True, - argstr="--standardFiber %s") + argstr="--standardFiber %s", + ) closeness = traits.Float( - desc= - "Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", - argstr="--closeness %f") + desc="Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", + argstr="--closeness %f", + ) numberOfPoints = traits.Int( - desc="Number of points in comparison fiber pairs", - argstr="--numberOfPoints %d") + desc="Number of points in comparison fiber pairs", argstr="--numberOfPoints %d" + ) testForBijection = traits.Bool( desc="Flag to apply the closeness criterion both ways", - argstr="--testForBijection ") + argstr="--testForBijection ", + ) testForFiberCardinality = traits.Bool( desc="Flag to require the same number of fibers in both tracts", - argstr="--testForFiberCardinality ") + argstr="--testForFiberCardinality ", + ) writeXMLPolyDataFile = traits.Bool( - desc= - "Flag to make use of XML files when reading and writing vtkPolyData.", - argstr="--writeXMLPolyDataFile ") + desc="Flag to make use of XML files when reading and writing vtkPolyData.", + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class compareTractInclusionOutputSpec(TraitedSpec): @@ -909,21 +932,20 @@ class compareTractInclusionOutputSpec(TraitedSpec): class compareTractInclusion(SEMLikeCommandLine): """title: Compare Tracts -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will halt with a status code indicating whether a test tract is nearly enough included in a standard tract in the sense that every fiber in the test tract has a low enough sum of squares distance to some fiber in the standard tract modulo spline resampling of every fiber to a fixed number of points. + description: This program will halt with a status code indicating whether a test tract is nearly enough included in a standard tract in the sense that every fiber in the test tract has a low enough sum of squares distance to some fiber in the standard tract modulo spline resampling of every fiber to a fixed number of points. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = compareTractInclusionInputSpec output_spec = compareTractInclusionOutputSpec @@ -936,109 +958,121 @@ class gtractFastMarchingTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputCostVolume = File( desc="Required: input vcl_cost image file name", exists=True, - argstr="--inputCostVolume %s") + argstr="--inputCostVolume %s", + ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc="Label value for Starting Seeds", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) numberOfIterations = traits.Int( desc="Number of iterations used for the optimization", - argstr="--numberOfIterations %d") + argstr="--numberOfIterations %d", + ) seedThreshold = traits.Float( - desc="Anisotropy threshold used for seed selection", - argstr="--seedThreshold %f") + desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" + ) trackingThreshold = traits.Float( desc="Anisotropy threshold used for fiber tracking", - argstr="--trackingThreshold %f") + argstr="--trackingThreshold %f", + ) costStepSize = traits.Float( - desc="Cost image sub-voxel sampling", argstr="--costStepSize %f") + desc="Cost image sub-voxel sampling", argstr="--costStepSize %f" + ) maximumStepSize = traits.Float( - desc="Maximum step size to move when tracking", - argstr="--maximumStepSize %f") + desc="Maximum step size to move when tracking", argstr="--maximumStepSize %f" + ) minimumStepSize = traits.Float( - desc="Minimum step size to move when tracking", - argstr="--minimumStepSize %f") + desc="Minimum step size to move when tracking", argstr="--minimumStepSize %f" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractFastMarchingTrackingOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractFastMarchingTracking(SEMLikeCommandLine): """title: Fast Marching Tracking -category: Diffusion.GTRACT - -description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the second portion of the algorithm. The user must first run gtractCostFastMarching to generate the vcl_cost image. The second step of the algorithm implemented here is a gradient descent soplution from the defined ending region back to the seed points specified in gtractCostFastMarching. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the second portion of the algorithm. The user must first run gtractCostFastMarching to generate the vcl_cost image. The second step of the algorithm implemented here is a gradient descent soplution from the defined ending region back to the seed points specified in gtractCostFastMarching. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractFastMarchingTrackingInputSpec output_spec = gtractFastMarchingTrackingOutputSpec _cmd = " gtractFastMarchingTracking " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractInvertDisplacementFieldInputSpec(CommandLineInputSpec): baseImage = File( - desc= - "Required: base image used to define the size of the inverse field", + desc="Required: base image used to define the size of the inverse field", exists=True, - argstr="--baseImage %s") + argstr="--baseImage %s", + ) deformationImage = File( desc="Required: Displacement field image", exists=True, - argstr="--deformationImage %s") + argstr="--deformationImage %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: Output deformation field", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) subsamplingFactor = traits.Int( desc="Subsampling factor for the deformation field", - argstr="--subsamplingFactor %d") + argstr="--subsamplingFactor %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertDisplacementFieldOutputSpec(TraitedSpec): @@ -1048,151 +1082,157 @@ class gtractInvertDisplacementFieldOutputSpec(TraitedSpec): class gtractInvertDisplacementField(SEMLikeCommandLine): """title: Invert Displacement Field -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will invert a deformatrion field. The size of the deformation field is defined by an example image provided by the user + description: This program will invert a deformatrion field. The size of the deformation field is defined by an example image provided by the user -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta. + contributor: This tool was developed by Vincent Magnotta. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractInvertDisplacementFieldInputSpec output_spec = gtractInvertDisplacementFieldOutputSpec _cmd = " gtractInvertDisplacementField " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoRegAnatomyInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", + desc="Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", + desc="Required: input anatomical image file name. It is recommended that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) vectorIndex = traits.Int( - desc= - "Vector image index in the moving image (within the DWI) to be used for registration.", - argstr="--vectorIndex %d") + desc="Vector image index in the moving image (within the DWI) to be used for registration.", + argstr="--vectorIndex %d", + ) inputRigidTransform = File( - desc= - "Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", + desc="Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", exists=True, - argstr="--inputRigidTransform %s") + argstr="--inputRigidTransform %s", + ) outputTransformName = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: filename for the fit transform.", - argstr="--outputTransformName %s") + argstr="--outputTransformName %s", + ) transformType = traits.Enum( "Rigid", "Bspline", desc="Transform Type: Rigid|Bspline", - argstr="--transformType %s") + argstr="--transformType %s", + ) numberOfIterations = traits.Int( desc="Number of iterations in the selected 3D fit", - argstr="--numberOfIterations %d") + argstr="--numberOfIterations %d", + ) gridSize = InputMultiPath( traits.Int, desc="Number of grid subdivisions in all 3 directions", sep=",", - argstr="--gridSize %s") + argstr="--gridSize %s", + ) borderSize = traits.Int(desc="Size of border", argstr="--borderSize %d") numberOfHistogramBins = traits.Int( - desc="Number of histogram bins", argstr="--numberOfHistogramBins %d") + desc="Number of histogram bins", argstr="--numberOfHistogramBins %d" + ) spatialScale = traits.Int( - desc= - "Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", - argstr="--spatialScale %d") - convergence = traits.Float( - desc="Convergence Factor", argstr="--convergence %f") + desc="Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", + argstr="--spatialScale %d", + ) + convergence = traits.Float(desc="Convergence Factor", argstr="--convergence %f") gradientTolerance = traits.Float( - desc="Gradient Tolerance", argstr="--gradientTolerance %f") + desc="Gradient Tolerance", argstr="--gradientTolerance %f" + ) maxBSplineDisplacement = traits.Float( - desc= - " Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) maximumStepSize = traits.Float( desc="Maximum permitted step size to move in the selected 3D fit", - argstr="--maximumStepSize %f") + argstr="--maximumStepSize %f", + ) minimumStepSize = traits.Float( - desc= - "Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", - argstr="--minimumStepSize %f") + desc="Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", + argstr="--minimumStepSize %f", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", - argstr="--translationScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", + argstr="--translationScale %f", + ) relaxationFactor = traits.Float( - desc= - "Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", - argstr="--relaxationFactor %f") + desc="Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", + argstr="--relaxationFactor %f", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", + argstr="--numberOfSamples %d", + ) samplingPercentage = traits.Float( - desc= - "This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", - argstr="--samplingPercentage %f") + desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", + argstr="--samplingPercentage %f", + ) useMomentsAlign = traits.Bool( - desc= - "MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useMomentsAlign ") + desc="MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useMomentsAlign ", + ) useGeometryAlign = traits.Bool( - desc= - "GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useGeometryAlign ") + desc="GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useGeometryAlign ", + ) useCenterOfHeadAlign = traits.Bool( - desc= - "CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useCenterOfHeadAlign ") + desc="CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useCenterOfHeadAlign ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCoRegAnatomyOutputSpec(TraitedSpec): outputTransformName = File( - desc="Required: filename for the fit transform.", exists=True) + desc="Required: filename for the fit transform.", exists=True + ) class gtractCoRegAnatomy(SEMLikeCommandLine): """title: Coregister B0 to Anatomy B-Spline -category: Diffusion.GTRACT - -description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images shoud be used for image co-registration with the B-Spline transform. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images should be used for image co-registration with the B-Spline transform. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractCoRegAnatomyInputSpec output_spec = gtractCoRegAnatomyOutputSpec _cmd = " gtractCoRegAnatomy " - _outputs_filenames = {'outputTransformName': 'outputTransformName.h5'} + _outputs_filenames = {"outputTransformName": "outputTransformName.h5"} _redirect_x = False @@ -1200,86 +1240,88 @@ class gtractResampleDWIInPlaceInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image is a 4D NRRD image.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) referenceVolume = File( - desc= - "If provided, resample to the final space of the referenceVolume 3D data set.", + desc="If provided, resample to the final space of the referenceVolume 3D data set.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputResampledB0 = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Convenience function for extracting the first index location (assumed to be the B0)", - argstr="--outputResampledB0 %s") + desc="Convenience function for extracting the first index location (assumed to be the B0)", + argstr="--outputResampledB0 %s", + ) inputTransform = File( - desc= - "Required: transform file derived from rigid registration of b0 image to reference structural image.", + desc="Required: transform file derived from rigid registration of b0 image to reference structural image.", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) warpDWITransform = File( desc="Optional: transform file to warp gradient volumes.", exists=True, - argstr="--warpDWITransform %s") + argstr="--warpDWITransform %s", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) imageOutputSize = InputMultiPath( traits.Int, - desc= - "The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", + desc="The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", sep=",", - argstr="--imageOutputSize %s") + argstr="--imageOutputSize %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", - argstr="--outputVolume %s") + desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleDWIInPlaceOutputSpec(TraitedSpec): outputResampledB0 = File( - desc= - "Convenience function for extracting the first index location (assumed to be the B0)", - exists=True) + desc="Convenience function for extracting the first index location (assumed to be the B0)", + exists=True, + ) outputVolume = File( - desc= - "Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", - exists=True) + desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", + exists=True, + ) class gtractResampleDWIInPlace(SEMLikeCommandLine): """title: Resample DWI In Place -category: Diffusion.GTRACT - -description: Resamples DWI image to structural image. + category: Diffusion.GTRACT -version: 4.0.0 + description: Resamples DWI image to structural image. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta, Greg Harris, Hans Johnson, and Joy Matsui. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta, Greg Harris, Hans Johnson, and Joy Matsui. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractResampleDWIInPlaceInputSpec output_spec = gtractResampleDWIInPlaceOutputSpec _cmd = " gtractResampleDWIInPlace " _outputs_filenames = { - 'outputResampledB0': 'outputResampledB0.nii', - 'outputVolume': 'outputVolume.nii' + "outputResampledB0": "outputResampledB0.nii", + "outputVolume": "outputVolume.nii", } _redirect_x = False @@ -1288,42 +1330,50 @@ class gtractCostFastMarchingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc="Label value for Starting Seeds", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" + ) outputCostVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output vcl_cost image", - argstr="--outputCostVolume %s") + argstr="--outputCostVolume %s", + ) outputSpeedVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output speed image", - argstr="--outputSpeedVolume %s") + argstr="--outputSpeedVolume %s", + ) anisotropyWeight = traits.Float( desc="Anisotropy weight used for vcl_cost function calculations", - argstr="--anisotropyWeight %f") + argstr="--anisotropyWeight %f", + ) stoppingValue = traits.Float( - desc="Terminiating value for vcl_cost function estimation", - argstr="--stoppingValue %f") + desc="Terminating value for vcl_cost function estimation", + argstr="--stoppingValue %f", + ) seedThreshold = traits.Float( - desc="Anisotropy threshold used for seed selection", - argstr="--seedThreshold %f") + desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCostFastMarchingOutputSpec(TraitedSpec): @@ -1334,228 +1384,235 @@ class gtractCostFastMarchingOutputSpec(TraitedSpec): class gtractCostFastMarching(SEMLikeCommandLine): """title: Cost Fast Marching -category: Diffusion.GTRACT - -description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the first portion of the algorithm. The user must first run gtractFastMarchingTracking to generate the actual fiber tracts. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the first portion of the algorithm. The user must first run gtractFastMarchingTracking to generate the actual fiber tracts. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractCostFastMarchingInputSpec output_spec = gtractCostFastMarchingOutputSpec _cmd = " gtractCostFastMarching " _outputs_filenames = { - 'outputCostVolume': 'outputCostVolume.nrrd', - 'outputSpeedVolume': 'outputSpeedVolume.nrrd' + "outputCostVolume": "outputCostVolume.nrrd", + "outputSpeedVolume": "outputSpeedVolume.nrrd", } _redirect_x = False class gtractFiberTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputStartingSeedsLabelMapVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc= - "Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", + argstr="--startingSeedsLabel %d", + ) inputEndingSeedsLabelMapVolume = File( - desc= - "Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", + desc="Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", exists=True, - argstr="--inputEndingSeedsLabelMapVolume %s") + argstr="--inputEndingSeedsLabelMapVolume %s", + ) endingSeedsLabel = traits.Int( - desc= - "Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", - argstr="--endingSeedsLabel %d") + desc="Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", + argstr="--endingSeedsLabel %d", + ) inputTract = File( - desc= - "Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", + desc="Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", exists=True, - argstr="--inputTract %s") + argstr="--inputTract %s", + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) trackingMethod = traits.Enum( "Guided", "Free", "Streamline", "GraphSearch", desc="Fiber tracking Filter Type: Guided|Free|Streamline|GraphSearch", - argstr="--trackingMethod %s") + argstr="--trackingMethod %s", + ) guidedCurvatureThreshold = traits.Float( desc="Guided Curvature Threshold (Degrees)", - argstr="--guidedCurvatureThreshold %f") + argstr="--guidedCurvatureThreshold %f", + ) maximumGuideDistance = traits.Float( desc="Maximum distance for using the guide fiber direction", - argstr="--maximumGuideDistance %f") + argstr="--maximumGuideDistance %f", + ) seedThreshold = traits.Float( - desc= - "Anisotropy threshold for seed selection (recommended for Free fiber tracking)", - argstr="--seedThreshold %f") + desc="Anisotropy threshold for seed selection (recommended for Free fiber tracking)", + argstr="--seedThreshold %f", + ) trackingThreshold = traits.Float( - desc= - "Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", - argstr="--trackingThreshold %f") + desc="Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", + argstr="--trackingThreshold %f", + ) curvatureThreshold = traits.Float( - desc= - "Curvature threshold in degrees (recommended for Free fiber tracking)", - argstr="--curvatureThreshold %f") + desc="Curvature threshold in degrees (recommended for Free fiber tracking)", + argstr="--curvatureThreshold %f", + ) branchingThreshold = traits.Float( - desc= - "Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", - argstr="--branchingThreshold %f") + desc="Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", + argstr="--branchingThreshold %f", + ) maximumBranchPoints = traits.Int( - desc= - "Maximum branch points (recommended for GraphSearch fiber tracking method)", - argstr="--maximumBranchPoints %d") + desc="Maximum branch points (recommended for GraphSearch fiber tracking method)", + argstr="--maximumBranchPoints %d", + ) useRandomWalk = traits.Bool( - desc="Flag to use random walk.", argstr="--useRandomWalk ") + desc="Flag to use random walk.", argstr="--useRandomWalk " + ) randomSeed = traits.Int( - desc="Random number generator seed", argstr="--randomSeed %d") + desc="Random number generator seed", argstr="--randomSeed %d" + ) branchingAngle = traits.Float( - desc= - "Branching angle in degrees (recommended for GraphSearch fiber tracking method)", - argstr="--branchingAngle %f") + desc="Branching angle in degrees (recommended for GraphSearch fiber tracking method)", + argstr="--branchingAngle %f", + ) minimumLength = traits.Float( desc="Minimum fiber length. Helpful for filtering invalid tracts.", - argstr="--minimumLength %f") + argstr="--minimumLength %f", + ) maximumLength = traits.Float( - desc="Maximum fiber length (voxels)", argstr="--maximumLength %f") - stepSize = traits.Float( - desc="Fiber tracking step size", argstr="--stepSize %f") + desc="Maximum fiber length (voxels)", argstr="--maximumLength %f" + ) + stepSize = traits.Float(desc="Fiber tracking step size", argstr="--stepSize %f") useLoopDetection = traits.Bool( - desc="Flag to make use of loop detection.", - argstr="--useLoopDetection ") + desc="Flag to make use of loop detection.", argstr="--useLoopDetection " + ) useTend = traits.Bool( - desc="Flag to make use of Tend F and Tend G parameters.", - argstr="--useTend ") + desc="Flag to make use of Tend F and Tend G parameters.", argstr="--useTend " + ) tendF = traits.Float(desc="Tend F parameter", argstr="--tendF %f") tendG = traits.Float(desc="Tend G parameter", argstr="--tendG %f") numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractFiberTrackingOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractFiberTracking(SEMLikeCommandLine): """title: Fiber Tracking -category: Diffusion.GTRACT - -description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines ther Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define severeal scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambigous regions and utilizes branching and a graph search algorithm in ambigous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline alogrithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines the Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define several scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambiguous regions and utilizes branching and a graph search algorithm in ambiguous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline algorithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta, Greg Harris and Yongqiang Zhao. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta, Greg Harris and Yongqiang Zhao. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractFiberTrackingInputSpec output_spec = gtractFiberTrackingOutputSpec _cmd = " gtractFiberTracking " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class extractNrrdVectorIndexInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the vector that will be extracted", + desc="Required: input file containing the vector that will be extracted", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) vectorIndex = traits.Int( - desc="Index in the vector image to extract", argstr="--vectorIndex %d") + desc="Index in the vector image to extract", argstr="--vectorIndex %d" + ) setImageOrientation = traits.Enum( "AsAcquired", "Axial", "Coronal", "Sagittal", - desc= - "Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", - argstr="--setImageOrientation %s") + desc="Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", + argstr="--setImageOrientation %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the vector image at the given index", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the vector image at the given index", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class extractNrrdVectorIndexOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the vector image at the given index", - exists=True) + desc="Required: name of output NRRD file containing the vector image at the given index", + exists=True, + ) class extractNrrdVectorIndex(SEMLikeCommandLine): """title: Extract Nrrd Index -category: Diffusion.GTRACT + category: Diffusion.GTRACT -description: This program will extract a 3D image (single vector) from a vector 3D image at a given vector index. + description: This program will extract a 3D image (single vector) from a vector 3D image at a given vector index. -version: 4.0.0 + version: 4.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 - -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = extractNrrdVectorIndexInputSpec output_spec = extractNrrdVectorIndexOutputSpec _cmd = " extractNrrdVectorIndex " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -1563,146 +1620,153 @@ class gtractResampleFibersInputSpec(CommandLineInputSpec): inputForwardDeformationFieldVolume = File( desc="Required: input forward deformation field image file name", exists=True, - argstr="--inputForwardDeformationFieldVolume %s") + argstr="--inputForwardDeformationFieldVolume %s", + ) inputReverseDeformationFieldVolume = File( desc="Required: input reverse deformation field image file name", exists=True, - argstr="--inputReverseDeformationFieldVolume %s") + argstr="--inputReverseDeformationFieldVolume %s", + ) inputTract = File( desc="Required: name of input vtkPolydata file containing tract lines.", exists=True, - argstr="--inputTract %s") + argstr="--inputTract %s", + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleFibersOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractResampleFibers(SEMLikeCommandLine): """title: Resample Fibers -category: Diffusion.GTRACT - -description: This program will resample a fiber tract with respect to a pair of deformation fields that represent the forward and reverse deformation fields. + category: Diffusion.GTRACT -version: 4.0.0 + description: This program will resample a fiber tract with respect to a pair of deformation fields that represent the forward and reverse deformation fields. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractResampleFibersInputSpec output_spec = gtractResampleFibersOutputSpec _cmd = " gtractResampleFibers " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractTensorInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.", + desc="Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the Tensor vector image", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the Tensor vector image", + argstr="--outputVolume %s", + ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", - argstr="--maskProcessingMode %s") + desc="ROIAUTO: mask is implicitly defined using a otsu foreground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", + argstr="--maskProcessingMode %s", + ) maskVolume = File( desc="Mask Image, if maskProcessingMode is ROI", exists=True, - argstr="--maskVolume %s") + argstr="--maskVolume %s", + ) backgroundSuppressingThreshold = traits.Int( - desc= - "Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", - argstr="--backgroundSuppressingThreshold %d") + desc="Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", + argstr="--backgroundSuppressingThreshold %d", + ) resampleIsotropic = traits.Bool( - desc= - "Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", - argstr="--resampleIsotropic ") - size = traits.Float( - desc="Isotropic voxel size to resample to", argstr="--size %f") + desc="Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", + argstr="--resampleIsotropic ", + ) + size = traits.Float(desc="Isotropic voxel size to resample to", argstr="--size %f") b0Index = traits.Int( - desc="Index in input vector index to extract", argstr="--b0Index %d") + desc="Index in input vector index to extract", argstr="--b0Index %d" + ) applyMeasurementFrame = traits.Bool( desc="Flag to apply the measurement frame to the gradient directions", - argstr="--applyMeasurementFrame ") + argstr="--applyMeasurementFrame ", + ) ignoreIndex = InputMultiPath( traits.Int, - desc= - "Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", + desc="Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", sep=",", - argstr="--ignoreIndex %s") + argstr="--ignoreIndex %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractTensorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the Tensor vector image", - exists=True) + desc="Required: name of output NRRD file containing the Tensor vector image", + exists=True, + ) class gtractTensor(SEMLikeCommandLine): """title: Tensor Estimation -category: Diffusion.GTRACT - -description: This step will convert a b-value averaged diffusion tensor image to a 3x3 tensor voxel image. This step takes the diffusion tensor image data and generates a tensor representation of the data based on the signal intensity decay, b values applied, and the diffusion difrections. The apparent diffusion coefficient for a given orientation is computed on a pixel-by-pixel basis by fitting the image data (voxel intensities) to the Stejskal-Tanner equation. If at least 6 diffusion directions are used, then the diffusion tensor can be computed. This program uses itk::DiffusionTensor3DReconstructionImageFilter. The user can adjust background threshold, median filter, and isotropic resampling. + category: Diffusion.GTRACT -version: 4.0.0 + description: This step will convert a b-value averaged diffusion tensor image to a 3x3 tensor voxel image. This step takes the diffusion tensor image data and generates a tensor representation of the data based on the signal intensity decay, b values applied, and the diffusion difrections. The apparent diffusion coefficient for a given orientation is computed on a pixel-by-pixel basis by fitting the image data (voxel intensities) to the Stejskal-Tanner equation. If at least 6 diffusion directions are used, then the diffusion tensor can be computed. This program uses itk::DiffusionTensor3DReconstructionImageFilter. The user can adjust background threshold, median filter, and isotropic resampling. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT + version: 4.0.0 -license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT -contributor: This tool was developed by Vincent Magnotta and Greg Harris. + license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt -acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + contributor: This tool was developed by Vincent Magnotta and Greg Harris. -""" + acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 + """ input_spec = gtractTensorInputSpec output_spec = gtractTensorOutputSpec _cmd = " gtractTensor " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/maxcurvature.py b/nipype/interfaces/semtools/diffusion/maxcurvature.py index 570109eb1b..86a7580f2d 100644 --- a/nipype/interfaces/semtools/diffusion/maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/maxcurvature.py @@ -1,23 +1,20 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class maxcurvatureInputSpec(CommandLineInputSpec): image = File(desc="FA Image", exists=True, argstr="--image %s") output = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Output File", - argstr="--output %s") + traits.Bool, File(), hash_files=False, desc="Output File", argstr="--output %s" + ) sigma = traits.Float(desc="Scale of Gradients", argstr="--sigma %f") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -29,28 +26,27 @@ class maxcurvatureOutputSpec(TraitedSpec): class maxcurvature(SEMLikeCommandLine): """title: MaxCurvature-Hessian (DTIProcess) -category: Diffusion - -description: This program computes the Hessian of the FA image (--image). We use this scalar image as a registration input when doing DTI atlas building. For most adult FA we use a sigma of 2 whereas for neonate or primate images and sigma of 1 or 1.5 is more appropriate. For really noisy images, 2.5 - 4 can be considered. The final image (--output) shows the main feature of the input image. + category: Diffusion -version: 1.1.0 + description: This program computes the Hessian of the FA image (--image). We use this scalar image as a registration input when doing DTI atlas building. For most adult FA we use a sigma of 2 whereas for neonate or primate images and sigma of 1 or 1.5 is more appropriate. For really noisy images, 2.5 - 4 can be considered. The final image (--output) shows the main feature of the input image. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.1.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. + contributor: Casey Goodlett -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler. + """ input_spec = maxcurvatureInputSpec output_spec = maxcurvatureOutputSpec _cmd = " maxcurvature " - _outputs_filenames = {'output': 'output.nii'} + _outputs_filenames = {"output": "output.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/diffusion/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py index 05593527fb..4dfb6943cb 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py @@ -1,64 +1,102 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DWIConvert def test_DWIConvert_inputs(): input_map = dict( - allowLossyConversion=dict(argstr='--allowLossyConversion ', ), - args=dict(argstr='%s', ), - conversionMode=dict(argstr='--conversionMode %s', ), + allowLossyConversion=dict( + argstr="--allowLossyConversion ", + ), + args=dict( + argstr="%s", + ), + conversionMode=dict( + argstr="--conversionMode %s", + ), environ=dict( nohash=True, usedefault=True, ), - fMRI=dict(argstr='--fMRI ', ), - fslNIFTIFile=dict(argstr='--fslNIFTIFile %s', ), + fMRI=dict( + argstr="--fMRI ", + ), + fslNIFTIFile=dict( + argstr="--fslNIFTIFile %s", + extensions=None, + ), gradientVectorFile=dict( - argstr='--gradientVectorFile %s', + argstr="--gradientVectorFile %s", hash_files=False, ), - inputBValues=dict(argstr='--inputBValues %s', ), - inputBVectors=dict(argstr='--inputBVectors %s', ), - inputDicomDirectory=dict(argstr='--inputDicomDirectory %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputBValues=dict( + argstr="--inputBValues %s", + extensions=None, + ), + inputBVectors=dict( + argstr="--inputBVectors %s", + extensions=None, + ), + inputDicomDirectory=dict( + argstr="--inputDicomDirectory %s", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputBValues=dict( - argstr='--outputBValues %s', + argstr="--outputBValues %s", hash_files=False, ), outputBVectors=dict( - argstr='--outputBVectors %s', + argstr="--outputBVectors %s", hash_files=False, ), outputDirectory=dict( - argstr='--outputDirectory %s', + argstr="--outputDirectory %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - smallGradientThreshold=dict(argstr='--smallGradientThreshold %f', ), - transposeInputBVectors=dict(argstr='--transposeInputBVectors ', ), + smallGradientThreshold=dict( + argstr="--smallGradientThreshold %f", + ), + transposeInputBVectors=dict( + argstr="--transposeInputBVectors ", + ), useBMatrixGradientDirections=dict( - argstr='--useBMatrixGradientDirections ', ), + argstr="--useBMatrixGradientDirections ", + ), useIdentityMeaseurementFrame=dict( - argstr='--useIdentityMeaseurementFrame ', ), + argstr="--useIdentityMeaseurementFrame ", + ), writeProtocolGradientsFile=dict( - argstr='--writeProtocolGradientsFile ', ), + argstr="--writeProtocolGradientsFile ", + ), ) inputs = DWIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIConvert_outputs(): output_map = dict( - gradientVectorFile=dict(), - outputBValues=dict(), - outputBVectors=dict(), + gradientVectorFile=dict( + extensions=None, + ), + outputBValues=dict( + extensions=None, + ), + outputBVectors=dict( + extensions=None, + ), outputDirectory=dict(), - outputVolume=dict(), + outputVolume=dict( + extensions=None, + ), ) outputs = DWIConvert.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py index eb9d8c6184..38e8f92b0b 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py @@ -1,29 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import compareTractInclusion def test_compareTractInclusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - closeness=dict(argstr='--closeness %f', ), + args=dict( + argstr="%s", + ), + closeness=dict( + argstr="--closeness %f", + ), environ=dict( nohash=True, usedefault=True, ), - numberOfPoints=dict(argstr='--numberOfPoints %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - standardFiber=dict(argstr='--standardFiber %s', ), - testFiber=dict(argstr='--testFiber %s', ), - testForBijection=dict(argstr='--testForBijection ', ), - testForFiberCardinality=dict(argstr='--testForFiberCardinality ', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + numberOfPoints=dict( + argstr="--numberOfPoints %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + standardFiber=dict( + argstr="--standardFiber %s", + extensions=None, + ), + testFiber=dict( + argstr="--testFiber %s", + extensions=None, + ), + testForBijection=dict( + argstr="--testForBijection ", + ), + testForFiberCardinality=dict( + argstr="--testForFiberCardinality ", + ), + writeXMLPolyDataFile=dict( + argstr="--writeXMLPolyDataFile ", + ), ) inputs = compareTractInclusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_compareTractInclusion_outputs(): output_map = dict() outputs = compareTractInclusion.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py index 92c00853f4..17d4d19b4c 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py @@ -1,30 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import dtiaverage def test_dtiaverage_inputs(): input_map = dict( - DTI_double=dict(argstr='--DTI_double ', ), - args=dict(argstr='%s', ), + DTI_double=dict( + argstr="--DTI_double ", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputs=dict(argstr='--inputs %s...', ), + inputs=dict( + argstr="--inputs %s...", + ), tensor_output=dict( - argstr='--tensor_output %s', + argstr="--tensor_output %s", hash_files=False, ), - verbose=dict(argstr='--verbose ', ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = dtiaverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiaverage_outputs(): - output_map = dict(tensor_output=dict(), ) + output_map = dict( + tensor_output=dict( + extensions=None, + ), + ) outputs = dtiaverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py index 440fd4df07..0a36716e87 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py @@ -1,60 +1,100 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import dtiestim def test_dtiestim_inputs(): input_map = dict( B0=dict( - argstr='--B0 %s', + argstr="--B0 %s", hash_files=False, ), B0_mask_output=dict( - argstr='--B0_mask_output %s', + argstr="--B0_mask_output %s", hash_files=False, ), - DTI_double=dict(argstr='--DTI_double ', ), - args=dict(argstr='%s', ), - bad_region_mask=dict(argstr='--bad_region_mask %s', ), - brain_mask=dict(argstr='--brain_mask %s', ), - correction=dict(argstr='--correction %s', ), + DTI_double=dict( + argstr="--DTI_double ", + ), + args=dict( + argstr="%s", + ), + bad_region_mask=dict( + argstr="--bad_region_mask %s", + extensions=None, + ), + brain_mask=dict( + argstr="--brain_mask %s", + extensions=None, + ), + correction=dict( + argstr="--correction %s", + ), defaultTensor=dict( - argstr='--defaultTensor %s', - sep=',', + argstr="--defaultTensor %s", + sep=",", + ), + dwi_image=dict( + argstr="--dwi_image %s", + extensions=None, ), - dwi_image=dict(argstr='--dwi_image %s', ), environ=dict( nohash=True, usedefault=True, ), idwi=dict( - argstr='--idwi %s', + argstr="--idwi %s", hash_files=False, ), - method=dict(argstr='--method %s', ), - shiftNeg=dict(argstr='--shiftNeg ', ), - shiftNegCoeff=dict(argstr='--shiftNegCoeff %f', ), - sigma=dict(argstr='--sigma %f', ), - step=dict(argstr='--step %f', ), + method=dict( + argstr="--method %s", + ), + shiftNeg=dict( + argstr="--shiftNeg ", + ), + shiftNegCoeff=dict( + argstr="--shiftNegCoeff %f", + ), + sigma=dict( + argstr="--sigma %f", + ), + step=dict( + argstr="--step %f", + ), tensor_output=dict( - argstr='--tensor_output %s', + argstr="--tensor_output %s", hash_files=False, ), - threshold=dict(argstr='--threshold %d', ), - verbose=dict(argstr='--verbose ', ), - weight_iterations=dict(argstr='--weight_iterations %d', ), + threshold=dict( + argstr="--threshold %d", + ), + verbose=dict( + argstr="--verbose ", + ), + weight_iterations=dict( + argstr="--weight_iterations %d", + ), ) inputs = dtiestim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiestim_outputs(): output_map = dict( - B0=dict(), - B0_mask_output=dict(), - idwi=dict(), - tensor_output=dict(), + B0=dict( + extensions=None, + ), + B0_mask_output=dict( + extensions=None, + ), + idwi=dict( + extensions=None, + ), + tensor_output=dict( + extensions=None, + ), ) outputs = dtiestim.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py index 3148d1edb5..24352abbe3 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py @@ -1,112 +1,179 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import dtiprocess def test_dtiprocess_inputs(): input_map = dict( - DTI_double=dict(argstr='--DTI_double ', ), + DTI_double=dict( + argstr="--DTI_double ", + ), RD_output=dict( - argstr='--RD_output %s', + argstr="--RD_output %s", hash_files=False, ), - affineitk_file=dict(argstr='--affineitk_file %s', ), - args=dict(argstr='%s', ), + affineitk_file=dict( + argstr="--affineitk_file %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), color_fa_output=dict( - argstr='--color_fa_output %s', + argstr="--color_fa_output %s", hash_files=False, ), - correction=dict(argstr='--correction %s', ), + correction=dict( + argstr="--correction %s", + ), deformation_output=dict( - argstr='--deformation_output %s', + argstr="--deformation_output %s", hash_files=False, ), - dof_file=dict(argstr='--dof_file %s', ), - dti_image=dict(argstr='--dti_image %s', ), + dof_file=dict( + argstr="--dof_file %s", + extensions=None, + ), + dti_image=dict( + argstr="--dti_image %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), fa_gradient_output=dict( - argstr='--fa_gradient_output %s', + argstr="--fa_gradient_output %s", hash_files=False, ), fa_gradmag_output=dict( - argstr='--fa_gradmag_output %s', + argstr="--fa_gradmag_output %s", hash_files=False, ), fa_output=dict( - argstr='--fa_output %s', + argstr="--fa_output %s", hash_files=False, ), - forward=dict(argstr='--forward %s', ), + forward=dict( + argstr="--forward %s", + extensions=None, + ), frobenius_norm_output=dict( - argstr='--frobenius_norm_output %s', + argstr="--frobenius_norm_output %s", hash_files=False, ), - hField=dict(argstr='--hField ', ), - interpolation=dict(argstr='--interpolation %s', ), + hField=dict( + argstr="--hField ", + ), + interpolation=dict( + argstr="--interpolation %s", + ), lambda1_output=dict( - argstr='--lambda1_output %s', + argstr="--lambda1_output %s", hash_files=False, ), lambda2_output=dict( - argstr='--lambda2_output %s', + argstr="--lambda2_output %s", hash_files=False, ), lambda3_output=dict( - argstr='--lambda3_output %s', + argstr="--lambda3_output %s", hash_files=False, ), - mask=dict(argstr='--mask %s', ), + mask=dict( + argstr="--mask %s", + extensions=None, + ), md_output=dict( - argstr='--md_output %s', + argstr="--md_output %s", hash_files=False, ), negative_eigenvector_output=dict( - argstr='--negative_eigenvector_output %s', + argstr="--negative_eigenvector_output %s", hash_files=False, ), - newdof_file=dict(argstr='--newdof_file %s', ), + newdof_file=dict( + argstr="--newdof_file %s", + extensions=None, + ), outmask=dict( - argstr='--outmask %s', + argstr="--outmask %s", hash_files=False, ), principal_eigenvector_output=dict( - argstr='--principal_eigenvector_output %s', + argstr="--principal_eigenvector_output %s", hash_files=False, ), - reorientation=dict(argstr='--reorientation %s', ), + reorientation=dict( + argstr="--reorientation %s", + ), rot_output=dict( - argstr='--rot_output %s', + argstr="--rot_output %s", hash_files=False, ), - scalar_float=dict(argstr='--scalar_float ', ), - sigma=dict(argstr='--sigma %f', ), - verbose=dict(argstr='--verbose ', ), + scalar_float=dict( + argstr="--scalar_float ", + ), + sigma=dict( + argstr="--sigma %f", + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = dtiprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiprocess_outputs(): output_map = dict( - RD_output=dict(), - color_fa_output=dict(), - deformation_output=dict(), - fa_gradient_output=dict(), - fa_gradmag_output=dict(), - fa_output=dict(), - frobenius_norm_output=dict(), - lambda1_output=dict(), - lambda2_output=dict(), - lambda3_output=dict(), - md_output=dict(), - negative_eigenvector_output=dict(), - outmask=dict(), - principal_eigenvector_output=dict(), - rot_output=dict(), + RD_output=dict( + extensions=None, + ), + color_fa_output=dict( + extensions=None, + ), + deformation_output=dict( + extensions=None, + ), + fa_gradient_output=dict( + extensions=None, + ), + fa_gradmag_output=dict( + extensions=None, + ), + fa_output=dict( + extensions=None, + ), + frobenius_norm_output=dict( + extensions=None, + ), + lambda1_output=dict( + extensions=None, + ), + lambda2_output=dict( + extensions=None, + ), + lambda3_output=dict( + extensions=None, + ), + md_output=dict( + extensions=None, + ), + negative_eigenvector_output=dict( + extensions=None, + ), + outmask=dict( + extensions=None, + ), + principal_eigenvector_output=dict( + extensions=None, + ), + rot_output=dict( + extensions=None, + ), ) outputs = dtiprocess.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py index ac5784f1c7..aaa516e9dc 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py @@ -1,31 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import extractNrrdVectorIndex def test_extractNrrdVectorIndex_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - setImageOrientation=dict(argstr='--setImageOrientation %s', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + setImageOrientation=dict( + argstr="--setImageOrientation %s", + ), + vectorIndex=dict( + argstr="--vectorIndex %d", + ), ) inputs = extractNrrdVectorIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_extractNrrdVectorIndex_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = extractNrrdVectorIndex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py index 195d7dc3e1..da3e02c37b 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py @@ -1,20 +1,28 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractAnisotropyMap def test_gtractAnisotropyMap_inputs(): input_map = dict( - anisotropyType=dict(argstr='--anisotropyType %s', ), - args=dict(argstr='%s', ), + anisotropyType=dict( + argstr="--anisotropyType %s", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputTensorVolume=dict(argstr='--inputTensorVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputTensorVolume=dict( + argstr="--inputTensorVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +31,14 @@ def test_gtractAnisotropyMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractAnisotropyMap_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractAnisotropyMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py index fbb5bd6a55..a37b0e65ce 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py @@ -1,21 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractAverageBvalues def test_gtractAverageBvalues_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averageB0only=dict(argstr='--averageB0only ', ), - directionsTolerance=dict(argstr='--directionsTolerance %f', ), + args=dict( + argstr="%s", + ), + averageB0only=dict( + argstr="--averageB0only ", + ), + directionsTolerance=dict( + argstr="--directionsTolerance %f", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -24,8 +34,14 @@ def test_gtractAverageBvalues_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractAverageBvalues_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractAverageBvalues.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py index fed980c463..3d6e24aee3 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py @@ -1,21 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractClipAnisotropy def test_gtractClipAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clipFirstSlice=dict(argstr='--clipFirstSlice ', ), - clipLastSlice=dict(argstr='--clipLastSlice ', ), + args=dict( + argstr="%s", + ), + clipFirstSlice=dict( + argstr="--clipFirstSlice ", + ), + clipLastSlice=dict( + argstr="--clipLastSlice ", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -24,8 +34,14 @@ def test_gtractClipAnisotropy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractClipAnisotropy_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractClipAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py index 456e1e79fa..1ab780c1b9 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py @@ -1,53 +1,107 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractCoRegAnatomy def test_gtractCoRegAnatomy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - borderSize=dict(argstr='--borderSize %d', ), - convergence=dict(argstr='--convergence %f', ), + args=dict( + argstr="%s", + ), + borderSize=dict( + argstr="--borderSize %d", + ), + convergence=dict( + argstr="--convergence %f", + ), environ=dict( nohash=True, usedefault=True, ), - gradientTolerance=dict(argstr='--gradientTolerance %f', ), + gradientTolerance=dict( + argstr="--gradientTolerance %f", + ), gridSize=dict( - argstr='--gridSize %s', - sep=',', - ), - inputAnatomicalVolume=dict(argstr='--inputAnatomicalVolume %s', ), - inputRigidTransform=dict(argstr='--inputRigidTransform %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--gridSize %s", + sep=",", + ), + inputAnatomicalVolume=dict( + argstr="--inputAnatomicalVolume %s", + extensions=None, + ), + inputRigidTransform=dict( + argstr="--inputRigidTransform %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + maxBSplineDisplacement=dict( + argstr="--maxBSplineDisplacement %f", + ), + maximumStepSize=dict( + argstr="--maximumStepSize %f", + ), + minimumStepSize=dict( + argstr="--minimumStepSize %f", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfIterations=dict( + argstr="--numberOfIterations %d", + ), + numberOfSamples=dict( + argstr="--numberOfSamples %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTransformName=dict( - argstr='--outputTransformName %s', + argstr="--outputTransformName %s", hash_files=False, ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - spatialScale=dict(argstr='--spatialScale %d', ), - transformType=dict(argstr='--transformType %s', ), - translationScale=dict(argstr='--translationScale %f', ), - useCenterOfHeadAlign=dict(argstr='--useCenterOfHeadAlign ', ), - useGeometryAlign=dict(argstr='--useGeometryAlign ', ), - useMomentsAlign=dict(argstr='--useMomentsAlign ', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + relaxationFactor=dict( + argstr="--relaxationFactor %f", + ), + samplingPercentage=dict( + argstr="--samplingPercentage %f", + ), + spatialScale=dict( + argstr="--spatialScale %d", + ), + transformType=dict( + argstr="--transformType %s", + ), + translationScale=dict( + argstr="--translationScale %f", + ), + useCenterOfHeadAlign=dict( + argstr="--useCenterOfHeadAlign ", + ), + useGeometryAlign=dict( + argstr="--useGeometryAlign ", + ), + useMomentsAlign=dict( + argstr="--useMomentsAlign ", + ), + vectorIndex=dict( + argstr="--vectorIndex %d", + ), ) inputs = gtractCoRegAnatomy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCoRegAnatomy_outputs(): - output_map = dict(outputTransformName=dict(), ) + output_map = dict( + outputTransformName=dict( + extensions=None, + ), + ) outputs = gtractCoRegAnatomy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py index 7b75858eff..d2a6ca3288 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractConcatDwi def test_gtractConcatDwi_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - ignoreOrigins=dict(argstr='--ignoreOrigins ', ), - inputVolume=dict(argstr='--inputVolume %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + ignoreOrigins=dict( + argstr="--ignoreOrigins ", + ), + inputVolume=dict( + argstr="--inputVolume %s...", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +30,14 @@ def test_gtractConcatDwi_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractConcatDwi_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractConcatDwi.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py index d9e0b725c8..ba03837015 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractCopyImageOrientation def test_gtractCopyImageOrientation_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_gtractCopyImageOrientation_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCopyImageOrientation_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractCopyImageOrientation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py index 3143b16dfb..0122bf7636 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py @@ -1,47 +1,84 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractCoregBvalues def test_gtractCoregBvalues_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugLevel=dict(argstr='--debugLevel %d', ), - eddyCurrentCorrection=dict(argstr='--eddyCurrentCorrection ', ), + args=dict( + argstr="%s", + ), + debugLevel=dict( + argstr="--debugLevel %d", + ), + eddyCurrentCorrection=dict( + argstr="--eddyCurrentCorrection ", + ), environ=dict( nohash=True, usedefault=True, ), - fixedVolume=dict(argstr='--fixedVolume %s', ), - fixedVolumeIndex=dict(argstr='--fixedVolumeIndex %d', ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - movingVolume=dict(argstr='--movingVolume %s', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfSpatialSamples=dict(argstr='--numberOfSpatialSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + fixedVolume=dict( + argstr="--fixedVolume %s", + extensions=None, + ), + fixedVolumeIndex=dict( + argstr="--fixedVolumeIndex %d", + ), + maximumStepSize=dict( + argstr="--maximumStepSize %f", + ), + minimumStepSize=dict( + argstr="--minimumStepSize %f", + ), + movingVolume=dict( + argstr="--movingVolume %s", + extensions=None, + ), + numberOfIterations=dict( + argstr="--numberOfIterations %d", + ), + numberOfSpatialSamples=dict( + argstr="--numberOfSpatialSamples %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - registerB0Only=dict(argstr='--registerB0Only ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - spatialScale=dict(argstr='--spatialScale %f', ), + registerB0Only=dict( + argstr="--registerB0Only ", + ), + relaxationFactor=dict( + argstr="--relaxationFactor %f", + ), + samplingPercentage=dict( + argstr="--samplingPercentage %f", + ), + spatialScale=dict( + argstr="--spatialScale %f", + ), ) inputs = gtractCoregBvalues.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCoregBvalues_outputs(): output_map = dict( - outputTransform=dict(), - outputVolume=dict(), + outputTransform=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = gtractCoregBvalues.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py index 32d1e68898..7d086cd7c0 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py @@ -1,42 +1,67 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractCostFastMarching def test_gtractCostFastMarching_inputs(): input_map = dict( - anisotropyWeight=dict(argstr='--anisotropyWeight %f', ), - args=dict(argstr='%s', ), + anisotropyWeight=dict( + argstr="--anisotropyWeight %f", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputAnisotropyVolume=dict(argstr='--inputAnisotropyVolume %s', ), + inputAnisotropyVolume=dict( + argstr="--inputAnisotropyVolume %s", + extensions=None, + ), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', ), - inputTensorVolume=dict(argstr='--inputTensorVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--inputStartingSeedsLabelMapVolume %s", + extensions=None, + ), + inputTensorVolume=dict( + argstr="--inputTensorVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputCostVolume=dict( - argstr='--outputCostVolume %s', + argstr="--outputCostVolume %s", hash_files=False, ), outputSpeedVolume=dict( - argstr='--outputSpeedVolume %s', + argstr="--outputSpeedVolume %s", hash_files=False, ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - stoppingValue=dict(argstr='--stoppingValue %f', ), + seedThreshold=dict( + argstr="--seedThreshold %f", + ), + startingSeedsLabel=dict( + argstr="--startingSeedsLabel %d", + ), + stoppingValue=dict( + argstr="--stoppingValue %f", + ), ) inputs = gtractCostFastMarching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCostFastMarching_outputs(): output_map = dict( - outputCostVolume=dict(), - outputSpeedVolume=dict(), + outputCostVolume=dict( + extensions=None, + ), + outputSpeedVolume=dict( + extensions=None, + ), ) outputs = gtractCostFastMarching.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py index bbe375bbb9..1990cc2057 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py @@ -1,31 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractCreateGuideFiber def test_gtractCreateGuideFiber_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputFiber=dict(argstr='--inputFiber %s', ), - numberOfPoints=dict(argstr='--numberOfPoints %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputFiber=dict( + argstr="--inputFiber %s", + extensions=None, + ), + numberOfPoints=dict( + argstr="--numberOfPoints %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputFiber=dict( - argstr='--outputFiber %s', + argstr="--outputFiber %s", hash_files=False, ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + writeXMLPolyDataFile=dict( + argstr="--writeXMLPolyDataFile ", + ), ) inputs = gtractCreateGuideFiber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCreateGuideFiber_outputs(): - output_map = dict(outputFiber=dict(), ) + output_map = dict( + outputFiber=dict( + extensions=None, + ), + ) outputs = gtractCreateGuideFiber.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py index ec3a99b91e..4059d45f6a 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py @@ -1,41 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractFastMarchingTracking def test_gtractFastMarchingTracking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - costStepSize=dict(argstr='--costStepSize %f', ), + args=dict( + argstr="%s", + ), + costStepSize=dict( + argstr="--costStepSize %f", + ), environ=dict( nohash=True, usedefault=True, ), - inputAnisotropyVolume=dict(argstr='--inputAnisotropyVolume %s', ), - inputCostVolume=dict(argstr='--inputCostVolume %s', ), + inputAnisotropyVolume=dict( + argstr="--inputAnisotropyVolume %s", + extensions=None, + ), + inputCostVolume=dict( + argstr="--inputCostVolume %s", + extensions=None, + ), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', ), - inputTensorVolume=dict(argstr='--inputTensorVolume %s', ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--inputStartingSeedsLabelMapVolume %s", + extensions=None, + ), + inputTensorVolume=dict( + argstr="--inputTensorVolume %s", + extensions=None, + ), + maximumStepSize=dict( + argstr="--maximumStepSize %f", + ), + minimumStepSize=dict( + argstr="--minimumStepSize %f", + ), + numberOfIterations=dict( + argstr="--numberOfIterations %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTract=dict( - argstr='--outputTract %s', + argstr="--outputTract %s", hash_files=False, ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - trackingThreshold=dict(argstr='--trackingThreshold %f', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + seedThreshold=dict( + argstr="--seedThreshold %f", + ), + startingSeedsLabel=dict( + argstr="--startingSeedsLabel %d", + ), + trackingThreshold=dict( + argstr="--trackingThreshold %f", + ), + writeXMLPolyDataFile=dict( + argstr="--writeXMLPolyDataFile ", + ), ) inputs = gtractFastMarchingTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractFastMarchingTracking_outputs(): - output_map = dict(outputTract=dict(), ) + output_map = dict( + outputTract=dict( + extensions=None, + ), + ) outputs = gtractFastMarchingTracking.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py index 96c93b8b64..9837774d3e 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py @@ -1,57 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractFiberTracking def test_gtractFiberTracking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - branchingAngle=dict(argstr='--branchingAngle %f', ), - branchingThreshold=dict(argstr='--branchingThreshold %f', ), - curvatureThreshold=dict(argstr='--curvatureThreshold %f', ), - endingSeedsLabel=dict(argstr='--endingSeedsLabel %d', ), + args=dict( + argstr="%s", + ), + branchingAngle=dict( + argstr="--branchingAngle %f", + ), + branchingThreshold=dict( + argstr="--branchingThreshold %f", + ), + curvatureThreshold=dict( + argstr="--curvatureThreshold %f", + ), + endingSeedsLabel=dict( + argstr="--endingSeedsLabel %d", + ), environ=dict( nohash=True, usedefault=True, ), guidedCurvatureThreshold=dict( - argstr='--guidedCurvatureThreshold %f', ), - inputAnisotropyVolume=dict(argstr='--inputAnisotropyVolume %s', ), + argstr="--guidedCurvatureThreshold %f", + ), + inputAnisotropyVolume=dict( + argstr="--inputAnisotropyVolume %s", + extensions=None, + ), inputEndingSeedsLabelMapVolume=dict( - argstr='--inputEndingSeedsLabelMapVolume %s', ), + argstr="--inputEndingSeedsLabelMapVolume %s", + extensions=None, + ), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', ), - inputTensorVolume=dict(argstr='--inputTensorVolume %s', ), - inputTract=dict(argstr='--inputTract %s', ), - maximumBranchPoints=dict(argstr='--maximumBranchPoints %d', ), - maximumGuideDistance=dict(argstr='--maximumGuideDistance %f', ), - maximumLength=dict(argstr='--maximumLength %f', ), - minimumLength=dict(argstr='--minimumLength %f', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--inputStartingSeedsLabelMapVolume %s", + extensions=None, + ), + inputTensorVolume=dict( + argstr="--inputTensorVolume %s", + extensions=None, + ), + inputTract=dict( + argstr="--inputTract %s", + extensions=None, + ), + maximumBranchPoints=dict( + argstr="--maximumBranchPoints %d", + ), + maximumGuideDistance=dict( + argstr="--maximumGuideDistance %f", + ), + maximumLength=dict( + argstr="--maximumLength %f", + ), + minimumLength=dict( + argstr="--minimumLength %f", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTract=dict( - argstr='--outputTract %s', + argstr="--outputTract %s", hash_files=False, ), - randomSeed=dict(argstr='--randomSeed %d', ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - stepSize=dict(argstr='--stepSize %f', ), - tendF=dict(argstr='--tendF %f', ), - tendG=dict(argstr='--tendG %f', ), - trackingMethod=dict(argstr='--trackingMethod %s', ), - trackingThreshold=dict(argstr='--trackingThreshold %f', ), - useLoopDetection=dict(argstr='--useLoopDetection ', ), - useRandomWalk=dict(argstr='--useRandomWalk ', ), - useTend=dict(argstr='--useTend ', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + randomSeed=dict( + argstr="--randomSeed %d", + ), + seedThreshold=dict( + argstr="--seedThreshold %f", + ), + startingSeedsLabel=dict( + argstr="--startingSeedsLabel %d", + ), + stepSize=dict( + argstr="--stepSize %f", + ), + tendF=dict( + argstr="--tendF %f", + ), + tendG=dict( + argstr="--tendG %f", + ), + trackingMethod=dict( + argstr="--trackingMethod %s", + ), + trackingThreshold=dict( + argstr="--trackingThreshold %f", + ), + useLoopDetection=dict( + argstr="--useLoopDetection ", + ), + useRandomWalk=dict( + argstr="--useRandomWalk ", + ), + useTend=dict( + argstr="--useTend ", + ), + writeXMLPolyDataFile=dict( + argstr="--writeXMLPolyDataFile ", + ), ) inputs = gtractFiberTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractFiberTracking_outputs(): - output_map = dict(outputTract=dict(), ) + output_map = dict( + outputTract=dict( + extensions=None, + ), + ) outputs = gtractFiberTracking.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py index 9ddde832b2..64b896e0ca 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractImageConformity def test_gtractImageConformity_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_gtractImageConformity_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractImageConformity_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractImageConformity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py index ca642ceb66..ed43c90dc6 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py @@ -1,24 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractInvertBSplineTransform def test_gtractInvertBSplineTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - inputTransform=dict(argstr='--inputTransform %s', ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), landmarkDensity=dict( - argstr='--landmarkDensity %s', - sep=',', + argstr="--landmarkDensity %s", + sep=",", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), ) @@ -27,8 +36,14 @@ def test_gtractInvertBSplineTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertBSplineTransform_outputs(): - output_map = dict(outputTransform=dict(), ) + output_map = dict( + outputTransform=dict( + extensions=None, + ), + ) outputs = gtractInvertBSplineTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py index e4fd213d39..83129902aa 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py @@ -1,31 +1,48 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractInvertDisplacementField def test_gtractInvertDisplacementField_inputs(): input_map = dict( - args=dict(argstr='%s', ), - baseImage=dict(argstr='--baseImage %s', ), - deformationImage=dict(argstr='--deformationImage %s', ), + args=dict( + argstr="%s", + ), + baseImage=dict( + argstr="--baseImage %s", + extensions=None, + ), + deformationImage=dict( + argstr="--deformationImage %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - subsamplingFactor=dict(argstr='--subsamplingFactor %d', ), + subsamplingFactor=dict( + argstr="--subsamplingFactor %d", + ), ) inputs = gtractInvertDisplacementField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertDisplacementField_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractInvertDisplacementField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py index c035862663..73ba9c576f 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py @@ -1,19 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractInvertRigidTransform def test_gtractInvertRigidTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputTransform=dict(argstr='--inputTransform %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), ) @@ -22,8 +28,14 @@ def test_gtractInvertRigidTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertRigidTransform_outputs(): - output_map = dict(outputTransform=dict(), ) + output_map = dict( + outputTransform=dict( + extensions=None, + ), + ) outputs = gtractInvertRigidTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py index 43f428b233..7b38abe0b5 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py @@ -1,32 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractResampleAnisotropy def test_gtractResampleAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputAnatomicalVolume=dict(argstr='--inputAnatomicalVolume %s', ), - inputAnisotropyVolume=dict(argstr='--inputAnisotropyVolume %s', ), - inputTransform=dict(argstr='--inputTransform %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputAnatomicalVolume=dict( + argstr="--inputAnatomicalVolume %s", + extensions=None, + ), + inputAnisotropyVolume=dict( + argstr="--inputAnisotropyVolume %s", + extensions=None, + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - transformType=dict(argstr='--transformType %s', ), + transformType=dict( + argstr="--transformType %s", + ), ) inputs = gtractResampleAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleAnisotropy_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractResampleAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py index 812afca5c0..7271e8a42a 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py @@ -1,33 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractResampleB0 def test_gtractResampleB0_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputAnatomicalVolume=dict(argstr='--inputAnatomicalVolume %s', ), - inputTransform=dict(argstr='--inputTransform %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputAnatomicalVolume=dict( + argstr="--inputAnatomicalVolume %s", + extensions=None, + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - transformType=dict(argstr='--transformType %s', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + transformType=dict( + argstr="--transformType %s", + ), + vectorIndex=dict( + argstr="--vectorIndex %d", + ), ) inputs = gtractResampleB0.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleB0_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractResampleB0.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py index decc017c60..6649ecfc1f 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py @@ -1,32 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractResampleCodeImage def test_gtractResampleCodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputCodeVolume=dict(argstr='--inputCodeVolume %s', ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - inputTransform=dict(argstr='--inputTransform %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputCodeVolume=dict( + argstr="--inputCodeVolume %s", + extensions=None, + ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - transformType=dict(argstr='--transformType %s', ), + transformType=dict( + argstr="--transformType %s", + ), ) inputs = gtractResampleCodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleCodeImage_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractResampleCodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py index b815d3d3d6..3b61312e54 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py @@ -1,43 +1,66 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractResampleDWIInPlace def test_gtractResampleDWIInPlace_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugLevel=dict(argstr='--debugLevel %d', ), + args=dict( + argstr="%s", + ), + debugLevel=dict( + argstr="--debugLevel %d", + ), environ=dict( nohash=True, usedefault=True, ), imageOutputSize=dict( - argstr='--imageOutputSize %s', - sep=',', + argstr="--imageOutputSize %s", + sep=",", + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - inputTransform=dict(argstr='--inputTransform %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputResampledB0=dict( - argstr='--outputResampledB0 %s', + argstr="--outputResampledB0 %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - referenceVolume=dict(argstr='--referenceVolume %s', ), - warpDWITransform=dict(argstr='--warpDWITransform %s', ), + referenceVolume=dict( + argstr="--referenceVolume %s", + extensions=None, + ), + warpDWITransform=dict( + argstr="--warpDWITransform %s", + extensions=None, + ), ) inputs = gtractResampleDWIInPlace.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleDWIInPlace_outputs(): output_map = dict( - outputResampledB0=dict(), - outputVolume=dict(), + outputResampledB0=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = gtractResampleDWIInPlace.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py index d0d2cd5664..d64d2d8581 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py @@ -1,34 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractResampleFibers def test_gtractResampleFibers_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputForwardDeformationFieldVolume=dict( - argstr='--inputForwardDeformationFieldVolume %s', ), + argstr="--inputForwardDeformationFieldVolume %s", + extensions=None, + ), inputReverseDeformationFieldVolume=dict( - argstr='--inputReverseDeformationFieldVolume %s', ), - inputTract=dict(argstr='--inputTract %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--inputReverseDeformationFieldVolume %s", + extensions=None, + ), + inputTract=dict( + argstr="--inputTract %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputTract=dict( - argstr='--outputTract %s', + argstr="--outputTract %s", hash_files=False, ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + writeXMLPolyDataFile=dict( + argstr="--writeXMLPolyDataFile ", + ), ) inputs = gtractResampleFibers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleFibers_outputs(): - output_map = dict(outputTract=dict(), ) + output_map = dict( + outputTract=dict( + extensions=None, + ), + ) outputs = gtractResampleFibers.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py index ead96d1c71..eabe7c6f50 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py @@ -1,45 +1,71 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractTensor def test_gtractTensor_inputs(): input_map = dict( - applyMeasurementFrame=dict(argstr='--applyMeasurementFrame ', ), - args=dict(argstr='%s', ), - b0Index=dict(argstr='--b0Index %d', ), + applyMeasurementFrame=dict( + argstr="--applyMeasurementFrame ", + ), + args=dict( + argstr="%s", + ), + b0Index=dict( + argstr="--b0Index %d", + ), backgroundSuppressingThreshold=dict( - argstr='--backgroundSuppressingThreshold %d', ), + argstr="--backgroundSuppressingThreshold %d", + ), environ=dict( nohash=True, usedefault=True, ), ignoreIndex=dict( - argstr='--ignoreIndex %s', - sep=',', + argstr="--ignoreIndex %s", + sep=",", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + maskProcessingMode=dict( + argstr="--maskProcessingMode %s", + ), + maskVolume=dict( + argstr="--maskVolume %s", + extensions=None, ), - inputVolume=dict(argstr='--inputVolume %s', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maskVolume=dict(argstr='--maskVolume %s', ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - resampleIsotropic=dict(argstr='--resampleIsotropic ', ), - size=dict(argstr='--size %f', ), + resampleIsotropic=dict( + argstr="--resampleIsotropic ", + ), + size=dict( + argstr="--size %f", + ), ) inputs = gtractTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractTensor_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = gtractTensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py index 2dfde189e2..64daec32fb 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..gtract import gtractTransformToDisplacementField def test_gtractTransformToDisplacementField_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - inputTransform=dict(argstr='--inputTransform %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputDeformationFieldVolume=dict( - argstr='--outputDeformationFieldVolume %s', + argstr="--outputDeformationFieldVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_gtractTransformToDisplacementField_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractTransformToDisplacementField_outputs(): - output_map = dict(outputDeformationFieldVolume=dict(), ) + output_map = dict( + outputDeformationFieldVolume=dict( + extensions=None, + ), + ) outputs = gtractTransformToDisplacementField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py index cc06c5ede5..6638ef34cd 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py @@ -1,30 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..maxcurvature import maxcurvature def test_maxcurvature_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - image=dict(argstr='--image %s', ), + image=dict( + argstr="--image %s", + extensions=None, + ), output=dict( - argstr='--output %s', + argstr="--output %s", hash_files=False, ), - sigma=dict(argstr='--sigma %f', ), - verbose=dict(argstr='--verbose ', ), + sigma=dict( + argstr="--sigma %f", + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = maxcurvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_maxcurvature_outputs(): - output_map = dict(output=dict(), ) + output_map = dict( + output=dict( + extensions=None, + ), + ) outputs = maxcurvature.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tractography/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/__init__.py index f846b7fde5..809910cf28 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/__init__.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .commandlineonly import fiberstats from .fiberprocess import fiberprocess from .fibertrack import fibertrack diff --git a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py index 19adc2a817..e03c8fde9e 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py +++ b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py @@ -1,18 +1,17 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class fiberstatsInputSpec(CommandLineInputSpec): - fiber_file = File( - desc="DTI Fiber File", exists=True, argstr="--fiber_file %s") + fiber_file = File(desc="DTI Fiber File", exists=True, argstr="--fiber_file %s") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -23,25 +22,24 @@ class fiberstatsOutputSpec(TraitedSpec): class fiberstats(SEMLikeCommandLine): """title: FiberStats (DTIProcess) -category: Diffusion.Tractography.CommandLineOnly - -description: Obsolete tool - Not used anymore + category: Diffusion.Tractography.CommandLineOnly -version: 1.1.0 + description: Obsolete tool - Not used anymore -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.1.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. + contributor: Casey Goodlett -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler. + """ input_spec = fiberstatsInputSpec output_spec = fiberstatsOutputSpec diff --git a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py index c0e9dcbbaf..7efb9c9e23 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py @@ -1,110 +1,110 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class fiberprocessInputSpec(CommandLineInputSpec): - fiber_file = File( - desc="DTI fiber file", exists=True, argstr="--fiber_file %s") + fiber_file = File(desc="DTI fiber file", exists=True, argstr="--fiber_file %s") fiber_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output fiber file. May be warped or updated with new data depending on other options used.", - argstr="--fiber_output %s") + desc="Output fiber file. May be warped or updated with new data depending on other options used.", + argstr="--fiber_output %s", + ) tensor_volume = File( desc="Interpolate tensor values from the given field", exists=True, - argstr="--tensor_volume %s") + argstr="--tensor_volume %s", + ) h_field = File( - desc= - "HField for warp and statistics lookup. If this option is used tensor-volume must also be specified.", + desc="HField for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, - argstr="--h_field %s") + argstr="--h_field %s", + ) displacement_field = File( - desc= - "Displacement Field for warp and statistics lookup. If this option is used tensor-volume must also be specified.", + desc="Displacement Field for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, - argstr="--displacement_field %s") + argstr="--displacement_field %s", + ) saveProperties = traits.Bool( - desc= - "save the tensor property as scalar data into the vtk (only works for vtk fiber files). ", - argstr="--saveProperties ") + desc="save the tensor property as scalar data into the vtk (only works for vtk fiber files). ", + argstr="--saveProperties ", + ) no_warp = traits.Bool( - desc= - "Do not warp the geometry of the tensors only obtain the new statistics.", - argstr="--no_warp ") + desc="Do not warp the geometry of the tensors only obtain the new statistics.", + argstr="--no_warp ", + ) fiber_radius = traits.Float( - desc="set radius of all fibers to this value", - argstr="--fiber_radius %f") + desc="set radius of all fibers to this value", argstr="--fiber_radius %f" + ) index_space = traits.Bool( - desc= - "Use index-space for fiber output coordinates, otherwise us world space for fiber output coordinates (from tensor file).", - argstr="--index_space ") + desc="Use index-space for fiber output coordinates, otherwise us world space for fiber output coordinates (from tensor file).", + argstr="--index_space ", + ) voxelize = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", - argstr="--voxelize %s") + desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", + argstr="--voxelize %s", + ) voxelize_count_fibers = traits.Bool( desc="Count number of fibers per-voxel instead of just setting to 1", - argstr="--voxelize_count_fibers ") + argstr="--voxelize_count_fibers ", + ) voxel_label = traits.Int( - desc="Label for voxelized fiber", argstr="--voxel_label %d") + desc="Label for voxelized fiber", argstr="--voxel_label %d" + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") - noDataChange = traits.Bool( - desc="Do not change data ??? ", argstr="--noDataChange ") + noDataChange = traits.Bool(desc="Do not change data ??? ", argstr="--noDataChange ") class fiberprocessOutputSpec(TraitedSpec): fiber_output = File( - desc= - "Output fiber file. May be warped or updated with new data depending on other options used.", - exists=True) + desc="Output fiber file. May be warped or updated with new data depending on other options used.", + exists=True, + ) voxelize = File( - desc= - "Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", - exists=True) + desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", + exists=True, + ) class fiberprocess(SEMLikeCommandLine): """title: FiberProcess (DTIProcess) -category: Diffusion.Tractography - -description: fiberprocess is a tool that manage fiber files extracted from the fibertrack tool or any fiber tracking algorithm. It takes as an input .fib and .vtk files (--fiber_file) and saves the changed fibers (--fiber_output) into the 2 same formats. The main purpose of this tool is to deform the fiber file with a transformation field as an input (--displacement_field or --h_field depending if you deal with dfield or hfield). To use that option you need to specify the tensor field from which the fiber file was extracted with the option --tensor_volume. The transformation applied on the fiber file is the inverse of the one input. If the transformation is from one case to an atlas, fiberprocess assumes that the fiber file is in the atlas space and you want it in the original case space, so it's the inverse of the transformation which has been computed. -You have 2 options for fiber modification. You can either deform the fibers (their geometry) into the space OR you can keep the same geometry but map the diffusion properties (fa, md, lbd's...) of the original tensor field along the fibers at the corresponding locations. This is triggered by the --no_warp option. To use the previous example: when you have a tensor field in the original space and the deformed tensor field in the atlas space, you want to track the fibers in the atlas space, keeping this geometry but with the original case diffusion properties. Then you can specify the transformations field (from original case -> atlas) and the original tensor field with the --tensor_volume option. -With fiberprocess you can also binarize a fiber file. Using the --voxelize option will create an image where each voxel through which a fiber is passing is set to 1. The output is going to be a binary image with the values 0 or 1 by default but the 1 value voxel can be set to any number with the --voxel_label option. Finally you can create an image where the value at the voxel is the number of fiber passing through. (--voxelize_count_fibers) + category: Diffusion.Tractography -version: 1.0.0 + description: fiberprocess is a tool that manage fiber files extracted from the fibertrack tool or any fiber tracking algorithm. It takes as an input .fib and .vtk files (--fiber_file) and saves the changed fibers (--fiber_output) into the 2 same formats. The main purpose of this tool is to deform the fiber file with a transformation field as an input (--displacement_field or --h_field depending if you deal with dfield or hfield). To use that option you need to specify the tensor field from which the fiber file was extracted with the option --tensor_volume. The transformation applied on the fiber file is the inverse of the one input. If the transformation is from one case to an atlas, fiberprocess assumes that the fiber file is in the atlas space and you want it in the original case space, so it's the inverse of the transformation which has been computed. + You have 2 options for fiber modification. You can either deform the fibers (their geometry) into the space OR you can keep the same geometry but map the diffusion properties (fa, md, lbd's...) of the original tensor field along the fibers at the corresponding locations. This is triggered by the --no_warp option. To use the previous example: when you have a tensor field in the original space and the deformed tensor field in the atlas space, you want to track the fibers in the atlas space, keeping this geometry but with the original case diffusion properties. Then you can specify the transformations field (from original case -> atlas) and the original tensor field with the --tensor_volume option. + With fiberprocess you can also binarize a fiber file. Using the --voxelize option will create an image where each voxel through which a fiber is passing is set to 1. The output is going to be a binary image with the values 0 or 1 by default but the 1 value voxel can be set to any number with the --voxel_label option. Finally you can create an image where the value at the voxel is the number of fiber passing through. (--voxelize_count_fibers) -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.0.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -""" + contributor: Casey Goodlett + """ input_spec = fiberprocessInputSpec output_spec = fiberprocessOutputSpec _cmd = " fiberprocess " _outputs_filenames = { - 'fiber_output': 'fiber_output.vtk', - 'voxelize': 'voxelize.nii' + "fiber_output": "fiber_output.vtk", + "voxelize": "voxelize.nii", } _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py index 498cb2579d..1fa64180d5 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py @@ -1,94 +1,94 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class fibertrackInputSpec(CommandLineInputSpec): input_tensor_file = File( - desc="Tensor Image", exists=True, argstr="--input_tensor_file %s") + desc="Tensor Image", exists=True, argstr="--input_tensor_file %s" + ) input_roi_file = File( - desc= - "The filename of the image which contains the labels used for seeding and constraining the algorithm.", + desc="The filename of the image which contains the labels used for seeding and constraining the algorithm.", exists=True, - argstr="--input_roi_file %s") + argstr="--input_roi_file %s", + ) output_fiber_file = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", - argstr="--output_fiber_file %s") + desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", + argstr="--output_fiber_file %s", + ) source_label = traits.Int( - desc= - "The label of voxels in the labelfile to use for seeding tractography. One tract is seeded from the center of each voxel with this label", - argstr="--source_label %d") + desc="The label of voxels in the labelfile to use for seeding tractography. One tract is seeded from the center of each voxel with this label", + argstr="--source_label %d", + ) target_label = traits.Int( - desc= - "The label of voxels in the labelfile used to constrain tractography. Tracts that do not pass through a voxel with this label are rejected. Set this keep all tracts.", - argstr="--target_label %d") - forbidden_label = traits.Int( - desc="Forbidden label", argstr="--forbidden_label %d") + desc="The label of voxels in the labelfile used to constrain tractography. Tracts that do not pass through a voxel with this label are rejected. Set this keep all tracts.", + argstr="--target_label %d", + ) + forbidden_label = traits.Int(desc="Forbidden label", argstr="--forbidden_label %d") whole_brain = traits.Bool( - desc= - "If this option is enabled all voxels in the image are used to seed tractography. When this option is enabled both source and target labels function as target labels", - argstr="--whole_brain ") + desc="If this option is enabled all voxels in the image are used to seed tractography. When this option is enabled both source and target labels function as target labels", + argstr="--whole_brain ", + ) max_angle = traits.Float( - desc="Maximum angle of change in radians", argstr="--max_angle %f") + desc="Maximum angle of change in radians", argstr="--max_angle %f" + ) step_size = traits.Float( - desc="Step size in mm for the tracking algorithm", - argstr="--step_size %f") + desc="Step size in mm for the tracking algorithm", argstr="--step_size %f" + ) min_fa = traits.Float( - desc="The minimum FA threshold to continue tractography", - argstr="--min_fa %f") + desc="The minimum FA threshold to continue tractography", argstr="--min_fa %f" + ) force = traits.Bool(desc="Ignore sanity checks.", argstr="--force ") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") really_verbose = traits.Bool( - desc="Follow detail of fiber tracking algorithm", - argstr="--really_verbose ") + desc="Follow detail of fiber tracking algorithm", argstr="--really_verbose " + ) class fibertrackOutputSpec(TraitedSpec): output_fiber_file = File( - desc= - "The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", - exists=True) + desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", + exists=True, + ) class fibertrack(SEMLikeCommandLine): """title: FiberTrack (DTIProcess) -category: Diffusion.Tractography - -description: This program implements a simple streamline tractography method based on the principal eigenvector of the tensor field. A fourth order Runge-Kutta integration rule used to advance the streamlines. -As a first parameter you have to input the tensor field (with the --input_tensor_file option). Then the region of interest image file is set with the --input_roi_file. Next you want to set the output fiber file name after the --output_fiber_file option. -You can specify the label value in the input_roi_file with the --target_label, --source_label and --fobidden_label options. By default target label is 1, source label is 2 and forbidden label is 0. The source label is where the streamlines are seeded, the target label defines the voxels through which the fibers must pass by to be kept in the final fiber file and the forbidden label defines the voxels where the streamlines are stopped if they pass through it. There is also a --whole_brain option which, if enabled, consider both target and source labels of the roi image as target labels and all the voxels of the image are considered as sources. -During the tractography, the --fa_min parameter is used as the minimum value needed at different voxel for the tracking to keep going along a streamline. The --step_size parameter is used for each iteration of the tracking algorithm and defines the length of each step. The --max_angle option defines the maximum angle allowed between two successive segments along the tracked fiber. + category: Diffusion.Tractography -version: 1.1.0 + description: This program implements a simple streamline tractography method based on the principal eigenvector of the tensor field. A fourth order Runge-Kutta integration rule used to advance the streamlines. + As a first parameter you have to input the tensor field (with the --input_tensor_file option). Then the region of interest image file is set with the --input_roi_file. Next you want to set the output fiber file name after the --output_fiber_file option. + You can specify the label value in the input_roi_file with the --target_label, --source_label and --fobidden_label options. By default target label is 1, source label is 2 and forbidden label is 0. The source label is where the streamlines are seeded, the target label defines the voxels through which the fibers must pass by to be kept in the final fiber file and the forbidden label defines the voxels where the streamlines are stopped if they pass through it. There is also a --whole_brain option which, if enabled, consider both target and source labels of the roi image as target labels and all the voxels of the image are considered as sources. + During the tractography, the --fa_min parameter is used as the minimum value needed at different voxel for the tracking to keep going along a streamline. The --step_size parameter is used for each iteration of the tracking algorithm and defines the length of each step. The --max_angle option defines the maximum angle allowed between two successive segments along the tracked fiber. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.1.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. + contributor: Casey Goodlett -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependencies on boost and a fortran compiler. + """ input_spec = fibertrackInputSpec output_spec = fibertrackOutputSpec _cmd = " fibertrack " - _outputs_filenames = {'output_fiber_file': 'output_fiber_file.vtk'} + _outputs_filenames = {"output_fiber_file": "output_fiber_file.vtk"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py index 7c61974ef3..92050c6e43 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py @@ -1,67 +1,137 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..ukftractography import UKFTractography def test_UKFTractography_inputs(): input_map = dict( - Ql=dict(argstr='--Ql %f', ), - Qm=dict(argstr='--Qm %f', ), - Qw=dict(argstr='--Qw %f', ), - Rs=dict(argstr='--Rs %f', ), - args=dict(argstr='%s', ), - dwiFile=dict(argstr='--dwiFile %s', ), + Ql=dict( + argstr="--Ql %f", + ), + Qm=dict( + argstr="--Qm %f", + ), + Qw=dict( + argstr="--Qw %f", + ), + Rs=dict( + argstr="--Rs %f", + ), + args=dict( + argstr="%s", + ), + dwiFile=dict( + argstr="--dwiFile %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - freeWater=dict(argstr='--freeWater ', ), - fullTensorModel=dict(argstr='--fullTensorModel ', ), + freeWater=dict( + argstr="--freeWater ", + ), + fullTensorModel=dict( + argstr="--fullTensorModel ", + ), labels=dict( - argstr='--labels %s', - sep=',', - ), - maskFile=dict(argstr='--maskFile %s', ), - maxBranchingAngle=dict(argstr='--maxBranchingAngle %f', ), - maxHalfFiberLength=dict(argstr='--maxHalfFiberLength %f', ), - minBranchingAngle=dict(argstr='--minBranchingAngle %f', ), - minFA=dict(argstr='--minFA %f', ), - minGA=dict(argstr='--minGA %f', ), - numTensor=dict(argstr='--numTensor %s', ), - numThreads=dict(argstr='--numThreads %d', ), - recordCovariance=dict(argstr='--recordCovariance ', ), - recordFA=dict(argstr='--recordFA ', ), - recordFreeWater=dict(argstr='--recordFreeWater ', ), - recordLength=dict(argstr='--recordLength %f', ), - recordNMSE=dict(argstr='--recordNMSE ', ), - recordState=dict(argstr='--recordState ', ), - recordTensors=dict(argstr='--recordTensors ', ), - recordTrace=dict(argstr='--recordTrace ', ), - seedFALimit=dict(argstr='--seedFALimit %f', ), - seedsFile=dict(argstr='--seedsFile %s', ), - seedsPerVoxel=dict(argstr='--seedsPerVoxel %d', ), - stepLength=dict(argstr='--stepLength %f', ), - storeGlyphs=dict(argstr='--storeGlyphs ', ), + argstr="--labels %s", + sep=",", + ), + maskFile=dict( + argstr="--maskFile %s", + extensions=None, + ), + maxBranchingAngle=dict( + argstr="--maxBranchingAngle %f", + ), + maxHalfFiberLength=dict( + argstr="--maxHalfFiberLength %f", + ), + minBranchingAngle=dict( + argstr="--minBranchingAngle %f", + ), + minFA=dict( + argstr="--minFA %f", + ), + minGA=dict( + argstr="--minGA %f", + ), + numTensor=dict( + argstr="--numTensor %s", + ), + numThreads=dict( + argstr="--numThreads %d", + ), + recordCovariance=dict( + argstr="--recordCovariance ", + ), + recordFA=dict( + argstr="--recordFA ", + ), + recordFreeWater=dict( + argstr="--recordFreeWater ", + ), + recordLength=dict( + argstr="--recordLength %f", + ), + recordNMSE=dict( + argstr="--recordNMSE ", + ), + recordState=dict( + argstr="--recordState ", + ), + recordTensors=dict( + argstr="--recordTensors ", + ), + recordTrace=dict( + argstr="--recordTrace ", + ), + seedFALimit=dict( + argstr="--seedFALimit %f", + ), + seedsFile=dict( + argstr="--seedsFile %s", + extensions=None, + ), + seedsPerVoxel=dict( + argstr="--seedsPerVoxel %d", + ), + stepLength=dict( + argstr="--stepLength %f", + ), + storeGlyphs=dict( + argstr="--storeGlyphs ", + ), tracts=dict( - argstr='--tracts %s', + argstr="--tracts %s", hash_files=False, ), tractsWithSecondTensor=dict( - argstr='--tractsWithSecondTensor %s', + argstr="--tractsWithSecondTensor %s", hash_files=False, ), - writeAsciiTracts=dict(argstr='--writeAsciiTracts ', ), - writeUncompressedTracts=dict(argstr='--writeUncompressedTracts ', ), + writeAsciiTracts=dict( + argstr="--writeAsciiTracts ", + ), + writeUncompressedTracts=dict( + argstr="--writeUncompressedTracts ", + ), ) inputs = UKFTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UKFTractography_outputs(): output_map = dict( - tracts=dict(), - tractsWithSecondTensor=dict(), + tracts=dict( + extensions=None, + ), + tractsWithSecondTensor=dict( + extensions=None, + ), ) outputs = UKFTractography.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py index b4756a9406..506d3f8f90 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py @@ -1,45 +1,80 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fiberprocess import fiberprocess def test_fiberprocess_inputs(): input_map = dict( - args=dict(argstr='%s', ), - displacement_field=dict(argstr='--displacement_field %s', ), + args=dict( + argstr="%s", + ), + displacement_field=dict( + argstr="--displacement_field %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - fiber_file=dict(argstr='--fiber_file %s', ), + fiber_file=dict( + argstr="--fiber_file %s", + extensions=None, + ), fiber_output=dict( - argstr='--fiber_output %s', + argstr="--fiber_output %s", hash_files=False, ), - fiber_radius=dict(argstr='--fiber_radius %f', ), - h_field=dict(argstr='--h_field %s', ), - index_space=dict(argstr='--index_space ', ), - noDataChange=dict(argstr='--noDataChange ', ), - no_warp=dict(argstr='--no_warp ', ), - saveProperties=dict(argstr='--saveProperties ', ), - tensor_volume=dict(argstr='--tensor_volume %s', ), - verbose=dict(argstr='--verbose ', ), - voxel_label=dict(argstr='--voxel_label %d', ), + fiber_radius=dict( + argstr="--fiber_radius %f", + ), + h_field=dict( + argstr="--h_field %s", + extensions=None, + ), + index_space=dict( + argstr="--index_space ", + ), + noDataChange=dict( + argstr="--noDataChange ", + ), + no_warp=dict( + argstr="--no_warp ", + ), + saveProperties=dict( + argstr="--saveProperties ", + ), + tensor_volume=dict( + argstr="--tensor_volume %s", + extensions=None, + ), + verbose=dict( + argstr="--verbose ", + ), + voxel_label=dict( + argstr="--voxel_label %d", + ), voxelize=dict( - argstr='--voxelize %s', + argstr="--voxelize %s", hash_files=False, ), - voxelize_count_fibers=dict(argstr='--voxelize_count_fibers ', ), + voxelize_count_fibers=dict( + argstr="--voxelize_count_fibers ", + ), ) inputs = fiberprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fiberprocess_outputs(): output_map = dict( - fiber_output=dict(), - voxelize=dict(), + fiber_output=dict( + extensions=None, + ), + voxelize=dict( + extensions=None, + ), ) outputs = fiberprocess.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py index d6f3a5cd50..a9df738d28 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py @@ -1,23 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..commandlineonly import fiberstats def test_fiberstats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fiber_file=dict(argstr='--fiber_file %s', ), - verbose=dict(argstr='--verbose ', ), + fiber_file=dict( + argstr="--fiber_file %s", + extensions=None, + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = fiberstats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fiberstats_outputs(): output_map = dict() outputs = fiberstats.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py index 33e57c0ca3..28798b14ff 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py @@ -1,39 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..fibertrack import fibertrack def test_fibertrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - forbidden_label=dict(argstr='--forbidden_label %d', ), - force=dict(argstr='--force ', ), - input_roi_file=dict(argstr='--input_roi_file %s', ), - input_tensor_file=dict(argstr='--input_tensor_file %s', ), - max_angle=dict(argstr='--max_angle %f', ), - min_fa=dict(argstr='--min_fa %f', ), + forbidden_label=dict( + argstr="--forbidden_label %d", + ), + force=dict( + argstr="--force ", + ), + input_roi_file=dict( + argstr="--input_roi_file %s", + extensions=None, + ), + input_tensor_file=dict( + argstr="--input_tensor_file %s", + extensions=None, + ), + max_angle=dict( + argstr="--max_angle %f", + ), + min_fa=dict( + argstr="--min_fa %f", + ), output_fiber_file=dict( - argstr='--output_fiber_file %s', + argstr="--output_fiber_file %s", hash_files=False, ), - really_verbose=dict(argstr='--really_verbose ', ), - source_label=dict(argstr='--source_label %d', ), - step_size=dict(argstr='--step_size %f', ), - target_label=dict(argstr='--target_label %d', ), - verbose=dict(argstr='--verbose ', ), - whole_brain=dict(argstr='--whole_brain ', ), + really_verbose=dict( + argstr="--really_verbose ", + ), + source_label=dict( + argstr="--source_label %d", + ), + step_size=dict( + argstr="--step_size %f", + ), + target_label=dict( + argstr="--target_label %d", + ), + verbose=dict( + argstr="--verbose ", + ), + whole_brain=dict( + argstr="--whole_brain ", + ), ) inputs = fibertrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fibertrack_outputs(): - output_map = dict(output_fiber_file=dict(), ) + output_map = dict( + output_fiber_file=dict( + extensions=None, + ), + ) outputs = fibertrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py index 11971dbb6d..fc8035762d 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py @@ -1,167 +1,172 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class UKFTractographyInputSpec(CommandLineInputSpec): dwiFile = File(desc="Input DWI volume", exists=True, argstr="--dwiFile %s") seedsFile = File( - desc= - "Seeds for diffusion. If not specified, full brain tractography will be performed, and the algorithm will start from every voxel in the brain mask where the Generalized Anisotropy is bigger than 0.18", + desc="Seeds for diffusion. If not specified, full brain tractography will be performed, and the algorithm will start from every voxel in the brain mask where the Generalized Anisotropy is bigger than 0.18", exists=True, - argstr="--seedsFile %s") + argstr="--seedsFile %s", + ) labels = InputMultiPath( traits.Int, desc="A vector of the ROI labels to be used", sep=",", - argstr="--labels %s") + argstr="--labels %s", + ) maskFile = File( - desc="Mask for diffusion tractography", - exists=True, - argstr="--maskFile %s") + desc="Mask for diffusion tractography", exists=True, argstr="--maskFile %s" + ) tracts = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with first tensor output", - argstr="--tracts %s") + argstr="--tracts %s", + ) writeAsciiTracts = traits.Bool( - desc="Write tract file as a VTK binary data file", - argstr="--writeAsciiTracts ") + desc="Write tract file as a VTK binary data file", argstr="--writeAsciiTracts " + ) writeUncompressedTracts = traits.Bool( desc="Write tract file as a VTK uncompressed data file", - argstr="--writeUncompressedTracts ") + argstr="--writeUncompressedTracts ", + ) seedsPerVoxel = traits.Int( - desc= - " Each seed generates a fiber, thus using more seeds generates more fibers. In general use 1 or 2 seeds, and for a more thorough result use 5 or 10 (depending on your machine this may take up to 2 days to run)., ", - argstr="--seedsPerVoxel %d") + desc=" Each seed generates a fiber, thus using more seeds generates more fibers. In general use 1 or 2 seeds, and for a more thorough result use 5 or 10 (depending on your machine this may take up to 2 days to run)., ", + argstr="--seedsPerVoxel %d", + ) numTensor = traits.Enum( - "1", "2", desc="Number of tensors used", argstr="--numTensor %s") + "1", "2", desc="Number of tensors used", argstr="--numTensor %s" + ) freeWater = traits.Bool( - desc= - "Adds a term for free water difusion to the model. (Note for experts: if checked, the 1T simple model is forced) ", - argstr="--freeWater ") + desc="Adds a term for free water diffusion to the model. (Note for experts: if checked, the 1T simple model is forced) ", + argstr="--freeWater ", + ) recordFA = traits.Bool( - desc= - "Whether to store FA. Attaches field 'FA', and 'FA2' for 2-tensor case to fiber. ", - argstr="--recordFA ") + desc="Whether to store FA. Attaches field 'FA', and 'FA2' for 2-tensor case to fiber. ", + argstr="--recordFA ", + ) recordFreeWater = traits.Bool( - desc= - "Whether to store the fraction of free water. Attaches field 'FreeWater' to fiber.", - argstr="--recordFreeWater ") + desc="Whether to store the fraction of free water. Attaches field 'FreeWater' to fiber.", + argstr="--recordFreeWater ", + ) recordTrace = traits.Bool( - desc= - "Whether to store Trace. Attaches field 'Trace', and 'Trace2' for 2-tensor case to fiber.", - argstr="--recordTrace ") + desc="Whether to store Trace. Attaches field 'Trace', and 'Trace2' for 2-tensor case to fiber.", + argstr="--recordTrace ", + ) recordTensors = traits.Bool( - desc= - "Recording the tensors enables Slicer to color the fiber bundles by FA, orientation, and so on. The fields will be called 'TensorN', where N is the tensor number. ", - argstr="--recordTensors ") + desc="Recording the tensors enables Slicer to color the fiber bundles by FA, orientation, and so on. The fields will be called 'TensorN', where N is the tensor number. ", + argstr="--recordTensors ", + ) recordNMSE = traits.Bool( desc="Whether to store NMSE. Attaches field 'NMSE' to fiber. ", - argstr="--recordNMSE ") + argstr="--recordNMSE ", + ) recordState = traits.Bool( - desc= - "Whether to attach the states to the fiber. Will generate field 'state'.", - argstr="--recordState ") + desc="Whether to attach the states to the fiber. Will generate field 'state'.", + argstr="--recordState ", + ) recordCovariance = traits.Bool( - desc= - "Whether to store the covariance. Will generate field 'covariance' in fiber.", - argstr="--recordCovariance ") + desc="Whether to store the covariance. Will generate field 'covariance' in fiber.", + argstr="--recordCovariance ", + ) recordLength = traits.Float( - desc="Record length of tractography, in millimeters", - argstr="--recordLength %f") + desc="Record length of tractography, in millimeters", argstr="--recordLength %f" + ) minFA = traits.Float( - desc= - "Abort the tractography when the Fractional Anisotropy is less than this value", - argstr="--minFA %f") + desc="Abort the tractography when the Fractional Anisotropy is less than this value", + argstr="--minFA %f", + ) minGA = traits.Float( - desc= - "Abort the tractography when the Generalized Anisotropy is less than this value", - argstr="--minGA %f") + desc="Abort the tractography when the Generalized Anisotropy is less than this value", + argstr="--minGA %f", + ) fullTensorModel = traits.Bool( - desc= - "Whether to use the full tensor model. If unchecked, use the default simple tensor model", - argstr="--fullTensorModel ") + desc="Whether to use the full tensor model. If unchecked, use the default simple tensor model", + argstr="--fullTensorModel ", + ) numThreads = traits.Int( - desc= - "Number of threads used during computation. Set to the number of cores on your workstation for optimal speed. If left undefined the number of cores detected will be used. ", - argstr="--numThreads %d") + desc="Number of threads used during computation. Set to the number of cores on your workstation for optimal speed. If left undefined the number of cores detected will be used. ", + argstr="--numThreads %d", + ) stepLength = traits.Float( - desc="Step length of tractography, in millimeters", - argstr="--stepLength %f") + desc="Step length of tractography, in millimeters", argstr="--stepLength %f" + ) maxHalfFiberLength = traits.Float( - desc= - "The max length limit of the half fibers generated during tractography. Here the fiber is \'half\' because the tractography goes in only one direction from one seed point at a time", - argstr="--maxHalfFiberLength %f") + desc="The max length limit of the half fibers generated during tractography. Here the fiber is 'half' because the tractography goes in only one direction from one seed point at a time", + argstr="--maxHalfFiberLength %f", + ) seedFALimit = traits.Float( desc="Seed points whose FA are below this value are excluded", - argstr="--seedFALimit %f") - Qm = traits.Float( - desc="Process noise for angles/direction", argstr="--Qm %f") + argstr="--seedFALimit %f", + ) + Qm = traits.Float(desc="Process noise for angles/direction", argstr="--Qm %f") Ql = traits.Float(desc="Process noise for eigenvalues", argstr="--Ql %f") Qw = traits.Float( - desc= - "Process noise for free water weights, ignored if no free water estimation", - argstr="--Qw %f") + desc="Process noise for free water weights, ignored if no free water estimation", + argstr="--Qw %f", + ) Rs = traits.Float(desc="Measurement noise", argstr="--Rs %f") maxBranchingAngle = traits.Float( - desc= - "Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is supressed when this maxBranchingAngle is set to 0.0", - argstr="--maxBranchingAngle %f") + desc="Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is suppressed when this maxBranchingAngle is set to 0.0", + argstr="--maxBranchingAngle %f", + ) minBranchingAngle = traits.Float( - desc= - "Minimum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle)", - argstr="--minBranchingAngle %f") + desc="Minimum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle)", + argstr="--minBranchingAngle %f", + ) tractsWithSecondTensor = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with second tensor output (if there is one)", - argstr="--tractsWithSecondTensor %s") + argstr="--tractsWithSecondTensor %s", + ) storeGlyphs = traits.Bool( - desc= - "Store tensors' main directions as two-point lines in a separate file named glyphs_{tracts}. When using multiple tensors, only the major tensors' main directions are stored", - argstr="--storeGlyphs ") + desc="Store tensors' main directions as two-point lines in a separate file named glyphs_{tracts}. When using multiple tensors, only the major tensors' main directions are stored", + argstr="--storeGlyphs ", + ) class UKFTractographyOutputSpec(TraitedSpec): - tracts = File( - desc="Tracts generated, with first tensor output", exists=True) + tracts = File(desc="Tracts generated, with first tensor output", exists=True) tractsWithSecondTensor = File( desc="Tracts generated, with second tensor output (if there is one)", - exists=True) + exists=True, + ) class UKFTractography(SEMLikeCommandLine): """title: UKF Tractography -category: Diffusion.Tractography - -description: This module traces fibers in a DWI Volume using the multiple tensor unscented Kalman Filter methology. For more informations check the documentation. + category: Diffusion.Tractography -version: 1.0 + description: This module traces fibers in a DWI Volume using the multiple tensor unscented Kalman Filter methology. For more information check the documentation. -documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/ukftractography:MainPage + version: 1.0 -contributor: Yogesh Rathi, Stefan Lienhard, Yinpeng Li, Martin Styner, Ipek Oguz, Yundi Shi, Christian Baumgartner, Kent Williams, Hans Johnson, Peter Savadjiev, Carl-Fredrik Westin. + documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/ukftractography:MainPage -acknowledgements: The development of this module was supported by NIH grants R01 MH097979 (PI Rathi), R01 MH092862 (PIs Westin and Verma), U01 NS083223 (PI Westin), R01 MH074794 (PI Westin) and P41 EB015902 (PI Kikinis). + contributor: Yogesh Rathi, Stefan Lienhard, Yinpeng Li, Martin Styner, Ipek Oguz, Yundi Shi, Christian Baumgartner, Kent Williams, Hans Johnson, Peter Savadjiev, Carl-Fredrik Westin. -""" + acknowledgements: The development of this module was supported by NIH grants R01 MH097979 (PI Rathi), R01 MH092862 (PIs Westin and Verma), U01 NS083223 (PI Westin), R01 MH074794 (PI Westin) and P41 EB015902 (PI Kikinis). + """ input_spec = UKFTractographyInputSpec output_spec = UKFTractographyOutputSpec _cmd = " UKFTractography " _outputs_filenames = { - 'tracts': 'tracts.vtp', - 'tractsWithSecondTensor': 'tractsWithSecondTensor.vtp' + "tracts": "tracts.vtp", + "tractsWithSecondTensor": "tractsWithSecondTensor.vtp", } _redirect_x = False diff --git a/nipype/interfaces/semtools/featurecreator.py b/nipype/interfaces/semtools/featurecreator.py index 69ff2d675c..08482853a5 100644 --- a/nipype/interfaces/semtools/featurecreator.py +++ b/nipype/interfaces/semtools/featurecreator.py @@ -1,26 +1,28 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class GenerateCsfClippedFromClassifiedImageInputSpec(CommandLineInputSpec): inputCassifiedVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputCassifiedVolume %s") + argstr="--inputCassifiedVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class GenerateCsfClippedFromClassifiedImageOutputSpec(TraitedSpec): @@ -30,22 +32,21 @@ class GenerateCsfClippedFromClassifiedImageOutputSpec(TraitedSpec): class GenerateCsfClippedFromClassifiedImage(SEMLikeCommandLine): """title: GenerateCsfClippedFromClassifiedImage -category: FeatureCreator - -description: Get the distance from a voxel to the nearest voxel of a given tissue type. + category: FeatureCreator -version: 0.1.0.$Revision: 1 $(alpha) + description: Get the distance from a voxel to the nearest voxel of a given tissue type. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was written by Hans J. Johnson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was written by Hans J. Johnson. + """ input_spec = GenerateCsfClippedFromClassifiedImageInputSpec output_spec = GenerateCsfClippedFromClassifiedImageOutputSpec _cmd = " GenerateCsfClippedFromClassifiedImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/__init__.py b/nipype/interfaces/semtools/filtering/__init__.py index 1e69233303..159dc2c490 100644 --- a/nipype/interfaces/semtools/filtering/__init__.py +++ b/nipype/interfaces/semtools/filtering/__init__.py @@ -1,10 +1,21 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .denoising import UnbiasedNonLocalMeans from .featuredetection import ( - GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, - DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, - GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, - NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, - DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, - GradientAnisotropicDiffusionImageFilter, CannyEdge) + GenerateSummedGradientImage, + CannySegmentationLevelSetImageFilter, + DilateImage, + TextureFromNoiseImageFilter, + FlippedDifference, + ErodeImage, + GenerateBrainClippedImage, + NeighborhoodMedian, + GenerateTestImage, + NeighborhoodMean, + HammerAttributeCreator, + TextureMeasureFilter, + DilateMask, + DumpBinaryTrainingVectors, + DistanceMaps, + STAPLEAnalysis, + GradientAnisotropicDiffusionImageFilter, + CannyEdge, +) diff --git a/nipype/interfaces/semtools/filtering/denoising.py b/nipype/interfaces/semtools/filtering/denoising.py index 97d687c512..9afd9184fa 100644 --- a/nipype/interfaces/semtools/filtering/denoising.py +++ b/nipype/interfaces/semtools/filtering/denoising.py @@ -1,83 +1,82 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class UnbiasedNonLocalMeansInputSpec(CommandLineInputSpec): sigma = traits.Float( - desc= - "The root power of noise (sigma) in the complex Gaussian process the Rician comes from. If it is underestimated, the algorithm fails to remove the noise. If it is overestimated, over-blurring is likely to occur.", - argstr="--sigma %f") + desc="The root power of noise (sigma) in the complex Gaussian process the Rician comes from. If it is underestimated, the algorithm fails to remove the noise. If it is overestimated, over-blurring is likely to occur.", + argstr="--sigma %f", + ) rs = InputMultiPath( traits.Int, - desc= - "The algorithm search for similar voxels in a neighborhood of this radius (radii larger than 5,5,5 are very slow, and the results can be only marginally better. Small radii may fail to effectively remove the noise).", + desc="The algorithm search for similar voxels in a neighborhood of this radius (radii larger than 5,5,5 are very slow, and the results can be only marginally better. Small radii may fail to effectively remove the noise).", sep=",", - argstr="--rs %s") + argstr="--rs %s", + ) rc = InputMultiPath( traits.Int, - desc= - "Similarity between blocks is computed as the difference between mean values and gradients. These parameters are computed fitting a hyperplane with LS inside a neighborhood of this size", + desc="Similarity between blocks is computed as the difference between mean values and gradients. These parameters are computed fitting a hyperplane with LS inside a neighborhood of this size", sep=",", - argstr="--rc %s") + argstr="--rc %s", + ) hp = traits.Float( - desc= - "This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", - argstr="--hp %f") + desc="This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", + argstr="--hp %f", + ) ps = traits.Float( - desc= - "To accelerate computations, preselection is used: if the normalized difference is above this threshold, the voxel will be discarded (non used for average)", - argstr="--ps %f") - inputVolume = File( - position=-2, desc="Input MRI volume.", exists=True, argstr="%s") + desc="To accelerate computations, preselection is used: if the normalized difference is above this threshold, the voxel will be discarded (non used for average)", + argstr="--ps %f", + ) + inputVolume = File(position=-2, desc="Input MRI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output (filtered) MRI volume.", - argstr="%s") + argstr="%s", + ) class UnbiasedNonLocalMeansOutputSpec(TraitedSpec): - outputVolume = File( - position=-1, desc="Output (filtered) MRI volume.", exists=True) + outputVolume = File(position=-1, desc="Output (filtered) MRI volume.", exists=True) class UnbiasedNonLocalMeans(SEMLikeCommandLine): """title: Unbiased NLM for MRI -category: Filtering.Denoising - -description: This module implements a fast version of the popular Non-Local Means filter for image denoising. This algorithm filters each pixel as a weighted average of its neighbors in a large vicinity. The weights are computed based on the similarity of each neighbor with the voxel to be denoised. - In the original formulation a patch with a certain radius is centered in each of the voxels, and the Mean Squared Error between each pair of corresponding voxels is computed. In this implementation, only the mean value and gradient components are compared. This, together with an efficient memory management, can attain a speed-up of nearly 20x. Besides, the filtering is more accurate than the original with poor SNR. - This code is intended for its use with MRI (or any other Rician-distributed modality): the second order moment is estimated, then we subtract twice the squared power of noise, and finally we take the square root of the result to remove the Rician bias. - The original implementation of the NLM filter may be found in: - A. Buades, B. Coll, J. Morel, "A review of image denoising algorithms, with a new one", Multiscale Modelling and Simulation 4(2): 490-530. 2005. - The correction of the Rician bias is described in the following reference (among others): - S. Aja-Fernandez, K. Krissian, "An unbiased Non-Local Means scheme for DWI filtering", in: Proceedings of the MICCAI Workshop on Computational Diffusion MRI, 2008, pp. 277-284. - The whole description of this version may be found in the following paper (please, cite it if you are willing to use this software): - A. Tristan-Vega, V. Garcia Perez, S. Aja-Fenandez, and C.-F. Westin, "Efficient and Robust Nonlocal Means Denoising of MR Data Based on Salient Features Matching", Computer Methods and Programs in Biomedicine. (Accepted for publication) 2011. + category: Filtering.Denoising -version: 0.0.1.$Revision: 1 $(beta) + description: This module implements a fast version of the popular Non-Local Means filter for image denoising. This algorithm filters each pixel as a weighted average of its neighbors in a large vicinity. The weights are computed based on the similarity of each neighbor with the voxel to be denoised. + In the original formulation a patch with a certain radius is centered in each of the voxels, and the Mean Squared Error between each pair of corresponding voxels is computed. In this implementation, only the mean value and gradient components are compared. This, together with an efficient memory management, can attain a speed-up of nearly 20x. Besides, the filtering is more accurate than the original with poor SNR. + This code is intended for its use with MRI (or any other Rician-distributed modality): the second order moment is estimated, then we subtract twice the squared power of noise, and finally we take the square root of the result to remove the Rician bias. + The original implementation of the NLM filter may be found in: + A. Buades, B. Coll, J. Morel, "A review of image denoising algorithms, with a new one", Multiscale Modelling and Simulation 4(2): 490-530. 2005. + The correction of the Rician bias is described in the following reference (among others): + S. Aja-Fernandez, K. Krissian, "An unbiased Non-Local Means scheme for DWI filtering", in: Proceedings of the MICCAI Workshop on Computational Diffusion MRI, 2008, pp. 277-284. + The whole description of this version may be found in the following paper (please, cite it if you are willing to use this software): + A. Tristan-Vega, V. Garcia Perez, S. Aja-Fenandez, and C.-F. Westin, "Efficient and Robust Nonlocal Means Denoising of MR Data Based on Salient Features Matching", Computer Methods and Programs in Biomedicine. (Accepted for publication) 2011. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:UnbiasedNonLocalMeans-Documentation-3.6 + version: 0.0.1.$Revision: 1 $(beta) -contributor: Antonio Tristan Vega, Veronica Garcia-Perez, Santiago Aja-Fernandez, Carl-Fredrik Westin + documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:UnbiasedNonLocalMeans-Documentation-3.6 -acknowledgements: Supported by grant number FMECD-2010/71131616E from the Spanish Ministry of Education/Fulbright Committee + contributor: Antonio Tristan Vega, Veronica Garcia-Perez, Santiago Aja-Fernandez, Carl-Fredrik Westin -""" + acknowledgements: Supported by grant number FMECD-2010/71131616E from the Spanish Ministry of Education/Fulbright Committee + """ input_spec = UnbiasedNonLocalMeansInputSpec output_spec = UnbiasedNonLocalMeansOutputSpec _cmd = " UnbiasedNonLocalMeans " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/featuredetection.py b/nipype/interfaces/semtools/filtering/featuredetection.py index ca4973ab43..b61cf59a11 100644 --- a/nipype/interfaces/semtools/filtering/featuredetection.py +++ b/nipype/interfaces/semtools/filtering/featuredetection.py @@ -1,37 +1,38 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class GenerateSummedGradientImageInputSpec(CommandLineInputSpec): inputVolume1 = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputVolume1 %s") + desc="input volume 1, usually t1 image", exists=True, argstr="--inputVolume1 %s" + ) inputVolume2 = File( - desc="input volume 2, usally t2 image", - exists=True, - argstr="--inputVolume2 %s") + desc="input volume 2, usually t2 image", exists=True, argstr="--inputVolume2 %s" + ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputFileName %s") + argstr="--outputFileName %s", + ) MaximumGradient = traits.Bool( - desc= - "If set this flag, it will compute maximum gradient between two input volumes instead of sum of it.", - argstr="--MaximumGradient ") + desc="If set this flag, it will compute maximum gradient between two input volumes instead of sum of it.", + argstr="--MaximumGradient ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateSummedGradientImageOutputSpec(TraitedSpec): @@ -41,22 +42,21 @@ class GenerateSummedGradientImageOutputSpec(TraitedSpec): class GenerateSummedGradientImage(SEMLikeCommandLine): """title: GenerateSummedGradient -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Automatic FeatureImages using neural networks + description: Automatic FeatureImages using neural networks -version: 1.0 + version: 1.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: Greg Harris, Eun Young Kim - -""" + contributor: Greg Harris, Eun Young Kim + """ input_spec = GenerateSummedGradientImageInputSpec output_spec = GenerateSummedGradientImageOutputSpec _cmd = " GenerateSummedGradientImage " - _outputs_filenames = {'outputFileName': 'outputFileName'} + _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False @@ -64,21 +64,23 @@ class CannySegmentationLevelSetImageFilterInputSpec(CommandLineInputSpec): inputVolume = File(exists=True, argstr="--inputVolume %s") initialModel = File(exists=True, argstr="--initialModel %s") outputVolume = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputVolume %s") + traits.Bool, File(), hash_files=False, argstr="--outputVolume %s" + ) outputSpeedVolume = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputSpeedVolume %s") + traits.Bool, File(), hash_files=False, argstr="--outputSpeedVolume %s" + ) cannyThreshold = traits.Float( - desc="Canny Threshold Value", argstr="--cannyThreshold %f") - cannyVariance = traits.Float( - desc="Canny variance", argstr="--cannyVariance %f") + desc="Canny Threshold Value", argstr="--cannyThreshold %f" + ) + cannyVariance = traits.Float(desc="Canny variance", argstr="--cannyVariance %f") advectionWeight = traits.Float( - desc= - "Controls the smoothness of the resulting mask, small number are more smooth, large numbers allow more sharp corners. ", - argstr="--advectionWeight %f") + desc="Controls the smoothness of the resulting mask, small number are more smooth, large numbers allow more sharp corners. ", + argstr="--advectionWeight %f", + ) initialModelIsovalue = traits.Float( - desc= - "The identification of the input model iso-surface. (for a binary image with 0s and 1s use 0.5) (for a binary image with 0s and 255's use 127.5).", - argstr="--initialModelIsovalue %f") + desc="The identification of the input model iso-surface. (for a binary image with 0s and 1s use 0.5) (for a binary image with 0s and 255's use 127.5).", + argstr="--initialModelIsovalue %f", + ) maxIterations = traits.Int(desc="The", argstr="--maxIterations %d") @@ -90,45 +92,48 @@ class CannySegmentationLevelSetImageFilterOutputSpec(TraitedSpec): class CannySegmentationLevelSetImageFilter(SEMLikeCommandLine): """title: Canny Level Set Image Filter -category: Filtering.FeatureDetection - -description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. + category: Filtering.FeatureDetection -version: 0.3.0 + description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. -license: CC + version: 0.3.0 -contributor: Regina Kim + license: CC -acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. + contributor: Regina Kim -""" + acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. + """ input_spec = CannySegmentationLevelSetImageFilterInputSpec output_spec = CannySegmentationLevelSetImageFilterOutputSpec _cmd = " CannySegmentationLevelSetImageFilter " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputSpeedVolume': 'outputSpeedVolume.nii' + "outputVolume": "outputVolume.nii", + "outputSpeedVolume": "outputSpeedVolume.nii", } _redirect_x = False class DilateImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DilateImageOutputSpec(TraitedSpec): @@ -138,38 +143,40 @@ class DilateImageOutputSpec(TraitedSpec): class DilateImage(SEMLikeCommandLine): """title: Dilate Image -category: Filtering.FeatureDetection - -description: Uses mathematical morphology to dilate the input images. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Uses mathematical morphology to dilate the input images. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = DilateImageInputSpec output_spec = DilateImageOutputSpec _cmd = " DilateImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class TextureFromNoiseImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class TextureFromNoiseImageFilterOutputSpec(TraitedSpec): @@ -179,40 +186,42 @@ class TextureFromNoiseImageFilterOutputSpec(TraitedSpec): class TextureFromNoiseImageFilter(SEMLikeCommandLine): """title: TextureFromNoiseImageFilter -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Calculate the local noise in an image. + description: Calculate the local noise in an image. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -documentation-url: http:://www.na-mic.org/ + documentation-url: http:://www.na-mic.org/ -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Eunyoung Regina Kim - -""" + contributor: This tool was developed by Eunyoung Regina Kim + """ input_spec = TextureFromNoiseImageFilterInputSpec output_spec = TextureFromNoiseImageFilterOutputSpec _cmd = " TextureFromNoiseImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class FlippedDifferenceInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class FlippedDifferenceOutputSpec(TraitedSpec): @@ -222,42 +231,45 @@ class FlippedDifferenceOutputSpec(TraitedSpec): class FlippedDifference(SEMLikeCommandLine): """title: Flip Image -category: Filtering.FeatureDetection - -description: Difference between an image and the axially flipped version of that image. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Difference between an image and the axially flipped version of that image. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = FlippedDifferenceInputSpec output_spec = FlippedDifferenceOutputSpec _cmd = " FlippedDifference " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class ErodeImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class ErodeImageOutputSpec(TraitedSpec): @@ -267,45 +279,44 @@ class ErodeImageOutputSpec(TraitedSpec): class ErodeImage(SEMLikeCommandLine): """title: Erode Image -category: Filtering.FeatureDetection - -description: Uses mathematical morphology to erode the input images. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Uses mathematical morphology to erode the input images. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = ErodeImageInputSpec output_spec = ErodeImageOutputSpec _cmd = " ErodeImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateBrainClippedImageInputSpec(CommandLineInputSpec): inputImg = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputImg %s") + desc="input volume 1, usually t1 image", exists=True, argstr="--inputImg %s" + ) inputMsk = File( - desc="input volume 2, usally t2 image", - exists=True, - argstr="--inputMsk %s") + desc="input volume 2, usually t2 image", exists=True, argstr="--inputMsk %s" + ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputFileName %s") + argstr="--outputFileName %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateBrainClippedImageOutputSpec(TraitedSpec): @@ -315,40 +326,43 @@ class GenerateBrainClippedImageOutputSpec(TraitedSpec): class GenerateBrainClippedImage(SEMLikeCommandLine): """title: GenerateBrainClippedImage -category: Filtering.FeatureDetection - -description: Automatic FeatureImages using neural networks + category: Filtering.FeatureDetection -version: 1.0 + description: Automatic FeatureImages using neural networks -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 1.0 -contributor: Eun Young Kim + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Eun Young Kim + """ input_spec = GenerateBrainClippedImageInputSpec output_spec = GenerateBrainClippedImageOutputSpec _cmd = " GenerateBrainClippedImage " - _outputs_filenames = {'outputFileName': 'outputFileName'} + _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False class NeighborhoodMedianInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class NeighborhoodMedianOutputSpec(TraitedSpec): @@ -358,44 +372,42 @@ class NeighborhoodMedianOutputSpec(TraitedSpec): class NeighborhoodMedian(SEMLikeCommandLine): """title: Neighborhood Median -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Calculates the median, for the given neighborhood size, at each voxel of the input image. + description: Calculates the median, for the given neighborhood size, at each voxel of the input image. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -documentation-url: http:://www.na-mic.org/ + documentation-url: http:://www.na-mic.org/ -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. - -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = NeighborhoodMedianInputSpec output_spec = NeighborhoodMedianOutputSpec _cmd = " NeighborhoodMedian " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateTestImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputVolume %s") + desc="input volume 1, usually t1 image", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputVolume %s") - lowerBoundOfOutputVolume = traits.Float( - argstr="--lowerBoundOfOutputVolume %f") - upperBoundOfOutputVolume = traits.Float( - argstr="--upperBoundOfOutputVolume %f") + argstr="--outputVolume %s", + ) + lowerBoundOfOutputVolume = traits.Float(argstr="--lowerBoundOfOutputVolume %f") + upperBoundOfOutputVolume = traits.Float(argstr="--upperBoundOfOutputVolume %f") outputVolumeSize = traits.Float( - desc="output Volume Size", argstr="--outputVolumeSize %f") + desc="output Volume Size", argstr="--outputVolumeSize %f" + ) class GenerateTestImageOutputSpec(TraitedSpec): @@ -405,40 +417,43 @@ class GenerateTestImageOutputSpec(TraitedSpec): class GenerateTestImage(SEMLikeCommandLine): """title: DownSampleImage -category: Filtering.FeatureDetection - -description: Down sample image for testing + category: Filtering.FeatureDetection -version: 1.0 + description: Down sample image for testing -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 1.0 -contributor: Eun Young Kim + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Eun Young Kim + """ input_spec = GenerateTestImageInputSpec output_spec = GenerateTestImageOutputSpec _cmd = " GenerateTestImage " - _outputs_filenames = {'outputVolume': 'outputVolume'} + _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False class NeighborhoodMeanInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class NeighborhoodMeanOutputSpec(TraitedSpec): @@ -448,47 +463,48 @@ class NeighborhoodMeanOutputSpec(TraitedSpec): class NeighborhoodMean(SEMLikeCommandLine): """title: Neighborhood Mean -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Calculates the mean, for the given neighborhood size, at each voxel of the T1, T2, and FLAIR. + description: Calculates the mean, for the given neighborhood size, at each voxel of the T1, T2, and FLAIR. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -documentation-url: http:://www.na-mic.org/ + documentation-url: http:://www.na-mic.org/ -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. - -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = NeighborhoodMeanInputSpec output_spec = NeighborhoodMeanOutputSpec _cmd = " NeighborhoodMean " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class HammerAttributeCreatorInputSpec(CommandLineInputSpec): Scale = traits.Int(desc="Determine Scale of Ball", argstr="--Scale %d") - Strength = traits.Float( - desc="Determine Strength of Edges", argstr="--Strength %f") + Strength = traits.Float(desc="Determine Strength of Edges", argstr="--Strength %f") inputGMVolume = File( desc="Required: input grey matter posterior image", exists=True, - argstr="--inputGMVolume %s") + argstr="--inputGMVolume %s", + ) inputWMVolume = File( desc="Required: input white matter posterior image", exists=True, - argstr="--inputWMVolume %s") + argstr="--inputWMVolume %s", + ) inputCSFVolume = File( desc="Required: input CSF posterior image", exists=True, - argstr="--inputCSFVolume %s") + argstr="--inputCSFVolume %s", + ) outputVolumeBase = traits.Str( - desc= - "Required: output image base name to be appended for each feature vector.", - argstr="--outputVolumeBase %s") + desc="Required: output image base name to be appended for each feature vector.", + argstr="--outputVolumeBase %s", + ) class HammerAttributeCreatorOutputSpec(TraitedSpec): @@ -498,19 +514,18 @@ class HammerAttributeCreatorOutputSpec(TraitedSpec): class HammerAttributeCreator(SEMLikeCommandLine): """title: HAMMER Feature Vectors -category: Filtering.FeatureDetection - -description: Create the feature vectors used by HAMMER. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Create the feature vectors used by HAMMER. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This was extracted from the Hammer Registration source code, and wrapped up by Hans J. Johnson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This was extracted from the Hammer Registration source code, and wrapped up by Hans J. Johnson. + """ input_spec = HammerAttributeCreatorInputSpec output_spec = HammerAttributeCreatorOutputSpec @@ -525,7 +540,8 @@ class TextureMeasureFilterInputSpec(CommandLineInputSpec): distance = traits.Int(argstr="--distance %d") insideROIValue = traits.Float(argstr="--insideROIValue %f") outputFilename = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputFilename %s") + traits.Bool, File(), hash_files=False, argstr="--outputFilename %s" + ) class TextureMeasureFilterOutputSpec(TraitedSpec): @@ -535,46 +551,49 @@ class TextureMeasureFilterOutputSpec(TraitedSpec): class TextureMeasureFilter(SEMLikeCommandLine): """title: Canny Level Set Image Filter -category: Filtering.FeatureDetection - -description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. + category: Filtering.FeatureDetection -version: 0.3.0 + description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. -license: CC + version: 0.3.0 -contributor: Regina Kim + license: CC -acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. + contributor: Regina Kim -""" + acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. + """ input_spec = TextureMeasureFilterInputSpec output_spec = TextureMeasureFilterOutputSpec _cmd = " TextureMeasureFilter " - _outputs_filenames = {'outputFilename': 'outputFilename'} + _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False class DilateMaskInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputBinaryVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputBinaryVolume %s") + argstr="--inputBinaryVolume %s", + ) sizeStructuralElement = traits.Int( - desc= - "size of structural element. sizeStructuralElement=1 means that 3x3x3 structuring element for 3D", - argstr="--sizeStructuralElement %d") + desc="size of structural element. sizeStructuralElement=1 means that 3x3x3 structuring element for 3D", + argstr="--sizeStructuralElement %d", + ) lowerThreshold = traits.Float( - desc="Required: lowerThreshold value", argstr="--lowerThreshold %f") + desc="Required: lowerThreshold value", argstr="--lowerThreshold %f" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DilateMaskOutputSpec(TraitedSpec): @@ -584,24 +603,23 @@ class DilateMaskOutputSpec(TraitedSpec): class DilateMask(SEMLikeCommandLine): """title: Dilate Image -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Uses mathematical morphology to dilate the input images. + description: Uses mathematical morphology to dilate the input images. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -documentation-url: http:://www.na-mic.org/ + documentation-url: http:://www.na-mic.org/ -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. - -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = DilateMaskInputSpec output_spec = DilateMaskOutputSpec _cmd = " DilateMask " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -609,11 +627,13 @@ class DumpBinaryTrainingVectorsInputSpec(CommandLineInputSpec): inputHeaderFilename = File( desc="Required: input header file name", exists=True, - argstr="--inputHeaderFilename %s") + argstr="--inputHeaderFilename %s", + ) inputVectorFilename = File( desc="Required: input vector filename", exists=True, - argstr="--inputVectorFilename %s") + argstr="--inputVectorFilename %s", + ) class DumpBinaryTrainingVectorsOutputSpec(TraitedSpec): @@ -623,19 +643,18 @@ class DumpBinaryTrainingVectorsOutputSpec(TraitedSpec): class DumpBinaryTrainingVectors(SEMLikeCommandLine): """title: Erode Image -category: Filtering.FeatureDetection - -description: Uses mathematical morphology to erode the input images. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Uses mathematical morphology to erode the input images. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = DumpBinaryTrainingVectorsInputSpec output_spec = DumpBinaryTrainingVectorsOutputSpec @@ -648,21 +667,24 @@ class DistanceMapsInputSpec(CommandLineInputSpec): inputLabelVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputLabelVolume %s") + argstr="--inputLabelVolume %s", + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputTissueLabel = traits.Int( - desc= - "Required: input integer value of tissue type used to calculate distance", - argstr="--inputTissueLabel %d") + desc="Required: input integer value of tissue type used to calculate distance", + argstr="--inputTissueLabel %d", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DistanceMapsOutputSpec(TraitedSpec): @@ -672,41 +694,42 @@ class DistanceMapsOutputSpec(TraitedSpec): class DistanceMaps(SEMLikeCommandLine): """title: Mauerer Distance -category: Filtering.FeatureDetection - -description: Get the distance from a voxel to the nearest voxel of a given tissue type. + category: Filtering.FeatureDetection -version: 0.1.0.$Revision: 1 $(alpha) + description: Get the distance from a voxel to the nearest voxel of a given tissue type. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.$Revision: 1 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = DistanceMapsInputSpec output_spec = DistanceMapsOutputSpec _cmd = " DistanceMaps " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class STAPLEAnalysisInputSpec(CommandLineInputSpec): inputDimension = traits.Int( - desc="Required: input image Dimension 2 or 3", - argstr="--inputDimension %d") + desc="Required: input image Dimension 2 or 3", argstr="--inputDimension %d" + ) inputLabelVolume = InputMultiPath( File(exists=True), desc="Required: input label volume", - argstr="--inputLabelVolume %s...") + argstr="--inputLabelVolume %s...", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class STAPLEAnalysisOutputSpec(TraitedSpec): @@ -716,43 +739,46 @@ class STAPLEAnalysisOutputSpec(TraitedSpec): class STAPLEAnalysis(SEMLikeCommandLine): """title: Dilate Image -category: Filtering.FeatureDetection + category: Filtering.FeatureDetection -description: Uses mathematical morphology to dilate the input images. + description: Uses mathematical morphology to dilate the input images. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -documentation-url: http:://www.na-mic.org/ + documentation-url: http:://www.na-mic.org/ -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Mark Scully and Jeremy Bockholt. - -""" + contributor: This tool was developed by Mark Scully and Jeremy Bockholt. + """ input_spec = STAPLEAnalysisInputSpec output_spec = STAPLEAnalysisOutputSpec _cmd = " STAPLEAnalysis " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GradientAnisotropicDiffusionImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) numberOfIterations = traits.Int( - desc="Optional value for number of Iterations", - argstr="--numberOfIterations %d") + desc="Optional value for number of Iterations", argstr="--numberOfIterations %d" + ) timeStep = traits.Float( - desc="Time step for diffusion process", argstr="--timeStep %f") + desc="Time step for diffusion process", argstr="--timeStep %f" + ) conductance = traits.Float( - desc="Conductance for diffusion process", argstr="--conductance %f") + desc="Conductance for diffusion process", argstr="--conductance %f" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class GradientAnisotropicDiffusionImageFilterOutputSpec(TraitedSpec): @@ -762,18 +788,17 @@ class GradientAnisotropicDiffusionImageFilterOutputSpec(TraitedSpec): class GradientAnisotropicDiffusionImageFilter(SEMLikeCommandLine): """title: GradientAnisopropicDiffusionFilter -category: Filtering.FeatureDetection - -description: Image Smoothing using Gradient Anisotropic Diffuesion Filer + category: Filtering.FeatureDetection -contributor: This tool was developed by Eun Young Kim by modifying ITK Example + description: Image Smoothing using Gradient Anisotropic Diffuesion Filer -""" + contributor: This tool was developed by Eun Young Kim by modifying ITK Example + """ input_spec = GradientAnisotropicDiffusionImageFilterInputSpec output_spec = GradientAnisotropicDiffusionImageFilterOutputSpec _cmd = " GradientAnisotropicDiffusionImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -781,25 +806,27 @@ class CannyEdgeInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) variance = traits.Float( - desc= - "Variance and Maximum error are used in the Gaussian smoothing of the input image. See itkDiscreteGaussianImageFilter for information on these parameters.", - argstr="--variance %f") + desc="Variance and Maximum error are used in the Gaussian smoothing of the input image. See itkDiscreteGaussianImageFilter for information on these parameters.", + argstr="--variance %f", + ) upperThreshold = traits.Float( - desc= - "Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", - argstr="--upperThreshold %f") + desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", + argstr="--upperThreshold %f", + ) lowerThreshold = traits.Float( - desc= - "Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", - argstr="--lowerThreshold %f") + desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", + argstr="--lowerThreshold %f", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class CannyEdgeOutputSpec(TraitedSpec): @@ -809,22 +836,21 @@ class CannyEdgeOutputSpec(TraitedSpec): class CannyEdge(SEMLikeCommandLine): """title: Canny Edge Detection -category: Filtering.FeatureDetection - -description: Get the distance from a voxel to the nearest voxel of a given tissue type. + category: Filtering.FeatureDetection -version: 0.1.0.(alpha) + description: Get the distance from a voxel to the nearest voxel of a given tissue type. -documentation-url: http:://www.na-mic.org/ + version: 0.1.0.(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://www.na-mic.org/ -contributor: This tool was written by Hans J. Johnson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: This tool was written by Hans J. Johnson. + """ input_spec = CannyEdgeInputSpec output_spec = CannyEdgeOutputSpec _cmd = " CannyEdge " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/tests/__init__.py b/nipype/interfaces/semtools/filtering/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/filtering/tests/__init__.py +++ b/nipype/interfaces/semtools/filtering/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py b/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py index cfbee7449c..222c460279 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py @@ -1,31 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import CannyEdge def test_CannyEdge_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - lowerThreshold=dict(argstr='--lowerThreshold %f', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + lowerThreshold=dict( + argstr="--lowerThreshold %f", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - upperThreshold=dict(argstr='--upperThreshold %f', ), - variance=dict(argstr='--variance %f', ), + upperThreshold=dict( + argstr="--upperThreshold %f", + ), + variance=dict( + argstr="--variance %f", + ), ) inputs = CannyEdge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CannyEdge_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = CannyEdge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py index b4196db823..21f8e5da6f 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py @@ -1,28 +1,45 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import CannySegmentationLevelSetImageFilter def test_CannySegmentationLevelSetImageFilter_inputs(): input_map = dict( - advectionWeight=dict(argstr='--advectionWeight %f', ), - args=dict(argstr='%s', ), - cannyThreshold=dict(argstr='--cannyThreshold %f', ), - cannyVariance=dict(argstr='--cannyVariance %f', ), + advectionWeight=dict( + argstr="--advectionWeight %f", + ), + args=dict( + argstr="%s", + ), + cannyThreshold=dict( + argstr="--cannyThreshold %f", + ), + cannyVariance=dict( + argstr="--cannyVariance %f", + ), environ=dict( nohash=True, usedefault=True, ), - initialModel=dict(argstr='--initialModel %s', ), - initialModelIsovalue=dict(argstr='--initialModelIsovalue %f', ), - inputVolume=dict(argstr='--inputVolume %s', ), - maxIterations=dict(argstr='--maxIterations %d', ), + initialModel=dict( + argstr="--initialModel %s", + extensions=None, + ), + initialModelIsovalue=dict( + argstr="--initialModelIsovalue %f", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + maxIterations=dict( + argstr="--maxIterations %d", + ), outputSpeedVolume=dict( - argstr='--outputSpeedVolume %s', + argstr="--outputSpeedVolume %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -31,10 +48,16 @@ def test_CannySegmentationLevelSetImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CannySegmentationLevelSetImageFilter_outputs(): output_map = dict( - outputSpeedVolume=dict(), - outputVolume=dict(), + outputSpeedVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = CannySegmentationLevelSetImageFilter.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py index 7945ec8ade..2be2940aeb 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import DilateImage def test_DilateImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputRadius=dict( + argstr="--inputRadius %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_DilateImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateImage_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py index 6bd6235e80..66cc444a94 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py @@ -1,31 +1,48 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import DilateMask def test_DilateMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBinaryVolume=dict(argstr='--inputBinaryVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - lowerThreshold=dict(argstr='--lowerThreshold %f', ), + inputBinaryVolume=dict( + argstr="--inputBinaryVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + lowerThreshold=dict( + argstr="--lowerThreshold %f", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - sizeStructuralElement=dict(argstr='--sizeStructuralElement %d', ), + sizeStructuralElement=dict( + argstr="--sizeStructuralElement %d", + ), ) inputs = DilateMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateMask_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = DilateMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py index f7aaf722c4..255145cfc6 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import DistanceMaps def test_DistanceMaps_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s', ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputTissueLabel=dict(argstr='--inputTissueLabel %d', ), + inputLabelVolume=dict( + argstr="--inputLabelVolume %s", + extensions=None, + ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputTissueLabel=dict( + argstr="--inputTissueLabel %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_DistanceMaps_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DistanceMaps_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = DistanceMaps.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py index 320bb76b77..b62a21fdbe 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py @@ -1,23 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import DumpBinaryTrainingVectors def test_DumpBinaryTrainingVectors_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputHeaderFilename=dict(argstr='--inputHeaderFilename %s', ), - inputVectorFilename=dict(argstr='--inputVectorFilename %s', ), + inputHeaderFilename=dict( + argstr="--inputHeaderFilename %s", + extensions=None, + ), + inputVectorFilename=dict( + argstr="--inputVectorFilename %s", + extensions=None, + ), ) inputs = DumpBinaryTrainingVectors.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DumpBinaryTrainingVectors_outputs(): output_map = dict() outputs = DumpBinaryTrainingVectors.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py index 9e096b0062..ae7140754f 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import ErodeImage def test_ErodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputRadius=dict( + argstr="--inputRadius %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_ErodeImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ErodeImage_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py b/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py index 539660f73b..704bc01820 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py @@ -1,19 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import FlippedDifference def test_FlippedDifference_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -22,8 +29,14 @@ def test_FlippedDifference_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FlippedDifference_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = FlippedDifference.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py index 0a211f23d2..e48d622ab3 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import GenerateBrainClippedImage def test_GenerateBrainClippedImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputImg=dict(argstr='--inputImg %s', ), - inputMsk=dict(argstr='--inputMsk %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputImg=dict( + argstr="--inputImg %s", + extensions=None, + ), + inputMsk=dict( + argstr="--inputMsk %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputFileName=dict( - argstr='--outputFileName %s', + argstr="--outputFileName %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_GenerateBrainClippedImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateBrainClippedImage_outputs(): - output_map = dict(outputFileName=dict(), ) + output_map = dict( + outputFileName=dict( + extensions=None, + ), + ) outputs = GenerateBrainClippedImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py index 58d3f35c5a..b53396f396 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py @@ -1,21 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import GenerateSummedGradientImage def test_GenerateSummedGradientImage_inputs(): input_map = dict( - MaximumGradient=dict(argstr='--MaximumGradient ', ), - args=dict(argstr='%s', ), + MaximumGradient=dict( + argstr="--MaximumGradient ", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume1=dict(argstr='--inputVolume1 %s', ), - inputVolume2=dict(argstr='--inputVolume2 %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume1=dict( + argstr="--inputVolume1 %s", + extensions=None, + ), + inputVolume2=dict( + argstr="--inputVolume2 %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputFileName=dict( - argstr='--outputFileName %s', + argstr="--outputFileName %s", hash_files=False, ), ) @@ -24,8 +35,14 @@ def test_GenerateSummedGradientImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateSummedGradientImage_outputs(): - output_map = dict(outputFileName=dict(), ) + output_map = dict( + outputFileName=dict( + extensions=None, + ), + ) outputs = GenerateSummedGradientImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py index 1348e61d4b..c6e8fdfb12 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py @@ -1,33 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import GenerateTestImage def test_GenerateTestImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), lowerBoundOfOutputVolume=dict( - argstr='--lowerBoundOfOutputVolume %f', ), + argstr="--lowerBoundOfOutputVolume %f", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - outputVolumeSize=dict(argstr='--outputVolumeSize %f', ), + outputVolumeSize=dict( + argstr="--outputVolumeSize %f", + ), upperBoundOfOutputVolume=dict( - argstr='--upperBoundOfOutputVolume %f', ), + argstr="--upperBoundOfOutputVolume %f", + ), ) inputs = GenerateTestImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateTestImage_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = GenerateTestImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py index 27ebb18d29..c7828aaccc 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py @@ -1,31 +1,47 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import GradientAnisotropicDiffusionImageFilter def test_GradientAnisotropicDiffusionImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), + args=dict( + argstr="%s", + ), + conductance=dict( + argstr="--conductance %f", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfIterations=dict( + argstr="--numberOfIterations %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - timeStep=dict(argstr='--timeStep %f', ), + timeStep=dict( + argstr="--timeStep %f", + ), ) inputs = GradientAnisotropicDiffusionImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GradientAnisotropicDiffusionImageFilter_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = GradientAnisotropicDiffusionImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py b/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py index 9ad82598bb..8188ad0432 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py @@ -1,27 +1,45 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import HammerAttributeCreator def test_HammerAttributeCreator_inputs(): input_map = dict( - Scale=dict(argstr='--Scale %d', ), - Strength=dict(argstr='--Strength %f', ), - args=dict(argstr='%s', ), + Scale=dict( + argstr="--Scale %d", + ), + Strength=dict( + argstr="--Strength %f", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputCSFVolume=dict(argstr='--inputCSFVolume %s', ), - inputGMVolume=dict(argstr='--inputGMVolume %s', ), - inputWMVolume=dict(argstr='--inputWMVolume %s', ), - outputVolumeBase=dict(argstr='--outputVolumeBase %s', ), + inputCSFVolume=dict( + argstr="--inputCSFVolume %s", + extensions=None, + ), + inputGMVolume=dict( + argstr="--inputGMVolume %s", + extensions=None, + ), + inputWMVolume=dict( + argstr="--inputWMVolume %s", + extensions=None, + ), + outputVolumeBase=dict( + argstr="--outputVolumeBase %s", + ), ) inputs = HammerAttributeCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HammerAttributeCreator_outputs(): output_map = dict() outputs = HammerAttributeCreator.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py index c6b1ef20a9..839bfe5f11 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import NeighborhoodMean def test_NeighborhoodMean_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputRadius=dict( + argstr="--inputRadius %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_NeighborhoodMean_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NeighborhoodMean_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = NeighborhoodMean.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py index 2c2b2d585f..476a93595e 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py @@ -1,20 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import NeighborhoodMedian def test_NeighborhoodMedian_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputRadius=dict( + argstr="--inputRadius %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +32,14 @@ def test_NeighborhoodMedian_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NeighborhoodMedian_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = NeighborhoodMedian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py b/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py index 75bb83315f..f6ee369414 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py @@ -1,19 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import STAPLEAnalysis def test_STAPLEAnalysis_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputDimension=dict(argstr='--inputDimension %d', ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s...', ), + inputDimension=dict( + argstr="--inputDimension %d", + ), + inputLabelVolume=dict( + argstr="--inputLabelVolume %s...", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -22,8 +27,14 @@ def test_STAPLEAnalysis_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_STAPLEAnalysis_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = STAPLEAnalysis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py index cd4b4a1af2..e16d7a9522 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py @@ -1,19 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import TextureFromNoiseImageFilter def test_TextureFromNoiseImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputRadius=dict( + argstr="--inputRadius %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -22,8 +28,14 @@ def test_TextureFromNoiseImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TextureFromNoiseImageFilter_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = TextureFromNoiseImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py index 0ad523c7d0..7e6c4f6263 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py @@ -1,21 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featuredetection import TextureMeasureFilter def test_TextureMeasureFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %d', ), + args=dict( + argstr="%s", + ), + distance=dict( + argstr="--distance %d", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - insideROIValue=dict(argstr='--insideROIValue %f', ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + insideROIValue=dict( + argstr="--insideROIValue %f", + ), outputFilename=dict( - argstr='--outputFilename %s', + argstr="--outputFilename %s", hash_files=False, ), ) @@ -24,8 +35,14 @@ def test_TextureMeasureFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TextureMeasureFilter_outputs(): - output_map = dict(outputFilename=dict(), ) + output_map = dict( + outputFilename=dict( + extensions=None, + ), + ) outputs = TextureMeasureFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py b/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py index b84cda1fc3..194d556a8e 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py @@ -1,43 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import UnbiasedNonLocalMeans def test_UnbiasedNonLocalMeans_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - hp=dict(argstr='--hp %f', ), + hp=dict( + argstr="--hp %f", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - ps=dict(argstr='--ps %f', ), + ps=dict( + argstr="--ps %f", + ), rc=dict( - argstr='--rc %s', - sep=',', + argstr="--rc %s", + sep=",", ), rs=dict( - argstr='--rs %s', - sep=',', + argstr="--rs %s", + sep=",", + ), + sigma=dict( + argstr="--sigma %f", ), - sigma=dict(argstr='--sigma %f', ), ) inputs = UnbiasedNonLocalMeans.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnbiasedNonLocalMeans_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = UnbiasedNonLocalMeans.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/legacy/__init__.py b/nipype/interfaces/semtools/legacy/__init__.py index 3309d49d62..de11d37760 100644 --- a/nipype/interfaces/semtools/legacy/__init__.py +++ b/nipype/interfaces/semtools/legacy/__init__.py @@ -1,3 +1 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .registration import scalartransform diff --git a/nipype/interfaces/semtools/legacy/registration.py b/nipype/interfaces/semtools/legacy/registration.py index 04bb425e3d..b0755aff40 100644 --- a/nipype/interfaces/semtools/legacy/registration.py +++ b/nipype/interfaces/semtools/legacy/registration.py @@ -1,74 +1,75 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class scalartransformInputSpec(CommandLineInputSpec): input_image = File( - desc="Image to tranform", exists=True, argstr="--input_image %s") + desc="Image to transform", exists=True, argstr="--input_image %s" + ) output_image = traits.Either( traits.Bool, File(), hash_files=False, desc="The transformed image", - argstr="--output_image %s") + argstr="--output_image %s", + ) transformation = traits.Either( traits.Bool, File(), hash_files=False, desc="Output file for transformation parameters", - argstr="--transformation %s") - invert = traits.Bool( - desc="Invert tranform before applying.", argstr="--invert ") + argstr="--transformation %s", + ) + invert = traits.Bool(desc="Invert transform before applying.", argstr="--invert ") deformation = File( - desc="Deformation field.", exists=True, argstr="--deformation %s") - h_field = traits.Bool( - desc="The deformation is an h-field.", argstr="--h_field ") + desc="Deformation field.", exists=True, argstr="--deformation %s" + ) + h_field = traits.Bool(desc="The deformation is an h-field.", argstr="--h_field ") interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) class scalartransformOutputSpec(TraitedSpec): output_image = File(desc="The transformed image", exists=True) - transformation = File( - desc="Output file for transformation parameters", exists=True) + transformation = File(desc="Output file for transformation parameters", exists=True) class scalartransform(SEMLikeCommandLine): """title: ScalarTransform (DTIProcess) -category: Legacy.Registration - -version: 1.0.0 + category: Legacy.Registration -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess + version: 1.0.0 -license: Copyright (c) Casey Goodlett. All rights reserved. - See http://www.ia.unc.edu/dev/Copyright.htm for details. - This software is distributed WITHOUT ANY WARRANTY; without even - the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the above copyright notices for more information. + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess -contributor: Casey Goodlett + license: Copyright (c) Casey Goodlett. All rights reserved. + See http://www.ia.unc.edu/dev/Copyright.htm for details. + This software is distributed WITHOUT ANY WARRANTY; without even + the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the above copyright notices for more information. -""" + contributor: Casey Goodlett + """ input_spec = scalartransformInputSpec output_spec = scalartransformOutputSpec _cmd = " scalartransform " _outputs_filenames = { - 'output_image': 'output_image.nii', - 'transformation': 'transformation' + "output_image": "output_image.nii", + "transformation": "transformation", } _redirect_x = False diff --git a/nipype/interfaces/semtools/legacy/tests/__init__.py b/nipype/interfaces/semtools/legacy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/legacy/tests/__init__.py +++ b/nipype/interfaces/semtools/legacy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py b/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py index f9ff60cfd9..6af2e14039 100644 --- a/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py +++ b/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py @@ -1,26 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import scalartransform def test_scalartransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict(argstr='--deformation %s', ), + args=dict( + argstr="%s", + ), + deformation=dict( + argstr="--deformation %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), - h_field=dict(argstr='--h_field ', ), - input_image=dict(argstr='--input_image %s', ), - interpolation=dict(argstr='--interpolation %s', ), - invert=dict(argstr='--invert ', ), + h_field=dict( + argstr="--h_field ", + ), + input_image=dict( + argstr="--input_image %s", + extensions=None, + ), + interpolation=dict( + argstr="--interpolation %s", + ), + invert=dict( + argstr="--invert ", + ), output_image=dict( - argstr='--output_image %s', + argstr="--output_image %s", hash_files=False, ), transformation=dict( - argstr='--transformation %s', + argstr="--transformation %s", hash_files=False, ), ) @@ -29,10 +42,16 @@ def test_scalartransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_scalartransform_outputs(): output_map = dict( - output_image=dict(), - transformation=dict(), + output_image=dict( + extensions=None, + ), + transformation=dict( + extensions=None, + ), ) outputs = scalartransform.output_spec() diff --git a/nipype/interfaces/semtools/registration/__init__.py b/nipype/interfaces/semtools/registration/__init__.py index 33bd60ad59..808e0f67eb 100644 --- a/nipype/interfaces/semtools/registration/__init__.py +++ b/nipype/interfaces/semtools/registration/__init__.py @@ -1,7 +1,4 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .specialized import (VBRAINSDemonWarp, BRAINSDemonWarp, - BRAINSTransformFromFiducials) +from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit from .brainsresize import BRAINSResize diff --git a/nipype/interfaces/semtools/registration/brainsfit.py b/nipype/interfaces/semtools/registration/brainsfit.py index 6142aac418..0ed5dd2be9 100644 --- a/nipype/interfaces/semtools/registration/brainsfit.py +++ b/nipype/interfaces/semtools/registration/brainsfit.py @@ -1,158 +1,159 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( - desc= - "Input fixed image (the moving image will be transformed into this image space).", + desc="Input fixed image (the moving image will be transformed into this image space).", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) movingVolume = File( - desc= - "Input moving image (this image will be transformed into the fixed image space).", + desc="Input moving image (this image will be transformed into the fixed image space).", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) samplingPercentage = traits.Float( - desc= - "Fraction of voxels of the fixed image that will be used for registration. The number has to be larger than zero and less or equal to one. Higher values increase the computation time but may give more accurate results. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is 0.002 (use approximately 0.2% of voxels, resulting in 100000 samples in a 512x512x192 volume) to provide a very fast registration in most cases. Typical values range from 0.01 (1%) for low detail images to 0.2 (20%) for high detail images.", - argstr="--samplingPercentage %f") + desc="Fraction of voxels of the fixed image that will be used for registration. The number has to be larger than zero and less or equal to one. Higher values increase the computation time but may give more accurate results. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is 0.002 (use approximately 0.2% of voxels, resulting in 100000 samples in a 512x512x192 volume) to provide a very fast registration in most cases. Typical values range from 0.01 (1%) for low detail images to 0.2 (20%) for high detail images.", + argstr="--samplingPercentage %f", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "Number of BSpline grid subdivisions along each axis of the fixed image, centered on the image space. Values must be 3 or higher for the BSpline to be correctly computed.", + desc="Number of BSpline grid subdivisions along each axis of the fixed image, centered on the image space. Values must be 3 or higher for the BSpline to be correctly computed.", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--linearTransform %s") + desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--linearTransform %s", + ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--bsplineTransform %s") + desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--bsplineTransform %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--outputVolume %s") + desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--outputVolume %s", + ) initialTransform = File( - desc= - "Transform to be applied to the moving image to initialize the registration. This can only be used if Initialize Transform Mode is Off.", + desc="Transform to be applied to the moving image to initialize the registration. This can only be used if Initialize Transform Mode is Off.", exists=True, - argstr="--initialTransform %s") + argstr="--initialTransform %s", + ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", - desc= - "Determine how to initialize the transform center. useMomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. useGeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Off assumes that the physical space of the images are close. This flag is mutually exclusive with the Initialization transform.", - argstr="--initializeTransformMode %s") + desc="Determine how to initialize the transform center. useMomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. useGeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Off assumes that the physical space of the images are close. This flag is mutually exclusive with the Initialization transform.", + argstr="--initializeTransformMode %s", + ) useRigid = traits.Bool( - desc= - "Perform a rigid registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useRigid ") + desc="Perform a rigid registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useRigid ", + ) useScaleVersor3D = traits.Bool( - desc= - "Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useScaleVersor3D ") + desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useScaleVersor3D ", + ) useScaleSkewVersor3D = traits.Bool( - desc= - "Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useScaleSkewVersor3D ") + desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useScaleSkewVersor3D ", + ) useAffine = traits.Bool( - desc= - "Perform an Affine registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useAffine ") + desc="Perform an Affine registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useAffine ", + ) useBSpline = traits.Bool( - desc= - "Perform a BSpline registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useBSpline ") + desc="Perform a BSpline registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useBSpline ", + ) useSyN = traits.Bool( - desc= - "Perform a SyN registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useSyN ") + desc="Perform a SyN registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useSyN ", + ) useComposite = traits.Bool( - desc= - "Perform a Composite registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useComposite ") + desc="Perform a Composite registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useComposite ", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "Specifies a mask to only consider a certain image region for the registration. If ROIAUTO is chosen, then the mask is computed using Otsu thresholding and hole filling. If ROI is chosen then the mask has to be specified as in input.", - argstr="--maskProcessingMode %s") + desc="Specifies a mask to only consider a certain image region for the registration. If ROIAUTO is chosen, then the mask is computed using Otsu thresholding and hole filling. If ROI is chosen then the mask has to be specified as in input.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( - desc= - "Fixed Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", + desc="Fixed Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Moving Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", + desc="Moving Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - argstr="--outputFixedVolumeROI %s") + desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + argstr="--outputFixedVolumeROI %s", + ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - argstr="--outputMovingVolumeROI %s") + desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + argstr="--outputMovingVolumeROI %s", + ) useROIBSpline = traits.Bool( - desc= - "If enabled then the bounding box of the input ROIs defines the BSpline grid support region. Otherwise the BSpline grid support region is the whole fixed image.", - argstr="--useROIBSpline ") + desc="If enabled then the bounding box of the input ROIs defines the BSpline grid support region. Otherwise the BSpline grid support region is the whole fixed image.", + argstr="--useROIBSpline ", + ) histogramMatch = traits.Bool( - desc= - "Apply histogram matching operation for the input images to make them more similar. This is suitable for images of the same modality that may have different brightness or contrast, but the same overall intensity profile. Do NOT use if registering images from different modalities.", - argstr="--histogramMatch ") + desc="Apply histogram matching operation for the input images to make them more similar. This is suitable for images of the same modality that may have different brightness or contrast, but the same overall intensity profile. Do NOT use if registering images from different modalities.", + argstr="--histogramMatch ", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Apply median filtering to reduce noise in the input volumes. The 3 values specify the radius for the optional MedianImageFilter preprocessing in all 3 directions (in voxels).", + desc="Apply median filtering to reduce noise in the input volumes. The 3 values specify the radius for the optional MedianImageFilter preprocessing in all 3 directions (in voxels).", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) removeIntensityOutliers = traits.Float( - desc= - "Remove very high and very low intensity voxels from the input volumes. The parameter specifies the half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the 0.005% of both tails will be thrown away, so 0.01% of intensities in total would be ignored in the statistic calculation.", - argstr="--removeIntensityOutliers %f") + desc="Remove very high and very low intensity voxels from the input volumes. The parameter specifies the half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the 0.005% of both tails will be thrown away, so 0.01% of intensities in total would be ignored in the statistic calculation.", + argstr="--removeIntensityOutliers %f", + ) fixedVolume2 = File( - desc= - "Input fixed image that will be used for multimodal registration. (the moving image will be transformed into this image space).", + desc="Input fixed image that will be used for multimodal registration. (the moving image will be transformed into this image space).", exists=True, - argstr="--fixedVolume2 %s") + argstr="--fixedVolume2 %s", + ) movingVolume2 = File( - desc= - "Input moving image that will be used for multimodal registration(this image will be transformed into the fixed image space).", + desc="Input moving image that will be used for multimodal registration(this image will be transformed into the fixed image space).", exists=True, - argstr="--movingVolume2 %s") + argstr="--movingVolume2 %s", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -161,15 +162,16 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "uint", "uchar", desc="Data type for representing a voxel of the Output Volume.", - argstr="--outputVolumePixelType %s") + argstr="--outputVolumePixelType %s", + ) backgroundFillValue = traits.Float( - desc= - "This value will be used for filling those areas of the output image that have no corresponding voxels in the input moving image.", - argstr="--backgroundFillValue %f") + desc="This value will be used for filling those areas of the output image that have no corresponding voxels in the input moving image.", + argstr="--backgroundFillValue %f", + ) scaleOutputValues = traits.Bool( - desc= - "If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", - argstr="--scaleOutputValues ") + desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", + argstr="--scaleOutputValues ", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -181,231 +183,231 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, Hamming, Cosine, Welch, Lanczos, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, Hamming, Cosine, Welch, Lanczos, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) numberOfIterations = InputMultiPath( traits.Int, - desc= - "The maximum number of iterations to try before stopping the optimization. When using a lower value (500-1000) then the registration is forced to terminate earlier but there is a higher risk of stopping before an optimal solution is reached.", + desc="The maximum number of iterations to try before stopping the optimization. When using a lower value (500-1000) then the registration is forced to terminate earlier but there is a higher risk of stopping before an optimal solution is reached.", sep=",", - argstr="--numberOfIterations %s") + argstr="--numberOfIterations %s", + ) maximumStepLength = traits.Float( - desc= - "Starting step length of the optimizer. In general, higher values allow for recovering larger initial misalignments but there is an increased chance that the registration will not converge.", - argstr="--maximumStepLength %f") + desc="Starting step length of the optimizer. In general, higher values allow for recovering larger initial misalignments but there is an increased chance that the registration will not converge.", + argstr="--maximumStepLength %f", + ) minimumStepLength = InputMultiPath( traits.Float, - desc= - "Each step in the optimization takes steps at least this big. When none are possible, registration is complete. Smaller values allows the optimizer to make smaller adjustments, but the registration time may increase.", + desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete. Smaller values allows the optimizer to make smaller adjustments, but the registration time may increase.", sep=",", - argstr="--minimumStepLength %s") + argstr="--minimumStepLength %s", + ) relaxationFactor = traits.Float( - desc= - "Specifies how quickly the optimization step length is decreased during registration. The value must be larger than 0 and smaller than 1. Larger values result in slower step size decrease, which allow for recovering larger initial misalignments but it increases the registration time and the chance that the registration will not converge.", - argstr="--relaxationFactor %f") + desc="Specifies how quickly the optimization step length is decreased during registration. The value must be larger than 0 and smaller than 1. Larger values result in slower step size decrease, which allow for recovering larger initial misalignments but it increases the registration time and the chance that the registration will not converge.", + argstr="--relaxationFactor %f", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position (in mm) compared to unit rotational changes (in radians) -- decrease this to allow for more rotation in the search pattern.", - argstr="--translationScale %f") + desc="How much to scale up changes in position (in mm) compared to unit rotational changes (in radians) -- decrease this to allow for more rotation in the search pattern.", + argstr="--translationScale %f", + ) reproportionScale = traits.Float( - desc= - "ScaleVersor3D 'Scale' compensation factor. Increase this to allow for more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--reproportionScale %f") + desc="ScaleVersor3D 'Scale' compensation factor. Increase this to allow for more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--reproportionScale %f", + ) skewScale = traits.Float( - desc= - "ScaleSkewVersor3D Skew compensation factor. Increase this to allow for more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--skewScale %f") + desc="ScaleSkewVersor3D Skew compensation factor. Increase this to allow for more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--skewScale %f", + ) maxBSplineDisplacement = traits.Float( - desc= - "Maximum allowed displacements in image physical coordinates (mm) for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc="Maximum allowed displacements in image physical coordinates (mm) for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) fixedVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D fixed image to fit. Only allowed if the fixed input volume is 4-dimensional.", - argstr="--fixedVolumeTimeIndex %d") + desc="The index in the time series for the 3D fixed image to fit. Only allowed if the fixed input volume is 4-dimensional.", + argstr="--fixedVolumeTimeIndex %d", + ) movingVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D moving image to fit. Only allowed if the moving input volume is 4-dimensional", - argstr="--movingVolumeTimeIndex %d") + desc="The index in the time series for the 3D moving image to fit. Only allowed if the moving input volume is 4-dimensional", + argstr="--movingVolumeTimeIndex %d", + ) numberOfHistogramBins = traits.Int( - desc= - "The number of histogram levels used for mutual information metric estimation.", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels used for mutual information metric estimation.", + argstr="--numberOfHistogramBins %d", + ) numberOfMatchPoints = traits.Int( - desc= - "Number of histogram match points used for mutual information metric estimation.", - argstr="--numberOfMatchPoints %d") + desc="Number of histogram match points used for mutual information metric estimation.", + argstr="--numberOfMatchPoints %d", + ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MIH", - desc= - "The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", - argstr="--costMetric %s") + desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", + argstr="--costMetric %s", + ) maskInferiorCutOffFromCenter = traits.Float( - desc= - "If Initialize Transform Mode is set to useCenterOfHeadAlign or Masking Option is ROIAUTO then this value defines the how much is cut of from the inferior part of the image. The cut-off distance is specified in millimeters, relative to the image center. If the value is 1000 or larger then no cut-off performed.", - argstr="--maskInferiorCutOffFromCenter %f") + desc="If Initialize Transform Mode is set to useCenterOfHeadAlign or Masking Option is ROIAUTO then this value defines the how much is cut of from the inferior part of the image. The cut-off distance is specified in millimeters, relative to the image center. If the value is 1000 or larger then no cut-off performed.", + argstr="--maskInferiorCutOffFromCenter %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. A setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. A setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) ROIAutoClosingSize = traits.Float( - desc= - "This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", - argstr="--ROIAutoClosingSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", + argstr="--ROIAutoClosingSize %f", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for higher accuracy, at the cost of longer computation time., NOTE that it is suggested to use samplingPercentage instead of this option. However, if set to non-zero, numberOfSamples overwrites the samplingPercentage option. ", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for higher accuracy, at the cost of longer computation time., NOTE that it is suggested to use samplingPercentage instead of this option. However, if set to non-zero, numberOfSamples overwrites the samplingPercentage option. ", + argstr="--numberOfSamples %d", + ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", - argstr="--strippedOutputTransform %s") + desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", + argstr="--strippedOutputTransform %s", + ) transformType = InputMultiPath( traits.Str, - desc= - "Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, BSpline and SyN. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", + desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, BSpline and SyN. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputTransform %s") + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputTransform %s", + ) initializeRegistrationByCurrentGenericTransform = traits.Bool( - desc= - "If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existant transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.", - argstr="--initializeRegistrationByCurrentGenericTransform ") + desc="If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existent transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.", + argstr="--initializeRegistrationByCurrentGenericTransform ", + ) failureExitCode = traits.Int( - desc= - "If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", - argstr="--failureExitCode %d") + desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", + argstr="--failureExitCode %d", + ) writeTransformOnFailure = traits.Bool( - desc= - "Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", - argstr="--writeTransformOnFailure ") + desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", + argstr="--writeTransformOnFailure ", + ) numberOfThreads = traits.Int( - desc= - "Explicitly specify the maximum number of threads to use. (default is auto-detected)", - argstr="--numberOfThreads %d") + desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", + argstr="--numberOfThreads %d", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) costFunctionConvergenceFactor = traits.Float( - desc= - "From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", - argstr="--costFunctionConvergenceFactor %f") + desc="From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", + argstr="--costFunctionConvergenceFactor %f", + ) projectedGradientTolerance = traits.Float( - desc= - "From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", - argstr="--projectedGradientTolerance %f") + desc="From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", + argstr="--projectedGradientTolerance %f", + ) maximumNumberOfEvaluations = traits.Int( - desc= - "Maximum number of evaluations for line search in lbfgsb optimizer.", - argstr="--maximumNumberOfEvaluations %d") + desc="Maximum number of evaluations for line search in lbfgsb optimizer.", + argstr="--maximumNumberOfEvaluations %d", + ) maximumNumberOfCorrections = traits.Int( desc="Maximum number of corrections in lbfgsb optimizer.", - argstr="--maximumNumberOfCorrections %d") + argstr="--maximumNumberOfCorrections %d", + ) gui = traits.Bool( - desc= - "Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", - argstr="--gui ") + desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build system, and probably does nothing on your installation.", + argstr="--gui ", + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) metricSamplingStrategy = traits.Enum( "Random", - desc= - "It defines the method that registration filter uses to sample the input fixed image. Only Random is supported for now.", - argstr="--metricSamplingStrategy %s") + desc="It defines the method that registration filter uses to sample the input fixed image. Only Random is supported for now.", + argstr="--metricSamplingStrategy %s", + ) logFileReport = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", - argstr="--logFileReport %s") + desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", + argstr="--logFileReport %s", + ) writeOutputTransformInFloat = traits.Bool( - desc= - "By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", - argstr="--writeOutputTransformInFloat ") + desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", + argstr="--writeOutputTransformInFloat ", + ) class BRAINSFitOutputSpec(TraitedSpec): linearTransform = File( - desc= - "(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) bsplineTransform = File( - desc= - "(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) outputVolume = File( - desc= - "(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) outputFixedVolumeROI = File( - desc= - "ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - exists=True) + desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + exists=True, + ) outputMovingVolumeROI = File( - desc= - "ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - exists=True) + desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + exists=True, + ) strippedOutputTransform = File( - desc= - "Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", - exists=True) + desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", + exists=True, + ) outputTransform = File( - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) logFileReport = File( - desc= - "A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", - exists=True) + desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", + exists=True, + ) class BRAINSFit(SEMLikeCommandLine): """title: General Registration (BRAINS) -category: Registration - -description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Full documentation avalable here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit. Method described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 + category: Registration -version: 3.0.0 + description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Full documentation available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit. Method described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit -contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard + contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard + """ input_spec = BRAINSFitInputSpec output_spec = BRAINSFitOutputSpec _cmd = " BRAINSFit " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'bsplineTransform': 'bsplineTransform.h5', - 'outputTransform': 'outputTransform.h5', - 'outputFixedVolumeROI': 'outputFixedVolumeROI.nii', - 'strippedOutputTransform': 'strippedOutputTransform.h5', - 'outputMovingVolumeROI': 'outputMovingVolumeROI.nii', - 'linearTransform': 'linearTransform.h5', - 'logFileReport': 'logFileReport' + "outputVolume": "outputVolume.nii", + "bsplineTransform": "bsplineTransform.h5", + "outputTransform": "outputTransform.h5", + "outputFixedVolumeROI": "outputFixedVolumeROI.nii", + "strippedOutputTransform": "strippedOutputTransform.h5", + "outputMovingVolumeROI": "outputMovingVolumeROI.nii", + "linearTransform": "linearTransform.h5", + "logFileReport": "logFileReport", } _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/brainsresample.py b/nipype/interfaces/semtools/registration/brainsresample.py index f9ea80acbd..37f2eeb69b 100644 --- a/nipype/interfaces/semtools/registration/brainsresample.py +++ b/nipype/interfaces/semtools/registration/brainsresample.py @@ -1,29 +1,30 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class BRAINSResampleInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Warp", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( - desc= - "Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", + desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -32,19 +33,19 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) deformationVolume = File( - desc= - "Displacement Field to be used to warp the image (ITKv3 or earlier)", + desc="Displacement Field to be used to warp the image (ITKv3 or earlier)", exists=True, - argstr="--deformationVolume %s") + argstr="--deformationVolume %s", + ) warpTransform = File( - desc= - "Filename for the BRAINSFit transform (ITKv3 or earlier) or composite transform file (ITKv4)", + desc="Filename for the BRAINSFit transform (ITKv3 or earlier) or composite transform file (ITKv4)", exists=True, - argstr="--warpTransform %s") + argstr="--warpTransform %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -56,24 +57,24 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) inverseTransform = traits.Bool( - desc= - "True/False is to compute inverse of given transformation. Default is false", - argstr="--inverseTransform ") - defaultValue = traits.Float( - desc="Default voxel value", argstr="--defaultValue %f") + desc="True/False is to compute inverse of given transformation. Default is false", + argstr="--inverseTransform ", + ) + defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, - desc= - "Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space", + desc="Add warped grid to output image to help show the deformation that occurred with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for making a 2D image of grid lines from the 3D space", sep=",", - argstr="--gridSpacing %s") + argstr="--gridSpacing %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSResampleOutputSpec(TraitedSpec): @@ -83,24 +84,23 @@ class BRAINSResampleOutputSpec(TraitedSpec): class BRAINSResample(SEMLikeCommandLine): """title: Resample Image (BRAINS) -category: Registration - -description: This program collects together three common image processing tasks that all involve resampling an image volume: Resampling to a new resolution and spacing, applying a transformation (using an ITK transform IO mechanisms) and Warping (using a vector image deformation field). Full documentation available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample. + category: Registration -version: 3.0.0 + description: This program collects together three common image processing tasks that all involve resampling an image volume: Resampling to a new resolution and spacing, applying a transformation (using an ITK transform IO mechanisms) and Warping (using a vector image deformation field). Full documentation available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample -contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = " BRAINSResample " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/brainsresize.py b/nipype/interfaces/semtools/registration/brainsresize.py index 11238dd914..f86d7d9e62 100644 --- a/nipype/interfaces/semtools/registration/brainsresize.py +++ b/nipype/interfaces/semtools/registration/brainsresize.py @@ -1,24 +1,24 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class BRAINSResizeInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Scale", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Scale", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting scaled image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -27,12 +27,12 @@ class BRAINSResizeInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) scaleFactor = traits.Float( - desc="The scale factor for the image spacing.", - argstr="--scaleFactor %f") + desc="The scale factor for the image spacing.", argstr="--scaleFactor %f" + ) class BRAINSResizeOutputSpec(TraitedSpec): @@ -42,22 +42,21 @@ class BRAINSResizeOutputSpec(TraitedSpec): class BRAINSResize(SEMLikeCommandLine): """title: Resize Image (BRAINS) -category: Registration - -description: This program is useful for downsampling an image by a constant scale factor. + category: Registration -version: 3.0.0 + description: This program is useful for downsampling an image by a constant scale factor. -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 3.0.0 -contributor: This tool was developed by Hans Johnson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Hans Johnson. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = BRAINSResizeInputSpec output_spec = BRAINSResizeOutputSpec _cmd = " BRAINSResize " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/specialized.py b/nipype/interfaces/semtools/registration/specialized.py index 2cc08e3ec7..68a6a0b993 100644 --- a/nipype/interfaces/semtools/registration/specialized.py +++ b/nipype/interfaces/semtools/registration/specialized.py @@ -1,56 +1,59 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", - argstr="--movingVolume %s...") + argstr="--movingVolume %s...", + ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", - argstr="--fixedVolume %s...") + argstr="--fixedVolume %s...", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -62,244 +65,257 @@ class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation field at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) makeBOBF = traits.Bool( - desc= - "Flag to make Brain-Only Background-Filled versions of the input and target volumes.", - argstr="--makeBOBF ") + desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", + argstr="--makeBOBF ", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", - argstr="--weightFactors %s") + argstr="--weightFactors %s", + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class VBRAINSDemonWarp(SEMLikeCommandLine): """title: Vector Demon Registration (BRAINS) -category: Registration.Specialized - -description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. + category: Registration.Specialized -version: 3.0.0 + description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp -contributor: This tool was developed by Hans J. Johnson and Greg Harris. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Hans J. Johnson and Greg Harris. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = VBRAINSDemonWarpInputSpec output_spec = VBRAINSDemonWarpOutputSpec _cmd = " VBRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( - desc="Required: input moving image", - exists=True, - argstr="--movingVolume %s") + desc="Required: input moving image", exists=True, argstr="--movingVolume %s" + ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -311,196 +327,208 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation field at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", - desc= - "What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is chosen, then the mask is implicitly defined using a otsu foreground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class BRAINSDemonWarp(SEMLikeCommandLine): """title: Demon Registration (BRAINS) -category: Registration.Specialized - -description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp. + category: Registration.Specialized -version: 3.0.0 + description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp -contributor: This tool was developed by Hans J. Johnson and Greg Harris. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Hans J. Johnson and Greg Harris. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = BRAINSDemonWarpInputSpec output_spec = BRAINSDemonWarpOutputSpec _cmd = " BRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False @@ -509,60 +537,67 @@ class BRAINSTransformFromFiducialsInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", - argstr="--transformType %s") + argstr="--transformType %s", + ) fixedLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, - argstr="--fixedLandmarksFile %s") + argstr="--fixedLandmarksFile %s", + ) movingLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, - argstr="--movingLandmarksFile %s") + argstr="--movingLandmarksFile %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSTransformFromFiducialsOutputSpec(TraitedSpec): saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class BRAINSTransformFromFiducials(SEMLikeCommandLine): """title: Fiducial Registration (BRAINS) -category: Registration.Specialized - -description: Computes a rigid, similarity or affine transform from a matched list of fiducials + category: Registration.Specialized -version: 0.1.0.$Revision$ + description: Computes a rigid, similarity or affine transform from a matched list of fiducials -documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:TransformFromFiducials-Documentation-3.6 + version: 0.1.0.$Revision$ -contributor: Casey B Goodlett + documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:TransformFromFiducials-Documentation-3.6 -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Casey B Goodlett -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = BRAINSTransformFromFiducialsInputSpec output_spec = BRAINSTransformFromFiducialsOutputSpec _cmd = " BRAINSTransformFromFiducials " - _outputs_filenames = {'saveTransform': 'saveTransform.h5'} + _outputs_filenames = {"saveTransform": "saveTransform.h5"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/tests/__init__.py b/nipype/interfaces/semtools/registration/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/registration/tests/__init__.py +++ b/nipype/interfaces/semtools/registration/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py index c631f9b96e..d1c8055df3 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py @@ -1,100 +1,171 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSDemonWarp def test_BRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", + sep=",", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %d", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', + argstr="--checkerboardPatternSubdivisions %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s", + extensions=None, + ), + gradient_type=dict( + argstr="--gradient_type %s", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', ), - initializeWithTransform=dict(argstr='--initializeWithTransform %s', ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - max_step_length=dict(argstr='--max_step_length %f', ), + argstr="--initializeWithDisplacementField %s", + extensions=None, + ), + initializeWithTransform=dict( + argstr="--initializeWithTransform %s", + extensions=None, + ), + inputPixelType=dict( + argstr="--inputPixelType %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + lowerThresholdForBOBF=dict( + argstr="--lowerThresholdForBOBF %d", + ), + maskProcessingMode=dict( + argstr="--maskProcessingMode %s", + ), + max_step_length=dict( + argstr="--max_step_length %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', + argstr="--minimumFixedPyramid %s", + sep=",", ), minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', + argstr="--minimumMovingPyramid %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s", + extensions=None, ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s', ), neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', + argstr="--neighborhoodForBOBF %s", + sep=",", ), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfPyramidLevels=dict( + argstr="--numberOfPyramidLevels %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict( + argstr="--outputDebug ", + ), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), + argstr="--outputDisplacementFieldPrefix %s", + ), outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', + argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), + outputNormalized=dict( + argstr="--outputNormalized ", + ), + outputPixelType=dict( + argstr="--outputPixelType %s", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), + promptUser=dict( + argstr="--promptUser ", + ), + registrationFilterType=dict( + argstr="--registrationFilterType %s", + ), seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', + argstr="--seedForBOBF %s", + sep=",", ), smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + argstr="--smoothDisplacementFieldSigma %f", + ), + upFieldSmoothing=dict( + argstr="--upFieldSmoothing %f", + ), + upperThresholdForBOBF=dict( + argstr="--upperThresholdForBOBF %d", + ), + use_vanilla_dem=dict( + argstr="--use_vanilla_dem ", + ), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(), - outputDisplacementFieldVolume=dict(), - outputVolume=dict(), + outputCheckerboardVolume=dict( + extensions=None, + ), + outputDisplacementFieldVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = BRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py index 18059c4f57..69111d9212 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py @@ -1,142 +1,265 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsfit import BRAINSFit def test_BRAINSFit_inputs(): input_map = dict( - ROIAutoClosingSize=dict(argstr='--ROIAutoClosingSize %f', ), - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - backgroundFillValue=dict(argstr='--backgroundFillValue %f', ), + ROIAutoClosingSize=dict( + argstr="--ROIAutoClosingSize %f", + ), + ROIAutoDilateSize=dict( + argstr="--ROIAutoDilateSize %f", + ), + args=dict( + argstr="%s", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %f", + ), bsplineTransform=dict( - argstr='--bsplineTransform %s', + argstr="--bsplineTransform %s", hash_files=False, ), costFunctionConvergenceFactor=dict( - argstr='--costFunctionConvergenceFactor %f', ), - costMetric=dict(argstr='--costMetric %s', ), - debugLevel=dict(argstr='--debugLevel %d', ), + argstr="--costFunctionConvergenceFactor %f", + ), + costMetric=dict( + argstr="--costMetric %s", + ), + debugLevel=dict( + argstr="--debugLevel %d", + ), environ=dict( nohash=True, usedefault=True, ), - failureExitCode=dict(argstr='--failureExitCode %d', ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s', ), - fixedVolume2=dict(argstr='--fixedVolume2 %s', ), - fixedVolumeTimeIndex=dict(argstr='--fixedVolumeTimeIndex %d', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), - initialTransform=dict(argstr='--initialTransform %s', ), + failureExitCode=dict( + argstr="--failureExitCode %d", + ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s", + extensions=None, + ), + fixedVolume2=dict( + argstr="--fixedVolume2 %s", + extensions=None, + ), + fixedVolumeTimeIndex=dict( + argstr="--fixedVolumeTimeIndex %d", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), + initialTransform=dict( + argstr="--initialTransform %s", + extensions=None, + ), initializeRegistrationByCurrentGenericTransform=dict( - argstr='--initializeRegistrationByCurrentGenericTransform ', ), - initializeTransformMode=dict(argstr='--initializeTransformMode %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), + argstr="--initializeRegistrationByCurrentGenericTransform ", + ), + initializeTransformMode=dict( + argstr="--initializeTransformMode %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), linearTransform=dict( - argstr='--linearTransform %s', + argstr="--linearTransform %s", hash_files=False, ), logFileReport=dict( - argstr='--logFileReport %s', + argstr="--logFileReport %s", hash_files=False, ), maskInferiorCutOffFromCenter=dict( - argstr='--maskInferiorCutOffFromCenter %f', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), + argstr="--maskInferiorCutOffFromCenter %f", + ), + maskProcessingMode=dict( + argstr="--maskProcessingMode %s", + ), + maxBSplineDisplacement=dict( + argstr="--maxBSplineDisplacement %f", + ), maximumNumberOfCorrections=dict( - argstr='--maximumNumberOfCorrections %d', ), + argstr="--maximumNumberOfCorrections %d", + ), maximumNumberOfEvaluations=dict( - argstr='--maximumNumberOfEvaluations %d', ), - maximumStepLength=dict(argstr='--maximumStepLength %f', ), + argstr="--maximumNumberOfEvaluations %d", + ), + maximumStepLength=dict( + argstr="--maximumStepLength %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", + ), + metricSamplingStrategy=dict( + argstr="--metricSamplingStrategy %s", ), - metricSamplingStrategy=dict(argstr='--metricSamplingStrategy %s', ), minimumStepLength=dict( - argstr='--minimumStepLength %s', - sep=',', - ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s', ), - movingVolume2=dict(argstr='--movingVolume2 %s', ), - movingVolumeTimeIndex=dict(argstr='--movingVolumeTimeIndex %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), + argstr="--minimumStepLength %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s", + extensions=None, + ), + movingVolume2=dict( + argstr="--movingVolume2 %s", + extensions=None, + ), + movingVolumeTimeIndex=dict( + argstr="--movingVolumeTimeIndex %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), numberOfIterations=dict( - argstr='--numberOfIterations %s', - sep=',', + argstr="--numberOfIterations %s", + sep=",", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfSamples=dict( + argstr="--numberOfSamples %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputFixedVolumeROI=dict( - argstr='--outputFixedVolumeROI %s', + argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( - argstr='--outputMovingVolumeROI %s', + argstr="--outputMovingVolumeROI %s", hash_files=False, ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), + outputVolumePixelType=dict( + argstr="--outputVolumePixelType %s", + ), projectedGradientTolerance=dict( - argstr='--projectedGradientTolerance %f', ), - promptUser=dict(argstr='--promptUser ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - removeIntensityOutliers=dict(argstr='--removeIntensityOutliers %f', ), - reproportionScale=dict(argstr='--reproportionScale %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - scaleOutputValues=dict(argstr='--scaleOutputValues ', ), - skewScale=dict(argstr='--skewScale %f', ), + argstr="--projectedGradientTolerance %f", + ), + promptUser=dict( + argstr="--promptUser ", + ), + relaxationFactor=dict( + argstr="--relaxationFactor %f", + ), + removeIntensityOutliers=dict( + argstr="--removeIntensityOutliers %f", + ), + reproportionScale=dict( + argstr="--reproportionScale %f", + ), + samplingPercentage=dict( + argstr="--samplingPercentage %f", + ), + scaleOutputValues=dict( + argstr="--scaleOutputValues ", + ), + skewScale=dict( + argstr="--skewScale %f", + ), splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', + argstr="--splineGridSize %s", + sep=",", ), strippedOutputTransform=dict( - argstr='--strippedOutputTransform %s', + argstr="--strippedOutputTransform %s", hash_files=False, ), transformType=dict( - argstr='--transformType %s', - sep=',', - ), - translationScale=dict(argstr='--translationScale %f', ), - useAffine=dict(argstr='--useAffine ', ), - useBSpline=dict(argstr='--useBSpline ', ), - useComposite=dict(argstr='--useComposite ', ), - useROIBSpline=dict(argstr='--useROIBSpline ', ), - useRigid=dict(argstr='--useRigid ', ), - useScaleSkewVersor3D=dict(argstr='--useScaleSkewVersor3D ', ), - useScaleVersor3D=dict(argstr='--useScaleVersor3D ', ), - useSyN=dict(argstr='--useSyN ', ), + argstr="--transformType %s", + sep=",", + ), + translationScale=dict( + argstr="--translationScale %f", + ), + useAffine=dict( + argstr="--useAffine ", + ), + useBSpline=dict( + argstr="--useBSpline ", + ), + useComposite=dict( + argstr="--useComposite ", + ), + useROIBSpline=dict( + argstr="--useROIBSpline ", + ), + useRigid=dict( + argstr="--useRigid ", + ), + useScaleSkewVersor3D=dict( + argstr="--useScaleSkewVersor3D ", + ), + useScaleVersor3D=dict( + argstr="--useScaleVersor3D ", + ), + useSyN=dict( + argstr="--useSyN ", + ), writeOutputTransformInFloat=dict( - argstr='--writeOutputTransformInFloat ', ), - writeTransformOnFailure=dict(argstr='--writeTransformOnFailure ', ), + argstr="--writeOutputTransformInFloat ", + ), + writeTransformOnFailure=dict( + argstr="--writeTransformOnFailure ", + ), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSFit_outputs(): output_map = dict( - bsplineTransform=dict(), - linearTransform=dict(), - logFileReport=dict(), - outputFixedVolumeROI=dict(), - outputMovingVolumeROI=dict(), - outputTransform=dict(), - outputVolume=dict(), - strippedOutputTransform=dict(), + bsplineTransform=dict( + extensions=None, + ), + linearTransform=dict( + extensions=None, + ), + logFileReport=dict( + extensions=None, + ), + outputFixedVolumeROI=dict( + extensions=None, + ), + outputMovingVolumeROI=dict( + extensions=None, + ), + outputTransform=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), + strippedOutputTransform=dict( + extensions=None, + ), ) outputs = BRAINSFit.output_spec() diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py index 98ec5f4ff3..46d175da07 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py @@ -1,40 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsresample import BRAINSResample def test_BRAINSResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - defaultValue=dict(argstr='--defaultValue %f', ), - deformationVolume=dict(argstr='--deformationVolume %s', ), + args=dict( + argstr="%s", + ), + defaultValue=dict( + argstr="--defaultValue %f", + ), + deformationVolume=dict( + argstr="--deformationVolume %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), gridSpacing=dict( - argstr='--gridSpacing %s', - sep=',', + argstr="--gridSpacing %s", + sep=",", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + inverseTransform=dict( + argstr="--inverseTransform ", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - inputVolume=dict(argstr='--inputVolume %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - inverseTransform=dict(argstr='--inverseTransform ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - pixelType=dict(argstr='--pixelType %s', ), - referenceVolume=dict(argstr='--referenceVolume %s', ), - warpTransform=dict(argstr='--warpTransform %s', ), + pixelType=dict( + argstr="--pixelType %s", + ), + referenceVolume=dict( + argstr="--referenceVolume %s", + extensions=None, + ), + warpTransform=dict( + argstr="--warpTransform %s", + extensions=None, + ), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResample_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py index 4ce0d7159c..52d24e6fff 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py @@ -1,30 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsresize import BRAINSResize def test_BRAINSResize_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - pixelType=dict(argstr='--pixelType %s', ), - scaleFactor=dict(argstr='--scaleFactor %f', ), + pixelType=dict( + argstr="--pixelType %s", + ), + scaleFactor=dict( + argstr="--scaleFactor %f", + ), ) inputs = BRAINSResize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResize_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSResize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py index 2a700eaac7..bc98fc2763 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py @@ -1,33 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSTransformFromFiducials def test_BRAINSTransformFromFiducials_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - fixedLandmarksFile=dict(argstr='--fixedLandmarksFile %s', ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - movingLandmarksFile=dict(argstr='--movingLandmarksFile %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + fixedLandmarks=dict( + argstr="--fixedLandmarks %s...", + ), + fixedLandmarksFile=dict( + argstr="--fixedLandmarksFile %s", + extensions=None, + ), + movingLandmarks=dict( + argstr="--movingLandmarks %s...", + ), + movingLandmarksFile=dict( + argstr="--movingLandmarksFile %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), saveTransform=dict( - argstr='--saveTransform %s', + argstr="--saveTransform %s", hash_files=False, ), - transformType=dict(argstr='--transformType %s', ), + transformType=dict( + argstr="--transformType %s", + ), ) inputs = BRAINSTransformFromFiducials.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTransformFromFiducials_outputs(): - output_map = dict(saveTransform=dict(), ) + output_map = dict( + saveTransform=dict( + extensions=None, + ), + ) outputs = BRAINSTransformFromFiducials.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py b/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py index b3255da1d3..af8bac8680 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py @@ -1,92 +1,153 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import VBRAINSDemonWarp def test_VBRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", + sep=",", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %d", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', + argstr="--checkerboardPatternSubdivisions %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s...', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s...", + ), + gradient_type=dict( + argstr="--gradient_type %s", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', ), - initializeWithTransform=dict(argstr='--initializeWithTransform %s', ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - makeBOBF=dict(argstr='--makeBOBF ', ), - max_step_length=dict(argstr='--max_step_length %f', ), + argstr="--initializeWithDisplacementField %s", + extensions=None, + ), + initializeWithTransform=dict( + argstr="--initializeWithTransform %s", + extensions=None, + ), + inputPixelType=dict( + argstr="--inputPixelType %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + lowerThresholdForBOBF=dict( + argstr="--lowerThresholdForBOBF %d", + ), + makeBOBF=dict( + argstr="--makeBOBF ", + ), + max_step_length=dict( + argstr="--max_step_length %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', + argstr="--minimumFixedPyramid %s", + sep=",", ), minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', + argstr="--minimumMovingPyramid %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s...", ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s...', ), neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', + argstr="--neighborhoodForBOBF %s", + sep=",", ), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfPyramidLevels=dict( + argstr="--numberOfPyramidLevels %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict( + argstr="--outputDebug ", + ), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), + argstr="--outputDisplacementFieldPrefix %s", + ), outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', + argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), + outputNormalized=dict( + argstr="--outputNormalized ", + ), + outputPixelType=dict( + argstr="--outputPixelType %s", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), + promptUser=dict( + argstr="--promptUser ", + ), + registrationFilterType=dict( + argstr="--registrationFilterType %s", + ), seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', + argstr="--seedForBOBF %s", + sep=",", ), smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + argstr="--smoothDisplacementFieldSigma %f", + ), + upFieldSmoothing=dict( + argstr="--upFieldSmoothing %f", + ), + upperThresholdForBOBF=dict( + argstr="--upperThresholdForBOBF %d", + ), + use_vanilla_dem=dict( + argstr="--use_vanilla_dem ", + ), weightFactors=dict( - argstr='--weightFactors %s', - sep=',', + argstr="--weightFactors %s", + sep=",", ), ) inputs = VBRAINSDemonWarp.input_spec() @@ -94,11 +155,19 @@ def test_VBRAINSDemonWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(), - outputDisplacementFieldVolume=dict(), - outputVolume=dict(), + outputCheckerboardVolume=dict( + extensions=None, + ), + outputDisplacementFieldVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = VBRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/__init__.py b/nipype/interfaces/semtools/segmentation/__init__.py index d0cd69e0c9..8d11e465ef 100644 --- a/nipype/interfaces/semtools/segmentation/__init__.py +++ b/nipype/interfaces/semtools/segmentation/__init__.py @@ -1,6 +1,10 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .specialized import ( - BRAINSCut, BRAINSROIAuto, BRAINSConstellationDetector, - BRAINSCreateLabelMapFromProbabilityMaps, BinaryMaskEditorBasedOnLandmarks, - BRAINSMultiSTAPLE, BRAINSABC, ESLR) + BRAINSCut, + BRAINSROIAuto, + BRAINSConstellationDetector, + BRAINSCreateLabelMapFromProbabilityMaps, + BinaryMaskEditorBasedOnLandmarks, + BRAINSMultiSTAPLE, + BRAINSABC, + ESLR, +) diff --git a/nipype/interfaces/semtools/segmentation/specialized.py b/nipype/interfaces/semtools/segmentation/specialized.py index fa08b8e260..d4f1ab1b07 100644 --- a/nipype/interfaces/semtools/segmentation/specialized.py +++ b/nipype/interfaces/semtools/segmentation/specialized.py @@ -1,70 +1,76 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + InputMultiPath, + OutputMultiPath, +) class BRAINSCutInputSpec(CommandLineInputSpec): netConfiguration = File( - desc= - "XML File defining BRAINSCut parameters. OLD NAME. PLEASE USE modelConfigurationFilename instead.", + desc="XML File defining BRAINSCut parameters. OLD NAME. PLEASE USE modelConfigurationFilename instead.", exists=True, - argstr="--netConfiguration %s") + argstr="--netConfiguration %s", + ) modelConfigurationFilename = File( desc="XML File defining BRAINSCut parameters", exists=True, - argstr="--modelConfigurationFilename %s") + argstr="--modelConfigurationFilename %s", + ) trainModelStartIndex = traits.Int( - desc="Starting iteration for training", - argstr="--trainModelStartIndex %d") + desc="Starting iteration for training", argstr="--trainModelStartIndex %d" + ) verbose = traits.Int( - desc="print out some debugging information", argstr="--verbose %d") + desc="print out some debugging information", argstr="--verbose %d" + ) multiStructureThreshold = traits.Bool( - desc="multiStructureThreshold module to deal with overlaping area", - argstr="--multiStructureThreshold ") + desc="multiStructureThreshold module to deal with overlapping area", + argstr="--multiStructureThreshold ", + ) histogramEqualization = traits.Bool( - desc= - "A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which genreate input vectors without Histogram Equalization. ", - argstr="--histogramEqualization ") + desc="A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which generate input vectors without Histogram Equalization. ", + argstr="--histogramEqualization ", + ) computeSSEOn = traits.Bool( - desc= - "compute Sum of Square Error (SSE) along the trained model until the number of iteration given in the modelConfigurationFilename file", - argstr="--computeSSEOn ") + desc="compute Sum of Square Error (SSE) along the trained model until the number of iteration given in the modelConfigurationFilename file", + argstr="--computeSSEOn ", + ) generateProbability = traits.Bool( - desc="Generate probability map", argstr="--generateProbability ") + desc="Generate probability map", argstr="--generateProbability " + ) createVectors = traits.Bool( - desc="create vectors for training neural net", - argstr="--createVectors ") - trainModel = traits.Bool( - desc="train the neural net", argstr="--trainModel ") + desc="create vectors for training neural net", argstr="--createVectors " + ) + trainModel = traits.Bool(desc="train the neural net", argstr="--trainModel ") NoTrainingVectorShuffling = traits.Bool( desc="If this flag is on, there will be no shuffling.", - argstr="--NoTrainingVectorShuffling ") - applyModel = traits.Bool( - desc="apply the neural net", argstr="--applyModel ") + argstr="--NoTrainingVectorShuffling ", + ) + applyModel = traits.Bool(desc="apply the neural net", argstr="--applyModel ") validate = traits.Bool( - desc= - "validate data set.Just need for the first time run ( This is for validation of xml file and not working yet )", - argstr="--validate ") + desc="validate data set.Just need for the first time run ( This is for validation of xml file and not working yet )", + argstr="--validate ", + ) method = traits.Enum("RandomForest", "ANN", argstr="--method %s") numberOfTrees = traits.Int( - desc= - " Random tree: number of trees. This is to be used when only one model with specified depth wish to be created. ", - argstr="--numberOfTrees %d") + desc=" Random tree: number of trees. This is to be used when only one model with specified depth wish to be created. ", + argstr="--numberOfTrees %d", + ) randomTreeDepth = traits.Int( - desc= - " Random tree depth. This is to be used when only one model with specified depth wish to be created. ", - argstr="--randomTreeDepth %d") + desc=" Random tree depth. This is to be used when only one model with specified depth wish to be created. ", + argstr="--randomTreeDepth %d", + ) modelFilename = traits.Str( - desc= - " model file name given from user (not by xml configuration file) ", - argstr="--modelFilename %s") + desc=" model file name given from user (not by xml configuration file) ", + argstr="--modelFilename %s", + ) class BRAINSCutOutputSpec(TraitedSpec): @@ -74,17 +80,16 @@ class BRAINSCutOutputSpec(TraitedSpec): class BRAINSCut(SEMLikeCommandLine): """title: BRAINSCut (BRAINS) -category: Segmentation.Specialized - -description: Automatic Segmentation using neural networks + category: Segmentation.Specialized -version: 1.0 + description: Automatic Segmentation using neural networks -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 1.0 -contributor: Vince Magnotta, Hans Johnson, Greg Harris, Kent Williams, Eunyoung Regina Kim + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Vince Magnotta, Hans Johnson, Greg Harris, Kent Williams, Eunyoung Regina Kim + """ input_spec = BRAINSCutInputSpec output_spec = BRAINSCutOutputSpec @@ -97,41 +102,45 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", - argstr="--outputROIMaskVolume %s") + argstr="--outputROIMaskVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", - argstr="--outputVolume %s") + desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", + argstr="--outputVolume %s", + ) maskOutput = traits.Bool( - desc="The inputVolume multiplied by the ROI mask.", - argstr="--maskOutput ") + desc="The inputVolume multiplied by the ROI mask.", argstr="--maskOutput " + ) cropOutput = traits.Bool( desc="The inputVolume cropped to the region of the ROI mask.", - argstr="--cropOutput ") + argstr="--cropOutput ", + ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", - argstr="--otsuPercentileThreshold %f") + argstr="--otsuPercentileThreshold %f", + ) thresholdCorrectionFactor = traits.Float( - desc= - "A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", - argstr="--thresholdCorrectionFactor %f") + desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", + argstr="--thresholdCorrectionFactor %f", + ) closingSize = traits.Float( - desc= - "The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", - argstr="--closingSize %f") + desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", + argstr="--closingSize %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -139,157 +148,161 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( - desc="The ROI automatically found from the input image.", exists=True) + desc="The ROI automatically found from the input image.", exists=True + ) outputVolume = File( - desc= - "The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", - exists=True) + desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", + exists=True, + ) class BRAINSROIAuto(SEMLikeCommandLine): """title: Foreground masking (BRAINS) -category: Segmentation.Specialized - -description: This program is used to create a mask over the most prominant forground region in an image. This is accomplished via a combination of otsu thresholding and a closing operation. More documentation is available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ForegroundMasking. + category: Segmentation.Specialized -version: 2.4.1 + description: This program is used to create a mask over the most prominent foreground region in an image. This is accomplished via a combination of otsu thresholding and a closing operation. More documentation is available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ForegroundMasking. -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 2.4.1 -contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) + contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) + """ input_spec = BRAINSROIAutoInputSpec output_spec = BRAINSROIAutoOutputSpec _cmd = " BRAINSROIAuto " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputROIMaskVolume': 'outputROIMaskVolume.nii' + "outputVolume": "outputVolume.nii", + "outputROIMaskVolume": "outputROIMaskVolume.nii", } _redirect_x = False class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec): houghEyeDetectorMode = traits.Int( - desc= - ", This flag controls the mode of Hough eye detector. By default, value of 1 is for T1W images, while the value of 0 is for T2W and PD images., ", - argstr="--houghEyeDetectorMode %d") + desc=", This flag controls the mode of Hough eye detector. By default, value of 1 is for T1W images, while the value of 0 is for T2W and PD images., ", + argstr="--houghEyeDetectorMode %d", + ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, - argstr="--inputTemplateModel %s") + argstr="--inputTemplateModel %s", + ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, - argstr="--LLSModel %s") + argstr="--LLSModel %s", + ) inputVolume = File( desc="Input image in which to find ACPC points", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", - argstr="--outputVolume %s") + desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", + argstr="--outputVolume %s", + ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", - argstr="--outputResampledVolume %s") + desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", + argstr="--outputResampledVolume %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The filename for the original space to ACPC alignment to be written (in .h5 format)., ", - argstr="--outputTransform %s") + desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", + argstr="--outputTransform %s", + ) outputLandmarksInInputSpace = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", - argstr="--outputLandmarksInInputSpace %s") + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", + argstr="--outputLandmarksInInputSpace %s", + ) outputLandmarksInACPCAlignedSpace = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", - argstr="--outputLandmarksInACPCAlignedSpace %s") + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", + argstr="--outputLandmarksInACPCAlignedSpace %s", + ) outputMRML = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", - argstr="--outputMRML %s") + desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", + argstr="--outputMRML %s", + ) outputVerificationScript = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", - argstr="--outputVerificationScript %s") + desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", + argstr="--outputVerificationScript %s", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ", - argstr="--mspQualityLevel %d") + desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ", + argstr="--mspQualityLevel %d", + ) otsuPercentileThreshold = traits.Float( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, which is employed when acLowerBound is set and an outputUntransformedClippedVolume is requested., ", - argstr="--otsuPercentileThreshold %f") + desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed when acLowerBound is set and an outputUntransformedClippedVolume is requested., ", + argstr="--otsuPercentileThreshold %f", + ) acLowerBound = traits.Float( - desc= - ", When generating a resampled output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (as found by the model.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", - argstr="--acLowerBound %f") + desc=", When generating a resampled output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (as found by the model.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", + argstr="--acLowerBound %f", + ) cutOutHeadInOutputVolume = traits.Bool( - desc= - ", Flag to cut out just the head tissue when producing an (un)transformed clipped volume., ", - argstr="--cutOutHeadInOutputVolume ") + desc=", Flag to cut out just the head tissue when producing an (un)transformed clipped volume., ", + argstr="--cutOutHeadInOutputVolume ", + ) outputUntransformedClippedVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", - argstr="--outputUntransformedClippedVolume %s") + desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", + argstr="--outputUntransformedClippedVolume %s", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -301,180 +314,174 @@ class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) forceACPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the AC point from the original image on the command line., ", + desc=", Use this flag to manually specify the AC point from the original image on the command line., ", sep=",", - argstr="--forceACPoint %s") + argstr="--forceACPoint %s", + ) forcePCPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the PC point from the original image on the command line., ", + desc=", Use this flag to manually specify the PC point from the original image on the command line., ", sep=",", - argstr="--forcePCPoint %s") + argstr="--forcePCPoint %s", + ) forceVN4Point = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the VN4 point from the original image on the command line., ", + desc=", Use this flag to manually specify the VN4 point from the original image on the command line., ", sep=",", - argstr="--forceVN4Point %s") + argstr="--forceVN4Point %s", + ) forceRPPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the RP point from the original image on the command line., ", + desc=", Use this flag to manually specify the RP point from the original image on the command line., ", sep=",", - argstr="--forceRPPoint %s") + argstr="--forceRPPoint %s", + ) inputLandmarksEMSP = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (in .fcsv) with the landmarks in the estimated MSP aligned space to be loaded. The detector will only process landmarks not enlisted on the file., ", + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (in .fcsv) with the landmarks in the estimated MSP aligned space to be loaded. The detector will only process landmarks not enlisted on the file., ", exists=True, - argstr="--inputLandmarksEMSP %s") + argstr="--inputLandmarksEMSP %s", + ) forceHoughEyeDetectorReportFailure = traits.Bool( - desc= - ", Flag indicates whether the Hough eye detector should report failure, ", - argstr="--forceHoughEyeDetectorReportFailure ") + desc=", Flag indicates whether the Hough eye detector should report failure, ", + argstr="--forceHoughEyeDetectorReportFailure ", + ) rmpj = traits.Float( - desc= - ", Search radius for MPJ in unit of mm, ", - argstr="--rmpj %f") + desc=", Search radius for MPJ in unit of mm, ", + argstr="--rmpj %f", + ) rac = traits.Float( desc=", Search radius for AC in unit of mm, ", - argstr="--rac %f") + argstr="--rac %f", + ) rpc = traits.Float( desc=", Search radius for PC in unit of mm, ", - argstr="--rpc %f") + argstr="--rpc %f", + ) rVN4 = traits.Float( - desc= - ", Search radius for VN4 in unit of mm, ", - argstr="--rVN4 %f") + desc=", Search radius for VN4 in unit of mm, ", + argstr="--rVN4 %f", + ) debug = traits.Bool( - desc= - ", Show internal debugging information., ", - argstr="--debug ") + desc=", Show internal debugging information., ", + argstr="--debug ", + ) verbose = traits.Bool( desc=", Show more verbose output, ", - argstr="--verbose ") + argstr="--verbose ", + ) writeBranded2DImage = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", - argstr="--writeBranded2DImage %s") + desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", + argstr="--writeBranded2DImage %s", + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, - desc= - ", The directory for the debuging images to be written., ", - argstr="--resultsDir %s") + desc=", The directory for the debugging images to be written., ", + argstr="--resultsDir %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) atlasVolume = File( desc="Atlas volume image to be used for BRAINSFit registration", exists=True, - argstr="--atlasVolume %s") + argstr="--atlasVolume %s", + ) atlasLandmarks = File( - desc= - "Atlas landmarks to be used for BRAINSFit registration initialization, ", + desc="Atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, - argstr="--atlasLandmarks %s") + argstr="--atlasLandmarks %s", + ) atlasLandmarkWeights = File( - desc= - "Weights associated with atlas landmarks to be used for BRAINSFit registration initialization, ", + desc="Weights associated with atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, - argstr="--atlasLandmarkWeights %s") + argstr="--atlasLandmarkWeights %s", + ) class BRAINSConstellationDetectorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", - exists=True) + desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", + exists=True, + ) outputResampledVolume = File( - desc= - "ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", - exists=True) + desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", + exists=True, + ) outputTransform = File( - desc= - "The filename for the original space to ACPC alignment to be written (in .h5 format)., ", - exists=True) + desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", + exists=True, + ) outputLandmarksInInputSpace = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", - exists=True) + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", + exists=True, + ) outputLandmarksInACPCAlignedSpace = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", - exists=True) + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", + exists=True, + ) outputMRML = File( - desc= - ", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", - exists=True) + desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", + exists=True, + ) outputVerificationScript = File( - desc= - ", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", - exists=True) + desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", + exists=True, + ) outputUntransformedClippedVolume = File( - desc= - "Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", - exists=True) + desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", + exists=True, + ) writeBranded2DImage = File( - desc= - ", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", - exists=True) + desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", + exists=True, + ) resultsDir = Directory( - desc= - ", The directory for the debuging images to be written., ", - exists=True) + desc=", The directory for the debugging images to be written., ", + exists=True, + ) class BRAINSConstellationDetector(SEMLikeCommandLine): """title: Brain Landmark Constellation Detector (BRAINS) -category: Segmentation.Specialized + category: Segmentation.Specialized -description: This program will find the mid-sagittal plane, a constellation of landmarks in a volume, and create an AC/PC aligned data set with the AC point at the center of the voxel lattice (labeled at the origin of the image physical space.) Part of this work is an extention of the algorithms originally described by Dr. Babak A. Ardekani, Alvin H. Bachman, Model-based automatic detection of the anterior and posterior commissures on MRI scans, NeuroImage, Volume 46, Issue 3, 1 July 2009, Pages 677-682, ISSN 1053-8119, DOI: 10.1016/j.neuroimage.2009.02.030. (http://www.sciencedirect.com/science/article/B6WNP-4VRP25C-4/2/8207b962a38aa83c822c6379bc43fe4c) + description: This program will find the mid-sagittal plane, a constellation of landmarks in a volume, and create an AC/PC aligned data set with the AC point at the center of the voxel lattice (labeled at the origin of the image physical space.) Part of this work is an extension of the algorithms originally described by Dr. Babak A. Ardekani, Alvin H. Bachman, Model-based automatic detection of the anterior and posterior commissures on MRI scans, NeuroImage, Volume 46, Issue 3, 1 July 2009, Pages 677-682, ISSN 1053-8119, DOI: 10.1016/j.neuroimage.2009.02.030. (http://www.sciencedirect.com/science/article/B6WNP-4VRP25C-4/2/8207b962a38aa83c822c6379bc43fe4c) -version: 1.0 + version: 1.0 -documentation-url: http://www.nitrc.org/projects/brainscdetector/ - -""" + documentation-url: http://www.nitrc.org/projects/brainscdetector/ + """ input_spec = BRAINSConstellationDetectorInputSpec output_spec = BRAINSConstellationDetectorOutputSpec _cmd = " BRAINSConstellationDetector " _outputs_filenames = { - 'outputVolume': - 'outputVolume.nii.gz', - 'outputMRML': - 'outputMRML.mrml', - 'resultsDir': - 'resultsDir', - 'outputResampledVolume': - 'outputResampledVolume.nii.gz', - 'outputTransform': - 'outputTransform.h5', - 'writeBranded2DImage': - 'writeBranded2DImage.png', - 'outputLandmarksInACPCAlignedSpace': - 'outputLandmarksInACPCAlignedSpace.fcsv', - 'outputLandmarksInInputSpace': - 'outputLandmarksInInputSpace.fcsv', - 'outputUntransformedClippedVolume': - 'outputUntransformedClippedVolume.nii.gz', - 'outputVerificationScript': - 'outputVerificationScript.sh' + "outputVolume": "outputVolume.nii.gz", + "outputMRML": "outputMRML.mrml", + "resultsDir": "resultsDir", + "outputResampledVolume": "outputResampledVolume.nii.gz", + "outputTransform": "outputTransform.h5", + "writeBranded2DImage": "writeBranded2DImage.png", + "outputLandmarksInACPCAlignedSpace": "outputLandmarksInACPCAlignedSpace.fcsv", + "outputLandmarksInInputSpace": "outputLandmarksInInputSpace.fcsv", + "outputUntransformedClippedVolume": "outputUntransformedClippedVolume.nii.gz", + "outputVerificationScript": "outputVerificationScript.sh", } _redirect_x = False @@ -483,37 +490,42 @@ class BRAINSCreateLabelMapFromProbabilityMapsInputSpec(CommandLineInputSpec): inputProbabilityVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", - argstr="--inputProbabilityVolume %s...") + argstr="--inputProbabilityVolume %s...", + ) priorLabelCodes = InputMultiPath( traits.Int, - desc= - "A list of PriorLabelCode values used for coding the output label images", + desc="A list of PriorLabelCode values used for coding the output label images", sep=",", - argstr="--priorLabelCodes %s") + argstr="--priorLabelCodes %s", + ) foregroundPriors = InputMultiPath( traits.Int, desc="A list: For each Prior Label, 1 if foreground, 0 if background", sep=",", - argstr="--foregroundPriors %s") + argstr="--foregroundPriors %s", + ) nonAirRegionMask = File( - desc= - "a mask representing the \'NonAirRegion\' -- Just force pixels in this region to zero", + desc="a mask representing the 'NonAirRegion' -- Just force pixels in this region to zero", exists=True, - argstr="--nonAirRegionMask %s") + argstr="--nonAirRegionMask %s", + ) inclusionThreshold = traits.Float( - desc="tolerance for inclusion", argstr="--inclusionThreshold %f") + desc="tolerance for inclusion", argstr="--inclusionThreshold %f" + ) dirtyLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the labels prior to cleaning", - argstr="--dirtyLabelVolume %s") + argstr="--dirtyLabelVolume %s", + ) cleanLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the foreground labels volume", - argstr="--cleanLabelVolume %s") + argstr="--cleanLabelVolume %s", + ) class BRAINSCreateLabelMapFromProbabilityMapsOutputSpec(TraitedSpec): @@ -524,18 +536,17 @@ class BRAINSCreateLabelMapFromProbabilityMapsOutputSpec(TraitedSpec): class BRAINSCreateLabelMapFromProbabilityMaps(SEMLikeCommandLine): """title: Create Label Map From Probability Maps (BRAINS) -category: Segmentation.Specialized - -description: Given A list of Probability Maps, generate a LabelMap. + category: Segmentation.Specialized -""" + description: Given A list of Probability Maps, generate a LabelMap. + """ input_spec = BRAINSCreateLabelMapFromProbabilityMapsInputSpec output_spec = BRAINSCreateLabelMapFromProbabilityMapsOutputSpec _cmd = " BRAINSCreateLabelMapFromProbabilityMaps " _outputs_filenames = { - 'dirtyLabelVolume': 'dirtyLabelVolume.nii', - 'cleanLabelVolume': 'cleanLabelVolume.nii' + "dirtyLabelVolume": "dirtyLabelVolume.nii", + "cleanLabelVolume": "cleanLabelVolume.nii", } _redirect_x = False @@ -544,125 +555,132 @@ class BinaryMaskEditorBasedOnLandmarksInputSpec(CommandLineInputSpec): inputBinaryVolume = File( desc="Input binary image in which to be edited", exists=True, - argstr="--inputBinaryVolume %s") + argstr="--inputBinaryVolume %s", + ) outputBinaryVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary image in which to be edited", - argstr="--outputBinaryVolume %s") + argstr="--outputBinaryVolume %s", + ) inputLandmarksFilename = File( - desc= - " The filename for the landmark definition file in the same format produced by Slicer3 (.fcsv). ", + desc=" The filename for the landmark definition file in the same format produced by Slicer3 (.fcsv). ", exists=True, - argstr="--inputLandmarksFilename %s") + argstr="--inputLandmarksFilename %s", + ) inputLandmarkNames = InputMultiPath( traits.Str, - desc= - " A target input landmark name to be edited. This should be listed in the inputLandmakrFilename Given. ", + desc=" A target input landmark name to be edited. This should be listed in the inputLandmakrFilename Given. ", sep=",", - argstr="--inputLandmarkNames %s") + argstr="--inputLandmarkNames %s", + ) setCutDirectionForLandmark = InputMultiPath( traits.Str, - desc= - "Setting the cutting out direction of the input binary image to the one of anterior, posterior, left, right, superior or posterior. (ENUMERATION: ANTERIOR, POSTERIOR, LEFT, RIGHT, SUPERIOR, POSTERIOR) ", + desc="Setting the cutting out direction of the input binary image to the one of anterior, posterior, left, right, superior or posterior. (ENUMERATION: ANTERIOR, POSTERIOR, LEFT, RIGHT, SUPERIOR, POSTERIOR) ", sep=",", - argstr="--setCutDirectionForLandmark %s") + argstr="--setCutDirectionForLandmark %s", + ) setCutDirectionForObliquePlane = InputMultiPath( traits.Str, - desc= - "If this is true, the mask will be thresholded out to the direction of inferior, posterior, and/or left. Default behavrior is that cutting out to the direction of superior, anterior and/or right. ", + desc="If this is true, the mask will be thresholded out to the direction of inferior, posterior, and/or left. Default behavior is that cutting out to the direction of superior, anterior and/or right. ", sep=",", - argstr="--setCutDirectionForObliquePlane %s") + argstr="--setCutDirectionForObliquePlane %s", + ) inputLandmarkNamesForObliquePlane = InputMultiPath( traits.Str, - desc= - " Three subset landmark names of inputLandmarksFilename for a oblique plane computation. The plane computed for binary volume editing. ", + desc=" Three subset landmark names of inputLandmarksFilename for a oblique plane computation. The plane computed for binary volume editing. ", sep=",", - argstr="--inputLandmarkNamesForObliquePlane %s") + argstr="--inputLandmarkNamesForObliquePlane %s", + ) class BinaryMaskEditorBasedOnLandmarksOutputSpec(TraitedSpec): outputBinaryVolume = File( - desc="Output binary image in which to be edited", exists=True) + desc="Output binary image in which to be edited", exists=True + ) class BinaryMaskEditorBasedOnLandmarks(SEMLikeCommandLine): """title: BRAINS Binary Mask Editor Based On Landmarks(BRAINS) -category: Segmentation.Specialized + category: Segmentation.Specialized -version: 1.0 + version: 1.0 -documentation-url: http://www.nitrc.org/projects/brainscdetector/ - -""" + documentation-url: http://www.nitrc.org/projects/brainscdetector/ + """ input_spec = BinaryMaskEditorBasedOnLandmarksInputSpec output_spec = BinaryMaskEditorBasedOnLandmarksOutputSpec _cmd = " BinaryMaskEditorBasedOnLandmarks " - _outputs_filenames = {'outputBinaryVolume': 'outputBinaryVolume.nii'} + _outputs_filenames = {"outputBinaryVolume": "outputBinaryVolume.nii"} _redirect_x = False class BRAINSMultiSTAPLEInputSpec(CommandLineInputSpec): inputCompositeT1Volume = File( - desc= - "Composite T1, all label maps transofrmed into the space for this image.", + desc="Composite T1, all label maps transformed into the space for this image.", exists=True, - argstr="--inputCompositeT1Volume %s") + argstr="--inputCompositeT1Volume %s", + ) inputLabelVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", - argstr="--inputLabelVolume %s...") + argstr="--inputLabelVolume %s...", + ) inputTransform = InputMultiPath( File(exists=True), desc="transforms to apply to label volumes", - argstr="--inputTransform %s...") + argstr="--inputTransform %s...", + ) labelForUndecidedPixels = traits.Int( - desc="Label for undecided pixels", - argstr="--labelForUndecidedPixels %d") + desc="Label for undecided pixels", argstr="--labelForUndecidedPixels %d" + ) resampledVolumePrefix = traits.Str( desc="if given, write out resampled volumes with this prefix", - argstr="--resampledVolumePrefix %s") + argstr="--resampledVolumePrefix %s", + ) skipResampling = traits.Bool( - desc="Omit resampling images into reference space", - argstr="--skipResampling ") + desc="Omit resampling images into reference space", argstr="--skipResampling " + ) outputMultiSTAPLE = traits.Either( traits.Bool, File(), hash_files=False, desc="the MultiSTAPLE average of input label volumes", - argstr="--outputMultiSTAPLE %s") + argstr="--outputMultiSTAPLE %s", + ) outputConfusionMatrix = traits.Either( traits.Bool, File(), hash_files=False, desc="Confusion Matrix", - argstr="--outputConfusionMatrix %s") + argstr="--outputConfusionMatrix %s", + ) class BRAINSMultiSTAPLEOutputSpec(TraitedSpec): outputMultiSTAPLE = File( - desc="the MultiSTAPLE average of input label volumes", exists=True) + desc="the MultiSTAPLE average of input label volumes", exists=True + ) outputConfusionMatrix = File(desc="Confusion Matrix", exists=True) class BRAINSMultiSTAPLE(SEMLikeCommandLine): """title: Create best representative label map) -category: Segmentation.Specialized + category: Segmentation.Specialized -description: given a list of label map images, create a representative/average label map. - -""" + description: given a list of label map images, create a representative/average label map. + """ input_spec = BRAINSMultiSTAPLEInputSpec output_spec = BRAINSMultiSTAPLEOutputSpec _cmd = " BRAINSMultiSTAPLE " _outputs_filenames = { - 'outputMultiSTAPLE': 'outputMultiSTAPLE.nii', - 'outputConfusionMatrix': 'outputConfusionMatrixh5|mat|txt' + "outputMultiSTAPLE": "outputMultiSTAPLE.nii", + "outputConfusionMatrix": "outputConfusionMatrixh5|mat|txt", } _redirect_x = False @@ -671,90 +689,97 @@ class BRAINSABCInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="The list of input image files to be segmented.", - argstr="--inputVolumes %s...") + argstr="--inputVolumes %s...", + ) atlasDefinition = File( desc="Contains all parameters for Atlas", exists=True, - argstr="--atlasDefinition %s") + argstr="--atlasDefinition %s", + ) restoreState = File( desc="The initial state for the registration process", exists=True, - argstr="--restoreState %s") + argstr="--restoreState %s", + ) saveState = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the final state of the registration", - argstr="--saveState %s") + desc="(optional) Filename to which save the final state of the registration", + argstr="--saveState %s", + ) inputVolumeTypes = InputMultiPath( traits.Str, desc="The list of input image types corresponding to the inputVolumes.", sep=",", - argstr="--inputVolumeTypes %s") + argstr="--inputVolumeTypes %s", + ) outputDir = traits.Either( traits.Bool, Directory(), hash_files=False, - desc="Ouput directory", - argstr="--outputDir %s") + desc="Output directory", + argstr="--outputDir %s", + ) atlasToSubjectTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", "SyN", - desc= - " What type of linear transform type do you want to use to register the atlas to the reference subject image.", - argstr="--atlasToSubjectTransformType %s") + desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", + argstr="--atlasToSubjectTransformType %s", + ) atlasToSubjectTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The transform from atlas to the subject", - argstr="--atlasToSubjectTransform %s") + argstr="--atlasToSubjectTransform %s", + ) atlasToSubjectInitialTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The initial transform from atlas to the subject", - argstr="--atlasToSubjectInitialTransform %s") + argstr="--atlasToSubjectInitialTransform %s", + ) subjectIntermodeTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", - desc= - " What type of linear transform type do you want to use to register the atlas to the reference subject image.", - argstr="--subjectIntermodeTransformType %s") + desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", + argstr="--subjectIntermodeTransformType %s", + ) outputVolumes = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File()), hash_files=False, - desc= - "Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", - argstr="--outputVolumes %s...") + desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", + argstr="--outputVolumes %s...", + ) outputLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Image", - argstr="--outputLabels %s") + argstr="--outputLabels %s", + ) outputDirtyLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Dirty Label Image", - argstr="--outputDirtyLabels %s") + argstr="--outputDirtyLabels %s", + ) posteriorTemplate = traits.Str( desc="filename template for Posterior output files", - argstr="--posteriorTemplate %s") + argstr="--posteriorTemplate %s", + ) outputFormat = traits.Enum( - "NIFTI", - "Meta", - "Nrrd", - desc="Output format", - argstr="--outputFormat %s") + "NIFTI", "Meta", "Nrrd", desc="Output format", argstr="--outputFormat %s" + ) interpolationMode = traits.Enum( "BSpline", "NearestNeighbor", @@ -766,147 +791,158 @@ class BRAINSABCInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") - maxIterations = traits.Int( - desc="Filter iterations", argstr="--maxIterations %d") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) + maxIterations = traits.Int(desc="Filter iterations", argstr="--maxIterations %d") medianFilterSize = InputMultiPath( traits.Int, - desc= - "The radius for the optional MedianImageFilter preprocessing in all 3 directions.", + desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) filterIteration = traits.Int( - desc="Filter iterations", argstr="--filterIteration %d") + desc="Filter iterations", argstr="--filterIteration %d" + ) filterTimeStep = traits.Float( - desc= - "Filter time step should be less than (PixelSpacing/(1^(DIM+1)), value is set to negative, then allow automatic setting of this value. ", - argstr="--filterTimeStep %f") + desc="Filter time step should be less than (PixelSpacing/(1^(DIM+1)), value is set to negative, then allow automatic setting of this value. ", + argstr="--filterTimeStep %f", + ) filterMethod = traits.Enum( "None", "CurvatureFlow", "GradientAnisotropicDiffusion", "Median", desc="Filter method for preprocessing of registration", - argstr="--filterMethod %s") - maxBiasDegree = traits.Int( - desc="Maximum bias degree", argstr="--maxBiasDegree %d") + argstr="--filterMethod %s", + ) + maxBiasDegree = traits.Int(desc="Maximum bias degree", argstr="--maxBiasDegree %d") useKNN = traits.Bool( - desc="Use the KNN stage of estimating posteriors.", argstr="--useKNN ") + desc="Use the KNN stage of estimating posteriors.", argstr="--useKNN " + ) purePlugsThreshold = traits.Float( - desc= - "If this threshold value is greater than zero, only pure samples are used to compute the distributions in EM classification, and only pure samples are used for KNN training. The default value is set to 0, that means not using pure plugs. However, a value of 0.2 is suggested if you want to activate using pure plugs option.", - argstr="--purePlugsThreshold %f") + desc="If this threshold value is greater than zero, only pure samples are used to compute the distributions in EM classification, and only pure samples are used for KNN training. The default value is set to 0, that means not using pure plugs. However, a value of 0.2 is suggested if you want to activate using pure plugs option.", + argstr="--purePlugsThreshold %f", + ) numberOfSubSamplesInEachPlugArea = InputMultiPath( traits.Int, - desc= - "Number of continous index samples taken at each direction of lattice space for each plug volume.", + desc="Number of continuous index samples taken at each direction of lattice space for each plug volume.", sep=",", - argstr="--numberOfSubSamplesInEachPlugArea %s") + argstr="--numberOfSubSamplesInEachPlugArea %s", + ) atlasWarpingOff = traits.Bool( - desc="Deformable registration of atlas to subject", - argstr="--atlasWarpingOff ") + desc="Deformable registration of atlas to subject", argstr="--atlasWarpingOff " + ) gridSize = InputMultiPath( traits.Int, desc="Grid size for atlas warping with BSplines", sep=",", - argstr="--gridSize %s") + argstr="--gridSize %s", + ) defaultSuffix = traits.Str(argstr="--defaultSuffix %s") implicitOutputs = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File()), hash_files=False, - desc= - "Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", - argstr="--implicitOutputs %s...") + desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", + argstr="--implicitOutputs %s...", + ) debuglevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debuglevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debuglevel %d", + ) writeLess = traits.Bool( desc="Does not write posteriors and filtered, bias corrected images", - argstr="--writeLess ") + argstr="--writeLess ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSABCOutputSpec(TraitedSpec): saveState = File( - desc= - "(optional) Filename to which save the final state of the registration", - exists=True) - outputDir = Directory(desc="Ouput directory", exists=True) + desc="(optional) Filename to which save the final state of the registration", + exists=True, + ) + outputDir = Directory(desc="Output directory", exists=True) atlasToSubjectTransform = File( - desc="The transform from atlas to the subject", exists=True) + desc="The transform from atlas to the subject", exists=True + ) atlasToSubjectInitialTransform = File( - desc="The initial transform from atlas to the subject", exists=True) + desc="The initial transform from atlas to the subject", exists=True + ) outputVolumes = OutputMultiPath( File(exists=True), - desc= - "Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location." + desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", ) outputLabels = File(desc="Output Label Image", exists=True) outputDirtyLabels = File(desc="Output Dirty Label Image", exists=True) implicitOutputs = OutputMultiPath( File(exists=True), - desc= - "Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments." + desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", ) class BRAINSABC(SEMLikeCommandLine): """title: Intra-subject registration, bias Correction, and tissue classification (BRAINS) -category: Segmentation.Specialized - -description: Atlas-based tissue segmentation method. This is an algorithmic extension of work done by XXXX at UNC and Utah XXXX need more description here. + category: Segmentation.Specialized -""" + description: Atlas-based tissue segmentation method. This is an algorithmic extension of work done by XXXX at UNC and Utah XXXX need more description here. + """ input_spec = BRAINSABCInputSpec output_spec = BRAINSABCOutputSpec _cmd = " BRAINSABC " _outputs_filenames = { - 'saveState': 'saveState.h5', - 'outputLabels': 'outputLabels.nii.gz', - 'atlasToSubjectTransform': 'atlasToSubjectTransform.h5', - 'atlasToSubjectInitialTransform': 'atlasToSubjectInitialTransform.h5', - 'outputDirtyLabels': 'outputDirtyLabels.nii.gz', - 'outputVolumes': 'outputVolumes.nii.gz', - 'outputDir': 'outputDir', - 'implicitOutputs': 'implicitOutputs.nii.gz' + "saveState": "saveState.h5", + "outputLabels": "outputLabels.nii.gz", + "atlasToSubjectTransform": "atlasToSubjectTransform.h5", + "atlasToSubjectInitialTransform": "atlasToSubjectInitialTransform.h5", + "outputDirtyLabels": "outputDirtyLabels.nii.gz", + "outputVolumes": "outputVolumes.nii.gz", + "outputDir": "outputDir", + "implicitOutputs": "implicitOutputs.nii.gz", } _redirect_x = False class ESLRInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Input Label Volume", exists=True, argstr="--inputVolume %s") + desc="Input Label Volume", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Volume", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) low = traits.Int( - desc="The lower bound of the labels to be used.", argstr="--low %d") + desc="The lower bound of the labels to be used.", argstr="--low %d" + ) high = traits.Int( - desc="The higher bound of the labels to be used.", argstr="--high %d") + desc="The higher bound of the labels to be used.", argstr="--high %d" + ) closingSize = traits.Int( - desc="The closing size for hole filling.", argstr="--closingSize %d") + desc="The closing size for hole filling.", argstr="--closingSize %d" + ) openingSize = traits.Int( - desc="The opening size for hole filling.", argstr="--openingSize %d") + desc="The opening size for hole filling.", argstr="--openingSize %d" + ) safetySize = traits.Int( - desc="The safetySize size for the clipping region.", - argstr="--safetySize %d") + desc="The safetySize size for the clipping region.", argstr="--safetySize %d" + ) preserveOutside = traits.Bool( desc="For values outside the specified range, preserve those values.", - argstr="--preserveOutside ") + argstr="--preserveOutside ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class ESLROutputSpec(TraitedSpec): @@ -916,14 +952,13 @@ class ESLROutputSpec(TraitedSpec): class ESLR(SEMLikeCommandLine): """title: Clean Contiguous Label Map (BRAINS) -category: Segmentation.Specialized - -description: From a range of label map values, extract the largest contiguous region of those labels + category: Segmentation.Specialized -""" + description: From a range of label map values, extract the largest contiguous region of those labels + """ input_spec = ESLRInputSpec output_spec = ESLROutputSpec _cmd = " ESLR " - _outputs_filenames = {'outputVolume': 'outputVolume.nii.gz'} + _outputs_filenames = {"outputVolume": "outputVolume.nii.gz"} _redirect_x = False diff --git a/nipype/interfaces/semtools/segmentation/tests/__init__.py b/nipype/interfaces/semtools/segmentation/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/segmentation/tests/__init__.py +++ b/nipype/interfaces/semtools/segmentation/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py index 2d66884522..5e66b2d82c 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py @@ -1,101 +1,154 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSABC def test_BRAINSABC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlasDefinition=dict(argstr='--atlasDefinition %s', ), + args=dict( + argstr="%s", + ), + atlasDefinition=dict( + argstr="--atlasDefinition %s", + extensions=None, + ), atlasToSubjectInitialTransform=dict( - argstr='--atlasToSubjectInitialTransform %s', + argstr="--atlasToSubjectInitialTransform %s", hash_files=False, ), atlasToSubjectTransform=dict( - argstr='--atlasToSubjectTransform %s', + argstr="--atlasToSubjectTransform %s", hash_files=False, ), atlasToSubjectTransformType=dict( - argstr='--atlasToSubjectTransformType %s', ), - atlasWarpingOff=dict(argstr='--atlasWarpingOff ', ), - debuglevel=dict(argstr='--debuglevel %d', ), - defaultSuffix=dict(argstr='--defaultSuffix %s', ), + argstr="--atlasToSubjectTransformType %s", + ), + atlasWarpingOff=dict( + argstr="--atlasWarpingOff ", + ), + debuglevel=dict( + argstr="--debuglevel %d", + ), + defaultSuffix=dict( + argstr="--defaultSuffix %s", + ), environ=dict( nohash=True, usedefault=True, ), - filterIteration=dict(argstr='--filterIteration %d', ), - filterMethod=dict(argstr='--filterMethod %s', ), - filterTimeStep=dict(argstr='--filterTimeStep %f', ), + filterIteration=dict( + argstr="--filterIteration %d", + ), + filterMethod=dict( + argstr="--filterMethod %s", + ), + filterTimeStep=dict( + argstr="--filterTimeStep %f", + ), gridSize=dict( - argstr='--gridSize %s', - sep=',', + argstr="--gridSize %s", + sep=",", ), implicitOutputs=dict( - argstr='--implicitOutputs %s...', + argstr="--implicitOutputs %s...", hash_files=False, ), inputVolumeTypes=dict( - argstr='--inputVolumeTypes %s', - sep=',', + argstr="--inputVolumeTypes %s", + sep=",", + ), + inputVolumes=dict( + argstr="--inputVolumes %s...", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + maxBiasDegree=dict( + argstr="--maxBiasDegree %d", + ), + maxIterations=dict( + argstr="--maxIterations %d", ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - maxBiasDegree=dict(argstr='--maxBiasDegree %d', ), - maxIterations=dict(argstr='--maxIterations %d', ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), numberOfSubSamplesInEachPlugArea=dict( - argstr='--numberOfSubSamplesInEachPlugArea %s', - sep=',', + argstr="--numberOfSubSamplesInEachPlugArea %s", + sep=",", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputDir=dict( - argstr='--outputDir %s', + argstr="--outputDir %s", hash_files=False, ), outputDirtyLabels=dict( - argstr='--outputDirtyLabels %s', + argstr="--outputDirtyLabels %s", hash_files=False, ), - outputFormat=dict(argstr='--outputFormat %s', ), + outputFormat=dict( + argstr="--outputFormat %s", + ), outputLabels=dict( - argstr='--outputLabels %s', + argstr="--outputLabels %s", hash_files=False, ), outputVolumes=dict( - argstr='--outputVolumes %s...', + argstr="--outputVolumes %s...", hash_files=False, ), - posteriorTemplate=dict(argstr='--posteriorTemplate %s', ), - purePlugsThreshold=dict(argstr='--purePlugsThreshold %f', ), - restoreState=dict(argstr='--restoreState %s', ), + posteriorTemplate=dict( + argstr="--posteriorTemplate %s", + ), + purePlugsThreshold=dict( + argstr="--purePlugsThreshold %f", + ), + restoreState=dict( + argstr="--restoreState %s", + extensions=None, + ), saveState=dict( - argstr='--saveState %s', + argstr="--saveState %s", hash_files=False, ), subjectIntermodeTransformType=dict( - argstr='--subjectIntermodeTransformType %s', ), - useKNN=dict(argstr='--useKNN ', ), - writeLess=dict(argstr='--writeLess ', ), + argstr="--subjectIntermodeTransformType %s", + ), + useKNN=dict( + argstr="--useKNN ", + ), + writeLess=dict( + argstr="--writeLess ", + ), ) inputs = BRAINSABC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSABC_outputs(): output_map = dict( - atlasToSubjectInitialTransform=dict(), - atlasToSubjectTransform=dict(), + atlasToSubjectInitialTransform=dict( + extensions=None, + ), + atlasToSubjectTransform=dict( + extensions=None, + ), implicitOutputs=dict(), outputDir=dict(), - outputDirtyLabels=dict(), - outputLabels=dict(), + outputDirtyLabels=dict( + extensions=None, + ), + outputLabels=dict( + extensions=None, + ), outputVolumes=dict(), - saveState=dict(), + saveState=dict( + extensions=None, + ), ) outputs = BRAINSABC.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py index 9b2d10061c..2f77f419e7 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py @@ -1,120 +1,196 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSConstellationDetector def test_BRAINSConstellationDetector_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - LLSModel=dict(argstr='--LLSModel %s', ), - acLowerBound=dict(argstr='--acLowerBound %f', ), - args=dict(argstr='%s', ), - atlasLandmarkWeights=dict(argstr='--atlasLandmarkWeights %s', ), - atlasLandmarks=dict(argstr='--atlasLandmarks %s', ), - atlasVolume=dict(argstr='--atlasVolume %s', ), - cutOutHeadInOutputVolume=dict(argstr='--cutOutHeadInOutputVolume ', ), - debug=dict(argstr='--debug ', ), + BackgroundFillValue=dict( + argstr="--BackgroundFillValue %s", + ), + LLSModel=dict( + argstr="--LLSModel %s", + extensions=None, + ), + acLowerBound=dict( + argstr="--acLowerBound %f", + ), + args=dict( + argstr="%s", + ), + atlasLandmarkWeights=dict( + argstr="--atlasLandmarkWeights %s", + extensions=None, + ), + atlasLandmarks=dict( + argstr="--atlasLandmarks %s", + extensions=None, + ), + atlasVolume=dict( + argstr="--atlasVolume %s", + extensions=None, + ), + cutOutHeadInOutputVolume=dict( + argstr="--cutOutHeadInOutputVolume ", + ), + debug=dict( + argstr="--debug ", + ), environ=dict( nohash=True, usedefault=True, ), forceACPoint=dict( - argstr='--forceACPoint %s', - sep=',', + argstr="--forceACPoint %s", + sep=",", ), forceHoughEyeDetectorReportFailure=dict( - argstr='--forceHoughEyeDetectorReportFailure ', ), + argstr="--forceHoughEyeDetectorReportFailure ", + ), forcePCPoint=dict( - argstr='--forcePCPoint %s', - sep=',', + argstr="--forcePCPoint %s", + sep=",", ), forceRPPoint=dict( - argstr='--forceRPPoint %s', - sep=',', + argstr="--forceRPPoint %s", + sep=",", ), forceVN4Point=dict( - argstr='--forceVN4Point %s', - sep=',', - ), - houghEyeDetectorMode=dict(argstr='--houghEyeDetectorMode %d', ), - inputLandmarksEMSP=dict(argstr='--inputLandmarksEMSP %s', ), - inputTemplateModel=dict(argstr='--inputTemplateModel %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + argstr="--forceVN4Point %s", + sep=",", + ), + houghEyeDetectorMode=dict( + argstr="--houghEyeDetectorMode %d", + ), + inputLandmarksEMSP=dict( + argstr="--inputLandmarksEMSP %s", + extensions=None, + ), + inputTemplateModel=dict( + argstr="--inputTemplateModel %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + mspQualityLevel=dict( + argstr="--mspQualityLevel %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + otsuPercentileThreshold=dict( + argstr="--otsuPercentileThreshold %f", + ), outputLandmarksInACPCAlignedSpace=dict( - argstr='--outputLandmarksInACPCAlignedSpace %s', + argstr="--outputLandmarksInACPCAlignedSpace %s", hash_files=False, ), outputLandmarksInInputSpace=dict( - argstr='--outputLandmarksInInputSpace %s', + argstr="--outputLandmarksInInputSpace %s", hash_files=False, ), outputMRML=dict( - argstr='--outputMRML %s', + argstr="--outputMRML %s", hash_files=False, ), outputResampledVolume=dict( - argstr='--outputResampledVolume %s', + argstr="--outputResampledVolume %s", hash_files=False, ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), outputUntransformedClippedVolume=dict( - argstr='--outputUntransformedClippedVolume %s', + argstr="--outputUntransformedClippedVolume %s", hash_files=False, ), outputVerificationScript=dict( - argstr='--outputVerificationScript %s', + argstr="--outputVerificationScript %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - rVN4=dict(argstr='--rVN4 %f', ), - rac=dict(argstr='--rac %f', ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + rVN4=dict( + argstr="--rVN4 %f", + ), + rac=dict( + argstr="--rac %f", + ), + rescaleIntensities=dict( + argstr="--rescaleIntensities ", + ), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', + argstr="--rescaleIntensitiesOutputRange %s", + sep=",", ), resultsDir=dict( - argstr='--resultsDir %s', + argstr="--resultsDir %s", hash_files=False, ), - rmpj=dict(argstr='--rmpj %f', ), - rpc=dict(argstr='--rpc %f', ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), + rmpj=dict( + argstr="--rmpj %f", + ), + rpc=dict( + argstr="--rpc %f", + ), + trimRescaledIntensities=dict( + argstr="--trimRescaledIntensities %f", + ), + verbose=dict( + argstr="--verbose ", + ), writeBranded2DImage=dict( - argstr='--writeBranded2DImage %s', + argstr="--writeBranded2DImage %s", hash_files=False, ), writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + argstr="--writedebuggingImagesLevel %d", + ), ) inputs = BRAINSConstellationDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSConstellationDetector_outputs(): output_map = dict( - outputLandmarksInACPCAlignedSpace=dict(), - outputLandmarksInInputSpace=dict(), - outputMRML=dict(), - outputResampledVolume=dict(), - outputTransform=dict(), - outputUntransformedClippedVolume=dict(), - outputVerificationScript=dict(), - outputVolume=dict(), + outputLandmarksInACPCAlignedSpace=dict( + extensions=None, + ), + outputLandmarksInInputSpace=dict( + extensions=None, + ), + outputMRML=dict( + extensions=None, + ), + outputResampledVolume=dict( + extensions=None, + ), + outputTransform=dict( + extensions=None, + ), + outputUntransformedClippedVolume=dict( + extensions=None, + ), + outputVerificationScript=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), resultsDir=dict(), - writeBranded2DImage=dict(), + writeBranded2DImage=dict( + extensions=None, + ), ) outputs = BRAINSConstellationDetector.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py index 9f2b00c311..4a395fbc14 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSCreateLabelMapFromProbabilityMaps def test_BRAINSCreateLabelMapFromProbabilityMaps_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), cleanLabelVolume=dict( - argstr='--cleanLabelVolume %s', + argstr="--cleanLabelVolume %s", hash_files=False, ), dirtyLabelVolume=dict( - argstr='--dirtyLabelVolume %s', + argstr="--dirtyLabelVolume %s", hash_files=False, ), environ=dict( @@ -19,15 +20,22 @@ def test_BRAINSCreateLabelMapFromProbabilityMaps_inputs(): usedefault=True, ), foregroundPriors=dict( - argstr='--foregroundPriors %s', - sep=',', + argstr="--foregroundPriors %s", + sep=",", + ), + inclusionThreshold=dict( + argstr="--inclusionThreshold %f", + ), + inputProbabilityVolume=dict( + argstr="--inputProbabilityVolume %s...", + ), + nonAirRegionMask=dict( + argstr="--nonAirRegionMask %s", + extensions=None, ), - inclusionThreshold=dict(argstr='--inclusionThreshold %f', ), - inputProbabilityVolume=dict(argstr='--inputProbabilityVolume %s...', ), - nonAirRegionMask=dict(argstr='--nonAirRegionMask %s', ), priorLabelCodes=dict( - argstr='--priorLabelCodes %s', - sep=',', + argstr="--priorLabelCodes %s", + sep=",", ), ) inputs = BRAINSCreateLabelMapFromProbabilityMaps.input_spec() @@ -35,10 +43,16 @@ def test_BRAINSCreateLabelMapFromProbabilityMaps_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSCreateLabelMapFromProbabilityMaps_outputs(): output_map = dict( - cleanLabelVolume=dict(), - dirtyLabelVolume=dict(), + cleanLabelVolume=dict( + extensions=None, + ), + dirtyLabelVolume=dict( + extensions=None, + ), ) outputs = BRAINSCreateLabelMapFromProbabilityMaps.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py index 9d66d4a463..e101cde2cc 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py @@ -1,40 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSCut def test_BRAINSCut_inputs(): input_map = dict( NoTrainingVectorShuffling=dict( - argstr='--NoTrainingVectorShuffling ', ), - applyModel=dict(argstr='--applyModel ', ), - args=dict(argstr='%s', ), - computeSSEOn=dict(argstr='--computeSSEOn ', ), - createVectors=dict(argstr='--createVectors ', ), + argstr="--NoTrainingVectorShuffling ", + ), + applyModel=dict( + argstr="--applyModel ", + ), + args=dict( + argstr="%s", + ), + computeSSEOn=dict( + argstr="--computeSSEOn ", + ), + createVectors=dict( + argstr="--createVectors ", + ), environ=dict( nohash=True, usedefault=True, ), - generateProbability=dict(argstr='--generateProbability ', ), - histogramEqualization=dict(argstr='--histogramEqualization ', ), - method=dict(argstr='--method %s', ), + generateProbability=dict( + argstr="--generateProbability ", + ), + histogramEqualization=dict( + argstr="--histogramEqualization ", + ), + method=dict( + argstr="--method %s", + ), modelConfigurationFilename=dict( - argstr='--modelConfigurationFilename %s', ), - modelFilename=dict(argstr='--modelFilename %s', ), - multiStructureThreshold=dict(argstr='--multiStructureThreshold ', ), - netConfiguration=dict(argstr='--netConfiguration %s', ), - numberOfTrees=dict(argstr='--numberOfTrees %d', ), - randomTreeDepth=dict(argstr='--randomTreeDepth %d', ), - trainModel=dict(argstr='--trainModel ', ), - trainModelStartIndex=dict(argstr='--trainModelStartIndex %d', ), - validate=dict(argstr='--validate ', ), - verbose=dict(argstr='--verbose %d', ), + argstr="--modelConfigurationFilename %s", + extensions=None, + ), + modelFilename=dict( + argstr="--modelFilename %s", + ), + multiStructureThreshold=dict( + argstr="--multiStructureThreshold ", + ), + netConfiguration=dict( + argstr="--netConfiguration %s", + extensions=None, + ), + numberOfTrees=dict( + argstr="--numberOfTrees %d", + ), + randomTreeDepth=dict( + argstr="--randomTreeDepth %d", + ), + trainModel=dict( + argstr="--trainModel ", + ), + trainModelStartIndex=dict( + argstr="--trainModelStartIndex %d", + ), + validate=dict( + argstr="--validate ", + ), + verbose=dict( + argstr="--verbose %d", + ), ) inputs = BRAINSCut.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSCut_outputs(): output_map = dict() outputs = BRAINSCut.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py index 826ec19f0e..6ff468ba47 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py @@ -1,39 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSMultiSTAPLE def test_BRAINSMultiSTAPLE_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputCompositeT1Volume=dict(argstr='--inputCompositeT1Volume %s', ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s...', ), - inputTransform=dict(argstr='--inputTransform %s...', ), - labelForUndecidedPixels=dict(argstr='--labelForUndecidedPixels %d', ), + inputCompositeT1Volume=dict( + argstr="--inputCompositeT1Volume %s", + extensions=None, + ), + inputLabelVolume=dict( + argstr="--inputLabelVolume %s...", + ), + inputTransform=dict( + argstr="--inputTransform %s...", + ), + labelForUndecidedPixels=dict( + argstr="--labelForUndecidedPixels %d", + ), outputConfusionMatrix=dict( - argstr='--outputConfusionMatrix %s', + argstr="--outputConfusionMatrix %s", hash_files=False, ), outputMultiSTAPLE=dict( - argstr='--outputMultiSTAPLE %s', + argstr="--outputMultiSTAPLE %s", hash_files=False, ), - resampledVolumePrefix=dict(argstr='--resampledVolumePrefix %s', ), - skipResampling=dict(argstr='--skipResampling ', ), + resampledVolumePrefix=dict( + argstr="--resampledVolumePrefix %s", + ), + skipResampling=dict( + argstr="--skipResampling ", + ), ) inputs = BRAINSMultiSTAPLE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSMultiSTAPLE_outputs(): output_map = dict( - outputConfusionMatrix=dict(), - outputMultiSTAPLE=dict(), + outputConfusionMatrix=dict( + extensions=None, + ), + outputMultiSTAPLE=dict( + extensions=None, + ), ) outputs = BRAINSMultiSTAPLE.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py index 3e70b75883..f65c8c9b88 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py @@ -1,43 +1,68 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSROIAuto def test_BRAINSROIAuto_inputs(): input_map = dict( - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %f', ), - cropOutput=dict(argstr='--cropOutput ', ), + ROIAutoDilateSize=dict( + argstr="--ROIAutoDilateSize %f", + ), + args=dict( + argstr="%s", + ), + closingSize=dict( + argstr="--closingSize %f", + ), + cropOutput=dict( + argstr="--cropOutput ", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - maskOutput=dict(argstr='--maskOutput ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + maskOutput=dict( + argstr="--maskOutput ", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + otsuPercentileThreshold=dict( + argstr="--otsuPercentileThreshold %f", + ), outputROIMaskVolume=dict( - argstr='--outputROIMaskVolume %s', + argstr="--outputROIMaskVolume %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), + outputVolumePixelType=dict( + argstr="--outputVolumePixelType %s", + ), thresholdCorrectionFactor=dict( - argstr='--thresholdCorrectionFactor %f', ), + argstr="--thresholdCorrectionFactor %f", + ), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSROIAuto_outputs(): output_map = dict( - outputROIMaskVolume=dict(), - outputVolume=dict(), + outputROIMaskVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = BRAINSROIAuto.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py index d71e7003ff..1dd20adecd 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py @@ -1,36 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BinaryMaskEditorBasedOnLandmarks def test_BinaryMaskEditorBasedOnLandmarks_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBinaryVolume=dict(argstr='--inputBinaryVolume %s', ), + inputBinaryVolume=dict( + argstr="--inputBinaryVolume %s", + extensions=None, + ), inputLandmarkNames=dict( - argstr='--inputLandmarkNames %s', - sep=',', + argstr="--inputLandmarkNames %s", + sep=",", ), inputLandmarkNamesForObliquePlane=dict( - argstr='--inputLandmarkNamesForObliquePlane %s', - sep=',', + argstr="--inputLandmarkNamesForObliquePlane %s", + sep=",", + ), + inputLandmarksFilename=dict( + argstr="--inputLandmarksFilename %s", + extensions=None, ), - inputLandmarksFilename=dict(argstr='--inputLandmarksFilename %s', ), outputBinaryVolume=dict( - argstr='--outputBinaryVolume %s', + argstr="--outputBinaryVolume %s", hash_files=False, ), setCutDirectionForLandmark=dict( - argstr='--setCutDirectionForLandmark %s', - sep=',', + argstr="--setCutDirectionForLandmark %s", + sep=",", ), setCutDirectionForObliquePlane=dict( - argstr='--setCutDirectionForObliquePlane %s', - sep=',', + argstr="--setCutDirectionForObliquePlane %s", + sep=",", ), ) inputs = BinaryMaskEditorBasedOnLandmarks.input_spec() @@ -38,8 +45,14 @@ def test_BinaryMaskEditorBasedOnLandmarks_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaskEditorBasedOnLandmarks_outputs(): - output_map = dict(outputBinaryVolume=dict(), ) + output_map = dict( + outputBinaryVolume=dict( + extensions=None, + ), + ) outputs = BinaryMaskEditorBasedOnLandmarks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py index 2e099b4f56..272327f4db 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py @@ -1,35 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import ESLR def test_ESLR_inputs(): input_map = dict( - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %d', ), + args=dict( + argstr="%s", + ), + closingSize=dict( + argstr="--closingSize %d", + ), environ=dict( nohash=True, usedefault=True, ), - high=dict(argstr='--high %d', ), - inputVolume=dict(argstr='--inputVolume %s', ), - low=dict(argstr='--low %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - openingSize=dict(argstr='--openingSize %d', ), + high=dict( + argstr="--high %d", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + low=dict( + argstr="--low %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + openingSize=dict( + argstr="--openingSize %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - preserveOutside=dict(argstr='--preserveOutside ', ), - safetySize=dict(argstr='--safetySize %d', ), + preserveOutside=dict( + argstr="--preserveOutside ", + ), + safetySize=dict( + argstr="--safetySize %d", + ), ) inputs = ESLR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ESLR_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = ESLR.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/testing/__init__.py b/nipype/interfaces/semtools/testing/__init__.py index 66a4a2262e..aa4b5619c8 100644 --- a/nipype/interfaces/semtools/testing/__init__.py +++ b/nipype/interfaces/semtools/testing/__init__.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .featuredetection import SphericalCoordinateGeneration from .landmarkscompare import LandmarksCompare from .generateaveragelmkfile import GenerateAverageLmkFile diff --git a/nipype/interfaces/semtools/testing/featuredetection.py b/nipype/interfaces/semtools/testing/featuredetection.py index e8f332c0a6..67aa38746d 100644 --- a/nipype/interfaces/semtools/testing/featuredetection.py +++ b/nipype/interfaces/semtools/testing/featuredetection.py @@ -1,18 +1,22 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class SphericalCoordinateGenerationInputSpec(CommandLineInputSpec): inputAtlasImage = File( - desc="Input atlas image", exists=True, argstr="--inputAtlasImage %s") + desc="Input atlas image", exists=True, argstr="--inputAtlasImage %s" + ) outputPath = traits.Str( - desc="Output path for rho, phi and theta images", - argstr="--outputPath %s") + desc="Output path for rho, phi and theta images", argstr="--outputPath %s" + ) class SphericalCoordinateGenerationOutputSpec(TraitedSpec): @@ -22,15 +26,14 @@ class SphericalCoordinateGenerationOutputSpec(TraitedSpec): class SphericalCoordinateGeneration(SEMLikeCommandLine): """title: Spherical Coordinate Generation -category: Testing.FeatureDetection + category: Testing.FeatureDetection -description: get the atlas image as input and generates the rho, phi and theta images. + description: get the atlas image as input and generates the rho, phi and theta images. -version: 0.1.0.$Revision: 1 $(alpha) + version: 0.1.0.$Revision: 1 $(alpha) -contributor: Ali Ghayoor - -""" + contributor: Ali Ghayoor + """ input_spec = SphericalCoordinateGenerationInputSpec output_spec = SphericalCoordinateGenerationOutputSpec diff --git a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py index bbb414c366..9235e63dd2 100644 --- a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py +++ b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py @@ -1,10 +1,14 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class GenerateAverageLmkFileInputSpec(CommandLineInputSpec): @@ -12,36 +16,36 @@ class GenerateAverageLmkFileInputSpec(CommandLineInputSpec): traits.Str, desc="Input landmark files names (.fcsv or .wts)", sep=",", - argstr="--inputLandmarkFiles %s") + argstr="--inputLandmarkFiles %s", + ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", - argstr="--outputLandmarkFile %s") + desc="Output landmark file name that includes average values for landmarks (.fcsv or .wts)", + argstr="--outputLandmarkFile %s", + ) class GenerateAverageLmkFileOutputSpec(TraitedSpec): outputLandmarkFile = File( - desc= - "Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", - exists=True) + desc="Output landmark file name that includes average values for landmarks (.fcsv or .wts)", + exists=True, + ) class GenerateAverageLmkFile(SEMLikeCommandLine): """title: Average Fiducials -category: Testing + category: Testing -description: This program gets several fcsv file each one contains several landmarks with the same name but slightly different coordinates. For EACH landmark we compute the average coordination. + description: This program gets several fcsv file each one contains several landmarks with the same name but slightly different coordinates. For EACH landmark we compute the average coordination. -contributor: Ali Ghayoor - -""" + contributor: Ali Ghayoor + """ input_spec = GenerateAverageLmkFileInputSpec output_spec = GenerateAverageLmkFileOutputSpec _cmd = " GenerateAverageLmkFile " - _outputs_filenames = {'outputLandmarkFile': 'outputLandmarkFile'} + _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False diff --git a/nipype/interfaces/semtools/testing/landmarkscompare.py b/nipype/interfaces/semtools/testing/landmarkscompare.py index 872d6d0df0..7d628518a8 100644 --- a/nipype/interfaces/semtools/testing/landmarkscompare.py +++ b/nipype/interfaces/semtools/testing/landmarkscompare.py @@ -1,25 +1,30 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class LandmarksCompareInputSpec(CommandLineInputSpec): inputLandmarkFile1 = File( desc="First input landmark file (.fcsv or .wts)", exists=True, - argstr="--inputLandmarkFile1 %s") + argstr="--inputLandmarkFile1 %s", + ) inputLandmarkFile2 = File( desc="Second input landmark file (.fcsv or .wts)", exists=True, - argstr="--inputLandmarkFile2 %s") + argstr="--inputLandmarkFile2 %s", + ) tolerance = traits.Float( - desc= - "The maximum error (in mm) allowed in each direction of a landmark", - argstr="--tolerance %f") + desc="The maximum error (in mm) allowed in each direction of a landmark", + argstr="--tolerance %f", + ) class LandmarksCompareOutputSpec(TraitedSpec): @@ -29,13 +34,12 @@ class LandmarksCompareOutputSpec(TraitedSpec): class LandmarksCompare(SEMLikeCommandLine): """title: Compare Fiducials -category: Testing + category: Testing -description: Compares two .fcsv or .wts text files and verifies that they are identicle. Used for testing landmarks files. + description: Compares two .fcsv or .wts text files and verifies that they are identicle. Used for testing landmarks files. -contributor: Ali Ghayoor - -""" + contributor: Ali Ghayoor + """ input_spec = LandmarksCompareInputSpec output_spec = LandmarksCompareOutputSpec diff --git a/nipype/interfaces/semtools/tests/__init__.py b/nipype/interfaces/semtools/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/tests/__init__.py +++ b/nipype/interfaces/semtools/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/tests/test_auto_DWICompare.py b/nipype/interfaces/semtools/tests/test_auto_DWICompare.py index c00b0cc36f..b691c097a8 100644 --- a/nipype/interfaces/semtools/tests/test_auto_DWICompare.py +++ b/nipype/interfaces/semtools/tests/test_auto_DWICompare.py @@ -1,23 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..converters import DWICompare def test_DWICompare_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume1=dict(argstr='--inputVolume1 %s', ), - inputVolume2=dict(argstr='--inputVolume2 %s', ), + inputVolume1=dict( + argstr="--inputVolume1 %s", + extensions=None, + ), + inputVolume2=dict( + argstr="--inputVolume2 %s", + extensions=None, + ), ) inputs = DWICompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWICompare_outputs(): output_map = dict() outputs = DWICompare.output_spec() diff --git a/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py b/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py index e515bc613c..ad4dcb12d4 100644 --- a/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py +++ b/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py @@ -1,24 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..converters import DWISimpleCompare def test_DWISimpleCompare_inputs(): input_map = dict( - args=dict(argstr='%s', ), - checkDWIData=dict(argstr='--checkDWIData ', ), + args=dict( + argstr="%s", + ), + checkDWIData=dict( + argstr="--checkDWIData ", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume1=dict(argstr='--inputVolume1 %s', ), - inputVolume2=dict(argstr='--inputVolume2 %s', ), + inputVolume1=dict( + argstr="--inputVolume1 %s", + extensions=None, + ), + inputVolume2=dict( + argstr="--inputVolume2 %s", + extensions=None, + ), ) inputs = DWISimpleCompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWISimpleCompare_outputs(): output_map = dict() outputs = DWISimpleCompare.output_spec() diff --git a/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py b/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py index 63f45831af..3f857d8085 100644 --- a/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py +++ b/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py @@ -1,18 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..featurecreator import GenerateCsfClippedFromClassifiedImage def test_GenerateCsfClippedFromClassifiedImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputCassifiedVolume=dict(argstr='--inputCassifiedVolume %s', ), + inputCassifiedVolume=dict( + argstr="--inputCassifiedVolume %s", + extensions=None, + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -21,8 +25,14 @@ def test_GenerateCsfClippedFromClassifiedImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateCsfClippedFromClassifiedImage_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = GenerateCsfClippedFromClassifiedImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/__init__.py b/nipype/interfaces/semtools/utilities/__init__.py index 698b76a534..02a5540951 100644 --- a/nipype/interfaces/semtools/utilities/__init__.py +++ b/nipype/interfaces/semtools/utilities/__init__.py @@ -1,11 +1,22 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .brains import ( - BRAINSConstellationModeler, landmarksConstellationWeights, - BRAINSTrimForegroundInDirection, BRAINSLmkTransform, BRAINSMush, - BRAINSTransformConvert, landmarksConstellationAligner, BRAINSEyeDetector, - BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, - CleanUpOverlapLabels, BRAINSClipInferior, - GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, - BRAINSLandmarkInitializer, insertMidACPCpoint, BRAINSSnapShotWriter, - JointHistogram, ShuffleVectorsModule, ImageRegionPlotter) + BRAINSConstellationModeler, + landmarksConstellationWeights, + BRAINSTrimForegroundInDirection, + BRAINSLmkTransform, + BRAINSMush, + BRAINSTransformConvert, + landmarksConstellationAligner, + BRAINSEyeDetector, + BRAINSLinearModelerEPCA, + BRAINSInitializedControlPoints, + CleanUpOverlapLabels, + BRAINSClipInferior, + GenerateLabelMapFromProbabilityMap, + BRAINSAlignMSP, + BRAINSLandmarkInitializer, + insertMidACPCpoint, + BRAINSSnapShotWriter, + JointHistogram, + ShuffleVectorsModule, + ImageRegionPlotter, +) diff --git a/nipype/interfaces/semtools/utilities/brains.py b/nipype/interfaces/semtools/utilities/brains.py index abc696b5d9..de38a52fc2 100644 --- a/nipype/interfaces/semtools/utilities/brains.py +++ b/nipype/interfaces/semtools/utilities/brains.py @@ -1,150 +1,152 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -import os - -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + InputMultiPath, + OutputMultiPath, +) class BRAINSConstellationModelerInputSpec(CommandLineInputSpec): verbose = traits.Bool( desc=", Show more verbose output, ", - argstr="--verbose ") + argstr="--verbose ", + ) inputTrainingList = File( - desc= - ", Setup file, giving all parameters for training up a template model for each landmark., ", + desc=", Setup file, giving all parameters for training up a template model for each landmark., ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) outputModel = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The full filename of the output model file., ", - argstr="--outputModel %s") + desc=", The full filename of the output model file., ", + argstr="--outputModel %s", + ) saveOptimizedLandmarks = traits.Bool( - desc= - ", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", - argstr="--saveOptimizedLandmarks ") + desc=", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", + argstr="--saveOptimizedLandmarks ", + ) optimizedLandmarksFilenameExtender = traits.Str( - desc= - ", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", - argstr="--optimizedLandmarksFilenameExtender %s") + desc=", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", + argstr="--optimizedLandmarksFilenameExtender %s", + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, - desc= - ", The directory for the results to be written., ", - argstr="--resultsDir %s") + desc=", The directory for the results to be written., ", + argstr="--resultsDir %s", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", - argstr="--mspQualityLevel %d") + desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", + argstr="--mspQualityLevel %d", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSConstellationModelerOutputSpec(TraitedSpec): outputModel = File( - desc= - ", The full filename of the output model file., ", - exists=True) + desc=", The full filename of the output model file., ", + exists=True, + ) resultsDir = Directory( - desc= - ", The directory for the results to be written., ", - exists=True) + desc=", The directory for the results to be written., ", + exists=True, + ) class BRAINSConstellationModeler(SEMLikeCommandLine): """title: Generate Landmarks Model (BRAINS) -category: Utilities.BRAINS - -description: Train up a model for BRAINSConstellationDetector + category: Utilities.BRAINS -""" + description: Train up a model for BRAINSConstellationDetector + """ input_spec = BRAINSConstellationModelerInputSpec output_spec = BRAINSConstellationModelerOutputSpec _cmd = " BRAINSConstellationModeler " - _outputs_filenames = { - 'outputModel': 'outputModel.mdl', - 'resultsDir': 'resultsDir' - } + _outputs_filenames = {"outputModel": "outputModel.mdl", "resultsDir": "resultsDir"} _redirect_x = False class landmarksConstellationWeightsInputSpec(CommandLineInputSpec): inputTrainingList = File( - desc= - ", Setup file, giving all parameters for training up a Weight list for landmark., ", + desc=", Setup file, giving all parameters for training up a Weight list for landmark., ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, - argstr="--inputTemplateModel %s") + argstr="--inputTemplateModel %s", + ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, - argstr="--LLSModel %s") + argstr="--LLSModel %s", + ) outputWeightsList = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename of a csv file which is a list of landmarks and their corresponding weights., ", - argstr="--outputWeightsList %s") + desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", + argstr="--outputWeightsList %s", + ) class landmarksConstellationWeightsOutputSpec(TraitedSpec): outputWeightsList = File( - desc= - ", The filename of a csv file which is a list of landmarks and their corresponding weights., ", - exists=True) + desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", + exists=True, + ) class landmarksConstellationWeights(SEMLikeCommandLine): """title: Generate Landmarks Weights (BRAINS) -category: Utilities.BRAINS + category: Utilities.BRAINS -description: Train up a list of Weights for the Landmarks in BRAINSConstellationDetector - -""" + description: Train up a list of Weights for the Landmarks in BRAINSConstellationDetector + """ input_spec = landmarksConstellationWeightsInputSpec output_spec = landmarksConstellationWeightsOutputSpec _cmd = " landmarksConstellationWeights " - _outputs_filenames = {'outputWeightsList': 'outputWeightsList.wts'} + _outputs_filenames = {"outputWeightsList": "outputWeightsList.wts"} _redirect_x = False @@ -152,63 +154,64 @@ class BRAINSTrimForegroundInDirectionInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to trim off the neck (and also air-filling noise.)", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", - argstr="--outputVolume %s") + desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", + argstr="--outputVolume %s", + ) directionCode = traits.Int( - desc= - ", This flag chooses which dimension to compare. The sign lets you flip direction., ", - argstr="--directionCode %d") + desc=", This flag chooses which dimension to compare. The sign lets you flip direction., ", + argstr="--directionCode %d", + ) otsuPercentileThreshold = traits.Float( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", - argstr="--otsuPercentileThreshold %f") + desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", + argstr="--otsuPercentileThreshold %f", + ) closingSize = traits.Int( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, ", - argstr="--closingSize %d") + desc=", This is a parameter to FindLargestForegroundFilledMask, ", + argstr="--closingSize %d", + ) headSizeLimit = traits.Float( - desc= - ", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", - argstr="--headSizeLimit %f") + desc=", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", + argstr="--headSizeLimit %f", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSTrimForegroundInDirectionOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", - exists=True) + desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", + exists=True, + ) class BRAINSTrimForegroundInDirection(SEMLikeCommandLine): """title: Trim Foreground In Direction (BRAINS) -category: Utilities.BRAINS + category: Utilities.BRAINS -description: This program will trim off the neck and also air-filling noise from the inputImage. + description: This program will trim off the neck and also air-filling noise from the inputImage. -version: 0.1 + version: 0.1 -documentation-url: http://www.nitrc.org/projects/art/ - -""" + documentation-url: http://www.nitrc.org/projects/art/ + """ input_spec = BRAINSTrimForegroundInDirectionInputSpec output_spec = BRAINSTrimForegroundInDirectionOutputSpec _cmd = " BRAINSTrimForegroundInDirection " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -216,63 +219,71 @@ class BRAINSLmkTransformInputSpec(CommandLineInputSpec): inputMovingLandmarks = File( desc="Input Moving Landmark list file in fcsv, ", exists=True, - argstr="--inputMovingLandmarks %s") + argstr="--inputMovingLandmarks %s", + ) inputFixedLandmarks = File( desc="Input Fixed Landmark list file in fcsv, ", exists=True, - argstr="--inputFixedLandmarks %s") + argstr="--inputFixedLandmarks %s", + ) outputAffineTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename for the estimated affine transform, ", - argstr="--outputAffineTransform %s") + argstr="--outputAffineTransform %s", + ) inputMovingVolume = File( desc="The filename of input moving volume", exists=True, - argstr="--inputMovingVolume %s") + argstr="--inputMovingVolume %s", + ) inputReferenceVolume = File( desc="The filename of the reference volume", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename of the output resampled volume", - argstr="--outputResampledVolume %s") + argstr="--outputResampledVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSLmkTransformOutputSpec(TraitedSpec): outputAffineTransform = File( desc="The filename for the estimated affine transform, ", - exists=True) + exists=True, + ) outputResampledVolume = File( - desc="The filename of the output resampled volume", exists=True) + desc="The filename of the output resampled volume", exists=True + ) class BRAINSLmkTransform(SEMLikeCommandLine): """title: Landmark Transform (BRAINS) -category: Utilities.BRAINS - -description: This utility program estimates the affine transform to align the fixed landmarks to the moving landmarks, and then generate the resampled moving image to the same physical space as that of the reference image. + category: Utilities.BRAINS -version: 1.0 + description: This utility program estimates the affine transform to align the fixed landmarks to the moving landmarks, and then generate the resampled moving image to the same physical space as that of the reference image. -documentation-url: http://www.nitrc.org/projects/brainscdetector/ + version: 1.0 -""" + documentation-url: http://www.nitrc.org/projects/brainscdetector/ + """ input_spec = BRAINSLmkTransformInputSpec output_spec = BRAINSLmkTransformOutputSpec _cmd = " BRAINSLmkTransform " _outputs_filenames = { - 'outputResampledVolume': 'outputResampledVolume.nii', - 'outputAffineTransform': 'outputAffineTransform.h5' + "outputResampledVolume": "outputResampledVolume.nii", + "outputAffineTransform": "outputAffineTransform.h5", } _redirect_x = False @@ -281,110 +292,122 @@ class BRAINSMushInputSpec(CommandLineInputSpec): inputFirstVolume = File( desc="Input image (1) for mixture optimization", exists=True, - argstr="--inputFirstVolume %s") + argstr="--inputFirstVolume %s", + ) inputSecondVolume = File( desc="Input image (2) for mixture optimization", exists=True, - argstr="--inputSecondVolume %s") + argstr="--inputSecondVolume %s", + ) inputMaskVolume = File( desc="Input label image for mixture optimization", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) outputWeightsFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Weights File", - argstr="--outputWeightsFile %s") + argstr="--outputWeightsFile %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The MUSH image produced from the T1 and T2 weighted images", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) outputMask = traits.Either( traits.Bool, File(), hash_files=False, desc="The brain volume mask generated from the MUSH image", - argstr="--outputMask %s") + argstr="--outputMask %s", + ) seed = InputMultiPath( traits.Int, desc="Seed Point for Brain Region Filling", sep=",", - argstr="--seed %s") + argstr="--seed %s", + ) desiredMean = traits.Float( desc="Desired mean within the mask for weighted sum of both images.", - argstr="--desiredMean %f") + argstr="--desiredMean %f", + ) desiredVariance = traits.Float( - desc= - "Desired variance within the mask for weighted sum of both images.", - argstr="--desiredVariance %f") + desc="Desired variance within the mask for weighted sum of both images.", + argstr="--desiredVariance %f", + ) lowerThresholdFactorPre = traits.Float( desc="Lower threshold factor for finding an initial brain mask", - argstr="--lowerThresholdFactorPre %f") + argstr="--lowerThresholdFactorPre %f", + ) upperThresholdFactorPre = traits.Float( desc="Upper threshold factor for finding an initial brain mask", - argstr="--upperThresholdFactorPre %f") + argstr="--upperThresholdFactorPre %f", + ) lowerThresholdFactor = traits.Float( desc="Lower threshold factor for defining the brain mask", - argstr="--lowerThresholdFactor %f") + argstr="--lowerThresholdFactor %f", + ) upperThresholdFactor = traits.Float( desc="Upper threshold factor for defining the brain mask", - argstr="--upperThresholdFactor %f") + argstr="--upperThresholdFactor %f", + ) boundingBoxSize = InputMultiPath( traits.Int, - desc= - "Size of the cubic bounding box mask used when no brain mask is present", + desc="Size of the cubic bounding box mask used when no brain mask is present", sep=",", - argstr="--boundingBoxSize %s") + argstr="--boundingBoxSize %s", + ) boundingBoxStart = InputMultiPath( traits.Int, - desc= - "XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", + desc="XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", sep=",", - argstr="--boundingBoxStart %s") + argstr="--boundingBoxStart %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSMushOutputSpec(TraitedSpec): outputWeightsFile = File(desc="Output Weights File", exists=True) outputVolume = File( - desc="The MUSH image produced from the T1 and T2 weighted images", - exists=True) + desc="The MUSH image produced from the T1 and T2 weighted images", exists=True + ) outputMask = File( - desc="The brain volume mask generated from the MUSH image", - exists=True) + desc="The brain volume mask generated from the MUSH image", exists=True + ) class BRAINSMush(SEMLikeCommandLine): """title: Brain Extraction from T1/T2 image (BRAINS) -category: Utilities.BRAINS - -description: This program: 1) generates a weighted mixture image optimizing the mean and variance and 2) produces a mask of the brain volume + category: Utilities.BRAINS -version: 0.1.0.$Revision: 1.4 $(alpha) + description: This program: 1) generates a weighted mixture image optimizing the mean and variance and 2) produces a mask of the brain volume -documentation-url: http:://mri.radiology.uiowa.edu + version: 0.1.0.$Revision: 1.4 $(alpha) -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http:://mri.radiology.uiowa.edu -contributor: This tool is a modification by Steven Dunn of a program developed by Greg Harris and Ron Pierson. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: This work was developed by the University of Iowa Departments of Radiology and Psychiatry. This software was supported in part of NIH/NINDS award NS050568. + contributor: This tool is a modification by Steven Dunn of a program developed by Greg Harris and Ron Pierson. -""" + acknowledgements: This work was developed by the University of Iowa Departments of Radiology and Psychiatry. This software was supported in part of NIH/NINDS award NS050568. + """ input_spec = BRAINSMushInputSpec output_spec = BRAINSMushOutputSpec _cmd = " BRAINSMush " _outputs_filenames = { - 'outputMask': 'outputMask.nii.gz', - 'outputWeightsFile': 'outputWeightsFile.txt', - 'outputVolume': 'outputVolume.nii.gz' + "outputMask": "outputMask.nii.gz", + "outputWeightsFile": "outputWeightsFile.txt", + "outputVolume": "outputVolume.nii.gz", } _redirect_x = False @@ -399,22 +422,21 @@ class BRAINSTransformConvertInputSpec(CommandLineInputSpec): "ScaleSkewVersor", "DisplacementField", "Same", - desc= - "The target transformation type. Must be conversion-compatible with the input transform type", - argstr="--outputTransformType %s") + desc="The target transformation type. Must be conversion-compatible with the input transform type", + argstr="--outputTransformType %s", + ) outputPrecisionType = traits.Enum( "double", "float", - desc= - "Precision type of the output transform. It can be either single precision or double precision", - argstr="--outputPrecisionType %s") + desc="Precision type of the output transform. It can be either single precision or double precision", + argstr="--outputPrecisionType %s", + ) displacementVolume = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--displacementVolume %s") + traits.Bool, File(), hash_files=False, argstr="--displacementVolume %s" + ) outputTransform = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputTransform %s") + traits.Bool, File(), hash_files=False, argstr="--outputTransform %s" + ) class BRAINSTransformConvertOutputSpec(TraitedSpec): @@ -425,26 +447,25 @@ class BRAINSTransformConvertOutputSpec(TraitedSpec): class BRAINSTransformConvert(SEMLikeCommandLine): """title: BRAINS Transform Convert -category: Utilities.BRAINS - -description: Convert ITK transforms to higher order transforms + category: Utilities.BRAINS -version: 1.0 + description: Convert ITK transforms to higher order transforms -documentation-url: A utility to convert between transform file formats. + version: 1.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: A utility to convert between transform file formats. -contributor: Hans J. Johnson,Kent Williams, Ali Ghayoor + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Hans J. Johnson,Kent Williams, Ali Ghayoor + """ input_spec = BRAINSTransformConvertInputSpec output_spec = BRAINSTransformConvertOutputSpec _cmd = " BRAINSTransformConvert " _outputs_filenames = { - 'displacementVolume': 'displacementVolume.nii', - 'outputTransform': 'outputTransform.mat' + "displacementVolume": "displacementVolume.nii", + "outputTransform": "outputTransform.mat", } _redirect_x = False @@ -453,52 +474,52 @@ class landmarksConstellationAlignerInputSpec(CommandLineInputSpec): inputLandmarksPaired = File( desc="Input landmark file (.fcsv)", exists=True, - argstr="--inputLandmarksPaired %s") + argstr="--inputLandmarksPaired %s", + ) outputLandmarksPaired = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", - argstr="--outputLandmarksPaired %s") + argstr="--outputLandmarksPaired %s", + ) class landmarksConstellationAlignerOutputSpec(TraitedSpec): - outputLandmarksPaired = File( - desc="Output landmark file (.fcsv)", exists=True) + outputLandmarksPaired = File(desc="Output landmark file (.fcsv)", exists=True) class landmarksConstellationAligner(SEMLikeCommandLine): """title: MidACPC Landmark Insertion -category: Utilities.BRAINS - -description: This program converts the original landmark files to the acpc-aligned landmark files + category: Utilities.BRAINS -contributor: Ali Ghayoor + description: This program converts the original landmark files to the acpc-aligned landmark files -""" + contributor: Ali Ghayoor + """ input_spec = landmarksConstellationAlignerInputSpec output_spec = landmarksConstellationAlignerOutputSpec _cmd = " landmarksConstellationAligner " - _outputs_filenames = {'outputLandmarksPaired': 'outputLandmarksPaired'} + _outputs_filenames = {"outputLandmarksPaired": "outputLandmarksPaired"} _redirect_x = False class BRAINSEyeDetectorInputSpec(CommandLineInputSpec): numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") - inputVolume = File( - desc="The input volume", exists=True, argstr="--inputVolume %s") + argstr="--numberOfThreads %d", + ) + inputVolume = File(desc="The input volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The output volume", - argstr="--outputVolume %s") - debugDir = traits.Str( - desc="A place for debug information", argstr="--debugDir %s") + argstr="--outputVolume %s", + ) + debugDir = traits.Str(desc="A place for debug information", argstr="--debugDir %s") class BRAINSEyeDetectorOutputSpec(TraitedSpec): @@ -508,18 +529,17 @@ class BRAINSEyeDetectorOutputSpec(TraitedSpec): class BRAINSEyeDetector(SEMLikeCommandLine): """title: Eye Detector (BRAINS) -category: Utilities.BRAINS + category: Utilities.BRAINS -version: 1.0 + version: 1.0 -documentation-url: http://www.nitrc.org/projects/brainscdetector/ - -""" + documentation-url: http://www.nitrc.org/projects/brainscdetector/ + """ input_spec = BRAINSEyeDetectorInputSpec output_spec = BRAINSEyeDetectorOutputSpec _cmd = " BRAINSEyeDetector " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -527,10 +547,12 @@ class BRAINSLinearModelerEPCAInputSpec(CommandLineInputSpec): inputTrainingList = File( desc="Input Training Landmark List Filename, ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSLinearModelerEPCAOutputSpec(TraitedSpec): @@ -540,15 +562,14 @@ class BRAINSLinearModelerEPCAOutputSpec(TraitedSpec): class BRAINSLinearModelerEPCA(SEMLikeCommandLine): """title: Landmark Linear Modeler (BRAINS) -category: Utilities.BRAINS + category: Utilities.BRAINS -description: Training linear model using EPCA. Implementation based on my MS thesis, "A METHOD FOR AUTOMATED LANDMARK CONSTELLATION DETECTION USING EVOLUTIONARY PRINCIPAL COMPONENTS AND STATISTICAL SHAPE MODELS" + description: Training linear model using EPCA. Implementation based on my MS thesis, "A METHOD FOR AUTOMATED LANDMARK CONSTELLATION DETECTION USING EVOLUTIONARY PRINCIPAL COMPONENTS AND STATISTICAL SHAPE MODELS" -version: 1.0 + version: 1.0 -documentation-url: http://www.nitrc.org/projects/brainscdetector/ - -""" + documentation-url: http://www.nitrc.org/projects/brainscdetector/ + """ input_spec = BRAINSLinearModelerEPCAInputSpec output_spec = BRAINSLinearModelerEPCAOutputSpec @@ -558,31 +579,33 @@ class BRAINSLinearModelerEPCA(SEMLikeCommandLine): class BRAINSInitializedControlPointsInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Input Volume", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Input Volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Volume", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", + desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) permuteOrder = InputMultiPath( traits.Int, - desc= - "The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", + desc="The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", sep=",", - argstr="--permuteOrder %s") + argstr="--permuteOrder %s", + ) outputLandmarksFile = traits.Str( - desc="Output filename", argstr="--outputLandmarksFile %s") + desc="Output filename", argstr="--outputLandmarksFile %s" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSInitializedControlPointsOutputSpec(TraitedSpec): @@ -592,67 +615,64 @@ class BRAINSInitializedControlPointsOutputSpec(TraitedSpec): class BRAINSInitializedControlPoints(SEMLikeCommandLine): """title: Initialized Control Points (BRAINS) -category: Utilities.BRAINS - -description: Outputs bspline control points as landmarks + category: Utilities.BRAINS -version: 0.1.0.$Revision: 916 $(alpha) + description: Outputs bspline control points as landmarks -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 0.1.0.$Revision: 916 $(alpha) -contributor: Mark Scully + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for Mark Scully and Hans Johnson at the University of Iowa. + contributor: Mark Scully -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for Mark Scully and Hans Johnson at the University of Iowa. + """ input_spec = BRAINSInitializedControlPointsInputSpec output_spec = BRAINSInitializedControlPointsOutputSpec _cmd = " BRAINSInitializedControlPoints " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class CleanUpOverlapLabelsInputSpec(CommandLineInputSpec): inputBinaryVolumes = InputMultiPath( File(exists=True), - desc= - "The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", - argstr="--inputBinaryVolumes %s...") + desc="The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", + argstr="--inputBinaryVolumes %s...", + ) outputBinaryVolumes = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File()), hash_files=False, - desc= - "The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", - argstr="--outputBinaryVolumes %s...") + desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", + argstr="--outputBinaryVolumes %s...", + ) class CleanUpOverlapLabelsOutputSpec(TraitedSpec): outputBinaryVolumes = OutputMultiPath( File(exists=True), - desc= - "The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume" + desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", ) class CleanUpOverlapLabels(SEMLikeCommandLine): """title: Clean Up Overla Labels -category: Utilities.BRAINS - -description: Take a series of input binary images and clean up for those overlapped area. Binary volumes given first always wins out + category: Utilities.BRAINS -version: 0.1.0 + description: Take a series of input binary images and clean up for those overlapped area. Binary volumes given first always wins out -contributor: Eun Young Kim + version: 0.1.0 -""" + contributor: Eun Young Kim + """ input_spec = CleanUpOverlapLabelsInputSpec output_spec = CleanUpOverlapLabelsOutputSpec _cmd = " CleanUpOverlapLabels " - _outputs_filenames = {'outputBinaryVolumes': 'outputBinaryVolumes.nii'} + _outputs_filenames = {"outputBinaryVolumes": "outputBinaryVolumes.nii"} _redirect_x = False @@ -660,90 +680,94 @@ class BRAINSClipInferiorInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to make a clipped short int copy from.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", - argstr="--outputVolume %s") + desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", + argstr="--outputVolume %s", + ) acLowerBound = traits.Float( - desc= - ", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", - argstr="--acLowerBound %f") + desc=", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", + argstr="--acLowerBound %f", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSClipInferiorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", - exists=True) + desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", + exists=True, + ) class BRAINSClipInferior(SEMLikeCommandLine): """title: Clip Inferior of Center of Brain (BRAINS) -category: Utilities.BRAINS - -description: This program will read the inputVolume as a short int image, write the BackgroundFillValue everywhere inferior to the lower bound, and write the resulting clipped short int image in the outputVolume. + category: Utilities.BRAINS -version: 1.0 + description: This program will read the inputVolume as a short int image, write the BackgroundFillValue everywhere inferior to the lower bound, and write the resulting clipped short int image in the outputVolume. -""" + version: 1.0 + """ input_spec = BRAINSClipInferiorInputSpec output_spec = BRAINSClipInferiorOutputSpec _cmd = " BRAINSClipInferior " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateLabelMapFromProbabilityMapInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), - desc="The Input probaiblity images to be computed for lable maps", - argstr="--inputVolumes %s...") + desc="The Input probaiblity images to be computed for label maps", + argstr="--inputVolumes %s...", + ) outputLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The Input binary image for region of interest", - argstr="--outputLabelVolume %s") + argstr="--outputLabelVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateLabelMapFromProbabilityMapOutputSpec(TraitedSpec): outputLabelVolume = File( - desc="The Input binary image for region of interest", exists=True) + desc="The Input binary image for region of interest", exists=True + ) class GenerateLabelMapFromProbabilityMap(SEMLikeCommandLine): """title: Label Map from Probability Images -category: Utilities.BRAINS - -description: Given a list of probability maps for labels, create a discrete label map where only the highest probability region is used for the labeling. + category: Utilities.BRAINS -version: 0.1 + description: Given a list of probability maps for labels, create a discrete label map where only the highest probability region is used for the labeling. -contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + version: 0.1 -""" + contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + """ input_spec = GenerateLabelMapFromProbabilityMapInputSpec output_spec = GenerateLabelMapFromProbabilityMapOutputSpec _cmd = " GenerateLabelMapFromProbabilityMap " - _outputs_filenames = {'outputLabelVolume': 'outputLabelVolume.nii.gz'} + _outputs_filenames = {"outputLabelVolume": "outputLabelVolume.nii.gz"} _redirect_x = False @@ -751,47 +775,51 @@ class BRAINSAlignMSPInputSpec(CommandLineInputSpec): inputVolume = File( desc=", The Image to be resampled, ", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) OutputresampleMSP = traits.Either( traits.Bool, File(), hash_files=False, desc=", The image to be output., ", - argstr="--OutputresampleMSP %s") + argstr="--OutputresampleMSP %s", + ) verbose = traits.Bool( - desc=", Show more verbose output, ", argstr="--verbose ") + desc=", Show more verbose output, ", argstr="--verbose " + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", - argstr="--resultsDir %s") + argstr="--resultsDir %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", - argstr="--mspQualityLevel %d") + desc=", Flag controls how aggressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", + argstr="--mspQualityLevel %d", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -803,37 +831,39 @@ class BRAINSAlignMSPInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSAlignMSPOutputSpec(TraitedSpec): OutputresampleMSP = File( - desc=", The image to be output., ", exists=True) + desc=", The image to be output., ", exists=True + ) resultsDir = Directory( desc=", The directory for the results to be written., ", - exists=True) + exists=True, + ) class BRAINSAlignMSP(SEMLikeCommandLine): - """title: Align Mid Saggital Brain (BRAINS) + """title: Align Mid Sagittal Brain (BRAINS) -category: Utilities.BRAINS + category: Utilities.BRAINS -description: Resample an image into ACPC alignement ACPCDetect - -""" + description: Resample an image into ACPC alignment ACPCDetect + """ input_spec = BRAINSAlignMSPInputSpec output_spec = BRAINSAlignMSPOutputSpec _cmd = " BRAINSAlignMSP " _outputs_filenames = { - 'OutputresampleMSP': 'OutputresampleMSP.nii', - 'resultsDir': 'resultsDir' + "OutputresampleMSP": "OutputresampleMSP.nii", + "resultsDir": "resultsDir", } _redirect_x = False @@ -842,63 +872,65 @@ class BRAINSLandmarkInitializerInputSpec(CommandLineInputSpec): inputFixedLandmarkFilename = File( desc="input fixed landmark. *.fcsv", exists=True, - argstr="--inputFixedLandmarkFilename %s") + argstr="--inputFixedLandmarkFilename %s", + ) inputMovingLandmarkFilename = File( desc="input moving landmark. *.fcsv", exists=True, - argstr="--inputMovingLandmarkFilename %s") + argstr="--inputMovingLandmarkFilename %s", + ) inputWeightFilename = File( - desc= - "Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", + desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are proportional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", exists=True, - argstr="--inputWeightFilename %s") + argstr="--inputWeightFilename %s", + ) outputTransformFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="output transform file name (ex: ./outputTransform.mat) ", - argstr="--outputTransformFilename %s") + argstr="--outputTransformFilename %s", + ) class BRAINSLandmarkInitializerOutputSpec(TraitedSpec): outputTransformFilename = File( - desc="output transform file name (ex: ./outputTransform.mat) ", - exists=True) + desc="output transform file name (ex: ./outputTransform.mat) ", exists=True + ) class BRAINSLandmarkInitializer(SEMLikeCommandLine): """title: BRAINSLandmarkInitializer -category: Utilities.BRAINS - -description: Create transformation file (*mat) from a pair of landmarks (*fcsv) files. + category: Utilities.BRAINS -version: 1.0 + description: Create transformation file (*mat) from a pair of landmarks (*fcsv) files. -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 1.0 -contributor: Eunyoung Regina Kim + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -""" + contributor: Eunyoung Regina Kim + """ input_spec = BRAINSLandmarkInitializerInputSpec output_spec = BRAINSLandmarkInitializerOutputSpec _cmd = " BRAINSLandmarkInitializer " - _outputs_filenames = {'outputTransformFilename': 'outputTransformFilename'} + _outputs_filenames = {"outputTransformFilename": "outputTransformFilename"} _redirect_x = False class insertMidACPCpointInputSpec(CommandLineInputSpec): inputLandmarkFile = File( - desc="Input landmark file (.fcsv)", - exists=True, - argstr="--inputLandmarkFile %s") + desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarkFile %s" + ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", - argstr="--outputLandmarkFile %s") + argstr="--outputLandmarkFile %s", + ) class insertMidACPCpointOutputSpec(TraitedSpec): @@ -908,88 +940,86 @@ class insertMidACPCpointOutputSpec(TraitedSpec): class insertMidACPCpoint(SEMLikeCommandLine): """title: MidACPC Landmark Insertion -category: Utilities.BRAINS - -description: This program gets a landmark fcsv file and adds a new landmark as the midpoint between AC and PC points to the output landmark fcsv file + category: Utilities.BRAINS -contributor: Ali Ghayoor + description: This program gets a landmark fcsv file and adds a new landmark as the midpoint between AC and PC points to the output landmark fcsv file -""" + contributor: Ali Ghayoor + """ input_spec = insertMidACPCpointInputSpec output_spec = insertMidACPCpointOutputSpec _cmd = " insertMidACPCpoint " - _outputs_filenames = {'outputLandmarkFile': 'outputLandmarkFile'} + _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False class BRAINSSnapShotWriterInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), - desc= - "Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", - argstr="--inputVolumes %s...") + desc="Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", + argstr="--inputVolumes %s...", + ) inputBinaryVolumes = InputMultiPath( File(exists=True), - desc= - "Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", - argstr="--inputBinaryVolumes %s...") + desc="Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", + argstr="--inputBinaryVolumes %s...", + ) inputSliceToExtractInPhysicalPoint = InputMultiPath( traits.Float, - desc= - "2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", + desc="2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", sep=",", - argstr="--inputSliceToExtractInPhysicalPoint %s") + argstr="--inputSliceToExtractInPhysicalPoint %s", + ) inputSliceToExtractInIndex = InputMultiPath( traits.Int, - desc= - "2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", + desc="2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", sep=",", - argstr="--inputSliceToExtractInIndex %s") + argstr="--inputSliceToExtractInIndex %s", + ) inputSliceToExtractInPercent = InputMultiPath( traits.Int, - desc= - "2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", + desc="2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", sep=",", - argstr="--inputSliceToExtractInPercent %s") + argstr="--inputSliceToExtractInPercent %s", + ) inputPlaneDirection = InputMultiPath( traits.Int, - desc= - "Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.", + desc="Plane to display. In general, 0=sagittal, 1=coronal, and 2=axial plane.", sep=",", - argstr="--inputPlaneDirection %s") + argstr="--inputPlaneDirection %s", + ) outputFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="2D file name of input images. Required.", - argstr="--outputFilename %s") + argstr="--outputFilename %s", + ) class BRAINSSnapShotWriterOutputSpec(TraitedSpec): - outputFilename = File( - desc="2D file name of input images. Required.", exists=True) + outputFilename = File(desc="2D file name of input images. Required.", exists=True) class BRAINSSnapShotWriter(SEMLikeCommandLine): """title: BRAINSSnapShotWriter -category: Utilities.BRAINS + category: Utilities.BRAINS -description: Create 2D snapshot of input images. Mask images are color-coded + description: Create 2D snapshot of input images. Mask images are color-coded -version: 1.0 + version: 1.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: Eunyoung Regina Kim - -""" + contributor: Eunyoung Regina Kim + """ input_spec = BRAINSSnapShotWriterInputSpec output_spec = BRAINSSnapShotWriterOutputSpec _cmd = " BRAINSSnapShotWriter " - _outputs_filenames = {'outputFilename': 'outputFilename'} + _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False @@ -997,27 +1027,30 @@ class JointHistogramInputSpec(CommandLineInputSpec): inputVolumeInXAxis = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolumeInXAxis %s") + argstr="--inputVolumeInXAxis %s", + ) inputVolumeInYAxis = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolumeInYAxis %s") + argstr="--inputVolumeInYAxis %s", + ) inputMaskVolumeInXAxis = File( - desc= - "Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", + desc="Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", exists=True, - argstr="--inputMaskVolumeInXAxis %s") + argstr="--inputMaskVolumeInXAxis %s", + ) inputMaskVolumeInYAxis = File( - desc= - "Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", + desc="Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", exists=True, - argstr="--inputMaskVolumeInYAxis %s") + argstr="--inputMaskVolumeInYAxis %s", + ) outputJointHistogramImage = traits.Str( - desc= - " output joint histogram image file name. Histogram is usually 2D image. ", - argstr="--outputJointHistogramImage %s") + desc=" output joint histogram image file name. Histogram is usually 2D image. ", + argstr="--outputJointHistogramImage %s", + ) verbose = traits.Bool( - desc=" print debugging information, ", argstr="--verbose ") + desc=" print debugging information, ", argstr="--verbose " + ) class JointHistogramOutputSpec(TraitedSpec): @@ -1027,15 +1060,14 @@ class JointHistogramOutputSpec(TraitedSpec): class JointHistogram(SEMLikeCommandLine): """title: Write Out Image Intensities -category: Utilities.BRAINS + category: Utilities.BRAINS -description: For Analysis + description: For Analysis -version: 0.1 + version: 0.1 -contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu - -""" + contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + """ input_spec = JointHistogramInputSpec output_spec = JointHistogramOutputSpec @@ -1046,51 +1078,48 @@ class JointHistogram(SEMLikeCommandLine): class ShuffleVectorsModuleInputSpec(CommandLineInputSpec): inputVectorFileBaseName = File( - desc= - "input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + desc="input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True, - argstr="--inputVectorFileBaseName %s") + argstr="--inputVectorFileBaseName %s", + ) outputVectorFileBaseName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", - argstr="--outputVectorFileBaseName %s") + desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + argstr="--outputVectorFileBaseName %s", + ) resampleProportion = traits.Float( - desc= - "downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", - argstr="--resampleProportion %f") + desc="downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", + argstr="--resampleProportion %f", + ) class ShuffleVectorsModuleOutputSpec(TraitedSpec): outputVectorFileBaseName = File( - desc= - "output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", - exists=True) + desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + exists=True, + ) class ShuffleVectorsModule(SEMLikeCommandLine): """title: ShuffleVectors -category: Utilities.BRAINS + category: Utilities.BRAINS -description: Automatic Segmentation using neural networks + description: Automatic Segmentation using neural networks -version: 1.0 + version: 1.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: Hans Johnson - -""" + contributor: Hans Johnson + """ input_spec = ShuffleVectorsModuleInputSpec output_spec = ShuffleVectorsModuleOutputSpec _cmd = " ShuffleVectorsModule " - _outputs_filenames = { - 'outputVectorFileBaseName': 'outputVectorFileBaseName' - } + _outputs_filenames = {"outputVectorFileBaseName": "outputVectorFileBaseName"} _redirect_x = False @@ -1098,32 +1127,38 @@ class ImageRegionPlotterInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) inputBinaryROIVolume = File( desc="The Input binary image for region of interest", exists=True, - argstr="--inputBinaryROIVolume %s") + argstr="--inputBinaryROIVolume %s", + ) inputLabelVolume = File( - desc="The Label Image", exists=True, argstr="--inputLabelVolume %s") + desc="The Label Image", exists=True, argstr="--inputLabelVolume %s" + ) numberOfHistogramBins = traits.Int( - desc=" the number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc=" the number of histogram levels", argstr="--numberOfHistogramBins %d" + ) outputJointHistogramData = traits.Str( - desc=" output data file name", argstr="--outputJointHistogramData %s") + desc=" output data file name", argstr="--outputJointHistogramData %s" + ) useROIAUTO = traits.Bool( - desc= - " Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", - argstr="--useROIAUTO ") + desc=" Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", + argstr="--useROIAUTO ", + ) useIntensityForHistogram = traits.Bool( - desc= - " Create Intensity Joint Histogram instead of Quantile Joint Histogram", - argstr="--useIntensityForHistogram ") + desc=" Create Intensity Joint Histogram instead of Quantile Joint Histogram", + argstr="--useIntensityForHistogram ", + ) verbose = traits.Bool( - desc=" print debugging information, ", argstr="--verbose ") + desc=" print debugging information, ", argstr="--verbose " + ) class ImageRegionPlotterOutputSpec(TraitedSpec): @@ -1133,15 +1168,14 @@ class ImageRegionPlotterOutputSpec(TraitedSpec): class ImageRegionPlotter(SEMLikeCommandLine): """title: Write Out Image Intensities -category: Utilities.BRAINS + category: Utilities.BRAINS -description: For Analysis + description: For Analysis -version: 0.1 + version: 0.1 -contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu - -""" + contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu + """ input_spec = ImageRegionPlotterInputSpec output_spec = ImageRegionPlotterOutputSpec @@ -1152,59 +1186,61 @@ class ImageRegionPlotter(SEMLikeCommandLine): class fcsv_to_hdf5InputSpec(CommandLineInputSpec): versionID = traits.Str( - desc= - ", Current version ID. It should be match with the version of BCD that will be using the output model file, ", - argstr="--versionID %s") + desc=", Current version ID. It should be match with the version of BCD that will be using the output model file, ", + argstr="--versionID %s", + ) landmarksInformationFile = traits.Either( traits.Bool, File(), hash_files=False, desc=", name of HDF5 file to write matrices into, ", - argstr="--landmarksInformationFile %s") + argstr="--landmarksInformationFile %s", + ) landmarkTypesList = File( desc=", file containing list of landmark types, ", exists=True, - argstr="--landmarkTypesList %s") + argstr="--landmarkTypesList %s", + ) modelFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", - argstr="--modelFile %s") + desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", + argstr="--modelFile %s", + ) landmarkGlobPattern = traits.Str( - desc="Glob pattern to select fcsv files", - argstr="--landmarkGlobPattern %s") + desc="Glob pattern to select fcsv files", argstr="--landmarkGlobPattern %s" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class fcsv_to_hdf5OutputSpec(TraitedSpec): landmarksInformationFile = File( - desc=", name of HDF5 file to write matrices into, ", - exists=True) + desc=", name of HDF5 file to write matrices into, ", exists=True + ) modelFile = File( - desc= - ", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", - exists=True) + desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", + exists=True, + ) class fcsv_to_hdf5(SEMLikeCommandLine): """title: fcsv_to_hdf5 (BRAINS) -category: Utilities.BRAINS - -description: Convert a collection of fcsv files to a HDF5 format file + category: Utilities.BRAINS -""" + description: Convert a collection of fcsv files to a HDF5 format file + """ input_spec = fcsv_to_hdf5InputSpec output_spec = fcsv_to_hdf5OutputSpec _cmd = " fcsv_to_hdf5 " _outputs_filenames = { - 'modelFile': 'modelFile', - 'landmarksInformationFile': 'landmarksInformationFile.h5' + "modelFile": "modelFile", + "landmarksInformationFile": "landmarksInformationFile.h5", } _redirect_x = False @@ -1213,38 +1249,38 @@ class FindCenterOfBrainInputSpec(CommandLineInputSpec): inputVolume = File( desc="The image in which to find the center.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) imageMask = File(exists=True, argstr="--imageMask %s") clippedImageMask = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--clippedImageMask %s") + traits.Bool, File(), hash_files=False, argstr="--clippedImageMask %s" + ) maximize = traits.Bool(argstr="--maximize ") axis = traits.Int(argstr="--axis %d") - otsuPercentileThreshold = traits.Float( - argstr="--otsuPercentileThreshold %f") + otsuPercentileThreshold = traits.Float(argstr="--otsuPercentileThreshold %f") closingSize = traits.Int(argstr="--closingSize %d") headSizeLimit = traits.Float(argstr="--headSizeLimit %f") headSizeEstimate = traits.Float(argstr="--headSizeEstimate %f") backgroundValue = traits.Int(argstr="--backgroundValue %d") generateDebugImages = traits.Bool(argstr="--generateDebugImages ") debugDistanceImage = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--debugDistanceImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugDistanceImage %s" + ) debugGridImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--debugGridImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugGridImage %s" + ) debugAfterGridComputationsForegroundImage = traits.Either( traits.Bool, File(), hash_files=False, - argstr="--debugAfterGridComputationsForegroundImage %s") + argstr="--debugAfterGridComputationsForegroundImage %s", + ) debugClippedImageMask = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--debugClippedImageMask %s") + traits.Bool, File(), hash_files=False, argstr="--debugClippedImageMask %s" + ) debugTrimmedImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--debugTrimmedImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugTrimmedImage %s" + ) class FindCenterOfBrainOutputSpec(TraitedSpec): @@ -1259,35 +1295,28 @@ class FindCenterOfBrainOutputSpec(TraitedSpec): class FindCenterOfBrain(SEMLikeCommandLine): """title: Center Of Brain (BRAINS) -category: Utilities.BRAINS - -description: Finds the center point of a brain + category: Utilities.BRAINS -version: 3.0.0 + description: Finds the center point of a brain -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 3.0.0 -contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering + contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering + """ input_spec = FindCenterOfBrainInputSpec output_spec = FindCenterOfBrainOutputSpec _cmd = " FindCenterOfBrain " _outputs_filenames = { - 'debugClippedImageMask': - 'debugClippedImageMask.nii', - 'debugTrimmedImage': - 'debugTrimmedImage.nii', - 'debugDistanceImage': - 'debugDistanceImage.nii', - 'debugGridImage': - 'debugGridImage.nii', - 'clippedImageMask': - 'clippedImageMask.nii', - 'debugAfterGridComputationsForegroundImage': - 'debugAfterGridComputationsForegroundImage.nii' + "debugClippedImageMask": "debugClippedImageMask.nii", + "debugTrimmedImage": "debugTrimmedImage.nii", + "debugDistanceImage": "debugDistanceImage.nii", + "debugGridImage": "debugGridImage.nii", + "clippedImageMask": "clippedImageMask.nii", + "debugAfterGridComputationsForegroundImage": "debugAfterGridComputationsForegroundImage.nii", } _redirect_x = False diff --git a/nipype/interfaces/semtools/utilities/tests/__init__.py b/nipype/interfaces/semtools/utilities/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/utilities/tests/__init__.py +++ b/nipype/interfaces/semtools/utilities/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py index 27b65a4eba..81fee98e93 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py @@ -1,46 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSAlignMSP def test_BRAINSAlignMSP_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), + BackgroundFillValue=dict( + argstr="--BackgroundFillValue %s", + ), OutputresampleMSP=dict( - argstr='--OutputresampleMSP %s', + argstr="--OutputresampleMSP %s", hash_files=False, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + mspQualityLevel=dict( + argstr="--mspQualityLevel %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + rescaleIntensities=dict( + argstr="--rescaleIntensities ", + ), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', + argstr="--rescaleIntensitiesOutputRange %s", + sep=",", ), resultsDir=dict( - argstr='--resultsDir %s', + argstr="--resultsDir %s", hash_files=False, ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), + trimRescaledIntensities=dict( + argstr="--trimRescaledIntensities %f", + ), + verbose=dict( + argstr="--verbose ", + ), writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + argstr="--writedebuggingImagesLevel %d", + ), ) inputs = BRAINSAlignMSP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSAlignMSP_outputs(): output_map = dict( - OutputresampleMSP=dict(), + OutputresampleMSP=dict( + extensions=None, + ), resultsDir=dict(), ) outputs = BRAINSAlignMSP.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py index 7a0528f201..92e45758b1 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py @@ -1,21 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSClipInferior def test_BRAINSClipInferior_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - acLowerBound=dict(argstr='--acLowerBound %f', ), - args=dict(argstr='%s', ), + BackgroundFillValue=dict( + argstr="--BackgroundFillValue %s", + ), + acLowerBound=dict( + argstr="--acLowerBound %f", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -24,8 +34,14 @@ def test_BRAINSClipInferior_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSClipInferior_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSClipInferior.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py index a124ad60d9..2e26a91e05 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py @@ -1,48 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSConstellationModeler def test_BRAINSConstellationModeler_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - args=dict(argstr='%s', ), + BackgroundFillValue=dict( + argstr="--BackgroundFillValue %s", + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputTrainingList=dict(argstr='--inputTrainingList %s', ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputTrainingList=dict( + argstr="--inputTrainingList %s", + extensions=None, + ), + mspQualityLevel=dict( + argstr="--mspQualityLevel %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), optimizedLandmarksFilenameExtender=dict( - argstr='--optimizedLandmarksFilenameExtender %s', ), + argstr="--optimizedLandmarksFilenameExtender %s", + ), outputModel=dict( - argstr='--outputModel %s', + argstr="--outputModel %s", hash_files=False, ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + rescaleIntensities=dict( + argstr="--rescaleIntensities ", + ), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', + argstr="--rescaleIntensitiesOutputRange %s", + sep=",", ), resultsDir=dict( - argstr='--resultsDir %s', + argstr="--resultsDir %s", hash_files=False, ), - saveOptimizedLandmarks=dict(argstr='--saveOptimizedLandmarks ', ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), + saveOptimizedLandmarks=dict( + argstr="--saveOptimizedLandmarks ", + ), + trimRescaledIntensities=dict( + argstr="--trimRescaledIntensities %f", + ), + verbose=dict( + argstr="--verbose ", + ), writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + argstr="--writedebuggingImagesLevel %d", + ), ) inputs = BRAINSConstellationModeler.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSConstellationModeler_outputs(): output_map = dict( - outputModel=dict(), + outputModel=dict( + extensions=None, + ), resultsDir=dict(), ) outputs = BRAINSConstellationModeler.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py index 9df3c8b8d6..c511be64c5 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py @@ -1,20 +1,28 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSEyeDetector def test_BRAINSEyeDetector_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugDir=dict(argstr='--debugDir %s', ), + args=dict( + argstr="%s", + ), + debugDir=dict( + argstr="--debugDir %s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -23,8 +31,14 @@ def test_BRAINSEyeDetector_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSEyeDetector_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSEyeDetector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py index bc3807f9f8..05ba1ae7d6 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py @@ -1,29 +1,37 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSInitializedControlPoints def test_BRAINSInitializedControlPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputLandmarksFile=dict(argstr='--outputLandmarksFile %s', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + outputLandmarksFile=dict( + argstr="--outputLandmarksFile %s", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), permuteOrder=dict( - argstr='--permuteOrder %s', - sep=',', + argstr="--permuteOrder %s", + sep=",", ), splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', + argstr="--splineGridSize %s", + sep=",", ), ) inputs = BRAINSInitializedControlPoints.input_spec() @@ -31,8 +39,14 @@ def test_BRAINSInitializedControlPoints_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSInitializedControlPoints_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSInitializedControlPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py index 2456b8fb4d..efaa484008 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py @@ -1,22 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSLandmarkInitializer def test_BRAINSLandmarkInitializer_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputFixedLandmarkFilename=dict( - argstr='--inputFixedLandmarkFilename %s', ), + argstr="--inputFixedLandmarkFilename %s", + extensions=None, + ), inputMovingLandmarkFilename=dict( - argstr='--inputMovingLandmarkFilename %s', ), - inputWeightFilename=dict(argstr='--inputWeightFilename %s', ), + argstr="--inputMovingLandmarkFilename %s", + extensions=None, + ), + inputWeightFilename=dict( + argstr="--inputWeightFilename %s", + extensions=None, + ), outputTransformFilename=dict( - argstr='--outputTransformFilename %s', + argstr="--outputTransformFilename %s", hash_files=False, ), ) @@ -25,8 +33,14 @@ def test_BRAINSLandmarkInitializer_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLandmarkInitializer_outputs(): - output_map = dict(outputTransformFilename=dict(), ) + output_map = dict( + outputTransformFilename=dict( + extensions=None, + ), + ) outputs = BRAINSLandmarkInitializer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py index b6b83baa12..beb7a5f664 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py @@ -1,23 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSLinearModelerEPCA def test_BRAINSLinearModelerEPCA_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputTrainingList=dict(argstr='--inputTrainingList %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputTrainingList=dict( + argstr="--inputTrainingList %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), ) inputs = BRAINSLinearModelerEPCA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLinearModelerEPCA_outputs(): output_map = dict() outputs = BRAINSLinearModelerEPCA.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py index 262d948029..d0cdc8cc7f 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py @@ -1,26 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSLmkTransform def test_BRAINSLmkTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputFixedLandmarks=dict(argstr='--inputFixedLandmarks %s', ), - inputMovingLandmarks=dict(argstr='--inputMovingLandmarks %s', ), - inputMovingVolume=dict(argstr='--inputMovingVolume %s', ), - inputReferenceVolume=dict(argstr='--inputReferenceVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputFixedLandmarks=dict( + argstr="--inputFixedLandmarks %s", + extensions=None, + ), + inputMovingLandmarks=dict( + argstr="--inputMovingLandmarks %s", + extensions=None, + ), + inputMovingVolume=dict( + argstr="--inputMovingVolume %s", + extensions=None, + ), + inputReferenceVolume=dict( + argstr="--inputReferenceVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputAffineTransform=dict( - argstr='--outputAffineTransform %s', + argstr="--outputAffineTransform %s", hash_files=False, ), outputResampledVolume=dict( - argstr='--outputResampledVolume %s', + argstr="--outputResampledVolume %s", hash_files=False, ), ) @@ -29,10 +44,16 @@ def test_BRAINSLmkTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLmkTransform_outputs(): output_map = dict( - outputAffineTransform=dict(), - outputResampledVolume=dict(), + outputAffineTransform=dict( + extensions=None, + ), + outputResampledVolume=dict( + extensions=None, + ), ) outputs = BRAINSLmkTransform.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py index 9627258bcf..e6eac9cf2d 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py @@ -1,60 +1,92 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSMush def test_BRAINSMush_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), boundingBoxSize=dict( - argstr='--boundingBoxSize %s', - sep=',', + argstr="--boundingBoxSize %s", + sep=",", ), boundingBoxStart=dict( - argstr='--boundingBoxStart %s', - sep=',', + argstr="--boundingBoxStart %s", + sep=",", + ), + desiredMean=dict( + argstr="--desiredMean %f", + ), + desiredVariance=dict( + argstr="--desiredVariance %f", ), - desiredMean=dict(argstr='--desiredMean %f', ), - desiredVariance=dict(argstr='--desiredVariance %f', ), environ=dict( nohash=True, usedefault=True, ), - inputFirstVolume=dict(argstr='--inputFirstVolume %s', ), - inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), - inputSecondVolume=dict(argstr='--inputSecondVolume %s', ), - lowerThresholdFactor=dict(argstr='--lowerThresholdFactor %f', ), - lowerThresholdFactorPre=dict(argstr='--lowerThresholdFactorPre %f', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputFirstVolume=dict( + argstr="--inputFirstVolume %s", + extensions=None, + ), + inputMaskVolume=dict( + argstr="--inputMaskVolume %s", + extensions=None, + ), + inputSecondVolume=dict( + argstr="--inputSecondVolume %s", + extensions=None, + ), + lowerThresholdFactor=dict( + argstr="--lowerThresholdFactor %f", + ), + lowerThresholdFactorPre=dict( + argstr="--lowerThresholdFactorPre %f", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputMask=dict( - argstr='--outputMask %s', + argstr="--outputMask %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), outputWeightsFile=dict( - argstr='--outputWeightsFile %s', + argstr="--outputWeightsFile %s", hash_files=False, ), seed=dict( - argstr='--seed %s', - sep=',', + argstr="--seed %s", + sep=",", + ), + upperThresholdFactor=dict( + argstr="--upperThresholdFactor %f", + ), + upperThresholdFactorPre=dict( + argstr="--upperThresholdFactorPre %f", ), - upperThresholdFactor=dict(argstr='--upperThresholdFactor %f', ), - upperThresholdFactorPre=dict(argstr='--upperThresholdFactorPre %f', ), ) inputs = BRAINSMush.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSMush_outputs(): output_map = dict( - outputMask=dict(), - outputVolume=dict(), - outputWeightsFile=dict(), + outputMask=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), + outputWeightsFile=dict( + extensions=None, + ), ) outputs = BRAINSMush.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py index 1b803956e7..a120b062a4 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py @@ -1,35 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSSnapShotWriter def test_BRAINSSnapShotWriter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBinaryVolumes=dict(argstr='--inputBinaryVolumes %s...', ), + inputBinaryVolumes=dict( + argstr="--inputBinaryVolumes %s...", + ), inputPlaneDirection=dict( - argstr='--inputPlaneDirection %s', - sep=',', + argstr="--inputPlaneDirection %s", + sep=",", ), inputSliceToExtractInIndex=dict( - argstr='--inputSliceToExtractInIndex %s', - sep=',', + argstr="--inputSliceToExtractInIndex %s", + sep=",", ), inputSliceToExtractInPercent=dict( - argstr='--inputSliceToExtractInPercent %s', - sep=',', + argstr="--inputSliceToExtractInPercent %s", + sep=",", ), inputSliceToExtractInPhysicalPoint=dict( - argstr='--inputSliceToExtractInPhysicalPoint %s', - sep=',', + argstr="--inputSliceToExtractInPhysicalPoint %s", + sep=",", + ), + inputVolumes=dict( + argstr="--inputVolumes %s...", ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), outputFilename=dict( - argstr='--outputFilename %s', + argstr="--outputFilename %s", hash_files=False, ), ) @@ -38,8 +43,14 @@ def test_BRAINSSnapShotWriter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSSnapShotWriter_outputs(): - output_map = dict(outputFilename=dict(), ) + output_map = dict( + outputFilename=dict( + extensions=None, + ), + ) outputs = BRAINSSnapShotWriter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py index c6d10736d7..1eebbb0cec 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py @@ -1,37 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSTransformConvert def test_BRAINSTransformConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), displacementVolume=dict( - argstr='--displacementVolume %s', + argstr="--displacementVolume %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), - inputTransform=dict(argstr='--inputTransform %s', ), - outputPrecisionType=dict(argstr='--outputPrecisionType %s', ), + inputTransform=dict( + argstr="--inputTransform %s", + extensions=None, + ), + outputPrecisionType=dict( + argstr="--outputPrecisionType %s", + ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), - outputTransformType=dict(argstr='--outputTransformType %s', ), - referenceVolume=dict(argstr='--referenceVolume %s', ), + outputTransformType=dict( + argstr="--outputTransformType %s", + ), + referenceVolume=dict( + argstr="--referenceVolume %s", + extensions=None, + ), ) inputs = BRAINSTransformConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTransformConvert_outputs(): output_map = dict( - displacementVolume=dict(), - outputTransform=dict(), + displacementVolume=dict( + extensions=None, + ), + outputTransform=dict( + extensions=None, + ), ) outputs = BRAINSTransformConvert.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py index d2f3b74140..fa68f51e21 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py @@ -1,24 +1,40 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import BRAINSTrimForegroundInDirection def test_BRAINSTrimForegroundInDirection_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %d', ), - directionCode=dict(argstr='--directionCode %d', ), + BackgroundFillValue=dict( + argstr="--BackgroundFillValue %s", + ), + args=dict( + argstr="%s", + ), + closingSize=dict( + argstr="--closingSize %d", + ), + directionCode=dict( + argstr="--directionCode %d", + ), environ=dict( nohash=True, usedefault=True, ), - headSizeLimit=dict(argstr='--headSizeLimit %f', ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + headSizeLimit=dict( + argstr="--headSizeLimit %f", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + otsuPercentileThreshold=dict( + argstr="--otsuPercentileThreshold %f", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), ) @@ -27,8 +43,14 @@ def test_BRAINSTrimForegroundInDirection_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTrimForegroundInDirection_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSTrimForegroundInDirection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py b/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py index 936437886a..14ec09298c 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py @@ -1,18 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import CleanUpOverlapLabels def test_CleanUpOverlapLabels_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBinaryVolumes=dict(argstr='--inputBinaryVolumes %s...', ), + inputBinaryVolumes=dict( + argstr="--inputBinaryVolumes %s...", + ), outputBinaryVolumes=dict( - argstr='--outputBinaryVolumes %s...', + argstr="--outputBinaryVolumes %s...", hash_files=False, ), ) @@ -21,8 +24,12 @@ def test_CleanUpOverlapLabels_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CleanUpOverlapLabels_outputs(): - output_map = dict(outputBinaryVolumes=dict(), ) + output_map = dict( + outputBinaryVolumes=dict(), + ) outputs = CleanUpOverlapLabels.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py b/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py index 46924bf180..4a0d7c89c2 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py @@ -1,63 +1,100 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import FindCenterOfBrain def test_FindCenterOfBrain_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axis=dict(argstr='--axis %d', ), - backgroundValue=dict(argstr='--backgroundValue %d', ), + args=dict( + argstr="%s", + ), + axis=dict( + argstr="--axis %d", + ), + backgroundValue=dict( + argstr="--backgroundValue %d", + ), clippedImageMask=dict( - argstr='--clippedImageMask %s', + argstr="--clippedImageMask %s", hash_files=False, ), - closingSize=dict(argstr='--closingSize %d', ), + closingSize=dict( + argstr="--closingSize %d", + ), debugAfterGridComputationsForegroundImage=dict( - argstr='--debugAfterGridComputationsForegroundImage %s', + argstr="--debugAfterGridComputationsForegroundImage %s", hash_files=False, ), debugClippedImageMask=dict( - argstr='--debugClippedImageMask %s', + argstr="--debugClippedImageMask %s", hash_files=False, ), debugDistanceImage=dict( - argstr='--debugDistanceImage %s', + argstr="--debugDistanceImage %s", hash_files=False, ), debugGridImage=dict( - argstr='--debugGridImage %s', + argstr="--debugGridImage %s", hash_files=False, ), debugTrimmedImage=dict( - argstr='--debugTrimmedImage %s', + argstr="--debugTrimmedImage %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), - generateDebugImages=dict(argstr='--generateDebugImages ', ), - headSizeEstimate=dict(argstr='--headSizeEstimate %f', ), - headSizeLimit=dict(argstr='--headSizeLimit %f', ), - imageMask=dict(argstr='--imageMask %s', ), - inputVolume=dict(argstr='--inputVolume %s', ), - maximize=dict(argstr='--maximize ', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + generateDebugImages=dict( + argstr="--generateDebugImages ", + ), + headSizeEstimate=dict( + argstr="--headSizeEstimate %f", + ), + headSizeLimit=dict( + argstr="--headSizeLimit %f", + ), + imageMask=dict( + argstr="--imageMask %s", + extensions=None, + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + maximize=dict( + argstr="--maximize ", + ), + otsuPercentileThreshold=dict( + argstr="--otsuPercentileThreshold %f", + ), ) inputs = FindCenterOfBrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindCenterOfBrain_outputs(): output_map = dict( - clippedImageMask=dict(), - debugAfterGridComputationsForegroundImage=dict(), - debugClippedImageMask=dict(), - debugDistanceImage=dict(), - debugGridImage=dict(), - debugTrimmedImage=dict(), + clippedImageMask=dict( + extensions=None, + ), + debugAfterGridComputationsForegroundImage=dict( + extensions=None, + ), + debugClippedImageMask=dict( + extensions=None, + ), + debugDistanceImage=dict( + extensions=None, + ), + debugGridImage=dict( + extensions=None, + ), + debugTrimmedImage=dict( + extensions=None, + ), ) outputs = FindCenterOfBrain.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py b/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py index 9087d6667b..d3840ec7bd 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py @@ -1,19 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import GenerateLabelMapFromProbabilityMap def test_GenerateLabelMapFromProbabilityMap_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + inputVolumes=dict( + argstr="--inputVolumes %s...", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputLabelVolume=dict( - argstr='--outputLabelVolume %s', + argstr="--outputLabelVolume %s", hash_files=False, ), ) @@ -22,8 +27,14 @@ def test_GenerateLabelMapFromProbabilityMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateLabelMapFromProbabilityMap_outputs(): - output_map = dict(outputLabelVolume=dict(), ) + output_map = dict( + outputLabelVolume=dict( + extensions=None, + ), + ) outputs = GenerateLabelMapFromProbabilityMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py b/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py index f5372a240e..5b71204a67 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py @@ -1,31 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import ImageRegionPlotter def test_ImageRegionPlotter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputBinaryROIVolume=dict(argstr='--inputBinaryROIVolume %s', ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s', ), - inputVolume1=dict(argstr='--inputVolume1 %s', ), - inputVolume2=dict(argstr='--inputVolume2 %s', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), + inputBinaryROIVolume=dict( + argstr="--inputBinaryROIVolume %s", + extensions=None, + ), + inputLabelVolume=dict( + argstr="--inputLabelVolume %s", + extensions=None, + ), + inputVolume1=dict( + argstr="--inputVolume1 %s", + extensions=None, + ), + inputVolume2=dict( + argstr="--inputVolume2 %s", + extensions=None, + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), outputJointHistogramData=dict( - argstr='--outputJointHistogramData %s', ), - useIntensityForHistogram=dict(argstr='--useIntensityForHistogram ', ), - useROIAUTO=dict(argstr='--useROIAUTO ', ), - verbose=dict(argstr='--verbose ', ), + argstr="--outputJointHistogramData %s", + ), + useIntensityForHistogram=dict( + argstr="--useIntensityForHistogram ", + ), + useROIAUTO=dict( + argstr="--useROIAUTO ", + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = ImageRegionPlotter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageRegionPlotter_outputs(): output_map = dict() outputs = ImageRegionPlotter.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py b/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py index daf6d5634f..d5e61c867a 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py @@ -1,28 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import JointHistogram def test_JointHistogram_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMaskVolumeInXAxis=dict(argstr='--inputMaskVolumeInXAxis %s', ), - inputMaskVolumeInYAxis=dict(argstr='--inputMaskVolumeInYAxis %s', ), - inputVolumeInXAxis=dict(argstr='--inputVolumeInXAxis %s', ), - inputVolumeInYAxis=dict(argstr='--inputVolumeInYAxis %s', ), + inputMaskVolumeInXAxis=dict( + argstr="--inputMaskVolumeInXAxis %s", + extensions=None, + ), + inputMaskVolumeInYAxis=dict( + argstr="--inputMaskVolumeInYAxis %s", + extensions=None, + ), + inputVolumeInXAxis=dict( + argstr="--inputVolumeInXAxis %s", + extensions=None, + ), + inputVolumeInYAxis=dict( + argstr="--inputVolumeInYAxis %s", + extensions=None, + ), outputJointHistogramImage=dict( - argstr='--outputJointHistogramImage %s', ), - verbose=dict(argstr='--verbose ', ), + argstr="--outputJointHistogramImage %s", + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = JointHistogram.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JointHistogram_outputs(): output_map = dict() outputs = JointHistogram.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py b/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py index 7bea38e2f1..4b7d3431bd 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py @@ -1,29 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import ShuffleVectorsModule def test_ShuffleVectorsModule_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputVectorFileBaseName=dict(argstr='--inputVectorFileBaseName %s', ), + inputVectorFileBaseName=dict( + argstr="--inputVectorFileBaseName %s", + extensions=None, + ), outputVectorFileBaseName=dict( - argstr='--outputVectorFileBaseName %s', + argstr="--outputVectorFileBaseName %s", hash_files=False, ), - resampleProportion=dict(argstr='--resampleProportion %f', ), + resampleProportion=dict( + argstr="--resampleProportion %f", + ), ) inputs = ShuffleVectorsModule.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ShuffleVectorsModule_outputs(): - output_map = dict(outputVectorFileBaseName=dict(), ) + output_map = dict( + outputVectorFileBaseName=dict( + extensions=None, + ), + ) outputs = ShuffleVectorsModule.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py b/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py index 32e6ce65cd..896f630839 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py @@ -1,37 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import fcsv_to_hdf5 def test_fcsv_to_hdf5_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - landmarkGlobPattern=dict(argstr='--landmarkGlobPattern %s', ), - landmarkTypesList=dict(argstr='--landmarkTypesList %s', ), + landmarkGlobPattern=dict( + argstr="--landmarkGlobPattern %s", + ), + landmarkTypesList=dict( + argstr="--landmarkTypesList %s", + extensions=None, + ), landmarksInformationFile=dict( - argstr='--landmarksInformationFile %s', + argstr="--landmarksInformationFile %s", hash_files=False, ), modelFile=dict( - argstr='--modelFile %s', + argstr="--modelFile %s", hash_files=False, ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - versionID=dict(argstr='--versionID %s', ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + versionID=dict( + argstr="--versionID %s", + ), ) inputs = fcsv_to_hdf5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fcsv_to_hdf5_outputs(): output_map = dict( - landmarksInformationFile=dict(), - modelFile=dict(), + landmarksInformationFile=dict( + extensions=None, + ), + modelFile=dict( + extensions=None, + ), ) outputs = fcsv_to_hdf5.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py b/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py index 196b09b304..05aa2a3910 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py @@ -1,18 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import insertMidACPCpoint def test_insertMidACPCpoint_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputLandmarkFile=dict(argstr='--inputLandmarkFile %s', ), + inputLandmarkFile=dict( + argstr="--inputLandmarkFile %s", + extensions=None, + ), outputLandmarkFile=dict( - argstr='--outputLandmarkFile %s', + argstr="--outputLandmarkFile %s", hash_files=False, ), ) @@ -21,8 +25,14 @@ def test_insertMidACPCpoint_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_insertMidACPCpoint_outputs(): - output_map = dict(outputLandmarkFile=dict(), ) + output_map = dict( + outputLandmarkFile=dict( + extensions=None, + ), + ) outputs = insertMidACPCpoint.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py index 151a2c7b3a..472a1326e0 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py @@ -1,18 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import landmarksConstellationAligner def test_landmarksConstellationAligner_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputLandmarksPaired=dict(argstr='--inputLandmarksPaired %s', ), + inputLandmarksPaired=dict( + argstr="--inputLandmarksPaired %s", + extensions=None, + ), outputLandmarksPaired=dict( - argstr='--outputLandmarksPaired %s', + argstr="--outputLandmarksPaired %s", hash_files=False, ), ) @@ -21,8 +25,14 @@ def test_landmarksConstellationAligner_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_landmarksConstellationAligner_outputs(): - output_map = dict(outputLandmarksPaired=dict(), ) + output_map = dict( + outputLandmarksPaired=dict( + extensions=None, + ), + ) outputs = landmarksConstellationAligner.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py index 0bcd747d36..c14fdff775 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py @@ -1,20 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brains import landmarksConstellationWeights def test_landmarksConstellationWeights_inputs(): input_map = dict( - LLSModel=dict(argstr='--LLSModel %s', ), - args=dict(argstr='%s', ), + LLSModel=dict( + argstr="--LLSModel %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputTemplateModel=dict(argstr='--inputTemplateModel %s', ), - inputTrainingList=dict(argstr='--inputTrainingList %s', ), + inputTemplateModel=dict( + argstr="--inputTemplateModel %s", + extensions=None, + ), + inputTrainingList=dict( + argstr="--inputTrainingList %s", + extensions=None, + ), outputWeightsList=dict( - argstr='--outputWeightsList %s', + argstr="--outputWeightsList %s", hash_files=False, ), ) @@ -23,8 +33,14 @@ def test_landmarksConstellationWeights_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_landmarksConstellationWeights_outputs(): - output_map = dict(outputWeightsList=dict(), ) + output_map = dict( + outputWeightsList=dict( + extensions=None, + ), + ) outputs = landmarksConstellationWeights.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index c7ffc42259..ca191b99df 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -1,11 +1,22 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import +""" +3D Slicer is a platform for medical image informatics processing and visualization. + +For an EXPERIMENTAL implementation of an interface for the ``3dSlicer`` full framework, +please check `"dynamic" Slicer `__. +""" + from .diffusion import * from .segmentation import * from .filtering import * from .utilities import EMSegmentTransformToNewFormat -from .surface import (MergeModels, ModelToLabelMap, GrayscaleModelMaker, - ProbeVolumeWithModel, LabelMapSmoothing, ModelMaker) +from .surface import ( + MergeModels, + ModelToLabelMap, + GrayscaleModelMaker, + ProbeVolumeWithModel, + LabelMapSmoothing, + ModelMaker, +) from .quantification import * from .legacy import * from .registration import * diff --git a/nipype/interfaces/slicer/base.py b/nipype/interfaces/slicer/base.py index aae54ec00b..de00883265 100644 --- a/nipype/interfaces/slicer/base.py +++ b/nipype/interfaces/slicer/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from ..base import SEMLikeCommandLine diff --git a/nipype/interfaces/slicer/converters.py b/nipype/interfaces/slicer/converters.py index e93b994110..1be9651f55 100644 --- a/nipype/interfaces/slicer/converters.py +++ b/nipype/interfaces/slicer/converters.py @@ -1,83 +1,90 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, +) class DicomToNrrdConverterInputSpec(CommandLineInputSpec): inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, - argstr="--inputDicomDirectory %s") + argstr="--inputDicomDirectory %s", + ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD format", - argstr="--outputDirectory %s") + argstr="--outputDirectory %s", + ) outputVolume = traits.Str( - desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s") + desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s" + ) smallGradientThreshold = traits.Float( - desc= - "If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DicomToNrrdConverter will display an error message and quit, unless the useBMatrixGradientDirections option is set.", - argstr="--smallGradientThreshold %f") + desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DicomToNrrdConverter will display an error message and quit, unless the useBMatrixGradientDirections option is set.", + argstr="--smallGradientThreshold %f", + ) writeProtocolGradientsFile = traits.Bool( - desc= - "Write the protocol gradients to a file suffixed by \'.txt\' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", - argstr="--writeProtocolGradientsFile ") + desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", + argstr="--writeProtocolGradientsFile ", + ) useIdentityMeaseurementFrame = traits.Bool( - desc= - "Adjust all the gradients so that the measurement frame is an identity matrix.", - argstr="--useIdentityMeaseurementFrame ") + desc="Adjust all the gradients so that the measurement frame is an identity matrix.", + argstr="--useIdentityMeaseurementFrame ", + ) useBMatrixGradientDirections = traits.Bool( - desc= - "Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data.", - argstr="--useBMatrixGradientDirections ") + desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data.", + argstr="--useBMatrixGradientDirections ", + ) class DicomToNrrdConverterOutputSpec(TraitedSpec): outputDirectory = Directory( - desc="Directory holding the output NRRD format", exists=True) + desc="Directory holding the output NRRD format", exists=True + ) class DicomToNrrdConverter(SEMLikeCommandLine): """title: DICOM to NRRD Converter -category: Converters + category: Converters -description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. + description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. -version: 0.2.0.$Revision: 916 $(alpha) + version: 0.2.0.$Revision: 916 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DicomToNrrdConverter + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DicomToNrrdConverter -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: Xiaodong Tao (GE), Vince Magnotta (UIowa), Hans Johnson (UIowa) + contributor: Xiaodong Tao (GE), Vince Magnotta (UIowa), Hans Johnson (UIowa) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. + """ input_spec = DicomToNrrdConverterInputSpec output_spec = DicomToNrrdConverterOutputSpec _cmd = "DicomToNrrdConverter " - _outputs_filenames = {'outputDirectory': 'outputDirectory'} + _outputs_filenames = {"outputDirectory": "outputDirectory"} class OrientScalarVolumeInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-2, desc="Input volume 1", exists=True, argstr="%s") + inputVolume1 = File(position=-2, desc="Input volume 1", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="The oriented volume", - argstr="%s") + argstr="%s", + ) orientation = traits.Enum( "Axial", "Coronal", @@ -131,7 +138,8 @@ class OrientScalarVolumeInputSpec(CommandLineInputSpec): "AIL", "ASL", desc="Orientation choices", - argstr="--orientation %s") + argstr="--orientation %s", + ) class OrientScalarVolumeOutputSpec(TraitedSpec): @@ -141,21 +149,20 @@ class OrientScalarVolumeOutputSpec(TraitedSpec): class OrientScalarVolume(SEMLikeCommandLine): """title: Orient Scalar Volume -category: Converters - -description: Orients an output volume. Rearranges the slices in a volume according to the selected orientation. The slices are not interpolated. They are just reordered and/or permuted. The resulting volume will cover the original volume. NOTE: since Slicer takes into account the orientation of a volume, the re-oriented volume will not show any difference from the original volume, To see the difference, save the volume and display it with a system that either ignores the orientation of the image (e.g. Paraview) or displays individual images. + category: Converters -version: 0.1.0.$Revision: 19608 $(alpha) + description: Orients an output volume. Rearranges the slices in a volume according to the selected orientation. The slices are not interpolated. They are just reordered and/or permuted. The resulting volume will cover the original volume. NOTE: since Slicer takes into account the orientation of a volume, the re-oriented volume will not show any difference from the original volume, To see the difference, save the volume and display it with a system that either ignores the orientation of the image (e.g. Paraview) or displays individual images. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OrientImage + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OrientImage -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = OrientScalarVolumeInputSpec output_spec = OrientScalarVolumeOutputSpec _cmd = "OrientScalarVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/diffusion/__init__.py b/nipype/interfaces/slicer/diffusion/__init__.py index fd7231cefe..807fecdde2 100644 --- a/nipype/interfaces/slicer/diffusion/__init__.py +++ b/nipype/interfaces/slicer/diffusion/__init__.py @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .diffusion import ( - ResampleDTIVolume, DWIRicianLMMSEFilter, TractographyLabelMapSeeding, - DWIJointRicianLMMSEFilter, DiffusionWeightedVolumeMasking, DTIimport, - DWIToDTIEstimation, DiffusionTensorScalarMeasurements, DTIexport) + ResampleDTIVolume, + DWIRicianLMMSEFilter, + TractographyLabelMapSeeding, + DWIJointRicianLMMSEFilter, + DiffusionWeightedVolumeMasking, + DTIimport, + DWIToDTIEstimation, + DiffusionTensorScalarMeasurements, + DTIexport, +) diff --git a/nipype/interfaces/slicer/diffusion/diffusion.py b/nipype/interfaces/slicer/diffusion/diffusion.py index a088d25f8a..3238981562 100644 --- a/nipype/interfaces/slicer/diffusion/diffusion.py +++ b/nipype/interfaces/slicer/diffusion/diffusion.py @@ -1,140 +1,152 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + InputMultiPath, +) class ResampleDTIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, - argstr="--Reference %s") + argstr="--Reference %s", + ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( - desc= - "File containing the deformation field (3D vector image containing vectors with 3 components)", + desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, - argstr="--defField %s") + argstr="--defField %s", + ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an -Field", - argstr="--hfieldtype %s") + argstr="--hfieldtype %s", + ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", - desc= - "Sampling algorithm (linear , nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", - argstr="--interpolation %s") + desc="Sampling algorithm (linear , nn (nearest neighbor), ws (WindowedSinc), bs (BSpline) )", + argstr="--interpolation %s", + ) correction = traits.Enum( "zero", "none", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) transform_tensor_method = traits.Enum( "PPD", "FS", - desc= - "Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", - argstr="--transform_tensor_method %s") + desc="Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", + argstr="--transform_tensor_method %s", + ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", - argstr="--transform_order %s") + argstr="--transform_order %s", + ) notbulk = traits.Bool( - desc= - "The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", - argstr="--notbulk ") + desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", + argstr="--notbulk ", + ) spaceChange = traits.Bool( - desc= - "Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", - argstr="--spaceChange ") + desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", + argstr="--spaceChange ", + ) rotation_point = traits.List( desc="Center of rotation (only for rigid and affine transforms)", - argstr="--rotation_point %s") + argstr="--rotation_point %s", + ) centered_transform = traits.Bool( - desc= - "Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", - argstr="--centered_transform ") + desc="Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", + argstr="--centered_transform ", + ) image_center = traits.Enum( "input", "output", - desc= - "Image to use to center the transform (used only if \'Centered Transform\' is selected)", - argstr="--image_center %s") + desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", + argstr="--image_center %s", + ) Inverse_ITK_Transformation = traits.Bool( - desc= - "Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", - argstr="--Inverse_ITK_Transformation ") + desc="Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", + argstr="--Inverse_ITK_Transformation ", + ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", - argstr="--size %s") - origin = traits.List( - desc="Origin of the output Image", argstr="--origin %s") + argstr="--size %s", + ) + origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, - desc= - "9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", + desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", - argstr="--direction_matrix %s") + argstr="--direction_matrix %s", + ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", - argstr="--number_of_thread %d") + argstr="--number_of_thread %d", + ) default_pixel_value = traits.Float( - desc= - "Default pixel value for samples falling outside of the input region", - argstr="--default_pixel_value %f") + desc="Default pixel value for samples falling outside of the input region", + argstr="--default_pixel_value %f", + ) window_function = traits.Enum( "h", "c", "w", "l", "b", - desc= - "Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", - argstr="--window_function %s") + desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", + argstr="--window_function %s", + ) spline_order = traits.Int( desc="Spline Order (Spline order may be from 0 to 5)", - argstr="--spline_order %d") + argstr="--spline_order %d", + ) transform_matrix = InputMultiPath( traits.Float, - desc= - "12 parameters of the transform matrix by rows ( --last 3 being translation-- )", + desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", - argstr="--transform_matrix %s") + argstr="--transform_matrix %s", + ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", - argstr="--transform %s") + argstr="--transform %s", + ) class ResampleDTIVolumeOutputSpec(TraitedSpec): @@ -144,62 +156,66 @@ class ResampleDTIVolumeOutputSpec(TraitedSpec): class ResampleDTIVolume(SEMLikeCommandLine): """title: Resample DTI Volume -category: Diffusion.Diffusion Tensor Images - -description: Resampling an image is a very important task in image analysis. It is especially important in the frame of image registration. This module implements DT image resampling through the use of itk Transforms. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. + category: Diffusion.Diffusion Tensor Images -version: 0.1 + description: Resampling an image is a very important task in image analysis. It is especially important in the frame of image registration. This module implements DT image resampling through the use of itk Transforms. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleDTI + version: 0.1 -contributor: Francois Budin (UNC) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleDTI -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics + contributor: Francois Budin (UNC) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics + """ input_spec = ResampleDTIVolumeInputSpec output_spec = ResampleDTIVolumeOutputSpec _cmd = "ResampleDTIVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DWIRicianLMMSEFilterInputSpec(CommandLineInputSpec): iter = traits.Int( - desc="Number of iterations for the noise removal filter.", - argstr="--iter %d") + desc="Number of iterations for the noise removal filter.", argstr="--iter %d" + ) re = InputMultiPath( - traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s") - rf = InputMultiPath( - traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") + traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" + ) + rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") mnvf = traits.Int( desc="Minimum number of voxels in kernel used for filtering.", - argstr="--mnvf %d") + argstr="--mnvf %d", + ) mnve = traits.Int( desc="Minimum number of voxels in kernel used for estimation.", - argstr="--mnve %d") + argstr="--mnve %d", + ) minnstd = traits.Int( - desc="Minimum allowed noise standard deviation.", - argstr="--minnstd %d") + desc="Minimum allowed noise standard deviation.", argstr="--minnstd %d" + ) maxnstd = traits.Int( - desc="Maximum allowed noise standard deviation.", - argstr="--maxnstd %d") + desc="Maximum allowed noise standard deviation.", argstr="--maxnstd %d" + ) hrf = traits.Float( - desc="How many histogram bins per unit interval.", argstr="--hrf %f") + desc="How many histogram bins per unit interval.", argstr="--hrf %f" + ) uav = traits.Bool( - desc="Use absolute value in case of negative square.", argstr="--uav ") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + desc="Use absolute value in case of negative square.", argstr="--uav " + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", - argstr="--compressOutput ") + argstr="--compressOutput ", + ) class DWIRicianLMMSEFilterOutputSpec(TraitedSpec): @@ -209,144 +225,149 @@ class DWIRicianLMMSEFilterOutputSpec(TraitedSpec): class DWIRicianLMMSEFilter(SEMLikeCommandLine): """title: DWI Rician LMMSE Filter -category: Diffusion.Diffusion Weighted Images - -description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. Images corresponding to each gradient direction, including baseline, are processed individually. The noise parameter is automatically estimated (noise estimation improved but slower). -Note that this is a general purpose filter for MRi images. The module jointLMMSE has been specifically designed for DWI volumes and shows a better performance, so its use is recommended instead. -A complete description of the algorithm in this module can be found in: -S. Aja-Fernandez, M. Niethammer, M. Kubicki, M. Shenton, and C.-F. Westin. Restoration of DWI data using a Rician LMMSE estimator. IEEE Transactions on Medical Imaging, 27(10): pp. 1389-1403, Oct. 2008. + category: Diffusion.Diffusion Weighted Images -version: 0.1.1.$Revision: 1 $(alpha) + description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. Images corresponding to each gradient direction, including baseline, are processed individually. The noise parameter is automatically estimated (noise estimation improved but slower). + Note that this is a general purpose filter for MRi images. The module jointLMMSE has been specifically designed for DWI volumes and shows a better performance, so its use is recommended instead. + A complete description of the algorithm in this module can be found in: + S. Aja-Fernandez, M. Niethammer, M. Kubicki, M. Shenton, and C.-F. Westin. Restoration of DWI data using a Rician LMMSE estimator. IEEE Transactions on Medical Imaging, 27(10): pp. 1389-1403, Oct. 2008. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RicianLMMSEImageFilter + version: 0.1.1.$Revision: 1 $(alpha) -contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa), Marc Niethammer (UNC) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RicianLMMSEImageFilter -acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). + contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa), Marc Niethammer (UNC) -""" + acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). + """ input_spec = DWIRicianLMMSEFilterInputSpec output_spec = DWIRicianLMMSEFilterOutputSpec _cmd = "DWIRicianLMMSEFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class TractographyLabelMapSeedingInputSpec(CommandLineInputSpec): - InputVolume = File( - position=-2, desc="Input DTI volume", exists=True, argstr="%s") + InputVolume = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") inputroi = File( - desc="Label map with seeding ROIs", - exists=True, - argstr="--inputroi %s") + desc="Label map with seeding ROIs", exists=True, argstr="--inputroi %s" + ) OutputFibers = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Tractography result", - argstr="%s") + argstr="%s", + ) useindexspace = traits.Bool( - desc="Seed at IJK voxel grid", argstr="--useindexspace ") + desc="Seed at IJK voxel grid", argstr="--useindexspace " + ) seedspacing = traits.Float( - desc= - "Spacing (in mm) between seed points, only matters if use Use Index Space is off", - argstr="--seedspacing %f") + desc="Spacing (in mm) between seed points, only matters if use Use Index Space is off", + argstr="--seedspacing %f", + ) randomgrid = traits.Bool( - desc="Enable random placing of seeds", argstr="--randomgrid ") + desc="Enable random placing of seeds", argstr="--randomgrid " + ) clthreshold = traits.Float( desc="Minimum Linear Measure for the seeding to start.", - argstr="--clthreshold %f") + argstr="--clthreshold %f", + ) minimumlength = traits.Float( - desc="Minimum length of the fibers (in mm)", - argstr="--minimumlength %f") + desc="Minimum length of the fibers (in mm)", argstr="--minimumlength %f" + ) maximumlength = traits.Float( - desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f") + desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f" + ) stoppingmode = traits.Enum( "LinearMeasure", "FractionalAnisotropy", desc="Tensor measurement used to stop the tractography", - argstr="--stoppingmode %s") + argstr="--stoppingmode %s", + ) stoppingvalue = traits.Float( - desc= - "Tractography will stop when the stopping measurement drops below this value", - argstr="--stoppingvalue %f") + desc="Tractography will stop when the stopping measurement drops below this value", + argstr="--stoppingvalue %f", + ) stoppingcurvature = traits.Float( - desc= - "Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", - argstr="--stoppingcurvature %f") + desc="Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", + argstr="--stoppingcurvature %f", + ) integrationsteplength = traits.Float( desc="Distance between points on the same fiber in mm", - argstr="--integrationsteplength %f") + argstr="--integrationsteplength %f", + ) label = traits.Int( - desc="Label value that defines seeding region.", argstr="--label %d") + desc="Label value that defines seeding region.", argstr="--label %d" + ) writetofile = traits.Bool( - desc="Write fibers to disk or create in the scene?", - argstr="--writetofile ") + desc="Write fibers to disk or create in the scene?", argstr="--writetofile " + ) outputdirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory in which to save fiber(s)", - argstr="--outputdirectory %s") + argstr="--outputdirectory %s", + ) name = traits.Str(desc="Name to use for fiber files", argstr="--name %s") class TractographyLabelMapSeedingOutputSpec(TraitedSpec): OutputFibers = File(position=-1, desc="Tractography result", exists=True) - outputdirectory = Directory( - desc="Directory in which to save fiber(s)", exists=True) + outputdirectory = Directory(desc="Directory in which to save fiber(s)", exists=True) class TractographyLabelMapSeeding(SEMLikeCommandLine): """title: Tractography Label Map Seeding -category: Diffusion.Diffusion Tensor Images - -description: Seed tracts on a Diffusion Tensor Image (DT) from a label map + category: Diffusion.Diffusion Tensor Images -version: 0.1.0.$Revision: 1892 $(alpha) + description: Seed tracts on a Diffusion Tensor Image (DT) from a label map -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Seeding + version: 0.1.0.$Revision: 1892 $(alpha) -license: slicer3 + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Seeding -contributor: Raul San Jose (SPL, BWH), Demian Wassermann (SPL, BWH) + license: slicer3 -acknowledgements: Laboratory of Mathematics in Imaging. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Raul San Jose (SPL, BWH), Demian Wassermann (SPL, BWH) -""" + acknowledgements: Laboratory of Mathematics in Imaging. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = TractographyLabelMapSeedingInputSpec output_spec = TractographyLabelMapSeedingOutputSpec _cmd = "TractographyLabelMapSeeding " _outputs_filenames = { - 'OutputFibers': 'OutputFibers.vtk', - 'outputdirectory': 'outputdirectory' + "OutputFibers": "OutputFibers.vtk", + "outputdirectory": "outputdirectory", } class DWIJointRicianLMMSEFilterInputSpec(CommandLineInputSpec): re = InputMultiPath( - traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s") - rf = InputMultiPath( - traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") + traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" + ) + rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") ng = traits.Int( - desc= - "The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", - argstr="--ng %d") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + desc="The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", + argstr="--ng %d", + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", - argstr="--compressOutput ") + argstr="--compressOutput ", + ) class DWIJointRicianLMMSEFilterOutputSpec(TraitedSpec): @@ -356,100 +377,99 @@ class DWIJointRicianLMMSEFilterOutputSpec(TraitedSpec): class DWIJointRicianLMMSEFilter(SEMLikeCommandLine): """title: DWI Joint Rician LMMSE Filter -category: Diffusion.Diffusion Weighted Images - -description: This module reduces Rician noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. The N closest gradient directions to the direction being processed are filtered together to improve the results: the noise-free signal is seen as an n-diemensional vector which has to be estimated with the LMMSE method from a set of corrupted measurements. To that end, the covariance matrix of the noise-free vector and the cross covariance between this signal and the noise have to be estimated, which is done taking into account the image formation process. -The noise parameter is automatically estimated from a rough segmentation of the background of the image. In this area the signal is simply 0, so that Rician statistics reduce to Rayleigh and the noise power can be easily estimated from the mode of the histogram. -A complete description of the algorithm may be found in: -Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. + category: Diffusion.Diffusion Weighted Images -version: 0.1.1.$Revision: 1 $(alpha) + description: This module reduces Rician noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. The N closest gradient directions to the direction being processed are filtered together to improve the results: the noise-free signal is seen as an n-diemensional vector which has to be estimated with the LMMSE method from a set of corrupted measurements. To that end, the covariance matrix of the noise-free vector and the cross covariance between this signal and the noise have to be estimated, which is done taking into account the image formation process. + The noise parameter is automatically estimated from a rough segmentation of the background of the image. In this area the signal is simply 0, so that Rician statistics reduce to Rayleigh and the noise power can be easily estimated from the mode of the histogram. + A complete description of the algorithm may be found in: + Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/JointRicianLMMSEImageFilter + version: 0.1.1.$Revision: 1 $(alpha) -contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/JointRicianLMMSEImageFilter -acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). + contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) -""" + acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). + """ input_spec = DWIJointRicianLMMSEFilterInputSpec output_spec = DWIJointRicianLMMSEFilterOutputSpec _cmd = "DWIJointRicianLMMSEFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DiffusionWeightedVolumeMaskingInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-4, desc="Input DWI volume", exists=True, argstr="%s") + inputVolume = File(position=-4, desc="Input DWI volume", exists=True, argstr="%s") outputBaseline = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated baseline volume", - argstr="%s") + argstr="%s", + ) thresholdMask = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Otsu Threshold Mask", - argstr="%s") + argstr="%s", + ) otsuomegathreshold = traits.Float( - desc= - "Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threhold", - argstr="--otsuomegathreshold %f") + desc="Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threshold", + argstr="--otsuomegathreshold %f", + ) removeislands = traits.Bool( - desc="Remove Islands in Threshold Mask?", argstr="--removeislands ") + desc="Remove Islands in Threshold Mask?", argstr="--removeislands " + ) class DiffusionWeightedVolumeMaskingOutputSpec(TraitedSpec): - outputBaseline = File( - position=-2, desc="Estimated baseline volume", exists=True) + outputBaseline = File(position=-2, desc="Estimated baseline volume", exists=True) thresholdMask = File(position=-1, desc="Otsu Threshold Mask", exists=True) class DiffusionWeightedVolumeMasking(SEMLikeCommandLine): """title: Diffusion Weighted Volume Masking -category: Diffusion.Diffusion Weighted Images - -description:

Performs a mask calculation from a diffusion weighted (DW) image.

Starting from a dw image, this module computes the baseline image averaging all the images without diffusion weighting and then applies the otsu segmentation algorithm in order to produce a mask. this mask can then be used when estimating the diffusion tensor (dt) image, not to estimate tensors all over the volume.

+ category: Diffusion.Diffusion Weighted Images -version: 0.1.0.$Revision: 1892 $(alpha) + description:

Performs a mask calculation from a diffusion weighted (DW) image.

Starting from a dw image, this module computes the baseline image averaging all the images without diffusion weighting and then applies the otsu segmentation algorithm in order to produce a mask. this mask can then be used when estimating the diffusion tensor (dt) image, not to estimate tensors all over the volume.

-documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionWeightedMasking + version: 0.1.0.$Revision: 1892 $(alpha) -license: slicer3 + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionWeightedMasking -contributor: Demian Wassermann (SPL, BWH) + license: slicer3 -""" + contributor: Demian Wassermann (SPL, BWH) + """ input_spec = DiffusionWeightedVolumeMaskingInputSpec output_spec = DiffusionWeightedVolumeMaskingOutputSpec _cmd = "DiffusionWeightedVolumeMasking " _outputs_filenames = { - 'outputBaseline': 'outputBaseline.nii', - 'thresholdMask': 'thresholdMask.nii' + "outputBaseline": "outputBaseline.nii", + "thresholdMask": "thresholdMask.nii", } class DTIimportInputSpec(CommandLineInputSpec): - inputFile = File( - position=-2, desc="Input DTI file", exists=True, argstr="%s") + inputFile = File(position=-2, desc="Input DTI file", exists=True, argstr="%s") outputTensor = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI volume", - argstr="%s") + argstr="%s", + ) testingmode = traits.Bool( - desc= - "Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", - argstr="--testingmode ") + desc="Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", + argstr="--testingmode ", + ) class DTIimportOutputSpec(TraitedSpec): @@ -459,104 +479,102 @@ class DTIimportOutputSpec(TraitedSpec): class DTIimport(SEMLikeCommandLine): """title: DTIimport -category: Diffusion.Diffusion Data Conversion - -description: Import tensor datasets from various formats, including the NifTi file format + category: Diffusion.Diffusion Data Conversion -version: 1.0 + description: Import tensor datasets from various formats, including the NifTi file format -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIImport + version: 1.0 -contributor: Sonia Pujol (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIImport -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Sonia Pujol (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = DTIimportInputSpec output_spec = DTIimportOutputSpec _cmd = "DTIimport " - _outputs_filenames = {'outputTensor': 'outputTensor.nii'} + _outputs_filenames = {"outputTensor": "outputTensor.nii"} class DWIToDTIEstimationInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-3, desc="Input DWI volume", exists=True, argstr="%s") + inputVolume = File(position=-3, desc="Input DWI volume", exists=True, argstr="%s") mask = File( - desc="Mask where the tensors will be computed", - exists=True, - argstr="--mask %s") + desc="Mask where the tensors will be computed", exists=True, argstr="--mask %s" + ) outputTensor = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated DTI volume", - argstr="%s") + argstr="%s", + ) outputBaseline = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Estimated baseline volume", - argstr="%s") + argstr="%s", + ) enumeration = traits.Enum( "LS", "WLS", desc="LS: Least Squares, WLS: Weighted Least Squares", - argstr="--enumeration %s") + argstr="--enumeration %s", + ) shiftNeg = traits.Bool( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", - argstr="--shiftNeg ") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", + argstr="--shiftNeg ", + ) class DWIToDTIEstimationOutputSpec(TraitedSpec): outputTensor = File(position=-2, desc="Estimated DTI volume", exists=True) - outputBaseline = File( - position=-1, desc="Estimated baseline volume", exists=True) + outputBaseline = File(position=-1, desc="Estimated baseline volume", exists=True) class DWIToDTIEstimation(SEMLikeCommandLine): """title: DWI to DTI Estimation -category: Diffusion.Diffusion Weighted Images + category: Diffusion.Diffusion Weighted Images -description: Performs a tensor model estimation from diffusion weighted images. + description: Performs a tensor model estimation from diffusion weighted images. -There are three estimation methods available: least squares, weigthed least squares and non-linear estimation. The first method is the traditional method for tensor estimation and the fastest one. Weighted least squares takes into account the noise characteristics of the MRI images to weight the DWI samples used in the estimation based on its intensity magnitude. The last method is the more complex. + There are three estimation methods available: least squares, weighted least squares and non-linear estimation. The first method is the traditional method for tensor estimation and the fastest one. Weighted least squares takes into account the noise characteristics of the MRI images to weight the DWI samples used in the estimation based on its intensity magnitude. The last method is the more complex. -version: 0.1.0.$Revision: 1892 $(alpha) + version: 0.1.0.$Revision: 1892 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorEstimation + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorEstimation -license: slicer3 + license: slicer3 -contributor: Raul San Jose (SPL, BWH) + contributor: Raul San Jose (SPL, BWH) -acknowledgements: This command module is based on the estimation functionality provided by the Teem library. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This command module is based on the estimation functionality provided by the Teem library. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = DWIToDTIEstimationInputSpec output_spec = DWIToDTIEstimationOutputSpec _cmd = "DWIToDTIEstimation " _outputs_filenames = { - 'outputTensor': 'outputTensor.nii', - 'outputBaseline': 'outputBaseline.nii' + "outputTensor": "outputTensor.nii", + "outputBaseline": "outputBaseline.nii", } class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-3, desc="Input DTI volume", exists=True, argstr="%s") + inputVolume = File(position=-3, desc="Input DTI volume", exists=True, argstr="%s") outputScalar = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Scalar volume derived from tensor", - argstr="%s") + argstr="%s", + ) enumeration = traits.Enum( "Trace", "Determinant", @@ -584,47 +602,48 @@ class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): "ParallelDiffusivity", "PerpendicularDffusivity", desc="An enumeration of strings", - argstr="--enumeration %s") + argstr="--enumeration %s", + ) class DiffusionTensorScalarMeasurementsOutputSpec(TraitedSpec): outputScalar = File( - position=-1, desc="Scalar volume derived from tensor", exists=True) + position=-1, desc="Scalar volume derived from tensor", exists=True + ) class DiffusionTensorScalarMeasurements(SEMLikeCommandLine): """title: Diffusion Tensor Scalar Measurements -category: Diffusion.Diffusion Tensor Images + category: Diffusion.Diffusion Tensor Images -description: Compute a set of different scalar measurements from a tensor field, specially oriented for Diffusion Tensors where some rotationally invariant measurements, like Fractional Anisotropy, are highly used to describe the anistropic behaviour of the tensor. + description: Compute a set of different scalar measurements from a tensor field, specially oriented for Diffusion Tensors where some rotationally invariant measurements, like Fractional Anisotropy, are highly used to describe the anistropic behaviour of the tensor. -version: 0.1.0.$Revision: 1892 $(alpha) + version: 0.1.0.$Revision: 1892 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorMathematics + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorMathematics -contributor: Raul San Jose (SPL, BWH) + contributor: Raul San Jose (SPL, BWH) -acknowledgements: LMI - -""" + acknowledgements: LMI + """ input_spec = DiffusionTensorScalarMeasurementsInputSpec output_spec = DiffusionTensorScalarMeasurementsOutputSpec _cmd = "DiffusionTensorScalarMeasurements " - _outputs_filenames = {'outputScalar': 'outputScalar.nii'} + _outputs_filenames = {"outputScalar": "outputScalar.nii"} class DTIexportInputSpec(CommandLineInputSpec): - inputTensor = File( - position=-2, desc="Input DTI volume", exists=True, argstr="%s") + inputTensor = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") outputFile = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI file", - argstr="%s") + argstr="%s", + ) class DTIexportOutputSpec(TraitedSpec): @@ -634,21 +653,20 @@ class DTIexportOutputSpec(TraitedSpec): class DTIexport(SEMLikeCommandLine): """title: DTIexport -category: Diffusion.Diffusion Data Conversion - -description: Export DTI data to various file formats + category: Diffusion.Diffusion Data Conversion -version: 1.0 + description: Export DTI data to various file formats -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIExport + version: 1.0 -contributor: Sonia Pujol (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIExport -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Sonia Pujol (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = DTIexportInputSpec output_spec = DTIexportOutputSpec _cmd = "DTIexport " - _outputs_filenames = {'outputFile': 'outputFile'} + _outputs_filenames = {"outputFile": "outputFile"} diff --git a/nipype/interfaces/slicer/diffusion/tests/__init__.py b/nipype/interfaces/slicer/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py index c8d5df2ba2..9017db6760 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py @@ -1,21 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DTIexport def test_DTIexport_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputTensor=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputFile=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -25,8 +27,15 @@ def test_DTIexport_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIexport_outputs(): - output_map = dict(outputFile=dict(position=-1, ), ) + output_map = dict( + outputFile=dict( + extensions=None, + position=-1, + ), + ) outputs = DTIexport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py index ec1d66bc0b..802d4ce9bc 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py @@ -1,33 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DTIimport def test_DTIimport_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputFile=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputTensor=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - testingmode=dict(argstr='--testingmode ', ), + testingmode=dict( + argstr="--testingmode ", + ), ) inputs = DTIimport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIimport_outputs(): - output_map = dict(outputTensor=dict(position=-1, ), ) + output_map = dict( + outputTensor=dict( + extensions=None, + position=-1, + ), + ) outputs = DTIimport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py index a2caa2f633..e004599c12 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py @@ -1,33 +1,39 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DWIJointRicianLMMSEFilter def test_DWIJointRicianLMMSEFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - compressOutput=dict(argstr='--compressOutput ', ), + args=dict( + argstr="%s", + ), + compressOutput=dict( + argstr="--compressOutput ", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - ng=dict(argstr='--ng %d', ), + ng=dict( + argstr="--ng %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), re=dict( - argstr='--re %s', - sep=',', + argstr="--re %s", + sep=",", ), rf=dict( - argstr='--rf %s', - sep=',', + argstr="--rf %s", + sep=",", ), ) inputs = DWIJointRicianLMMSEFilter.input_spec() @@ -35,8 +41,15 @@ def test_DWIJointRicianLMMSEFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIJointRicianLMMSEFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = DWIJointRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py index 7eff851562..3c38117737 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py @@ -1,48 +1,73 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DWIRicianLMMSEFilter def test_DWIRicianLMMSEFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - compressOutput=dict(argstr='--compressOutput ', ), + args=dict( + argstr="%s", + ), + compressOutput=dict( + argstr="--compressOutput ", + ), environ=dict( nohash=True, usedefault=True, ), - hrf=dict(argstr='--hrf %f', ), + hrf=dict( + argstr="--hrf %f", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - iter=dict(argstr='--iter %d', ), - maxnstd=dict(argstr='--maxnstd %d', ), - minnstd=dict(argstr='--minnstd %d', ), - mnve=dict(argstr='--mnve %d', ), - mnvf=dict(argstr='--mnvf %d', ), + iter=dict( + argstr="--iter %d", + ), + maxnstd=dict( + argstr="--maxnstd %d", + ), + minnstd=dict( + argstr="--minnstd %d", + ), + mnve=dict( + argstr="--mnve %d", + ), + mnvf=dict( + argstr="--mnvf %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), re=dict( - argstr='--re %s', - sep=',', + argstr="--re %s", + sep=",", ), rf=dict( - argstr='--rf %s', - sep=',', + argstr="--rf %s", + sep=",", + ), + uav=dict( + argstr="--uav ", ), - uav=dict(argstr='--uav ', ), ) inputs = DWIRicianLMMSEFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIRicianLMMSEFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = DWIRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py index 217f91edf0..76b305283c 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py @@ -1,42 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DWIToDTIEstimation def test_DWIToDTIEstimation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - enumeration=dict(argstr='--enumeration %s', ), + args=dict( + argstr="%s", + ), + enumeration=dict( + argstr="--enumeration %s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), - mask=dict(argstr='--mask %s', ), + mask=dict( + argstr="--mask %s", + extensions=None, + ), outputBaseline=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), outputTensor=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-2, ), - shiftNeg=dict(argstr='--shiftNeg ', ), + shiftNeg=dict( + argstr="--shiftNeg ", + ), ) inputs = DWIToDTIEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIToDTIEstimation_outputs(): output_map = dict( - outputBaseline=dict(position=-1, ), - outputTensor=dict(position=-2, ), + outputBaseline=dict( + extensions=None, + position=-1, + ), + outputTensor=dict( + extensions=None, + position=-2, + ), ) outputs = DWIToDTIEstimation.output_spec() diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py index b75989c349..143194f493 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py @@ -1,22 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DiffusionTensorScalarMeasurements def test_DiffusionTensorScalarMeasurements_inputs(): input_map = dict( - args=dict(argstr='%s', ), - enumeration=dict(argstr='--enumeration %s', ), + args=dict( + argstr="%s", + ), + enumeration=dict( + argstr="--enumeration %s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), outputScalar=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -26,8 +30,15 @@ def test_DiffusionTensorScalarMeasurements_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionTensorScalarMeasurements_outputs(): - output_map = dict(outputScalar=dict(position=-1, ), ) + output_map = dict( + outputScalar=dict( + extensions=None, + position=-1, + ), + ) outputs = DiffusionTensorScalarMeasurements.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py index 5e8c39a4ac..5b11d2f578 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py @@ -1,28 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import DiffusionWeightedVolumeMasking def test_DiffusionWeightedVolumeMasking_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-4, ), - otsuomegathreshold=dict(argstr='--otsuomegathreshold %f', ), + otsuomegathreshold=dict( + argstr="--otsuomegathreshold %f", + ), outputBaseline=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-2, ), - removeislands=dict(argstr='--removeislands ', ), + removeislands=dict( + argstr="--removeislands ", + ), thresholdMask=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -32,10 +38,18 @@ def test_DiffusionWeightedVolumeMasking_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionWeightedVolumeMasking_outputs(): output_map = dict( - outputBaseline=dict(position=-2, ), - thresholdMask=dict(position=-1, ), + outputBaseline=dict( + extensions=None, + position=-2, + ), + thresholdMask=dict( + extensions=None, + position=-1, + ), ) outputs = DiffusionWeightedVolumeMasking.output_spec() diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py index 30860d9da0..c52bb5357d 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py @@ -1,69 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import ResampleDTIVolume def test_ResampleDTIVolume_inputs(): input_map = dict( Inverse_ITK_Transformation=dict( - argstr='--Inverse_ITK_Transformation ', ), - Reference=dict(argstr='--Reference %s', ), - args=dict(argstr='%s', ), - centered_transform=dict(argstr='--centered_transform ', ), - correction=dict(argstr='--correction %s', ), - defField=dict(argstr='--defField %s', ), - default_pixel_value=dict(argstr='--default_pixel_value %f', ), + argstr="--Inverse_ITK_Transformation ", + ), + Reference=dict( + argstr="--Reference %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), + centered_transform=dict( + argstr="--centered_transform ", + ), + correction=dict( + argstr="--correction %s", + ), + defField=dict( + argstr="--defField %s", + extensions=None, + ), + default_pixel_value=dict( + argstr="--default_pixel_value %f", + ), direction_matrix=dict( - argstr='--direction_matrix %s', - sep=',', + argstr="--direction_matrix %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - hfieldtype=dict(argstr='--hfieldtype %s', ), - image_center=dict(argstr='--image_center %s', ), + hfieldtype=dict( + argstr="--hfieldtype %s", + ), + image_center=dict( + argstr="--image_center %s", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - interpolation=dict(argstr='--interpolation %s', ), - notbulk=dict(argstr='--notbulk ', ), - number_of_thread=dict(argstr='--number_of_thread %d', ), - origin=dict(argstr='--origin %s', ), + interpolation=dict( + argstr="--interpolation %s", + ), + notbulk=dict( + argstr="--notbulk ", + ), + number_of_thread=dict( + argstr="--number_of_thread %d", + ), + origin=dict( + argstr="--origin %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - rotation_point=dict(argstr='--rotation_point %s', ), + rotation_point=dict( + argstr="--rotation_point %s", + ), size=dict( - argstr='--size %s', - sep=',', + argstr="--size %s", + sep=",", + ), + spaceChange=dict( + argstr="--spaceChange ", ), - spaceChange=dict(argstr='--spaceChange ', ), spacing=dict( - argstr='--spacing %s', - sep=',', + argstr="--spacing %s", + sep=",", + ), + spline_order=dict( + argstr="--spline_order %d", + ), + transform=dict( + argstr="--transform %s", ), - spline_order=dict(argstr='--spline_order %d', ), - transform=dict(argstr='--transform %s', ), transform_matrix=dict( - argstr='--transform_matrix %s', - sep=',', + argstr="--transform_matrix %s", + sep=",", + ), + transform_order=dict( + argstr="--transform_order %s", + ), + transform_tensor_method=dict( + argstr="--transform_tensor_method %s", + ), + transformationFile=dict( + argstr="--transformationFile %s", + extensions=None, + ), + window_function=dict( + argstr="--window_function %s", ), - transform_order=dict(argstr='--transform_order %s', ), - transform_tensor_method=dict(argstr='--transform_tensor_method %s', ), - transformationFile=dict(argstr='--transformationFile %s', ), - window_function=dict(argstr='--window_function %s', ), ) inputs = ResampleDTIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleDTIVolume_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = ResampleDTIVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py index 31fb7d5c0d..f8b1a3ddff 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py @@ -1,51 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..diffusion import TractographyLabelMapSeeding def test_TractographyLabelMapSeeding_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputFibers=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), - clthreshold=dict(argstr='--clthreshold %f', ), + args=dict( + argstr="%s", + ), + clthreshold=dict( + argstr="--clthreshold %f", + ), environ=dict( nohash=True, usedefault=True, ), - inputroi=dict(argstr='--inputroi %s', ), - integrationsteplength=dict(argstr='--integrationsteplength %f', ), - label=dict(argstr='--label %d', ), - maximumlength=dict(argstr='--maximumlength %f', ), - minimumlength=dict(argstr='--minimumlength %f', ), - name=dict(argstr='--name %s', ), + inputroi=dict( + argstr="--inputroi %s", + extensions=None, + ), + integrationsteplength=dict( + argstr="--integrationsteplength %f", + ), + label=dict( + argstr="--label %d", + ), + maximumlength=dict( + argstr="--maximumlength %f", + ), + minimumlength=dict( + argstr="--minimumlength %f", + ), + name=dict( + argstr="--name %s", + ), outputdirectory=dict( - argstr='--outputdirectory %s', + argstr="--outputdirectory %s", hash_files=False, ), - randomgrid=dict(argstr='--randomgrid ', ), - seedspacing=dict(argstr='--seedspacing %f', ), - stoppingcurvature=dict(argstr='--stoppingcurvature %f', ), - stoppingmode=dict(argstr='--stoppingmode %s', ), - stoppingvalue=dict(argstr='--stoppingvalue %f', ), - useindexspace=dict(argstr='--useindexspace ', ), - writetofile=dict(argstr='--writetofile ', ), + randomgrid=dict( + argstr="--randomgrid ", + ), + seedspacing=dict( + argstr="--seedspacing %f", + ), + stoppingcurvature=dict( + argstr="--stoppingcurvature %f", + ), + stoppingmode=dict( + argstr="--stoppingmode %s", + ), + stoppingvalue=dict( + argstr="--stoppingvalue %f", + ), + useindexspace=dict( + argstr="--useindexspace ", + ), + writetofile=dict( + argstr="--writetofile ", + ), ) inputs = TractographyLabelMapSeeding.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractographyLabelMapSeeding_outputs(): output_map = dict( - OutputFibers=dict(position=-1, ), + OutputFibers=dict( + extensions=None, + position=-1, + ), outputdirectory=dict(), ) outputs = TractographyLabelMapSeeding.output_spec() diff --git a/nipype/interfaces/slicer/filtering/__init__.py b/nipype/interfaces/slicer/filtering/__init__.py index 30ace1d2f2..8d7a6c0da4 100644 --- a/nipype/interfaces/slicer/filtering/__init__.py +++ b/nipype/interfaces/slicer/filtering/__init__.py @@ -1,13 +1,17 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .morphology import (GrayscaleGrindPeakImageFilter, - GrayscaleFillHoleImageFilter) -from .denoising import (GradientAnisotropicDiffusion, - CurvatureAnisotropicDiffusion, GaussianBlurImageFilter, - MedianImageFilter) -from .arithmetic import (MultiplyScalarVolumes, MaskScalarVolume, - SubtractScalarVolumes, AddScalarVolumes, - CastScalarVolume) +from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter +from .denoising import ( + GradientAnisotropicDiffusion, + CurvatureAnisotropicDiffusion, + GaussianBlurImageFilter, + MedianImageFilter, +) +from .arithmetic import ( + MultiplyScalarVolumes, + MaskScalarVolume, + SubtractScalarVolumes, + AddScalarVolumes, + CastScalarVolume, +) from .extractskeleton import ExtractSkeleton from .histogrammatching import HistogramMatching from .thresholdscalarvolume import ThresholdScalarVolume diff --git a/nipype/interfaces/slicer/filtering/arithmetic.py b/nipype/interfaces/slicer/filtering/arithmetic.py index 22785e32e1..8b729d8c6a 100644 --- a/nipype/interfaces/slicer/filtering/arithmetic.py +++ b/nipype/interfaces/slicer/filtering/arithmetic.py @@ -1,32 +1,34 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class MultiplyScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 * Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class MultiplyScalarVolumesOutputSpec(TraitedSpec): @@ -36,104 +38,98 @@ class MultiplyScalarVolumesOutputSpec(TraitedSpec): class MultiplyScalarVolumes(SEMLikeCommandLine): """title: Multiply Scalar Volumes -category: Filtering.Arithmetic + category: Filtering.Arithmetic -description: Multiplies two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. + description: Multiplies two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. -version: 0.1.0.$Revision: 8595 $(alpha) + version: 0.1.0.$Revision: 8595 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Multiply + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Multiply -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = MultiplyScalarVolumesInputSpec output_spec = MultiplyScalarVolumesOutputSpec _cmd = "MultiplyScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MaskScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( - position=-3, - desc="Input volume to be masked", - exists=True, - argstr="%s") + position=-3, desc="Input volume to be masked", exists=True, argstr="%s" + ) MaskVolume = File( - position=-2, - desc="Label volume containing the mask", - exists=True, - argstr="%s") + position=-2, desc="Label volume containing the mask", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, - desc= - "Output volume: Input Volume masked by label value from Mask Volume", - argstr="%s") + desc="Output volume: Input Volume masked by label value from Mask Volume", + argstr="%s", + ) label = traits.Int( - desc="Label value in the Mask Volume to use as the mask", - argstr="--label %d") + desc="Label value in the Mask Volume to use as the mask", argstr="--label %d" + ) replace = traits.Int( desc="Value to use for the output volume outside of the mask", - argstr="--replace %d") + argstr="--replace %d", + ) class MaskScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( position=-1, - desc= - "Output volume: Input Volume masked by label value from Mask Volume", - exists=True) + desc="Output volume: Input Volume masked by label value from Mask Volume", + exists=True, + ) class MaskScalarVolume(SEMLikeCommandLine): """title: Mask Scalar Volume -category: Filtering.Arithmetic - -description: Masks two images. The output image is set to 0 everywhere except where the chosen label from the mask volume is present, at which point it will retain it's original values. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. + category: Filtering.Arithmetic -version: 0.1.0.$Revision: 8595 $(alpha) + description: Masks two images. The output image is set to 0 everywhere except where the chosen label from the mask volume is present, at which point it will retain it's original values. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Mask + version: 0.1.0.$Revision: 8595 $(alpha) -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Mask -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = MaskScalarVolumeInputSpec output_spec = MaskScalarVolumeOutputSpec _cmd = "MaskScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class SubtractScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 - Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class SubtractScalarVolumesOutputSpec(TraitedSpec): @@ -143,46 +139,44 @@ class SubtractScalarVolumesOutputSpec(TraitedSpec): class SubtractScalarVolumes(SEMLikeCommandLine): """title: Subtract Scalar Volumes -category: Filtering.Arithmetic + category: Filtering.Arithmetic -description: Subtracts two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. + description: Subtracts two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Subtract + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Subtract -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = SubtractScalarVolumesInputSpec output_spec = SubtractScalarVolumesOutputSpec _cmd = "SubtractScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class AddScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 + Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class AddScalarVolumesOutputSpec(TraitedSpec): @@ -192,39 +186,37 @@ class AddScalarVolumesOutputSpec(TraitedSpec): class AddScalarVolumes(SEMLikeCommandLine): """title: Add Scalar Volumes -category: Filtering.Arithmetic - -description: Adds two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. + category: Filtering.Arithmetic -version: 0.1.0.$Revision: 19608 $(alpha) + description: Adds two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. -documentation-url: http://slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Add + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Add -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = AddScalarVolumesInputSpec output_spec = AddScalarVolumesOutputSpec _cmd = "AddScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CastScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( - position=-2, - desc="Input volume, the volume to cast.", - exists=True, - argstr="%s") + position=-2, desc="Input volume, the volume to cast.", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume, cast to the new type.", - argstr="%s") + argstr="%s", + ) type = traits.Enum( "Char", "UnsignedChar", @@ -235,34 +227,35 @@ class CastScalarVolumeInputSpec(CommandLineInputSpec): "Float", "Double", desc="Type for the new output volume.", - argstr="--type %s") + argstr="--type %s", + ) class CastScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( - position=-1, desc="Output volume, cast to the new type.", exists=True) + position=-1, desc="Output volume, cast to the new type.", exists=True + ) class CastScalarVolume(SEMLikeCommandLine): """title: Cast Scalar Volume -category: Filtering.Arithmetic - -description: Cast a volume to a given data type. -Use at your own risk when casting an input volume into a lower precision type! -Allows casting to the same type as the input volume. + category: Filtering.Arithmetic -version: 0.1.0.$Revision: 2104 $(alpha) + description: Cast a volume to a given data type. + Use at your own risk when casting an input volume into a lower precision type! + Allows casting to the same type as the input volume. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Cast + version: 0.1.0.$Revision: 2104 $(alpha) -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Cast -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = CastScalarVolumeInputSpec output_spec = CastScalarVolumeOutputSpec _cmd = "CastScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/checkerboardfilter.py b/nipype/interfaces/slicer/filtering/checkerboardfilter.py index e4ad85dc5e..336d0b13d7 100644 --- a/nipype/interfaces/slicer/filtering/checkerboardfilter.py +++ b/nipype/interfaces/slicer/filtering/checkerboardfilter.py @@ -1,30 +1,37 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class CheckerBoardFilterInputSpec(CommandLineInputSpec): checkerPattern = InputMultiPath( traits.Int, - desc= - "The pattern of input 1 and input 2 in the output image. The user can specify the number of checkers in each dimension. A checkerPattern of 2,2,1 means that images will alternate in every other checker in the first two dimensions. The same pattern will be used in the 3rd dimension.", + desc="The pattern of input 1 and input 2 in the output image. The user can specify the number of checkers in each dimension. A checkerPattern of 2,2,1 means that images will alternate in every other checker in the first two dimensions. The same pattern will be used in the 3rd dimension.", sep=",", - argstr="--checkerPattern %s") + argstr="--checkerPattern %s", + ) inputVolume1 = File( - position=-3, desc="First Input volume", exists=True, argstr="%s") + position=-3, desc="First Input volume", exists=True, argstr="%s" + ) inputVolume2 = File( - position=-2, desc="Second Input volume", exists=True, argstr="%s") + position=-2, desc="Second Input volume", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class CheckerBoardFilterOutputSpec(TraitedSpec): @@ -34,21 +41,20 @@ class CheckerBoardFilterOutputSpec(TraitedSpec): class CheckerBoardFilter(SEMLikeCommandLine): """title: CheckerBoard Filter -category: Filtering + category: Filtering -description: Create a checkerboard volume of two volumes. The output volume will show the two inputs alternating according to the user supplied checkerPattern. This filter is often used to compare the results of image registration. Note that the second input is resampled to the same origin, spacing and direction before it is composed with the first input. The scalar type of the output volume will be the same as the input image scalar type. + description: Create a checkerboard volume of two volumes. The output volume will show the two inputs alternating according to the user supplied checkerPattern. This filter is often used to compare the results of image registration. Note that the second input is resampled to the same origin, spacing and direction before it is composed with the first input. The scalar type of the output volume will be the same as the input image scalar type. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CheckerBoard + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CheckerBoard -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = CheckerBoardFilterInputSpec output_spec = CheckerBoardFilterOutputSpec _cmd = "CheckerBoardFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/denoising.py b/nipype/interfaces/slicer/filtering/denoising.py index 0dbaaebf74..e5d1135038 100644 --- a/nipype/interfaces/slicer/filtering/denoising.py +++ b/nipype/interfaces/slicer/filtering/denoising.py @@ -1,37 +1,40 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class GradientAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( - desc= - "Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", - argstr="--conductance %f") + desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", + argstr="--conductance %f", + ) iterations = traits.Int( - desc= - "The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", - argstr="--iterations %d") + desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", + argstr="--iterations %d", + ) timeStep = traits.Float( - desc= - "The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", - argstr="--timeStep %f") + desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", + argstr="--timeStep %f", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GradientAnisotropicDiffusionOutputSpec(TraitedSpec): @@ -41,53 +44,51 @@ class GradientAnisotropicDiffusionOutputSpec(TraitedSpec): class GradientAnisotropicDiffusion(SEMLikeCommandLine): """title: Gradient Anisotropic Diffusion -category: Filtering.Denoising + category: Filtering.Denoising -description: Runs gradient anisotropic diffusion on a volume. + description: Runs gradient anisotropic diffusion on a volume. -Anisotropic diffusion methods reduce noise (or unwanted detail) in images while preserving specific image features, like edges. For many applications, there is an assumption that light-dark transitions (edges) are interesting. Standard isotropic diffusion methods move and blur light-dark boundaries. Anisotropic diffusion methods are formulated to specifically preserve edges. The conductance term for this implementation is a function of the gradient magnitude of the image at each point, reducing the strength of diffusion at edges. The numerical implementation of this equation is similar to that described in the Perona-Malik paper, but uses a more robust technique for gradient magnitude estimation and has been generalized to N-dimensions. + Anisotropic diffusion methods reduce noise (or unwanted detail) in images while preserving specific image features, like edges. For many applications, there is an assumption that light-dark transitions (edges) are interesting. Standard isotropic diffusion methods move and blur light-dark boundaries. Anisotropic diffusion methods are formulated to specifically preserve edges. The conductance term for this implementation is a function of the gradient magnitude of the image at each point, reducing the strength of diffusion at edges. The numerical implementation of this equation is similar to that described in the Perona-Malik paper, but uses a more robust technique for gradient magnitude estimation and has been generalized to N-dimensions. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GradientAnisotropicDiffusion + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GradientAnisotropicDiffusion -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium - -""" + acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium + """ input_spec = GradientAnisotropicDiffusionInputSpec output_spec = GradientAnisotropicDiffusionOutputSpec _cmd = "GradientAnisotropicDiffusion " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CurvatureAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( - desc= - "Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", - argstr="--conductance %f") + desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", + argstr="--conductance %f", + ) iterations = traits.Int( - desc= - "The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", - argstr="--iterations %d") + desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", + argstr="--iterations %d", + ) timeStep = traits.Float( - desc= - "The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", - argstr="--timeStep %f") + desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", + argstr="--timeStep %f", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class CurvatureAnisotropicDiffusionOutputSpec(TraitedSpec): @@ -97,43 +98,43 @@ class CurvatureAnisotropicDiffusionOutputSpec(TraitedSpec): class CurvatureAnisotropicDiffusion(SEMLikeCommandLine): """title: Curvature Anisotropic Diffusion -category: Filtering.Denoising - -description: Performs anisotropic diffusion on an image using a modified curvature diffusion equation (MCDE). + category: Filtering.Denoising -MCDE does not exhibit the edge enhancing properties of classic anisotropic diffusion, which can under certain conditions undergo a 'negative' diffusion, which enhances the contrast of edges. Equations of the form of MCDE always undergo positive diffusion, with the conductance term only varying the strength of that diffusion. + description: Performs anisotropic diffusion on an image using a modified curvature diffusion equation (MCDE). - Qualitatively, MCDE compares well with other non-linear diffusion techniques. It is less sensitive to contrast than classic Perona-Malik style diffusion, and preserves finer detailed structures in images. There is a potential speed trade-off for using this function in place of Gradient Anisotropic Diffusion. Each iteration of the solution takes roughly twice as long. Fewer iterations, however, may be required to reach an acceptable solution. + MCDE does not exhibit the edge enhancing properties of classic anisotropic diffusion, which can under certain conditions undergo a 'negative' diffusion, which enhances the contrast of edges. Equations of the form of MCDE always undergo positive diffusion, with the conductance term only varying the strength of that diffusion. -version: 0.1.0.$Revision: 19608 $(alpha) + Qualitatively, MCDE compares well with other non-linear diffusion techniques. It is less sensitive to contrast than classic Perona-Malik style diffusion, and preserves finer detailed structures in images. There is a potential speed trade-off for using this function in place of Gradient Anisotropic Diffusion. Each iteration of the solution takes roughly twice as long. Fewer iterations, however, may be required to reach an acceptable solution. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CurvatureAnisotropicDiffusion + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CurvatureAnisotropicDiffusion -acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium + contributor: Bill Lorensen (GE) -""" + acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium + """ input_spec = CurvatureAnisotropicDiffusionInputSpec output_spec = CurvatureAnisotropicDiffusionOutputSpec _cmd = "CurvatureAnisotropicDiffusion " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GaussianBlurImageFilterInputSpec(CommandLineInputSpec): sigma = traits.Float( desc="Sigma value in physical units (e.g., mm) of the Gaussian kernel", - argstr="--sigma %f") - inputVolume = File( - position=-2, desc="Input volume", exists=True, argstr="%s") + argstr="--sigma %f", + ) + inputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Blurred Volume", - argstr="%s") + argstr="%s", + ) class GaussianBlurImageFilterOutputSpec(TraitedSpec): @@ -143,24 +144,23 @@ class GaussianBlurImageFilterOutputSpec(TraitedSpec): class GaussianBlurImageFilter(SEMLikeCommandLine): """title: Gaussian Blur Image Filter -category: Filtering.Denoising + category: Filtering.Denoising -description: Apply a gaussian blurr to an image + description: Apply a gaussian blur to an image -version: 0.1.0.$Revision: 1.1 $(alpha) + version: 0.1.0.$Revision: 1.1 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GaussianBlurImageFilter + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GaussianBlurImageFilter -contributor: Julien Jomier (Kitware), Stephen Aylward (Kitware) + contributor: Julien Jomier (Kitware), Stephen Aylward (Kitware) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = GaussianBlurImageFilterInputSpec output_spec = GaussianBlurImageFilterOutputSpec _cmd = "GaussianBlurImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MedianImageFilterInputSpec(CommandLineInputSpec): @@ -168,19 +168,19 @@ class MedianImageFilterInputSpec(CommandLineInputSpec): traits.Int, desc="The size of the neighborhood in each dimension", sep=",", - argstr="--neighborhood %s") + argstr="--neighborhood %s", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class MedianImageFilterOutputSpec(TraitedSpec): @@ -190,21 +190,20 @@ class MedianImageFilterOutputSpec(TraitedSpec): class MedianImageFilter(SEMLikeCommandLine): """title: Median Image Filter -category: Filtering.Denoising - -description: The MedianImageFilter is commonly used as a robust approach for noise reduction. This filter is particularly efficient against "salt-and-pepper" noise. In other words, it is robust to the presence of gray-level outliers. MedianImageFilter computes the value of each output pixel as the statistical median of the neighborhood of values around the corresponding input pixel. + category: Filtering.Denoising -version: 0.1.0.$Revision: 19608 $(alpha) + description: The MedianImageFilter is commonly used as a robust approach for noise reduction. This filter is particularly efficient against "salt-and-pepper" noise. In other words, it is robust to the presence of gray-level outliers. MedianImageFilter computes the value of each output pixel as the statistical median of the neighborhood of values around the corresponding input pixel. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MedianImageFilter + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MedianImageFilter -acknowledgements: This command module was derived from Insight/Examples/Filtering/MedianImageFilter (copyright) Insight Software Consortium + contributor: Bill Lorensen (GE) -""" + acknowledgements: This command module was derived from Insight/Examples/Filtering/MedianImageFilter (copyright) Insight Software Consortium + """ input_spec = MedianImageFilterInputSpec output_spec = MedianImageFilterOutputSpec _cmd = "MedianImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/extractskeleton.py b/nipype/interfaces/slicer/filtering/extractskeleton.py index d7770c8f2e..8b873bad98 100644 --- a/nipype/interfaces/slicer/filtering/extractskeleton.py +++ b/nipype/interfaces/slicer/filtering/extractskeleton.py @@ -1,59 +1,64 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class ExtractSkeletonInputSpec(CommandLineInputSpec): - InputImageFileName = File( - position=-2, desc="Input image", exists=True, argstr="%s") + InputImageFileName = File(position=-2, desc="Input image", exists=True, argstr="%s") OutputImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Skeleton of the input image", - argstr="%s") + argstr="%s", + ) type = traits.Enum( - "1D", "2D", desc="Type of skeleton to create", argstr="--type %s") + "1D", "2D", desc="Type of skeleton to create", argstr="--type %s" + ) dontPrune = traits.Bool( desc="Return the full skeleton, not just the maximal skeleton", - argstr="--dontPrune ") + argstr="--dontPrune ", + ) numPoints = traits.Int( - desc="Number of points used to represent the skeleton", - argstr="--numPoints %d") + desc="Number of points used to represent the skeleton", argstr="--numPoints %d" + ) pointsFile = traits.Str( - desc= - "Name of the file to store the coordinates of the central (1D) skeleton points", - argstr="--pointsFile %s") + desc="Name of the file to store the coordinates of the central (1D) skeleton points", + argstr="--pointsFile %s", + ) class ExtractSkeletonOutputSpec(TraitedSpec): OutputImageFileName = File( - position=-1, desc="Skeleton of the input image", exists=True) + position=-1, desc="Skeleton of the input image", exists=True + ) class ExtractSkeleton(SEMLikeCommandLine): """title: Extract Skeleton -category: Filtering + category: Filtering -description: Extract the skeleton of a binary object. The skeleton can be limited to being a 1D curve or allowed to be a full 2D manifold. The branches of the skeleton can be pruned so that only the maximal center skeleton is returned. + description: Extract the skeleton of a binary object. The skeleton can be limited to being a 1D curve or allowed to be a full 2D manifold. The branches of the skeleton can be pruned so that only the maximal center skeleton is returned. -version: 0.1.0.$Revision: 2104 $(alpha) + version: 0.1.0.$Revision: 2104 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExtractSkeleton + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExtractSkeleton -contributor: Pierre Seroul (UNC), Martin Styner (UNC), Guido Gerig (UNC), Stephen Aylward (Kitware) + contributor: Pierre Seroul (UNC), Martin Styner (UNC), Guido Gerig (UNC), Stephen Aylward (Kitware) -acknowledgements: The original implementation of this method was provided by ETH Zurich, Image Analysis Laboratory of Profs Olaf Kuebler, Gabor Szekely and Guido Gerig. Martin Styner at UNC, Chapel Hill made enhancements. Wrapping for Slicer was provided by Pierre Seroul and Stephen Aylward at Kitware, Inc. - -""" + acknowledgements: The original implementation of this method was provided by ETH Zurich, Image Analysis Laboratory of Profs Olaf Kuebler, Gabor Szekely and Guido Gerig. Martin Styner at UNC, Chapel Hill made enhancements. Wrapping for Slicer was provided by Pierre Seroul and Stephen Aylward at Kitware, Inc. + """ input_spec = ExtractSkeletonInputSpec output_spec = ExtractSkeletonOutputSpec _cmd = "ExtractSkeleton " - _outputs_filenames = {'OutputImageFileName': 'OutputImageFileName.nii'} + _outputs_filenames = {"OutputImageFileName": "OutputImageFileName.nii"} diff --git a/nipype/interfaces/slicer/filtering/histogrammatching.py b/nipype/interfaces/slicer/filtering/histogrammatching.py index 1b3b26b061..df1d87bd2f 100644 --- a/nipype/interfaces/slicer/filtering/histogrammatching.py +++ b/nipype/interfaces/slicer/filtering/histogrammatching.py @@ -1,75 +1,77 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class HistogramMatchingInputSpec(CommandLineInputSpec): numberOfHistogramLevels = traits.Int( desc="The number of hisogram levels to use", - argstr="--numberOfHistogramLevels %d") + argstr="--numberOfHistogramLevels %d", + ) numberOfMatchPoints = traits.Int( - desc="The number of match points to use", - argstr="--numberOfMatchPoints %d") + desc="The number of match points to use", argstr="--numberOfMatchPoints %d" + ) threshold = traits.Bool( - desc= - "If on, only pixels above the mean in each volume are thresholded.", - argstr="--threshold ") + desc="If on, only pixels above the mean in each volume are thresholded.", + argstr="--threshold ", + ) inputVolume = File( - position=-3, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-3, desc="Input volume to be filtered", exists=True, argstr="%s" + ) referenceVolume = File( position=-2, desc="Input volume whose histogram will be matched", exists=True, - argstr="%s") + argstr="%s", + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, - desc= - "Output volume. This is the input volume with intensities matched to the reference volume.", - argstr="%s") + desc="Output volume. This is the input volume with intensities matched to the reference volume.", + argstr="%s", + ) class HistogramMatchingOutputSpec(TraitedSpec): outputVolume = File( position=-1, - desc= - "Output volume. This is the input volume with intensities matched to the reference volume.", - exists=True) + desc="Output volume. This is the input volume with intensities matched to the reference volume.", + exists=True, + ) class HistogramMatching(SEMLikeCommandLine): """title: Histogram Matching -category: Filtering - -description: Normalizes the grayscale values of a source image based on the grayscale values of a reference image. This filter uses a histogram matching technique where the histograms of the two images are matched only at a specified number of quantile values. + category: Filtering -The filter was orginally designed to normalize MR images of the sameMR protocol and same body part. The algorithm works best if background pixels are excluded from both the source and reference histograms. A simple background exclusion method is to exclude all pixels whose grayscale values are smaller than the mean grayscale value. ThresholdAtMeanIntensity switches on this simple background exclusion method. + description: Normalizes the grayscale values of a source image based on the grayscale values of a reference image. This filter uses a histogram matching technique where the histograms of the two images are matched only at a specified number of quantile values. -Number of match points governs the number of quantile values to be matched. + The filter was originally designed to normalize MR images of the sameMR protocol and same body part. The algorithm works best if background pixels are excluded from both the source and reference histograms. A simple background exclusion method is to exclude all pixels whose grayscale values are smaller than the mean grayscale value. ThresholdAtMeanIntensity switches on this simple background exclusion method. -The filter assumes that both the source and reference are of the same type and that the input and output image type have the same number of dimension and have scalar pixel types. + Number of match points governs the number of quantile values to be matched. -version: 0.1.0.$Revision: 19608 $(alpha) + The filter assumes that both the source and reference are of the same type and that the input and output image type have the same number of dimension and have scalar pixel types. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/HistogramMatching + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/HistogramMatching -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = HistogramMatchingInputSpec output_spec = HistogramMatchingOutputSpec _cmd = "HistogramMatching " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/imagelabelcombine.py b/nipype/interfaces/slicer/filtering/imagelabelcombine.py index 067a575045..6941ff05fe 100644 --- a/nipype/interfaces/slicer/filtering/imagelabelcombine.py +++ b/nipype/interfaces/slicer/filtering/imagelabelcombine.py @@ -1,50 +1,55 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class ImageLabelCombineInputSpec(CommandLineInputSpec): InputLabelMap_A = File( - position=-3, desc="Label map image", exists=True, argstr="%s") + position=-3, desc="Label map image", exists=True, argstr="%s" + ) InputLabelMap_B = File( - position=-2, desc="Label map image", exists=True, argstr="%s") + position=-2, desc="Label map image", exists=True, argstr="%s" + ) OutputLabelMap = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resulting Label map image", - argstr="%s") + argstr="%s", + ) first_overwrites = traits.Bool( desc="Use first or second label when both are present", - argstr="--first_overwrites ") + argstr="--first_overwrites ", + ) class ImageLabelCombineOutputSpec(TraitedSpec): - OutputLabelMap = File( - position=-1, desc="Resulting Label map image", exists=True) + OutputLabelMap = File(position=-1, desc="Resulting Label map image", exists=True) class ImageLabelCombine(SEMLikeCommandLine): """title: Image Label Combine -category: Filtering + category: Filtering -description: Combine two label maps into one + description: Combine two label maps into one -version: 0.1.0 + version: 0.1.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ImageLabelCombine + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ImageLabelCombine -contributor: Alex Yarmarkovich (SPL, BWH) - -""" + contributor: Alex Yarmarkovich (SPL, BWH) + """ input_spec = ImageLabelCombineInputSpec output_spec = ImageLabelCombineOutputSpec _cmd = "ImageLabelCombine " - _outputs_filenames = {'OutputLabelMap': 'OutputLabelMap.nii'} + _outputs_filenames = {"OutputLabelMap": "OutputLabelMap.nii"} diff --git a/nipype/interfaces/slicer/filtering/morphology.py b/nipype/interfaces/slicer/filtering/morphology.py index 913c63d5ab..bd1232dece 100644 --- a/nipype/interfaces/slicer/filtering/morphology.py +++ b/nipype/interfaces/slicer/filtering/morphology.py @@ -1,25 +1,27 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class GrayscaleGrindPeakImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GrayscaleGrindPeakImageFilterOutputSpec(TraitedSpec): @@ -29,49 +31,47 @@ class GrayscaleGrindPeakImageFilterOutputSpec(TraitedSpec): class GrayscaleGrindPeakImageFilter(SEMLikeCommandLine): """title: Grayscale Grind Peak Image Filter -category: Filtering.Morphology + category: Filtering.Morphology -description: GrayscaleGrindPeakImageFilter removes peaks in a grayscale image. Peaks are local maxima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a peak are extrapolated through the peak. + description: GrayscaleGrindPeakImageFilter removes peaks in a grayscale image. Peaks are local maxima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a peak are extrapolated through the peak. -This filter is used to smooth over local maxima without affecting the values of local minima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local maxima. + This filter is used to smooth over local maxima without affecting the values of local minima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local maxima. -This filter uses the GrayscaleGeodesicDilateImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the minimum pixel value in the input image. + This filter uses the GrayscaleGeodesicDilateImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the minimum pixel value in the input image. -This filter is the dual to the GrayscaleFillholeImageFilter which implements the Fillhole algorithm. Since it is a dual, it is somewhat superfluous but is provided as a convenience. + This filter is the dual to the GrayscaleFillholeImageFilter which implements the Fillhole algorithm. Since it is a dual, it is somewhat superfluous but is provided as a convenience. -Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. + Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. -A companion filter, Grayscale Fill Hole, fills holes in grayscale images. + A companion filter, Grayscale Fill Hole, fills holes in grayscale images. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleGrindPeakImageFilter + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleGrindPeakImageFilter -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = GrayscaleGrindPeakImageFilterInputSpec output_spec = GrayscaleGrindPeakImageFilterOutputSpec _cmd = "GrayscaleGrindPeakImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GrayscaleFillHoleImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GrayscaleFillHoleImageFilterOutputSpec(TraitedSpec): @@ -81,29 +81,28 @@ class GrayscaleFillHoleImageFilterOutputSpec(TraitedSpec): class GrayscaleFillHoleImageFilter(SEMLikeCommandLine): """title: Grayscale Fill Hole Image Filter -category: Filtering.Morphology - -description: GrayscaleFillholeImageFilter fills holes in a grayscale image. Holes are local minima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a hole are extrapolated across the hole. + category: Filtering.Morphology -This filter is used to smooth over local minima without affecting the values of local maxima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local minima. + description: GrayscaleFillholeImageFilter fills holes in a grayscale image. Holes are local minima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a hole are extrapolated across the hole. -This filter uses the itkGrayscaleGeodesicErodeImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the maximum pixel value in the input image. + This filter is used to smooth over local minima without affecting the values of local maxima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local minima. - Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. + This filter uses the itkGrayscaleGeodesicErodeImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the maximum pixel value in the input image. - A companion filter, Grayscale Grind Peak, removes peaks in grayscale images. + Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. -version: 0.1.0.$Revision: 19608 $(alpha) + A companion filter, Grayscale Grind Peak, removes peaks in grayscale images. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleFillHoleImageFilter + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleFillHoleImageFilter -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = GrayscaleFillHoleImageFilterInputSpec output_spec = GrayscaleFillHoleImageFilterOutputSpec _cmd = "GrayscaleFillHoleImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py index 28f694f77e..69768e3b7e 100644 --- a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py +++ b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py @@ -1,70 +1,76 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class N4ITKBiasFieldCorrectionInputSpec(CommandLineInputSpec): inputimage = File( desc="Input image where you observe signal inhomegeneity", exists=True, - argstr="--inputimage %s") + argstr="--inputimage %s", + ) maskimage = File( - desc= - "Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", + desc="Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", exists=True, - argstr="--maskimage %s") + argstr="--maskimage %s", + ) outputimage = traits.Either( traits.Bool, File(), hash_files=False, desc="Result of processing", - argstr="--outputimage %s") + argstr="--outputimage %s", + ) outputbiasfield = traits.Either( traits.Bool, File(), hash_files=False, desc="Recovered bias field (OPTIONAL)", - argstr="--outputbiasfield %s") + argstr="--outputbiasfield %s", + ) iterations = InputMultiPath( traits.Int, - desc= - "Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", + desc="Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) convergencethreshold = traits.Float( - desc= - "Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", - argstr="--convergencethreshold %f") + desc="Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", + argstr="--convergencethreshold %f", + ) meshresolution = InputMultiPath( traits.Float, - desc= - "Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", + desc="Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", sep=",", - argstr="--meshresolution %s") + argstr="--meshresolution %s", + ) splinedistance = traits.Float( - desc= - "An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", - argstr="--splinedistance %f") + desc="An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", + argstr="--splinedistance %f", + ) shrinkfactor = traits.Int( - desc= - "Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", - argstr="--shrinkfactor %d") + desc="Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", + argstr="--shrinkfactor %d", + ) bsplineorder = traits.Int( - desc= - "Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", - argstr="--bsplineorder %d") - weightimage = File( - desc="Weight Image", exists=True, argstr="--weightimage %s") + desc="Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", + argstr="--bsplineorder %d", + ) + weightimage = File(desc="Weight Image", exists=True, argstr="--weightimage %s") histogramsharpening = InputMultiPath( traits.Float, - desc= - "A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", + desc="A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", sep=",", - argstr="--histogramsharpening %s") + argstr="--histogramsharpening %s", + ) class N4ITKBiasFieldCorrectionOutputSpec(TraitedSpec): @@ -75,24 +81,23 @@ class N4ITKBiasFieldCorrectionOutputSpec(TraitedSpec): class N4ITKBiasFieldCorrection(SEMLikeCommandLine): """title: N4ITK MRI Bias correction -category: Filtering + category: Filtering -description: Performs image bias correction using N4 algorithm. This module is based on the ITK filters contributed in the following publication: Tustison N, Gee J "N4ITK: Nick's N3 ITK Implementation For MRI Bias Field Correction", The Insight Journal 2009 January-June, http://hdl.handle.net/10380/3053 + description: Performs image bias correction using N4 algorithm. This module is based on the ITK filters contributed in the following publication: Tustison N, Gee J "N4ITK: Nick's N3 ITK Implementation For MRI Bias Field Correction", The Insight Journal 2009 January-June, http://hdl.handle.net/10380/3053 -version: 9 + version: 9 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/N4ITKBiasFieldCorrection + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/N4ITKBiasFieldCorrection -contributor: Nick Tustison (UPenn), Andrey Fedorov (SPL, BWH), Ron Kikinis (SPL, BWH) + contributor: Nick Tustison (UPenn), Andrey Fedorov (SPL, BWH), Ron Kikinis (SPL, BWH) -acknowledgements: The development of this module was partially supported by NIH grants R01 AA016748-01, R01 CA111288 and U01 CA151261 as well as by NA-MIC, NAC, NCIGT and the Slicer community. - -""" + acknowledgements: The development of this module was partially supported by NIH grants R01 AA016748-01, R01 CA111288 and U01 CA151261 as well as by NA-MIC, NAC, NCIGT and the Slicer community. + """ input_spec = N4ITKBiasFieldCorrectionInputSpec output_spec = N4ITKBiasFieldCorrectionOutputSpec _cmd = "N4ITKBiasFieldCorrection " _outputs_filenames = { - 'outputimage': 'outputimage.nii', - 'outputbiasfield': 'outputbiasfield.nii' + "outputimage": "outputimage.nii", + "outputbiasfield": "outputbiasfield.nii", } diff --git a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py index 6205b76b54..e9d6a8271f 100644 --- a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py +++ b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py @@ -1,125 +1,134 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class ResampleScalarVectorDWIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input Volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input Volume to be resampled", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, - argstr="--Reference %s") + argstr="--Reference %s", + ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( - desc= - "File containing the deformation field (3D vector image containing vectors with 3 components)", + desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, - argstr="--defField %s") + argstr="--defField %s", + ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an h-Field", - argstr="--hfieldtype %s") + argstr="--hfieldtype %s", + ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", - desc= - "Sampling algorithm (linear or nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", - argstr="--interpolation %s") + desc="Sampling algorithm (linear or nn (nearest neighbor), ws (WindowedSinc), bs (BSpline) )", + argstr="--interpolation %s", + ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", - argstr="--transform_order %s") + argstr="--transform_order %s", + ) notbulk = traits.Bool( - desc= - "The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", - argstr="--notbulk ") + desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", + argstr="--notbulk ", + ) spaceChange = traits.Bool( - desc= - "Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", - argstr="--spaceChange ") + desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", + argstr="--spaceChange ", + ) rotation_point = traits.List( - desc= - "Rotation Point in case of rotation around a point (otherwise useless)", - argstr="--rotation_point %s") + desc="Rotation Point in case of rotation around a point (otherwise useless)", + argstr="--rotation_point %s", + ) centered_transform = traits.Bool( - desc= - "Set the center of the transformation to the center of the input image", - argstr="--centered_transform ") + desc="Set the center of the transformation to the center of the input image", + argstr="--centered_transform ", + ) image_center = traits.Enum( "input", "output", - desc= - "Image to use to center the transform (used only if \'Centered Transform\' is selected)", - argstr="--image_center %s") + desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", + argstr="--image_center %s", + ) Inverse_ITK_Transformation = traits.Bool( - desc= - "Inverse the transformation before applying it from output image to input image", - argstr="--Inverse_ITK_Transformation ") + desc="Inverse the transformation before applying it from output image to input image", + argstr="--Inverse_ITK_Transformation ", + ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", - argstr="--size %s") - origin = traits.List( - desc="Origin of the output Image", argstr="--origin %s") + argstr="--size %s", + ) + origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, - desc= - "9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", + desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", - argstr="--direction_matrix %s") + argstr="--direction_matrix %s", + ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", - argstr="--number_of_thread %d") + argstr="--number_of_thread %d", + ) default_pixel_value = traits.Float( - desc= - "Default pixel value for samples falling outside of the input region", - argstr="--default_pixel_value %f") + desc="Default pixel value for samples falling outside of the input region", + argstr="--default_pixel_value %f", + ) window_function = traits.Enum( "h", "c", "w", "l", "b", - desc= - "Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", - argstr="--window_function %s") + desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", + argstr="--window_function %s", + ) spline_order = traits.Int(desc="Spline Order", argstr="--spline_order %d") transform_matrix = InputMultiPath( traits.Float, - desc= - "12 parameters of the transform matrix by rows ( --last 3 being translation-- )", + desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", - argstr="--transform_matrix %s") + argstr="--transform_matrix %s", + ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", - argstr="--transform %s") + argstr="--transform %s", + ) class ResampleScalarVectorDWIVolumeOutputSpec(TraitedSpec): @@ -129,25 +138,24 @@ class ResampleScalarVectorDWIVolumeOutputSpec(TraitedSpec): class ResampleScalarVectorDWIVolume(SEMLikeCommandLine): """title: Resample Scalar/Vector/DWI Volume -category: Filtering - -description: This module implements image and vector-image resampling through the use of itk Transforms.It can also handle diffusion weighted MRI image resampling. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. + category: Filtering -Warning: To resample DWMR Images, use nrrd input and output files. + description: This module implements image and vector-image resampling through the use of itk Transforms.It can also handle diffusion weighted MRI image resampling. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. -Warning: Do not use to resample Diffusion Tensor Images, tensors would not be reoriented + Warning: To resample DWMR Images, use nrrd input and output files. -version: 0.1 + Warning: Do not use to resample Diffusion Tensor Images, tensors would not be reoriented -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleScalarVectorDWIVolume + version: 0.1 -contributor: Francois Budin (UNC) + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleScalarVectorDWIVolume -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics + contributor: Francois Budin (UNC) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics + """ input_spec = ResampleScalarVectorDWIVolumeInputSpec output_spec = ResampleScalarVectorDWIVolumeOutputSpec _cmd = "ResampleScalarVectorDWIVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/tests/__init__.py b/nipype/interfaces/slicer/filtering/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/filtering/tests/__init__.py +++ b/nipype/interfaces/slicer/filtering/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py index 9f2209c1eb..2cd0ac229d 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py @@ -1,26 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..arithmetic import AddScalarVolumes def test_AddScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), inputVolume2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - order=dict(argstr='--order %s', ), + order=dict( + argstr="--order %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -30,8 +35,15 @@ def test_AddScalarVolumes_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddScalarVolumes_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = AddScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py index a659aa47f7..8417ab1a90 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py @@ -1,33 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..arithmetic import CastScalarVolume def test_CastScalarVolume_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - type=dict(argstr='--type %s', ), + type=dict( + argstr="--type %s", + ), ) inputs = CastScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CastScalarVolume_outputs(): - output_map = dict(OutputVolume=dict(position=-1, ), ) + output_map = dict( + OutputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = CastScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py index ae662cf2ba..49b5133faa 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py @@ -1,29 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..checkerboardfilter import CheckerBoardFilter def test_CheckerBoardFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), checkerPattern=dict( - argstr='--checkerPattern %s', - sep=',', + argstr="--checkerPattern %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), inputVolume2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -33,8 +36,15 @@ def test_CheckerBoardFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CheckerBoardFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = CheckerBoardFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py index 5613eb0c4c..48421b7c21 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py @@ -1,35 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import CurvatureAnisotropicDiffusion def test_CurvatureAnisotropicDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), + args=dict( + argstr="%s", + ), + conductance=dict( + argstr="--conductance %f", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - iterations=dict(argstr='--iterations %d', ), + iterations=dict( + argstr="--iterations %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - timeStep=dict(argstr='--timeStep %f', ), + timeStep=dict( + argstr="--timeStep %f", + ), ) inputs = CurvatureAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CurvatureAnisotropicDiffusion_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = CurvatureAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py index 5db6c65c7e..430e299787 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py @@ -1,36 +1,53 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..extractskeleton import ExtractSkeleton def test_ExtractSkeleton_inputs(): input_map = dict( InputImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputImageFileName=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), - dontPrune=dict(argstr='--dontPrune ', ), + args=dict( + argstr="%s", + ), + dontPrune=dict( + argstr="--dontPrune ", + ), environ=dict( nohash=True, usedefault=True, ), - numPoints=dict(argstr='--numPoints %d', ), - pointsFile=dict(argstr='--pointsFile %s', ), - type=dict(argstr='--type %s', ), + numPoints=dict( + argstr="--numPoints %d", + ), + pointsFile=dict( + argstr="--pointsFile %s", + ), + type=dict( + argstr="--type %s", + ), ) inputs = ExtractSkeleton.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractSkeleton_outputs(): - output_map = dict(OutputImageFileName=dict(position=-1, ), ) + output_map = dict( + OutputImageFileName=dict( + extensions=None, + position=-1, + ), + ) outputs = ExtractSkeleton.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py index ff46d6d308..113490472d 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py @@ -1,33 +1,44 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import GaussianBlurImageFilter def test_GaussianBlurImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - sigma=dict(argstr='--sigma %f', ), + sigma=dict( + argstr="--sigma %f", + ), ) inputs = GaussianBlurImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GaussianBlurImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = GaussianBlurImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py index 8ec7dbb156..95810788c7 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py @@ -1,35 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import GradientAnisotropicDiffusion def test_GradientAnisotropicDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), + args=dict( + argstr="%s", + ), + conductance=dict( + argstr="--conductance %f", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - iterations=dict(argstr='--iterations %d', ), + iterations=dict( + argstr="--iterations %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - timeStep=dict(argstr='--timeStep %f', ), + timeStep=dict( + argstr="--timeStep %f", + ), ) inputs = GradientAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GradientAnisotropicDiffusion_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = GradientAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py index 062a23c293..8891232347 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py @@ -1,21 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..morphology import GrayscaleFillHoleImageFilter def test_GrayscaleFillHoleImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -25,8 +27,15 @@ def test_GrayscaleFillHoleImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleFillHoleImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = GrayscaleFillHoleImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py index edb6081ed3..d48d9ded63 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py @@ -1,21 +1,23 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..morphology import GrayscaleGrindPeakImageFilter def test_GrayscaleGrindPeakImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -25,8 +27,15 @@ def test_GrayscaleGrindPeakImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleGrindPeakImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = GrayscaleGrindPeakImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py b/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py index f73690586d..cf2a959ff9 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py @@ -1,39 +1,55 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..histogrammatching import HistogramMatching def test_HistogramMatching_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), - numberOfHistogramLevels=dict(argstr='--numberOfHistogramLevels %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), + numberOfHistogramLevels=dict( + argstr="--numberOfHistogramLevels %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), referenceVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - threshold=dict(argstr='--threshold ', ), + threshold=dict( + argstr="--threshold ", + ), ) inputs = HistogramMatching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HistogramMatching_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = HistogramMatching.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py index 0bae22c342..802baf5f38 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py @@ -1,37 +1,49 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..imagelabelcombine import ImageLabelCombine def test_ImageLabelCombine_inputs(): input_map = dict( InputLabelMap_A=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), InputLabelMap_B=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputLabelMap=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - first_overwrites=dict(argstr='--first_overwrites ', ), + first_overwrites=dict( + argstr="--first_overwrites ", + ), ) inputs = ImageLabelCombine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageLabelCombine_outputs(): - output_map = dict(OutputLabelMap=dict(position=-1, ), ) + output_map = dict( + OutputLabelMap=dict( + extensions=None, + position=-1, + ), + ) outputs = ImageLabelCombine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py index cd04072890..5070718d66 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py @@ -1,38 +1,52 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..arithmetic import MaskScalarVolume def test_MaskScalarVolume_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), MaskVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - label=dict(argstr='--label %d', ), - replace=dict(argstr='--replace %d', ), + label=dict( + argstr="--label %d", + ), + replace=dict( + argstr="--replace %d", + ), ) inputs = MaskScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaskScalarVolume_outputs(): - output_map = dict(OutputVolume=dict(position=-1, ), ) + output_map = dict( + OutputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = MaskScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py index 35bb9496c9..8b86a90c3b 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py @@ -1,25 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import MedianImageFilter def test_MedianImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), neighborhood=dict( - argstr='--neighborhood %s', - sep=',', + argstr="--neighborhood %s", + sep=",", ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -29,8 +31,15 @@ def test_MedianImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = MedianImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py index 6590c4b133..817fdbbe95 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py @@ -1,26 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..arithmetic import MultiplyScalarVolumes def test_MultiplyScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), inputVolume2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - order=dict(argstr='--order %s', ), + order=dict( + argstr="--order %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -30,8 +35,15 @@ def test_MultiplyScalarVolumes_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiplyScalarVolumes_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = MultiplyScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py b/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py index c6ad0bf24d..43038036d5 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py @@ -1,52 +1,76 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..n4itkbiasfieldcorrection import N4ITKBiasFieldCorrection def test_N4ITKBiasFieldCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bsplineorder=dict(argstr='--bsplineorder %d', ), - convergencethreshold=dict(argstr='--convergencethreshold %f', ), + args=dict( + argstr="%s", + ), + bsplineorder=dict( + argstr="--bsplineorder %d", + ), + convergencethreshold=dict( + argstr="--convergencethreshold %f", + ), environ=dict( nohash=True, usedefault=True, ), histogramsharpening=dict( - argstr='--histogramsharpening %s', - sep=',', + argstr="--histogramsharpening %s", + sep=",", + ), + inputimage=dict( + argstr="--inputimage %s", + extensions=None, ), - inputimage=dict(argstr='--inputimage %s', ), iterations=dict( - argstr='--iterations %s', - sep=',', + argstr="--iterations %s", + sep=",", + ), + maskimage=dict( + argstr="--maskimage %s", + extensions=None, ), - maskimage=dict(argstr='--maskimage %s', ), meshresolution=dict( - argstr='--meshresolution %s', - sep=',', + argstr="--meshresolution %s", + sep=",", ), outputbiasfield=dict( - argstr='--outputbiasfield %s', + argstr="--outputbiasfield %s", hash_files=False, ), outputimage=dict( - argstr='--outputimage %s', + argstr="--outputimage %s", hash_files=False, ), - shrinkfactor=dict(argstr='--shrinkfactor %d', ), - splinedistance=dict(argstr='--splinedistance %f', ), - weightimage=dict(argstr='--weightimage %s', ), + shrinkfactor=dict( + argstr="--shrinkfactor %d", + ), + splinedistance=dict( + argstr="--splinedistance %f", + ), + weightimage=dict( + argstr="--weightimage %s", + extensions=None, + ), ) inputs = N4ITKBiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_N4ITKBiasFieldCorrection_outputs(): output_map = dict( - outputbiasfield=dict(), - outputimage=dict(), + outputbiasfield=dict( + extensions=None, + ), + outputimage=dict( + extensions=None, + ), ) outputs = N4ITKBiasFieldCorrection.output_spec() diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py index ebe34fd9b3..37dca6437c 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py @@ -1,67 +1,114 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..resamplescalarvectordwivolume import ResampleScalarVectorDWIVolume def test_ResampleScalarVectorDWIVolume_inputs(): input_map = dict( Inverse_ITK_Transformation=dict( - argstr='--Inverse_ITK_Transformation ', ), - Reference=dict(argstr='--Reference %s', ), - args=dict(argstr='%s', ), - centered_transform=dict(argstr='--centered_transform ', ), - defField=dict(argstr='--defField %s', ), - default_pixel_value=dict(argstr='--default_pixel_value %f', ), + argstr="--Inverse_ITK_Transformation ", + ), + Reference=dict( + argstr="--Reference %s", + extensions=None, + ), + args=dict( + argstr="%s", + ), + centered_transform=dict( + argstr="--centered_transform ", + ), + defField=dict( + argstr="--defField %s", + extensions=None, + ), + default_pixel_value=dict( + argstr="--default_pixel_value %f", + ), direction_matrix=dict( - argstr='--direction_matrix %s', - sep=',', + argstr="--direction_matrix %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - hfieldtype=dict(argstr='--hfieldtype %s', ), - image_center=dict(argstr='--image_center %s', ), + hfieldtype=dict( + argstr="--hfieldtype %s", + ), + image_center=dict( + argstr="--image_center %s", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - interpolation=dict(argstr='--interpolation %s', ), - notbulk=dict(argstr='--notbulk ', ), - number_of_thread=dict(argstr='--number_of_thread %d', ), - origin=dict(argstr='--origin %s', ), + interpolation=dict( + argstr="--interpolation %s", + ), + notbulk=dict( + argstr="--notbulk ", + ), + number_of_thread=dict( + argstr="--number_of_thread %d", + ), + origin=dict( + argstr="--origin %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - rotation_point=dict(argstr='--rotation_point %s', ), + rotation_point=dict( + argstr="--rotation_point %s", + ), size=dict( - argstr='--size %s', - sep=',', + argstr="--size %s", + sep=",", + ), + spaceChange=dict( + argstr="--spaceChange ", ), - spaceChange=dict(argstr='--spaceChange ', ), spacing=dict( - argstr='--spacing %s', - sep=',', + argstr="--spacing %s", + sep=",", + ), + spline_order=dict( + argstr="--spline_order %d", + ), + transform=dict( + argstr="--transform %s", ), - spline_order=dict(argstr='--spline_order %d', ), - transform=dict(argstr='--transform %s', ), transform_matrix=dict( - argstr='--transform_matrix %s', - sep=',', + argstr="--transform_matrix %s", + sep=",", + ), + transform_order=dict( + argstr="--transform_order %s", + ), + transformationFile=dict( + argstr="--transformationFile %s", + extensions=None, + ), + window_function=dict( + argstr="--window_function %s", ), - transform_order=dict(argstr='--transform_order %s', ), - transformationFile=dict(argstr='--transformationFile %s', ), - window_function=dict(argstr='--window_function %s', ), ) inputs = ResampleScalarVectorDWIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleScalarVectorDWIVolume_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = ResampleScalarVectorDWIVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py index d62589881f..abe3d9ad00 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py @@ -1,26 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..arithmetic import SubtractScalarVolumes def test_SubtractScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), inputVolume2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - order=dict(argstr='--order %s', ), + order=dict( + argstr="--order %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -30,8 +35,15 @@ def test_SubtractScalarVolumes_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SubtractScalarVolumes_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = SubtractScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py index 34de317104..0aaab0ff7a 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py @@ -1,37 +1,56 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..thresholdscalarvolume import ThresholdScalarVolume def test_ThresholdScalarVolume_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - lower=dict(argstr='--lower %d', ), - outsidevalue=dict(argstr='--outsidevalue %d', ), - threshold=dict(argstr='--threshold %d', ), - thresholdtype=dict(argstr='--thresholdtype %s', ), - upper=dict(argstr='--upper %d', ), + lower=dict( + argstr="--lower %d", + ), + outsidevalue=dict( + argstr="--outsidevalue %d", + ), + threshold=dict( + argstr="--threshold %d", + ), + thresholdtype=dict( + argstr="--thresholdtype %s", + ), + upper=dict( + argstr="--upper %d", + ), ) inputs = ThresholdScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ThresholdScalarVolume_outputs(): - output_map = dict(OutputVolume=dict(position=-1, ), ) + output_map = dict( + OutputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = ThresholdScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py index 8e7890de85..bbaa19f848 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py @@ -1,30 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..votingbinaryholefillingimagefilter import VotingBinaryHoleFillingImageFilter def test_VotingBinaryHoleFillingImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - background=dict(argstr='--background %d', ), + args=dict( + argstr="%s", + ), + background=dict( + argstr="--background %d", + ), environ=dict( nohash=True, usedefault=True, ), - foreground=dict(argstr='--foreground %d', ), + foreground=dict( + argstr="--foreground %d", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - majorityThreshold=dict(argstr='--majorityThreshold %d', ), + majorityThreshold=dict( + argstr="--majorityThreshold %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), radius=dict( - argstr='--radius %s', - sep=',', + argstr="--radius %s", + sep=",", ), ) inputs = VotingBinaryHoleFillingImageFilter.input_spec() @@ -32,8 +40,15 @@ def test_VotingBinaryHoleFillingImageFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VotingBinaryHoleFillingImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = VotingBinaryHoleFillingImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py index 041ce10990..d5f0cef21f 100644 --- a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py +++ b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py @@ -1,61 +1,62 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class ThresholdScalarVolumeInputSpec(CommandLineInputSpec): - InputVolume = File( - position=-2, desc="Input volume", exists=True, argstr="%s") + InputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Thresholded input volume", - argstr="%s") + argstr="%s", + ) threshold = traits.Int(desc="Threshold value", argstr="--threshold %d") lower = traits.Int(desc="Lower threshold value", argstr="--lower %d") upper = traits.Int(desc="Upper threshold value", argstr="--upper %d") outsidevalue = traits.Int( - desc= - "Set the voxels to this value if they fall outside the threshold range", - argstr="--outsidevalue %d") + desc="Set the voxels to this value if they fall outside the threshold range", + argstr="--outsidevalue %d", + ) thresholdtype = traits.Enum( "Below", "Above", "Outside", - desc= - "What kind of threshold to perform. If Outside is selected, uses Upper and Lower values. If Below is selected, uses the ThresholdValue, if Above is selected, uses the ThresholdValue.", - argstr="--thresholdtype %s") + desc="What kind of threshold to perform. If Outside is selected, uses Upper and Lower values. If Below is selected, uses the ThresholdValue, if Above is selected, uses the ThresholdValue.", + argstr="--thresholdtype %s", + ) class ThresholdScalarVolumeOutputSpec(TraitedSpec): - OutputVolume = File( - position=-1, desc="Thresholded input volume", exists=True) + OutputVolume = File(position=-1, desc="Thresholded input volume", exists=True) class ThresholdScalarVolume(SEMLikeCommandLine): """title: Threshold Scalar Volume -category: Filtering + category: Filtering -description:

Threshold an image.

Set image values to a user-specified outside value if they are below, above, or between simple threshold values.

ThresholdAbove: The values greater than or equal to the threshold value are set to OutsideValue.

ThresholdBelow: The values less than or equal to the threshold value are set to OutsideValue.

ThresholdOutside: The values outside the range Lower-Upper are set to OutsideValue.

Although all image types are supported on input, only signed types are produced.

+ description:

Threshold an image.

Set image values to a user-specified outside value if they are below, above, or between simple threshold values.

ThresholdAbove: The values greater than or equal to the threshold value are set to OutsideValue.

ThresholdBelow: The values less than or equal to the threshold value are set to OutsideValue.

ThresholdOutside: The values outside the range Lower-Upper are set to OutsideValue.

Although all image types are supported on input, only signed types are produced.

-version: 0.1.0.$Revision: 2104 $(alpha) + version: 0.1.0.$Revision: 2104 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Threshold + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Threshold -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ThresholdScalarVolumeInputSpec output_spec = ThresholdScalarVolumeOutputSpec _cmd = "ThresholdScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py index 9c19799d04..9903b07793 100644 --- a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py +++ b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py @@ -1,10 +1,14 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class VotingBinaryHoleFillingImageFilterInputSpec(CommandLineInputSpec): @@ -12,29 +16,31 @@ class VotingBinaryHoleFillingImageFilterInputSpec(CommandLineInputSpec): traits.Int, desc="The radius of a hole to be filled", sep=",", - argstr="--radius %s") + argstr="--radius %s", + ) majorityThreshold = traits.Int( - desc= - "The number of pixels over 50% that will decide whether an OFF pixel will become ON or not. For example, if the neighborhood of a pixel has 124 pixels (excluding itself), the 50% will be 62, and if you set a Majority threshold of 5, that means that the filter will require 67 or more neighbor pixels to be ON in order to switch the current OFF pixel to ON.", - argstr="--majorityThreshold %d") + desc="The number of pixels over 50% that will decide whether an OFF pixel will become ON or not. For example, if the neighborhood of a pixel has 124 pixels (excluding itself), the 50% will be 62, and if you set a Majority threshold of 5, that means that the filter will require 67 or more neighbor pixels to be ON in order to switch the current OFF pixel to ON.", + argstr="--majorityThreshold %d", + ) background = traits.Int( desc="The value associated with the background (not object)", - argstr="--background %d") + argstr="--background %d", + ) foreground = traits.Int( desc="The value associated with the foreground (object)", - argstr="--foreground %d") + argstr="--foreground %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class VotingBinaryHoleFillingImageFilterOutputSpec(TraitedSpec): @@ -44,21 +50,20 @@ class VotingBinaryHoleFillingImageFilterOutputSpec(TraitedSpec): class VotingBinaryHoleFillingImageFilter(SEMLikeCommandLine): """title: Voting Binary Hole Filling Image Filter -category: Filtering + category: Filtering -description: Applies a voting operation in order to fill-in cavities. This can be used for smoothing contours and for filling holes in binary images. This technique is used frequently when segmenting complete organs that may have ducts or vasculature that may not have been included in the initial segmentation, e.g. lungs, kidneys, liver. + description: Applies a voting operation in order to fill-in cavities. This can be used for smoothing contours and for filling holes in binary images. This technique is used frequently when segmenting complete organs that may have ducts or vasculature that may not have been included in the initial segmentation, e.g. lungs, kidneys, liver. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/VotingBinaryHoleFillingImageFilter + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/VotingBinaryHoleFillingImageFilter -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This command module was derived from Insight/Examples/Filtering/VotingBinaryHoleFillingImageFilter (copyright) Insight Software Consortium - -""" + acknowledgements: This command module was derived from Insight/Examples/Filtering/VotingBinaryHoleFillingImageFilter (copyright) Insight Software Consortium + """ input_spec = VotingBinaryHoleFillingImageFilterInputSpec output_spec = VotingBinaryHoleFillingImageFilterOutputSpec _cmd = "VotingBinaryHoleFillingImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index 6fe3ae927f..a36dd6b55a 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -1,22 +1,22 @@ -# -*- coding: utf-8 -*- """This script generates Slicer Interfaces based on the CLI modules XML. CLI modules are selected from the hardcoded list below and generated code is placed in the cli_modules.py file (and imported in __init__.py). For this to work -correctly you must have your CLI executabes in $PATH""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, bytes, open +correctly you must have your CLI executables in $PATH""" + import xml.dom.minidom import subprocess import os from shutil import rmtree import keyword -python_keywords = keyword.kwlist # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable + +python_keywords = ( + keyword.kwlist +) # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable def force_to_valid_python_variable_name(old_name): - """ Valid c++ names are not always valid in python, so + """Valid c++ names are not always valid in python, so provide alternate naming >>> force_to_valid_python_variable_name('lambda') @@ -27,54 +27,49 @@ def force_to_valid_python_variable_name(old_name): new_name = old_name new_name = new_name.lstrip().rstrip() if old_name in python_keywords: - new_name = 'opt_' + old_name + new_name = "opt_" + old_name return new_name def add_class_to_package(class_codes, class_names, module_name, package_dir): module_python_filename = os.path.join(package_dir, "%s.py" % module_name) - f_m = open(module_python_filename, 'w') - f_i = open(os.path.join(package_dir, "__init__.py"), 'a+') - f_m.write("""# -*- coding: utf-8 -*- + with open(module_python_filename, "w") as f_m: + f_m.write( + """# -*- coding: utf-8 -*- \"\"\"Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.\"\"\"\n\n""" - ) - imports = """from __future__ import (print_function, division, unicode_literals, - absolute_import) + ) + imports = """\ from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath) import os\n\n\n""" - f_m.write(imports) - f_m.write("\n\n".join(class_codes)) - f_i.write("from %s import %s\n" % (module_name, ", ".join(class_names))) - f_m.close() - f_i.close() + f_m.write(imports) + f_m.write("\n\n".join(class_codes)) + with open(os.path.join(package_dir, "__init__.py"), "a+") as f_i: + f_i.write("from {} import {}\n".format(module_name, ", ".join(class_names))) def crawl_code_struct(code_struct, package_dir): subpackages = [] for k, v in code_struct.items(): - if isinstance(v, str) or isinstance(v, (str, bytes)): + if isinstance(v, (str, bytes)): module_name = k.lower() class_name = k class_code = v - add_class_to_package([class_code], [class_name], module_name, - package_dir) + add_class_to_package([class_code], [class_name], module_name, package_dir) else: l1 = {} l2 = {} for key in list(v.keys()): - if (isinstance(v[key], str) - or isinstance(v[key], (str, bytes))): + if isinstance(v[key], (str, bytes)): l1[key] = v[key] else: l2[key] = v[key] if l2: v = l2 subpackages.append(k.lower()) - f_i = open(os.path.join(package_dir, "__init__.py"), 'a+') - f_i.write("from %s import *\n" % k.lower()) - f_i.close() + with open(os.path.join(package_dir, "__init__.py"), "a+") as f_i: + f_i.write("from %s import *\n" % k.lower()) new_pkg_dir = os.path.join(package_dir, k.lower()) if os.path.exists(new_pkg_dir): rmtree(new_pkg_dir) @@ -87,11 +82,12 @@ def crawl_code_struct(code_struct, package_dir): v = l1 module_name = k.lower() add_class_to_package( - list(v.values()), list(v.keys()), module_name, package_dir) + list(v.values()), list(v.keys()), module_name, package_dir + ) if subpackages: - f = open(os.path.join(package_dir, "setup.py"), 'w') - f.write( - """# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- + with open(os.path.join(package_dir, "setup.py"), "w") as f: + f.write( + """# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration @@ -105,28 +101,33 @@ def configuration(parent_package='',top_path=None): if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) -""".format(pkg_name=package_dir.split("/")[-1], - sub_pks="\n ".join([ - "config.add_data_dir('%s')" % sub_pkg for sub_pkg in subpackages - ]))) - f.close() - - -def generate_all_classes(modules_list=[], - launcher=[], - redirect_x=False, - mipav_hacks=False): - """ modules_list contains all the SEM compliant tools that should have wrappers created for them. - launcher containtains the command line prefix wrapper arugments needed to prepare - a proper environment for each of the modules. +""".format( + pkg_name=package_dir.split("/")[-1], + sub_pks="\n ".join( + [ + "config.add_data_dir('%s')" % sub_pkg + for sub_pkg in subpackages + ] + ), + ) + ) + + +def generate_all_classes( + modules_list=[], launcher=[], redirect_x=False, mipav_hacks=False +): + """modules_list contains all the SEM compliant tools that should have wrappers created for them. + launcher containtains the command line prefix wrapper arguments needed to prepare + a proper environment for each of the modules. """ all_code = {} for module in modules_list: print("=" * 80) - print("Generating Definition for module {0}".format(module)) + print(f"Generating Definition for module {module}") print("^" * 80) package, code, module = generate_class( - module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks) + module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks + ) cur_package = all_code module_name = package.strip().split(" ")[0].split(".")[-1] for package in package.strip().split(" ")[0].split(".")[:-1]: @@ -141,11 +142,9 @@ def generate_all_classes(modules_list=[], crawl_code_struct(all_code, os.getcwd()) -def generate_class(module, - launcher, - strip_module_name_prefix=True, - redirect_x=False, - mipav_hacks=False): +def generate_class( + module, launcher, strip_module_name_prefix=True, redirect_x=False, mipav_hacks=False +): dom = grab_xml(module, launcher, mipav_hacks=mipav_hacks) if strip_module_name_prefix: module_name = module.split(".")[-1] @@ -157,36 +156,43 @@ def generate_class(module, # self._outputs_nodes = [] - class_string = "\"\"\"" + class_string = '"""' for desc_str in [ - 'title', 'category', 'description', 'version', 'documentation-url', - 'license', 'contributor', 'acknowledgements' + "title", + "category", + "description", + "version", + "documentation-url", + "license", + "contributor", + "acknowledgements", ]: el = dom.getElementsByTagName(desc_str) if el and el[0].firstChild and el[0].firstChild.nodeValue.strip(): - class_string += desc_str + ": " + el[0].firstChild.nodeValue.strip( - ) + "\n\n" - if desc_str == 'category': + class_string += ( + desc_str + ": " + el[0].firstChild.nodeValue.strip() + "\n\n" + ) + if desc_str == "category": category = el[0].firstChild.nodeValue.strip() - class_string += "\"\"\"" + class_string += '"""' for paramGroup in dom.getElementsByTagName("parameters"): - indices = paramGroup.getElementsByTagName('index') + indices = paramGroup.getElementsByTagName("index") max_index = 0 for index in indices: if int(index.firstChild.nodeValue) > max_index: max_index = int(index.firstChild.nodeValue) for param in paramGroup.childNodes: - if param.nodeName in ['label', 'description', '#text', '#comment']: + if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} - longFlagNode = param.getElementsByTagName('longflag') + longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: # Prefer to use longFlag as name if it is given, rather than the parameter name longFlagName = longFlagNode[0].firstChild.nodeValue - # SEM automatically strips prefixed "--" or "-" from from xml before processing + # SEM automatically strips prefixed "--" or "-" from xml before processing # we need to replicate that behavior here The following # two nodes in xml have the same behavior in the program # --test @@ -196,102 +202,113 @@ def generate_class(module, name = force_to_valid_python_variable_name(name) traitsParams["argstr"] = "--" + longFlagName + " " else: - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue name = force_to_valid_python_variable_name(name) - if param.getElementsByTagName('index'): + if param.getElementsByTagName("index"): traitsParams["argstr"] = "" else: traitsParams["argstr"] = "--" + name + " " - if param.getElementsByTagName( - 'description') and param.getElementsByTagName( - 'description')[0].firstChild: - traitsParams["desc"] = param.getElementsByTagName( - 'description')[0].firstChild.nodeValue.replace( - '"', "\\\"").replace("\n", ", ") + if ( + param.getElementsByTagName("description") + and param.getElementsByTagName("description")[0].firstChild + ): + traitsParams["desc"] = ( + param.getElementsByTagName("description")[0] + .firstChild.nodeValue.replace('"', '\\"') + .replace("\n", ", ") + ) argsDict = { - 'directory': '%s', - 'file': '%s', - 'integer': "%d", - 'double': "%f", - 'float': "%f", - 'image': "%s", - 'transform': "%s", - 'boolean': '', - 'string-enumeration': '%s', - 'string': "%s", - 'integer-enumeration': '%s', - 'table': '%s', - 'point': '%s', - 'region': '%s', - 'geometry': '%s' + "directory": "%s", + "file": "%s", + "integer": "%d", + "double": "%f", + "float": "%f", + "image": "%s", + "transform": "%s", + "boolean": "", + "string-enumeration": "%s", + "string": "%s", + "integer-enumeration": "%s", + "table": "%s", + "point": "%s", + "region": "%s", + "geometry": "%s", } - if param.nodeName.endswith('-vector'): + if param.nodeName.endswith("-vector"): traitsParams["argstr"] += "%s" else: traitsParams["argstr"] += argsDict[param.nodeName] - index = param.getElementsByTagName('index') + index = param.getElementsByTagName("index") if index: - traitsParams["position"] = int( - index[0].firstChild.nodeValue) - (max_index + 1) + traitsParams["position"] = int(index[0].firstChild.nodeValue) - ( + max_index + 1 + ) - desc = param.getElementsByTagName('description') + desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue typesDict = { - 'integer': "traits.Int", - 'double': "traits.Float", - 'float': "traits.Float", - 'image': "File", - 'transform': "File", - 'boolean': "traits.Bool", - 'string': "traits.Str", - 'file': "File", - 'geometry': "File", - 'directory': "Directory", - 'table': "File", - 'point': "traits.List", - 'region': "traits.List" + "integer": "traits.Int", + "double": "traits.Float", + "float": "traits.Float", + "image": "File", + "transform": "File", + "boolean": "traits.Bool", + "string": "traits.Str", + "file": "File", + "geometry": "File", + "directory": "Directory", + "table": "File", + "point": "traits.List", + "region": "traits.List", } - if param.nodeName.endswith('-enumeration'): + if param.nodeName.endswith("-enumeration"): type = "traits.Enum" values = [ - '"%s"' % str(el.firstChild.nodeValue).replace('"', '') - for el in param.getElementsByTagName('element') + '"%s"' % str(el.firstChild.nodeValue).replace('"', "") + for el in param.getElementsByTagName("element") ] - elif param.nodeName.endswith('-vector'): + elif param.nodeName.endswith("-vector"): type = "InputMultiPath" if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: values = [ - "%s(exists=True)" % typesDict[param.nodeName.replace( - '-vector', '')] + "%s(exists=True)" + % typesDict[param.nodeName.replace("-vector", "")] ] else: - values = [typesDict[param.nodeName.replace('-vector', '')]] + values = [typesDict[param.nodeName.replace("-vector", "")]] if mipav_hacks is True: traitsParams["sep"] = ";" else: - traitsParams["sep"] = ',' - elif param.getAttribute('multiple') == "true": + traitsParams["sep"] = "," + elif param.getAttribute("multiple") == "true": type = "InputMultiPath" if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: values = ["%s(exists=True)" % typesDict[param.nodeName]] - elif param.nodeName in ['point', 'region']: + elif param.nodeName in ["point", "region"]: values = [ - "%s(traits.Float(), minlen=3, maxlen=3)" % - typesDict[param.nodeName] + "%s(traits.Float(), minlen=3, maxlen=3)" + % typesDict[param.nodeName] ] else: values = [typesDict[param.nodeName]] @@ -301,60 +318,85 @@ def generate_class(module, type = typesDict[param.nodeName] if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: - if not param.getElementsByTagName('channel'): + if not param.getElementsByTagName("channel"): raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{0}". - format(traitsParams)) - elif param.getElementsByTagName('channel')[ - 0].firstChild.nodeValue == 'output': + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{}".format( + traitsParams + ) + ) + elif ( + param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "output" + ): traitsParams["hash_files"] = False inputTraits.append( - "%s = traits.Either(traits.Bool, %s(%s), %s)" % - (name, type, - parse_values(values).replace("exists=True", ""), - parse_params(traitsParams))) + "%s = traits.Either(traits.Bool, %s(%s), %s)" + % ( + name, + type, + parse_values(values).replace("exists=True", ""), + parse_params(traitsParams), + ) + ) traitsParams["exists"] = True traitsParams.pop("argstr") traitsParams.pop("hash_files") - outputTraits.append("%s = %s(%s%s)" % - (name, type.replace("Input", "Output"), - parse_values(values), - parse_params(traitsParams))) - - outputs_filenames[name] = gen_filename_from_param( - param, name) - elif param.getElementsByTagName('channel')[ - 0].firstChild.nodeValue == 'input': + outputTraits.append( + "%s = %s(%s%s)" + % ( + name, + type.replace("Input", "Output"), + parse_values(values), + parse_params(traitsParams), + ) + ) + + outputs_filenames[name] = gen_filename_from_param(param, name) + elif ( + param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "input" + ): if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', - 'transform', 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ] and type not in ["InputMultiPath", "traits.List"]: traitsParams["exists"] = True - inputTraits.append("%s = %s(%s%s)" % - (name, type, parse_values(values), - parse_params(traitsParams))) + inputTraits.append( + "%s = %s(%s%s)" + % (name, type, parse_values(values), parse_params(traitsParams)) + ) else: raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{0}". - format(traitsParams)) + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{}".format( + traitsParams + ) + ) else: # For all other parameter types, they are implicitly only input types - inputTraits.append("%s = %s(%s%s)" % - (name, type, parse_values(values), - parse_params(traitsParams))) + inputTraits.append( + "%s = %s(%s%s)" + % (name, type, parse_values(values), parse_params(traitsParams)) + ) if mipav_hacks: blacklisted_inputs = ["maxMemoryUsage"] inputTraits = [ - trait for trait in inputTraits - if trait.split()[0] not in blacklisted_inputs + trait for trait in inputTraits if trait.split()[0] not in blacklisted_inputs ] compulsory_inputs = [ 'xDefaultMem = traits.Int(desc="Set default maximum heap size", argstr="-xDefaultMem %d")', - 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)' + 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)', ] inputTraits += compulsory_inputs @@ -370,9 +412,9 @@ def generate_class(module, output_spec_code += " " + trait + "\n" output_filenames_code = "_outputs_filenames = {" - output_filenames_code += ",".join([ - "'%s':'%s'" % (key, value) for key, value in outputs_filenames.items() - ]) + output_filenames_code += ",".join( + [f"'{key}':'{value}'" for key, value in outputs_filenames.items()] + ) output_filenames_code += "}" input_spec_code += "\n\n" @@ -385,12 +427,15 @@ def generate_class(module, output_spec = %module_name%OutputSpec _cmd = "%launcher% %name% " %output_filenames_code%\n""" - template += " _redirect_x = {0}\n".format(str(redirect_x)) + template += f" _redirect_x = {redirect_x}\n" - main_class = template.replace('%class_str%', class_string).replace( - "%module_name%", module_name).replace("%name%", module).replace( - "%output_filenames_code%", output_filenames_code).replace( - "%launcher%", " ".join(launcher)) + main_class = ( + template.replace("%class_str%", class_string) + .replace("%module_name%", module_name) + .replace("%name%", module) + .replace("%output_filenames_code%", output_filenames_code) + .replace("%launcher%", " ".join(launcher)) + ) return category, input_spec_code + output_spec_code + main_class, module_name @@ -402,7 +447,8 @@ def grab_xml(module, launcher, mipav_hacks=False): command_list.extend([module, "--xml"]) final_command = " ".join(command_list) xmlReturnValue = subprocess.Popen( - final_command, stdout=subprocess.PIPE, shell=True).communicate()[0] + final_command, stdout=subprocess.PIPE, shell=True + ).communicate()[0] if mipav_hacks: # workaround for a jist bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7234&group_id=228&atid=942 new_xml = "" @@ -422,10 +468,10 @@ def grab_xml(module, launcher, mipav_hacks=False): # workaround for a JIST bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7233&group_id=228&atid=942 if xmlReturnValue.strip().endswith("XML"): xmlReturnValue = xmlReturnValue.strip()[:-3] - if xmlReturnValue.strip().startswith( - "Error: Unable to set default atlas"): - xmlReturnValue = xmlReturnValue.strip()[len( - "Error: Unable to set default atlas"):] + if xmlReturnValue.strip().startswith("Error: Unable to set default atlas"): + xmlReturnValue = xmlReturnValue.strip()[ + len("Error: Unable to set default atlas") : + ] try: dom = xml.dom.minidom.parseString(xmlReturnValue.strip()) except Exception as e: @@ -444,15 +490,15 @@ def parse_params(params): list = [] for key, value in params.items(): if isinstance(value, (str, bytes)): - list.append('%s="%s"' % (key, value.replace('"', "'"))) + list.append('{}="{}"'.format(key, value.replace('"', "'"))) else: - list.append('%s=%s' % (key, value)) + list.append(f"{key}={value}") return ", ".join(list) def parse_values(values): - values = ['%s' % value for value in values] + values = ["%s" % value for value in values] if len(values) > 0: retstr = ", ".join(values) + ", " else: @@ -465,15 +511,15 @@ def gen_filename_from_param(param, base): if fileExtensions: # It is possible that multiple file extensions can be specified in a # comma separated list, This will extract just the first extension - firstFileExtension = fileExtensions.split(',')[0] + firstFileExtension = fileExtensions.split(",")[0] ext = firstFileExtension else: ext = { - 'image': '.nii', - 'transform': '.mat', - 'file': '', - 'directory': '', - 'geometry': '.vtk' + "image": ".nii", + "transform": ".mat", + "file": "", + "directory": "", + "geometry": ".vtk", }[param.nodeName] return base + ext @@ -483,68 +529,68 @@ def gen_filename_from_param(param, base): # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ - 'MedianImageFilter', - 'CheckerBoardFilter', - 'EMSegmentCommandLine', - 'GrayscaleFillHoleImageFilter', + "MedianImageFilter", + "CheckerBoardFilter", + "EMSegmentCommandLine", + "GrayscaleFillHoleImageFilter", # 'CreateDICOMSeries', #missing channel - 'TractographyLabelMapSeeding', - 'IntensityDifferenceMetric', - 'DWIToDTIEstimation', - 'MaskScalarVolume', - 'ImageLabelCombine', - 'DTIimport', - 'OtsuThresholdImageFilter', - 'ExpertAutomatedRegistration', - 'ThresholdScalarVolume', - 'DWIUnbiasedNonLocalMeansFilter', - 'BRAINSFit', - 'MergeModels', - 'ResampleDTIVolume', - 'MultiplyScalarVolumes', - 'LabelMapSmoothing', - 'RigidRegistration', - 'VotingBinaryHoleFillingImageFilter', - 'BRAINSROIAuto', - 'RobustStatisticsSegmenter', - 'GradientAnisotropicDiffusion', - 'ProbeVolumeWithModel', - 'ModelMaker', - 'ExtractSkeleton', - 'GrayscaleGrindPeakImageFilter', - 'N4ITKBiasFieldCorrection', - 'BRAINSResample', - 'DTIexport', - 'VBRAINSDemonWarp', - 'ResampleScalarVectorDWIVolume', - 'ResampleScalarVolume', - 'OtsuThresholdSegmentation', + "TractographyLabelMapSeeding", + "IntensityDifferenceMetric", + "DWIToDTIEstimation", + "MaskScalarVolume", + "ImageLabelCombine", + "DTIimport", + "OtsuThresholdImageFilter", + "ExpertAutomatedRegistration", + "ThresholdScalarVolume", + "DWIUnbiasedNonLocalMeansFilter", + "BRAINSFit", + "MergeModels", + "ResampleDTIVolume", + "MultiplyScalarVolumes", + "LabelMapSmoothing", + "RigidRegistration", + "VotingBinaryHoleFillingImageFilter", + "BRAINSROIAuto", + "RobustStatisticsSegmenter", + "GradientAnisotropicDiffusion", + "ProbeVolumeWithModel", + "ModelMaker", + "ExtractSkeleton", + "GrayscaleGrindPeakImageFilter", + "N4ITKBiasFieldCorrection", + "BRAINSResample", + "DTIexport", + "VBRAINSDemonWarp", + "ResampleScalarVectorDWIVolume", + "ResampleScalarVolume", + "OtsuThresholdSegmentation", # 'ExecutionModelTour', - 'HistogramMatching', - 'BRAINSDemonWarp', - 'ModelToLabelMap', - 'GaussianBlurImageFilter', - 'DiffusionWeightedVolumeMasking', - 'GrayscaleModelMaker', - 'CastScalarVolume', - 'DicomToNrrdConverter', - 'AffineRegistration', - 'AddScalarVolumes', - 'LinearRegistration', - 'SimpleRegionGrowingSegmentation', - 'DWIJointRicianLMMSEFilter', - 'MultiResolutionAffineRegistration', - 'SubtractScalarVolumes', - 'DWIRicianLMMSEFilter', - 'OrientScalarVolume', - 'FiducialRegistration', - 'BSplineDeformableRegistration', - 'CurvatureAnisotropicDiffusion', - 'PETStandardUptakeValueComputation', - 'DiffusionTensorScalarMeasurements', - 'ACPCTransform', - 'EMSegmentTransformToNewFormat', - 'BSplineToDeformationField' + "HistogramMatching", + "BRAINSDemonWarp", + "ModelToLabelMap", + "GaussianBlurImageFilter", + "DiffusionWeightedVolumeMasking", + "GrayscaleModelMaker", + "CastScalarVolume", + "DicomToNrrdConverter", + "AffineRegistration", + "AddScalarVolumes", + "LinearRegistration", + "SimpleRegionGrowingSegmentation", + "DWIJointRicianLMMSEFilter", + "MultiResolutionAffineRegistration", + "SubtractScalarVolumes", + "DWIRicianLMMSEFilter", + "OrientScalarVolume", + "FiducialRegistration", + "BSplineDeformableRegistration", + "CurvatureAnisotropicDiffusion", + "PETStandardUptakeValueComputation", + "DiffusionTensorScalarMeasurements", + "ACPCTransform", + "EMSegmentTransformToNewFormat", + "BSplineToDeformationField", ] # SlicerExecutionModel compliant tools that are usually statically built, and don't need the Slicer3 --launcher diff --git a/nipype/interfaces/slicer/legacy/__init__.py b/nipype/interfaces/slicer/legacy/__init__.py index 75c6b9d327..4c0da2e0a0 100644 --- a/nipype/interfaces/slicer/legacy/__init__.py +++ b/nipype/interfaces/slicer/legacy/__init__.py @@ -1,10 +1,12 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .diffusion import * from .segmentation import OtsuThresholdSegmentation from .filtering import OtsuThresholdImageFilter, ResampleScalarVolume from .converters import BSplineToDeformationField -from .registration import (BSplineDeformableRegistration, AffineRegistration, - MultiResolutionAffineRegistration, - RigidRegistration, LinearRegistration, - ExpertAutomatedRegistration) +from .registration import ( + BSplineDeformableRegistration, + AffineRegistration, + MultiResolutionAffineRegistration, + RigidRegistration, + LinearRegistration, + ExpertAutomatedRegistration, +) diff --git a/nipype/interfaces/slicer/legacy/converters.py b/nipype/interfaces/slicer/legacy/converters.py index f5af1ad29b..c5f0ecd21a 100644 --- a/nipype/interfaces/slicer/legacy/converters.py +++ b/nipype/interfaces/slicer/legacy/converters.py @@ -1,17 +1,21 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class BSplineToDeformationFieldInputSpec(CommandLineInputSpec): tfm = File(exists=True, argstr="--tfm %s") refImage = File(exists=True, argstr="--refImage %s") defImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--defImage %s") + traits.Bool, File(), hash_files=False, argstr="--defImage %s" + ) class BSplineToDeformationFieldOutputSpec(TraitedSpec): @@ -21,21 +25,20 @@ class BSplineToDeformationFieldOutputSpec(TraitedSpec): class BSplineToDeformationField(SEMLikeCommandLine): """title: BSpline to deformation field -category: Legacy.Converters + category: Legacy.Converters -description: Create a dense deformation field from a bspline+bulk transform. + description: Create a dense deformation field from a bspline+bulk transform. -version: 0.1.0.$Revision: 2104 $(alpha) + version: 0.1.0.$Revision: 2104 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineToDeformationField + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineToDeformationField -contributor: Andrey Fedorov (SPL, BWH) + contributor: Andrey Fedorov (SPL, BWH) -acknowledgements: This work is funded by NIH grants R01 CA111288 and U01 CA151261. - -""" + acknowledgements: This work is funded by NIH grants R01 CA111288 and U01 CA151261. + """ input_spec = BSplineToDeformationFieldInputSpec output_spec = BSplineToDeformationFieldOutputSpec _cmd = "BSplineToDeformationField " - _outputs_filenames = {'defImage': 'defImage.nii'} + _outputs_filenames = {"defImage": "defImage.nii"} diff --git a/nipype/interfaces/slicer/legacy/diffusion/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/__init__.py index f66daabb5b..b40040d9e7 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/__init__.py @@ -1,3 +1 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .denoising import DWIUnbiasedNonLocalMeansFilter diff --git a/nipype/interfaces/slicer/legacy/diffusion/denoising.py b/nipype/interfaces/slicer/legacy/diffusion/denoising.py index 0cc8cce0f6..8cccf949bb 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/denoising.py +++ b/nipype/interfaces/slicer/legacy/diffusion/denoising.py @@ -1,48 +1,52 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class DWIUnbiasedNonLocalMeansFilterInputSpec(CommandLineInputSpec): rs = InputMultiPath( traits.Int, - desc= - "The algorithm search for similar voxels in a neighborhood of this size (larger sizes than the default one are extremely slow).", + desc="The algorithm search for similar voxels in a neighborhood of this size (larger sizes than the default one are extremely slow).", sep=",", - argstr="--rs %s") + argstr="--rs %s", + ) rc = InputMultiPath( traits.Int, - desc= - "Similarity between blocks is measured using windows of this size.", + desc="Similarity between blocks is measured using windows of this size.", sep=",", - argstr="--rc %s") + argstr="--rc %s", + ) hp = traits.Float( - desc= - "This parameter is related to noise; the larger the parameter, the more agressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", - argstr="--hp %f") + desc="This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", + argstr="--hp %f", + ) ng = traits.Int( - desc= - "The number of the closest gradients that are used to jointly filter a given gradient direction (a maximum of 5 is allowed).", - argstr="--ng %d") + desc="The number of the closest gradients that are used to jointly filter a given gradient direction (a maximum of 5 is allowed).", + argstr="--ng %d", + ) re = InputMultiPath( traits.Int, - desc= - "A neighborhood of this size is used to compute the statistics for noise estimation.", + desc="A neighborhood of this size is used to compute the statistics for noise estimation.", sep=",", - argstr="--re %s") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + argstr="--re %s", + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) class DWIUnbiasedNonLocalMeansFilterOutputSpec(TraitedSpec): @@ -52,25 +56,24 @@ class DWIUnbiasedNonLocalMeansFilterOutputSpec(TraitedSpec): class DWIUnbiasedNonLocalMeansFilter(SEMLikeCommandLine): """title: DWI Unbiased Non Local Means Filter -category: Legacy.Diffusion.Denoising + category: Legacy.Diffusion.Denoising -description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the images using a Unbiased Non Local Means for Rician noise algorithm. It exploits not only the spatial redundancy, but the redundancy in similar gradient directions as well; it takes into account the N closest gradient directions to the direction being processed (a maximum of 5 gradient directions is allowed to keep a reasonable computational load, since we do not use neither similarity maps nor block-wise implementation). -The noise parameter is automatically estimated in the same way as in the jointLMMSE module. -A complete description of the algorithm may be found in: -Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. -Please, note that the execution of this filter is extremely slow, son only very conservative parameters (block size and search size as small as possible) should be used. Even so, its execution may take several hours. The advantage of this filter over joint LMMSE is its better preservation of edges and fine structures. + description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the images using a Unbiased Non Local Means for Rician noise algorithm. It exploits not only the spatial redundancy, but the redundancy in similar gradient directions as well; it takes into account the N closest gradient directions to the direction being processed (a maximum of 5 gradient directions is allowed to keep a reasonable computational load, since we do not use neither similarity maps nor block-wise implementation). + The noise parameter is automatically estimated in the same way as in the jointLMMSE module. + A complete description of the algorithm may be found in: + Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. + Please, note that the execution of this filter is extremely slow, son only very conservative parameters (block size and search size as small as possible) should be used. Even so, its execution may take several hours. The advantage of this filter over joint LMMSE is its better preservation of edges and fine structures. -version: 0.0.1.$Revision: 1 $(alpha) + version: 0.0.1.$Revision: 1 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/UnbiasedNonLocalMeansFilterForDWI + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/UnbiasedNonLocalMeansFilterForDWI -contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) + contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) -acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). - -""" + acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). + """ input_spec = DWIUnbiasedNonLocalMeansFilterInputSpec output_spec = DWIUnbiasedNonLocalMeansFilterOutputSpec _cmd = "DWIUnbiasedNonLocalMeansFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py b/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py index 0e25ffad9a..8aa18dc6a3 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py +++ b/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py @@ -1,37 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..denoising import DWIUnbiasedNonLocalMeansFilter def test_DWIUnbiasedNonLocalMeansFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - hp=dict(argstr='--hp %f', ), + hp=dict( + argstr="--hp %f", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - ng=dict(argstr='--ng %d', ), + ng=dict( + argstr="--ng %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), rc=dict( - argstr='--rc %s', - sep=',', + argstr="--rc %s", + sep=",", ), re=dict( - argstr='--re %s', - sep=',', + argstr="--re %s", + sep=",", ), rs=dict( - argstr='--rs %s', - sep=',', + argstr="--rs %s", + sep=",", ), ) inputs = DWIUnbiasedNonLocalMeansFilter.input_spec() @@ -39,8 +45,15 @@ def test_DWIUnbiasedNonLocalMeansFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIUnbiasedNonLocalMeansFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = DWIUnbiasedNonLocalMeansFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/filtering.py b/nipype/interfaces/slicer/legacy/filtering.py index aaed2350e0..978752f25b 100644 --- a/nipype/interfaces/slicer/legacy/filtering.py +++ b/nipype/interfaces/slicer/legacy/filtering.py @@ -1,36 +1,40 @@ -# -*- coding: utf-8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class OtsuThresholdImageFilterInputSpec(CommandLineInputSpec): insideValue = traits.Int( - desc= - "The value assigned to pixels that are inside the computed threshold", - argstr="--insideValue %d") + desc="The value assigned to pixels that are inside the computed threshold", + argstr="--insideValue %d", + ) outsideValue = traits.Int( - desc= - "The value assigned to pixels that are outside the computed threshold", - argstr="--outsideValue %d") + desc="The value assigned to pixels that are outside the computed threshold", + argstr="--outsideValue %d", + ) numberOfBins = traits.Int( - desc= - "This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", - argstr="--numberOfBins %d") + desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", + argstr="--numberOfBins %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class OtsuThresholdImageFilterOutputSpec(TraitedSpec): @@ -40,28 +44,27 @@ class OtsuThresholdImageFilterOutputSpec(TraitedSpec): class OtsuThresholdImageFilter(SEMLikeCommandLine): """title: Otsu Threshold Image Filter -category: Legacy.Filtering + category: Legacy.Filtering -description: This filter creates a binary thresholded image that separates an image into foreground and background components. The filter calculates the optimum threshold separating those two classes so that their combined spread (intra-class variance) is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter applies that threshold to the input image using the itkBinaryThresholdImageFilter. The numberOfHistogram bins can be set for the Otsu Calculator. The insideValue and outsideValue can be set for the BinaryThresholdImageFilter. The filter produces a labeled volume. + description: This filter creates a binary thresholded image that separates an image into foreground and background components. The filter calculates the optimum threshold separating those two classes so that their combined spread (intra-class variance) is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter applies that threshold to the input image using the itkBinaryThresholdImageFilter. The numberOfHistogram bins can be set for the Otsu Calculator. The insideValue and outsideValue can be set for the BinaryThresholdImageFilter. The filter produces a labeled volume. -The original reference is: + The original reference is: -N.Otsu, A threshold selection method from gray level histograms, IEEE Trans.Syst.ManCybern.SMC-9,62–66 1979. + N.Otsu, A threshold selection method from gray level histograms, IEEE Trans.Syst.ManCybern.SMC-9,62–66 1979. -version: 0.1.0.$Revision: 19608 $(alpha) + version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdImageFilter + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdImageFilter -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium - -""" + acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium + """ input_spec = OtsuThresholdImageFilterInputSpec output_spec = OtsuThresholdImageFilterOutputSpec _cmd = "OtsuThresholdImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ResampleScalarVolumeInputSpec(CommandLineInputSpec): @@ -69,7 +72,8 @@ class ResampleScalarVolumeInputSpec(CommandLineInputSpec): traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) interpolation = traits.Enum( "linear", "nearestNeighbor", @@ -79,21 +83,20 @@ class ResampleScalarVolumeInputSpec(CommandLineInputSpec): "welch", "lanczos", "blackman", - desc= - "Sampling algorithm (linear, nearest neighbor, bspline(cubic) or windowed sinc). There are several sinc algorithms available as described in the following publication: Erik H. W. Meijering, Wiro J. Niessen, Josien P. W. Pluim, Max A. Viergever: Quantitative Comparison of Sinc-Approximating Kernels for Medical Image Interpolation. MICCAI 1999, pp. 210-217. Each window has a radius of 3;", - argstr="--interpolation %s") + desc="Sampling algorithm (linear, nearest neighbor, bspline(cubic) or windowed sinc). There are several sinc algorithms available as described in the following publication: Erik H. W. Meijering, Wiro J. Niessen, Josien P. W. Pluim, Max A. Viergever: Quantitative Comparison of Sinc-Approximating Kernels for Medical Image Interpolation. MICCAI 1999, pp. 210-217. Each window has a radius of 3;", + argstr="--interpolation %s", + ) InputVolume = File( - position=-2, - desc="Input volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) class ResampleScalarVolumeOutputSpec(TraitedSpec): @@ -103,21 +106,20 @@ class ResampleScalarVolumeOutputSpec(TraitedSpec): class ResampleScalarVolume(SEMLikeCommandLine): """title: Resample Scalar Volume -category: Legacy.Filtering - -description: Resampling an image is an important task in image analysis. It is especially important in the frame of image registration. This module implements image resampling through the use of itk Transforms. This module uses an Identity Transform. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. Several interpolators are available: linear, nearest neighbor, bspline and five flavors of sinc. The sinc interpolators, although more precise, are much slower than the linear and nearest neighbor interpolator. To resample label volumnes, nearest neighbor interpolation should be used exclusively. + category: Legacy.Filtering -version: 0.1.0.$Revision: 20594 $(alpha) + description: Resampling an image is an important task in image analysis. It is especially important in the frame of image registration. This module implements image resampling through the use of itk Transforms. This module uses an Identity Transform. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. Several interpolators are available: linear, nearest neighbor, bspline and five flavors of sinc. The sinc interpolators, although more precise, are much slower than the linear and nearest neighbor interpolator. To resample label volumnes, nearest neighbor interpolation should be used exclusively. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleVolume + version: 0.1.0.$Revision: 20594 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleVolume -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ResampleScalarVolumeInputSpec output_spec = ResampleScalarVolumeOutputSpec _cmd = "ResampleScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/registration.py b/nipype/interfaces/slicer/legacy/registration.py index 7f73d85d82..01c9d17cb8 100644 --- a/nipype/interfaces/slicer/legacy/registration.py +++ b/nipype/interfaces/slicer/legacy/registration.py @@ -1,528 +1,532 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class BSplineDeformableRegistrationInputSpec(CommandLineInputSpec): - iterations = traits.Int( - desc="Number of iterations", argstr="--iterations %d") + iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") gridSize = traits.Int( - desc= - "Number of grid points on interior of the fixed image. Larger grid sizes allow for finer registrations.", - argstr="--gridSize %d") + desc="Number of grid points on interior of the fixed image. Larger grid sizes allow for finer registrations.", + argstr="--gridSize %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a deformable registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a deformable registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) constrain = traits.Bool( - desc= - "Constrain the deformation to the amount specified in Maximum Deformation", - argstr="--constrain ") + desc="Constrain the deformation to the amount specified in Maximum Deformation", + argstr="--constrain ", + ) maximumDeformation = traits.Float( - desc= - "If Constrain Deformation is checked, limit the deformation to this amount.", - argstr="--maximumDeformation %f") + desc="If Constrain Deformation is checked, limit the deformation to this amount.", + argstr="--maximumDeformation %f", + ) default = traits.Int( - desc= - "Default pixel value used if resampling a pixel outside of the volume.", - argstr="--default %d") + desc="Default pixel value used if resampling a pixel outside of the volume.", + argstr="--default %d", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. This transform should be an affine or rigid transform. It is used an a bulk transform for the BSpline. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. This transform should be an affine or rigid transform. It is used an a bulk transform for the BSpline. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) outputwarp = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", - argstr="--outputwarp %s") + desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", + argstr="--outputwarp %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class BSplineDeformableRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) outputwarp = File( - desc= - "Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", - exists=True) + desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class BSplineDeformableRegistration(SEMLikeCommandLine): """title: BSpline Deformable Registration -category: Legacy.Registration - -description: Registers two images together using BSpline transform and mutual information. + category: Legacy.Registration -version: 0.1.0.$Revision: 19608 $(alpha) + description: Registers two images together using BSpline transform and mutual information. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineDeformableRegistration + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Bill Lorensen (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineDeformableRegistration -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = BSplineDeformableRegistrationInputSpec output_spec = BSplineDeformableRegistrationOutputSpec _cmd = "BSplineDeformableRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt', - 'outputwarp': 'outputwarp.nrrd' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", + "outputwarp": "outputwarp.nrrd", } class AffineRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") - iterations = traits.Int( - desc="Number of iterations", argstr="--iterations %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) + iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used is 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used is 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class AffineRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class AffineRegistration(SEMLikeCommandLine): """title: Affine Registration -category: Legacy.Registration - -description: Registers two images together using an affine transform and mutual information. This module is often used to align images of different subjects or images of the same subject from different modalities. + category: Legacy.Registration -This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. + description: Registers two images together using an affine transform and mutual information. This module is often used to align images of different subjects or images of the same subject from different modalities. + This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. -version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/AffineRegistration + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Daniel Blezek (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/AffineRegistration -acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. + contributor: Daniel Blezek (GE) -This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. -""" + This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = AffineRegistrationInputSpec output_spec = AffineRegistrationOutputSpec _cmd = "AffineRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class MultiResolutionAffineRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, - desc= - "Image which defines the space into which the moving image is registered", + desc="Image which defines the space into which the moving image is registered", exists=True, - argstr="%s") + argstr="%s", + ) movingImage = File( position=-1, - desc= - "The transform goes from the fixed image's space into the moving image's space", + desc="The transform goes from the fixed image's space into the moving image's space", exists=True, - argstr="%s") + argstr="%s", + ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", - argstr="--resampledImage %s") + argstr="--resampledImage %s", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the output transform from the registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) fixedImageMask = File( desc="Label image which defines a mask of interest for the fixed image", exists=True, - argstr="--fixedImageMask %s") + argstr="--fixedImageMask %s", + ) fixedImageROI = traits.List( desc="Label image which defines a ROI of interest for the fixed image", - argstr="--fixedImageROI %s") + argstr="--fixedImageROI %s", + ) numIterations = traits.Int( desc="Number of iterations to run at each resolution level.", - argstr="--numIterations %d") + argstr="--numIterations %d", + ) numLineIterations = traits.Int( desc="Number of iterations to run at each resolution level.", - argstr="--numLineIterations %d") + argstr="--numLineIterations %d", + ) stepSize = traits.Float( - desc="The maximum step size of the optimizer in voxels", - argstr="--stepSize %f") + desc="The maximum step size of the optimizer in voxels", argstr="--stepSize %f" + ) stepTolerance = traits.Float( desc="The maximum step size of the optimizer in voxels", - argstr="--stepTolerance %f") + argstr="--stepTolerance %f", + ) metricTolerance = traits.Float(argstr="--metricTolerance %f") class MultiResolutionAffineRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( - desc="Save the output transform from the registration", exists=True) + desc="Save the output transform from the registration", exists=True + ) class MultiResolutionAffineRegistration(SEMLikeCommandLine): """title: Robust Multiresolution Affine Registration -category: Legacy.Registration - -description: Provides affine registration using multiple resolution levels and decomposed affine transforms. + category: Legacy.Registration -version: 0.1.0.$Revision: 2104 $(alpha) + description: Provides affine registration using multiple resolution levels and decomposed affine transforms. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MultiResolutionAffineRegistration + version: 0.1.0.$Revision: 2104 $(alpha) -contributor: Casey B Goodlett (Utah) + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MultiResolutionAffineRegistration -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Casey B Goodlett (Utah) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = MultiResolutionAffineRegistrationInputSpec output_spec = MultiResolutionAffineRegistrationOutputSpec _cmd = "MultiResolutionAffineRegistration " _outputs_filenames = { - 'resampledImage': 'resampledImage.nii', - 'saveTransform': 'saveTransform.txt' + "resampledImage": "resampledImage.nii", + "saveTransform": "saveTransform.txt", } class RigidRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) testingmode = traits.Bool( - desc= - "Enable testing mode. Input transform will be used to construct floating image. The floating image will be ignored if passed.", - argstr="--testingmode ") + desc="Enable testing mode. Input transform will be used to construct floating image. The floating image will be ignored if passed.", + argstr="--testingmode ", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) iterations = InputMultiPath( traits.Int, - desc= - "Comma separated list of iterations. Must have the same number of elements as the learning rate.", + desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) learningrate = InputMultiPath( traits.Float, - desc= - "Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", + desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", - argstr="--learningrate %s") + argstr="--learningrate %s", + ) translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class RigidRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class RigidRegistration(SEMLikeCommandLine): """title: Rigid Registration -category: Legacy.Registration - -description: Registers two images together using a rigid transform and mutual information. + category: Legacy.Registration -This module was originally distributed as "Linear registration" but has been renamed to eliminate confusion with the "Affine registration" module. + description: Registers two images together using a rigid transform and mutual information. -This module is often used to align images of different subjects or images of the same subject from different modalities. + This module was originally distributed as "Linear registration" but has been renamed to eliminate confusion with the "Affine registration" module. -This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. + This module is often used to align images of different subjects or images of the same subject from different modalities. + This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. -version: 0.1.0.$Revision: 19608 $(alpha) -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RigidRegistration + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Daniel Blezek (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RigidRegistration -acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. + contributor: Daniel Blezek (GE) -This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. -""" + This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = RigidRegistrationInputSpec output_spec = RigidRegistrationOutputSpec _cmd = "RigidRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class LinearRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) iterations = InputMultiPath( traits.Int, - desc= - "Comma separated list of iterations. Must have the same number of elements as the learning rate.", + desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) learningrate = InputMultiPath( traits.Float, - desc= - "Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", + desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", - argstr="--learningrate %s") + argstr="--learningrate %s", + ) translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class LinearRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class LinearRegistration(SEMLikeCommandLine): """title: Linear Registration -category: Legacy.Registration - -description: Registers two images together using a rigid transform and mutual information. + category: Legacy.Registration -version: 0.1.0.$Revision: 19608 $(alpha) + description: Registers two images together using a rigid transform and mutual information. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LinearRegistration + version: 0.1.0.$Revision: 19608 $(alpha) -contributor: Daniel Blezek (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LinearRegistration -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Daniel Blezek (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = LinearRegistrationInputSpec output_spec = LinearRegistrationOutputSpec _cmd = "LinearRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, - desc= - "Image which defines the space into which the moving image is registered", + desc="Image which defines the space into which the moving image is registered", exists=True, - argstr="%s") + argstr="%s", + ) movingImage = File( position=-1, - desc= - "The transform goes from the fixed image's space into the moving image's space", + desc="The transform goes from the fixed image's space into the moving image's space", exists=True, - argstr="%s") + argstr="%s", + ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", - argstr="--resampledImage %s") + argstr="--resampledImage %s", + ) loadTransform = File( desc="Load a transform that is immediately applied to the moving image", exists=True, - argstr="--loadTransform %s") + argstr="--loadTransform %s", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) initialization = traits.Enum( "None", "Landmarks", @@ -530,7 +534,8 @@ class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): "CentersOfMass", "SecondMoments", desc="Method to prime the registration process", - argstr="--initialization %s") + argstr="--initialization %s", + ) registration = traits.Enum( "None", "Initial", @@ -541,115 +546,129 @@ class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): "PipelineAffine", "PipelineBSpline", desc="Method for the registration process", - argstr="--registration %s") + argstr="--registration %s", + ) metric = traits.Enum( "MattesMI", "NormCorr", "MeanSqrd", desc="Method to quantify image match", - argstr="--metric %s") + argstr="--metric %s", + ) expectedOffset = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedOffset %f") + desc="Expected misalignment after initialization", argstr="--expectedOffset %f" + ) expectedRotation = traits.Float( desc="Expected misalignment after initialization", - argstr="--expectedRotation %f") + argstr="--expectedRotation %f", + ) expectedScale = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedScale %f") + desc="Expected misalignment after initialization", argstr="--expectedScale %f" + ) expectedSkew = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedSkew %f") + desc="Expected misalignment after initialization", argstr="--expectedSkew %f" + ) verbosityLevel = traits.Enum( "Silent", "Standard", "Verbose", desc="Level of detail of reporting progress", - argstr="--verbosityLevel %s") + argstr="--verbosityLevel %s", + ) sampleFromOverlap = traits.Bool( - desc= - "Limit metric evaluation to the fixed image region overlapped by the moving image", - argstr="--sampleFromOverlap ") + desc="Limit metric evaluation to the fixed image region overlapped by the moving image", + argstr="--sampleFromOverlap ", + ) fixedImageMask = File( desc="Image which defines a mask for the fixed image", exists=True, - argstr="--fixedImageMask %s") + argstr="--fixedImageMask %s", + ) randomNumberSeed = traits.Int( desc="Seed to generate a consistent random number sequence", - argstr="--randomNumberSeed %d") + argstr="--randomNumberSeed %d", + ) numberOfThreads = traits.Int( - desc="Number of CPU threads to use", argstr="--numberOfThreads %d") + desc="Number of CPU threads to use", argstr="--numberOfThreads %d" + ) minimizeMemory = traits.Bool( - desc= - "Reduce the amount of memory required at the cost of increased computation time", - argstr="--minimizeMemory ") + desc="Reduce the amount of memory required at the cost of increased computation time", + argstr="--minimizeMemory ", + ) interpolation = traits.Enum( "NearestNeighbor", "Linear", "BSpline", desc="Method for interpolation within the optimization process", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) rigidMaxIterations = traits.Int( desc="Maximum number of rigid optimization iterations", - argstr="--rigidMaxIterations %d") + argstr="--rigidMaxIterations %d", + ) rigidSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during rigid registration", - argstr="--rigidSamplingRatio %f") + desc="Portion of the image to use in computing the metric during rigid registration", + argstr="--rigidSamplingRatio %f", + ) affineMaxIterations = traits.Int( desc="Maximum number of affine optimization iterations", - argstr="--affineMaxIterations %d") + argstr="--affineMaxIterations %d", + ) affineSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during affine registration", - argstr="--affineSamplingRatio %f") + desc="Portion of the image to use in computing the metric during affine registration", + argstr="--affineSamplingRatio %f", + ) bsplineMaxIterations = traits.Int( desc="Maximum number of bspline optimization iterations", - argstr="--bsplineMaxIterations %d") + argstr="--bsplineMaxIterations %d", + ) bsplineSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during BSpline registration", - argstr="--bsplineSamplingRatio %f") + desc="Portion of the image to use in computing the metric during BSpline registration", + argstr="--bsplineSamplingRatio %f", + ) controlPointSpacing = traits.Int( desc="Number of pixels between control points", - argstr="--controlPointSpacing %d") + argstr="--controlPointSpacing %d", + ) class ExpertAutomatedRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class ExpertAutomatedRegistration(SEMLikeCommandLine): """title: Expert Automated Registration -category: Legacy.Registration - -description: Provides rigid, affine, and BSpline registration methods via a simple GUI + category: Legacy.Registration -version: 0.1.0.$Revision: 2104 $(alpha) + description: Provides rigid, affine, and BSpline registration methods via a simple GUI -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExpertAutomatedRegistration + version: 0.1.0.$Revision: 2104 $(alpha) -contributor: Stephen R Aylward (Kitware), Casey B Goodlett (Kitware) + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExpertAutomatedRegistration -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Stephen R Aylward (Kitware), Casey B Goodlett (Kitware) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ExpertAutomatedRegistrationInputSpec output_spec = ExpertAutomatedRegistrationOutputSpec _cmd = "ExpertAutomatedRegistration " _outputs_filenames = { - 'resampledImage': 'resampledImage.nii', - 'saveTransform': 'saveTransform.txt' + "resampledImage": "resampledImage.nii", + "saveTransform": "saveTransform.txt", } diff --git a/nipype/interfaces/slicer/legacy/segmentation.py b/nipype/interfaces/slicer/legacy/segmentation.py index 3500d50d50..664d9913ae 100644 --- a/nipype/interfaces/slicer/legacy/segmentation.py +++ b/nipype/interfaces/slicer/legacy/segmentation.py @@ -1,41 +1,43 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class OtsuThresholdSegmentationInputSpec(CommandLineInputSpec): brightObjects = traits.Bool( - desc= - "Segmenting bright objects on a dark background or dark objects on a bright background.", - argstr="--brightObjects ") + desc="Segmenting bright objects on a dark background or dark objects on a bright background.", + argstr="--brightObjects ", + ) numberOfBins = traits.Int( - desc= - "This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", - argstr="--numberOfBins %d") + desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", + argstr="--numberOfBins %d", + ) faceConnected = traits.Bool( - desc= - "This is an advanced parameter. Adjacent voxels are face connected. This affects the connected component algorithm. If this parameter is false, more regions are likely to be identified.", - argstr="--faceConnected ") + desc="This is an advanced parameter. Adjacent voxels are face connected. This affects the connected component algorithm. If this parameter is false, more regions are likely to be identified.", + argstr="--faceConnected ", + ) minimumObjectSize = traits.Int( - desc= - "Minimum size of object to retain. This parameter can be used to get rid of small regions in noisy images.", - argstr="--minimumObjectSize %d") + desc="Minimum size of object to retain. This parameter can be used to get rid of small regions in noisy images.", + argstr="--minimumObjectSize %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be segmented", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be segmented", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class OtsuThresholdSegmentationOutputSpec(TraitedSpec): @@ -45,21 +47,20 @@ class OtsuThresholdSegmentationOutputSpec(TraitedSpec): class OtsuThresholdSegmentation(SEMLikeCommandLine): """title: Otsu Threshold Segmentation -category: Legacy.Segmentation + category: Legacy.Segmentation -description: This filter creates a labeled image from a grayscale image. First, it calculates an optimal threshold that separates the image into foreground and background. This threshold separates those two classes so that their intra-class variance is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter runs a connected component algorithm to generate unique labels for each connected region of the foreground. Finally, the resulting image is relabeled to provide consecutive numbering. + description: This filter creates a labeled image from a grayscale image. First, it calculates an optimal threshold that separates the image into foreground and background. This threshold separates those two classes so that their intra-class variance is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter runs a connected component algorithm to generate unique labels for each connected region of the foreground. Finally, the resulting image is relabeled to provide consecutive numbering. -version: 1.0 + version: 1.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdSegmentation + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdSegmentation -contributor: Bill Lorensen (GE) + contributor: Bill Lorensen (GE) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = OtsuThresholdSegmentationInputSpec output_spec = OtsuThresholdSegmentationOutputSpec _cmd = "OtsuThresholdSegmentation " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/tests/__init__.py b/nipype/interfaces/slicer/legacy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/legacy/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py index 949cdc7f61..bb2de08cfb 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py @@ -1,48 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import AffineRegistration def test_AffineRegistration_inputs(): input_map = dict( FixedImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), MovingImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict(argstr='--initialtransform %s', ), - iterations=dict(argstr='--iterations %d', ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), + fixedsmoothingfactor=dict( + argstr="--fixedsmoothingfactor %d", + ), + histogrambins=dict( + argstr="--histogrambins %d", + ), + initialtransform=dict( + argstr="--initialtransform %s", + extensions=None, + ), + iterations=dict( + argstr="--iterations %d", + ), + movingsmoothingfactor=dict( + argstr="--movingsmoothingfactor %d", + ), outputtransform=dict( - argstr='--outputtransform %s', + argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict( + argstr="--spatialsamples %d", + ), + translationscale=dict( + argstr="--translationscale %f", + ), ) inputs = AffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineRegistration_outputs(): output_map = dict( - outputtransform=dict(), - resampledmovingfilename=dict(), + outputtransform=dict( + extensions=None, + ), + resampledmovingfilename=dict( + extensions=None, + ), ) outputs = AffineRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py index 17339f8859..41b316e7dc 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py @@ -1,54 +1,82 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import BSplineDeformableRegistration def test_BSplineDeformableRegistration_inputs(): input_map = dict( FixedImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), MovingImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - args=dict(argstr='%s', ), - constrain=dict(argstr='--constrain ', ), - default=dict(argstr='--default %d', ), + args=dict( + argstr="%s", + ), + constrain=dict( + argstr="--constrain ", + ), + default=dict( + argstr="--default %d", + ), environ=dict( nohash=True, usedefault=True, ), - gridSize=dict(argstr='--gridSize %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict(argstr='--initialtransform %s', ), - iterations=dict(argstr='--iterations %d', ), - maximumDeformation=dict(argstr='--maximumDeformation %f', ), + gridSize=dict( + argstr="--gridSize %d", + ), + histogrambins=dict( + argstr="--histogrambins %d", + ), + initialtransform=dict( + argstr="--initialtransform %s", + extensions=None, + ), + iterations=dict( + argstr="--iterations %d", + ), + maximumDeformation=dict( + argstr="--maximumDeformation %f", + ), outputtransform=dict( - argstr='--outputtransform %s', + argstr="--outputtransform %s", hash_files=False, ), outputwarp=dict( - argstr='--outputwarp %s', + argstr="--outputwarp %s", hash_files=False, ), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), + spatialsamples=dict( + argstr="--spatialsamples %d", + ), ) inputs = BSplineDeformableRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BSplineDeformableRegistration_outputs(): output_map = dict( - outputtransform=dict(), - outputwarp=dict(), - resampledmovingfilename=dict(), + outputtransform=dict( + extensions=None, + ), + outputwarp=dict( + extensions=None, + ), + resampledmovingfilename=dict( + extensions=None, + ), ) outputs = BSplineDeformableRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py index 8335515c13..fbd37eeb8e 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py @@ -1,29 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..converters import BSplineToDeformationField def test_BSplineToDeformationField_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), defImage=dict( - argstr='--defImage %s', + argstr="--defImage %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), - refImage=dict(argstr='--refImage %s', ), - tfm=dict(argstr='--tfm %s', ), + refImage=dict( + argstr="--refImage %s", + extensions=None, + ), + tfm=dict( + argstr="--tfm %s", + extensions=None, + ), ) inputs = BSplineToDeformationField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BSplineToDeformationField_outputs(): - output_map = dict(defImage=dict(), ) + output_map = dict( + defImage=dict( + extensions=None, + ), + ) outputs = BSplineToDeformationField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py index 7fa8b77d63..28f4e19d7b 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py @@ -1,65 +1,124 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import ExpertAutomatedRegistration def test_ExpertAutomatedRegistration_inputs(): input_map = dict( - affineMaxIterations=dict(argstr='--affineMaxIterations %d', ), - affineSamplingRatio=dict(argstr='--affineSamplingRatio %f', ), - args=dict(argstr='%s', ), - bsplineMaxIterations=dict(argstr='--bsplineMaxIterations %d', ), - bsplineSamplingRatio=dict(argstr='--bsplineSamplingRatio %f', ), - controlPointSpacing=dict(argstr='--controlPointSpacing %d', ), + affineMaxIterations=dict( + argstr="--affineMaxIterations %d", + ), + affineSamplingRatio=dict( + argstr="--affineSamplingRatio %f", + ), + args=dict( + argstr="%s", + ), + bsplineMaxIterations=dict( + argstr="--bsplineMaxIterations %d", + ), + bsplineSamplingRatio=dict( + argstr="--bsplineSamplingRatio %f", + ), + controlPointSpacing=dict( + argstr="--controlPointSpacing %d", + ), environ=dict( nohash=True, usedefault=True, ), - expectedOffset=dict(argstr='--expectedOffset %f', ), - expectedRotation=dict(argstr='--expectedRotation %f', ), - expectedScale=dict(argstr='--expectedScale %f', ), - expectedSkew=dict(argstr='--expectedSkew %f', ), + expectedOffset=dict( + argstr="--expectedOffset %f", + ), + expectedRotation=dict( + argstr="--expectedRotation %f", + ), + expectedScale=dict( + argstr="--expectedScale %f", + ), + expectedSkew=dict( + argstr="--expectedSkew %f", + ), fixedImage=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - fixedImageMask=dict(argstr='--fixedImageMask %s', ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - initialization=dict(argstr='--initialization %s', ), - interpolation=dict(argstr='--interpolation %s', ), - loadTransform=dict(argstr='--loadTransform %s', ), - metric=dict(argstr='--metric %s', ), - minimizeMemory=dict(argstr='--minimizeMemory ', ), + fixedImageMask=dict( + argstr="--fixedImageMask %s", + extensions=None, + ), + fixedLandmarks=dict( + argstr="--fixedLandmarks %s...", + ), + initialization=dict( + argstr="--initialization %s", + ), + interpolation=dict( + argstr="--interpolation %s", + ), + loadTransform=dict( + argstr="--loadTransform %s", + extensions=None, + ), + metric=dict( + argstr="--metric %s", + ), + minimizeMemory=dict( + argstr="--minimizeMemory ", + ), movingImage=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - randomNumberSeed=dict(argstr='--randomNumberSeed %d', ), - registration=dict(argstr='--registration %s', ), + movingLandmarks=dict( + argstr="--movingLandmarks %s...", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + randomNumberSeed=dict( + argstr="--randomNumberSeed %d", + ), + registration=dict( + argstr="--registration %s", + ), resampledImage=dict( - argstr='--resampledImage %s', + argstr="--resampledImage %s", hash_files=False, ), - rigidMaxIterations=dict(argstr='--rigidMaxIterations %d', ), - rigidSamplingRatio=dict(argstr='--rigidSamplingRatio %f', ), - sampleFromOverlap=dict(argstr='--sampleFromOverlap ', ), + rigidMaxIterations=dict( + argstr="--rigidMaxIterations %d", + ), + rigidSamplingRatio=dict( + argstr="--rigidSamplingRatio %f", + ), + sampleFromOverlap=dict( + argstr="--sampleFromOverlap ", + ), saveTransform=dict( - argstr='--saveTransform %s', + argstr="--saveTransform %s", hash_files=False, ), - verbosityLevel=dict(argstr='--verbosityLevel %s', ), + verbosityLevel=dict( + argstr="--verbosityLevel %s", + ), ) inputs = ExpertAutomatedRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExpertAutomatedRegistration_outputs(): output_map = dict( - resampledImage=dict(), - saveTransform=dict(), + resampledImage=dict( + extensions=None, + ), + saveTransform=dict( + extensions=None, + ), ) outputs = ExpertAutomatedRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py index a55432f9b0..77fb5d69f6 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py @@ -1,55 +1,77 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import LinearRegistration def test_LinearRegistration_inputs(): input_map = dict( FixedImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), MovingImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict(argstr='--initialtransform %s', ), + fixedsmoothingfactor=dict( + argstr="--fixedsmoothingfactor %d", + ), + histogrambins=dict( + argstr="--histogrambins %d", + ), + initialtransform=dict( + argstr="--initialtransform %s", + extensions=None, + ), iterations=dict( - argstr='--iterations %s', - sep=',', + argstr="--iterations %s", + sep=",", ), learningrate=dict( - argstr='--learningrate %s', - sep=',', + argstr="--learningrate %s", + sep=",", + ), + movingsmoothingfactor=dict( + argstr="--movingsmoothingfactor %d", ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), outputtransform=dict( - argstr='--outputtransform %s', + argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict( + argstr="--spatialsamples %d", + ), + translationscale=dict( + argstr="--translationscale %f", + ), ) inputs = LinearRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LinearRegistration_outputs(): output_map = dict( - outputtransform=dict(), - resampledmovingfilename=dict(), + outputtransform=dict( + extensions=None, + ), + resampledmovingfilename=dict( + extensions=None, + ), ) outputs = LinearRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py index 7903fd1d5c..0f1e19d4ba 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py @@ -1,48 +1,72 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import MultiResolutionAffineRegistration def test_MultiResolutionAffineRegistration_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fixedImage=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - fixedImageMask=dict(argstr='--fixedImageMask %s', ), - fixedImageROI=dict(argstr='--fixedImageROI %s', ), - metricTolerance=dict(argstr='--metricTolerance %f', ), + fixedImageMask=dict( + argstr="--fixedImageMask %s", + extensions=None, + ), + fixedImageROI=dict( + argstr="--fixedImageROI %s", + ), + metricTolerance=dict( + argstr="--metricTolerance %f", + ), movingImage=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - numIterations=dict(argstr='--numIterations %d', ), - numLineIterations=dict(argstr='--numLineIterations %d', ), + numIterations=dict( + argstr="--numIterations %d", + ), + numLineIterations=dict( + argstr="--numLineIterations %d", + ), resampledImage=dict( - argstr='--resampledImage %s', + argstr="--resampledImage %s", hash_files=False, ), saveTransform=dict( - argstr='--saveTransform %s', + argstr="--saveTransform %s", hash_files=False, ), - stepSize=dict(argstr='--stepSize %f', ), - stepTolerance=dict(argstr='--stepTolerance %f', ), + stepSize=dict( + argstr="--stepSize %f", + ), + stepTolerance=dict( + argstr="--stepTolerance %f", + ), ) inputs = MultiResolutionAffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiResolutionAffineRegistration_outputs(): output_map = dict( - resampledImage=dict(), - saveTransform=dict(), + resampledImage=dict( + extensions=None, + ), + saveTransform=dict( + extensions=None, + ), ) outputs = MultiResolutionAffineRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py index a1af6c71b3..672d971471 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py @@ -1,35 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..filtering import OtsuThresholdImageFilter def test_OtsuThresholdImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - insideValue=dict(argstr='--insideValue %d', ), - numberOfBins=dict(argstr='--numberOfBins %d', ), + insideValue=dict( + argstr="--insideValue %d", + ), + numberOfBins=dict( + argstr="--numberOfBins %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - outsideValue=dict(argstr='--outsideValue %d', ), + outsideValue=dict( + argstr="--outsideValue %d", + ), ) inputs = OtsuThresholdImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OtsuThresholdImageFilter_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = OtsuThresholdImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py index 02beeee464..a28c8231c8 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py @@ -1,25 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..segmentation import OtsuThresholdSegmentation def test_OtsuThresholdSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brightObjects=dict(argstr='--brightObjects ', ), + args=dict( + argstr="%s", + ), + brightObjects=dict( + argstr="--brightObjects ", + ), environ=dict( nohash=True, usedefault=True, ), - faceConnected=dict(argstr='--faceConnected ', ), + faceConnected=dict( + argstr="--faceConnected ", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - minimumObjectSize=dict(argstr='--minimumObjectSize %d', ), - numberOfBins=dict(argstr='--numberOfBins %d', ), + minimumObjectSize=dict( + argstr="--minimumObjectSize %d", + ), + numberOfBins=dict( + argstr="--numberOfBins %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -29,8 +39,15 @@ def test_OtsuThresholdSegmentation_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OtsuThresholdSegmentation_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = OtsuThresholdSegmentation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py b/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py index baa698246c..2f47b3bd16 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py @@ -1,28 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..filtering import ResampleScalarVolume def test_ResampleScalarVolume_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - interpolation=dict(argstr='--interpolation %s', ), + interpolation=dict( + argstr="--interpolation %s", + ), spacing=dict( - argstr='--spacing %s', - sep=',', + argstr="--spacing %s", + sep=",", ), ) inputs = ResampleScalarVolume.input_spec() @@ -30,8 +34,15 @@ def test_ResampleScalarVolume_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleScalarVolume_outputs(): - output_map = dict(OutputVolume=dict(position=-1, ), ) + output_map = dict( + OutputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = ResampleScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py index 6c2a0eb072..3e8aba0e4d 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py @@ -1,56 +1,80 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..registration import RigidRegistration def test_RigidRegistration_inputs(): input_map = dict( FixedImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), MovingImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict(argstr='--initialtransform %s', ), + fixedsmoothingfactor=dict( + argstr="--fixedsmoothingfactor %d", + ), + histogrambins=dict( + argstr="--histogrambins %d", + ), + initialtransform=dict( + argstr="--initialtransform %s", + extensions=None, + ), iterations=dict( - argstr='--iterations %s', - sep=',', + argstr="--iterations %s", + sep=",", ), learningrate=dict( - argstr='--learningrate %s', - sep=',', + argstr="--learningrate %s", + sep=",", + ), + movingsmoothingfactor=dict( + argstr="--movingsmoothingfactor %d", ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), outputtransform=dict( - argstr='--outputtransform %s', + argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - testingmode=dict(argstr='--testingmode ', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict( + argstr="--spatialsamples %d", + ), + testingmode=dict( + argstr="--testingmode ", + ), + translationscale=dict( + argstr="--translationscale %f", + ), ) inputs = RigidRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RigidRegistration_outputs(): output_map = dict( - outputtransform=dict(), - resampledmovingfilename=dict(), + outputtransform=dict( + extensions=None, + ), + resampledmovingfilename=dict( + extensions=None, + ), ) outputs = RigidRegistration.output_spec() diff --git a/nipype/interfaces/slicer/quantification/__init__.py b/nipype/interfaces/slicer/quantification/__init__.py index 6054dddd59..c34d46d1b5 100644 --- a/nipype/interfaces/slicer/quantification/__init__.py +++ b/nipype/interfaces/slicer/quantification/__init__.py @@ -1,4 +1,2 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import from .changequantification import IntensityDifferenceMetric from .petstandarduptakevaluecomputation import PETStandardUptakeValueComputation diff --git a/nipype/interfaces/slicer/quantification/changequantification.py b/nipype/interfaces/slicer/quantification/changequantification.py index 5abf1b1287..952a8d6e1d 100644 --- a/nipype/interfaces/slicer/quantification/changequantification.py +++ b/nipype/interfaces/slicer/quantification/changequantification.py @@ -1,86 +1,90 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class IntensityDifferenceMetricInputSpec(CommandLineInputSpec): sensitivityThreshold = traits.Float( - desc= - "This parameter should be between 0 and 1, and defines how sensitive the metric should be to the intensity changes.", - argstr="--sensitivityThreshold %f") + desc="This parameter should be between 0 and 1, and defines how sensitive the metric should be to the intensity changes.", + argstr="--sensitivityThreshold %f", + ) changingBandSize = traits.Int( - desc= - "How far (in mm) from the boundary of the segmentation should the intensity changes be considered.", - argstr="--changingBandSize %d") + desc="How far (in mm) from the boundary of the segmentation should the intensity changes be considered.", + argstr="--changingBandSize %d", + ) baselineVolume = File( - position=-4, - desc="Baseline volume to be compared to", - exists=True, - argstr="%s") + position=-4, desc="Baseline volume to be compared to", exists=True, argstr="%s" + ) baselineSegmentationVolume = File( position=-3, - desc= - "Label volume that contains segmentation of the structure of interest in the baseline volume.", + desc="Label volume that contains segmentation of the structure of interest in the baseline volume.", exists=True, - argstr="%s") + argstr="%s", + ) followupVolume = File( position=-2, desc="Followup volume to be compare to the baseline", exists=True, - argstr="%s") + argstr="%s", + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume to keep the results of change quantification.", - argstr="%s") + argstr="%s", + ) reportFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="Report file name", - argstr="--reportFileName %s") + argstr="--reportFileName %s", + ) class IntensityDifferenceMetricOutputSpec(TraitedSpec): outputVolume = File( position=-1, desc="Output volume to keep the results of change quantification.", - exists=True) + exists=True, + ) reportFileName = File(desc="Report file name", exists=True) class IntensityDifferenceMetric(SEMLikeCommandLine): """title: - Intensity Difference Change Detection (FAST) - + Intensity Difference Change Detection (FAST) -category: - Quantification.ChangeQuantification + category: + Quantification.ChangeQuantification -description: - Quantifies the changes between two spatially aligned images based on the pixel-wise difference of image intensities. + description: + Quantifies the changes between two spatially aligned images based on the pixel-wise difference of image intensities. -version: 0.1 -contributor: Andrey Fedorov + version: 0.1 -acknowledgements: + contributor: Andrey Fedorov + acknowledgements: -""" + """ input_spec = IntensityDifferenceMetricInputSpec output_spec = IntensityDifferenceMetricOutputSpec _cmd = "IntensityDifferenceMetric " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'reportFileName': 'reportFileName' + "outputVolume": "outputVolume.nii", + "reportFileName": "reportFileName", } diff --git a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py index 0edfca3fbb..4861eca651 100644 --- a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py +++ b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py @@ -1,75 +1,81 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, +) class PETStandardUptakeValueComputationInputSpec(CommandLineInputSpec): petDICOMPath = Directory( - desc= - "Input path to a directory containing a PET volume containing DICOM header information for SUV computation", + desc="Input path to a directory containing a PET volume containing DICOM header information for SUV computation", exists=True, - argstr="--petDICOMPath %s") + argstr="--petDICOMPath %s", + ) petVolume = File( - desc= - "Input PET volume for SUVbw computation (must be the same volume as pointed to by the DICOM path!).", + desc="Input PET volume for SUVbw computation (must be the same volume as pointed to by the DICOM path!).", exists=True, - argstr="--petVolume %s") + argstr="--petVolume %s", + ) labelMap = File( desc="Input label volume containing the volumes of interest", exists=True, - argstr="--labelMap %s") + argstr="--labelMap %s", + ) color = File( desc="Color table to to map labels to colors and names", exists=True, - argstr="--color %s") + argstr="--color %s", + ) csvFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A file holding the output SUV values in comma separated lines, one per label. Optional.", - argstr="--csvFile %s") + desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", + argstr="--csvFile %s", + ) OutputLabel = traits.Str( desc="List of labels for which SUV values were computed", - argstr="--OutputLabel %s") + argstr="--OutputLabel %s", + ) OutputLabelValue = traits.Str( desc="List of label values for which SUV values were computed", - argstr="--OutputLabelValue %s") + argstr="--OutputLabelValue %s", + ) SUVMax = traits.Str(desc="SUV max for each label", argstr="--SUVMax %s") SUVMean = traits.Str(desc="SUV mean for each label", argstr="--SUVMean %s") - SUVMin = traits.Str( - desc="SUV minimum for each label", argstr="--SUVMin %s") + SUVMin = traits.Str(desc="SUV minimum for each label", argstr="--SUVMin %s") class PETStandardUptakeValueComputationOutputSpec(TraitedSpec): csvFile = File( - desc= - "A file holding the output SUV values in comma separated lines, one per label. Optional.", - exists=True) + desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", + exists=True, + ) class PETStandardUptakeValueComputation(SEMLikeCommandLine): """title: PET Standard Uptake Value Computation -category: Quantification - -description: Computes the standardized uptake value based on body weight. Takes an input PET image in DICOM and NRRD format (DICOM header must contain Radiopharmaceutical parameters). Produces a CSV file that contains patientID, studyDate, dose, labelID, suvmin, suvmax, suvmean, labelName for each volume of interest. It also displays some of the information as output strings in the GUI, the CSV file is optional in that case. The CSV file is appended to on each execution of the CLI. + category: Quantification -version: 0.1.0.$Revision: 8595 $(alpha) + description: Computes the standardized uptake value based on body weight. Takes an input PET image in DICOM and NRRD format (DICOM header must contain Radiopharmaceutical parameters). Produces a CSV file that contains patientID, studyDate, dose, labelID, suvmin, suvmax, suvmean, labelName for each volume of interest. It also displays some of the information as output strings in the GUI, the CSV file is optional in that case. The CSV file is appended to on each execution of the CLI. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ComputeSUVBodyWeight + version: 0.1.0.$Revision: 8595 $(alpha) -contributor: Wendy Plesniak (SPL, BWH), Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ComputeSUVBodyWeight -acknowledgements: This work is funded by the Harvard Catalyst, and the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Wendy Plesniak (SPL, BWH), Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) -""" + acknowledgements: This work is funded by the Harvard Catalyst, and the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = PETStandardUptakeValueComputationInputSpec output_spec = PETStandardUptakeValueComputationOutputSpec _cmd = "PETStandardUptakeValueComputation " - _outputs_filenames = {'csvFile': 'csvFile.csv'} + _outputs_filenames = {"csvFile": "csvFile.csv"} diff --git a/nipype/interfaces/slicer/quantification/tests/__init__.py b/nipype/interfaces/slicer/quantification/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/quantification/tests/__init__.py +++ b/nipype/interfaces/slicer/quantification/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py b/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py index e46f046add..6bc91e4d5e 100644 --- a/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py +++ b/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py @@ -1,48 +1,63 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..changequantification import IntensityDifferenceMetric def test_IntensityDifferenceMetric_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), baselineSegmentationVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), baselineVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-4, ), - changingBandSize=dict(argstr='--changingBandSize %d', ), + changingBandSize=dict( + argstr="--changingBandSize %d", + ), environ=dict( nohash=True, usedefault=True, ), followupVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), reportFileName=dict( - argstr='--reportFileName %s', + argstr="--reportFileName %s", hash_files=False, ), - sensitivityThreshold=dict(argstr='--sensitivityThreshold %f', ), + sensitivityThreshold=dict( + argstr="--sensitivityThreshold %f", + ), ) inputs = IntensityDifferenceMetric.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IntensityDifferenceMetric_outputs(): output_map = dict( - outputVolume=dict(position=-1, ), - reportFileName=dict(), + outputVolume=dict( + extensions=None, + position=-1, + ), + reportFileName=dict( + extensions=None, + ), ) outputs = IntensityDifferenceMetric.output_spec() diff --git a/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py b/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py index 61141f65db..aec22b541f 100644 --- a/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py +++ b/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py @@ -1,36 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..petstandarduptakevaluecomputation import PETStandardUptakeValueComputation def test_PETStandardUptakeValueComputation_inputs(): input_map = dict( - OutputLabel=dict(argstr='--OutputLabel %s', ), - OutputLabelValue=dict(argstr='--OutputLabelValue %s', ), - SUVMax=dict(argstr='--SUVMax %s', ), - SUVMean=dict(argstr='--SUVMean %s', ), - SUVMin=dict(argstr='--SUVMin %s', ), - args=dict(argstr='%s', ), - color=dict(argstr='--color %s', ), + OutputLabel=dict( + argstr="--OutputLabel %s", + ), + OutputLabelValue=dict( + argstr="--OutputLabelValue %s", + ), + SUVMax=dict( + argstr="--SUVMax %s", + ), + SUVMean=dict( + argstr="--SUVMean %s", + ), + SUVMin=dict( + argstr="--SUVMin %s", + ), + args=dict( + argstr="%s", + ), + color=dict( + argstr="--color %s", + extensions=None, + ), csvFile=dict( - argstr='--csvFile %s', + argstr="--csvFile %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), - labelMap=dict(argstr='--labelMap %s', ), - petDICOMPath=dict(argstr='--petDICOMPath %s', ), - petVolume=dict(argstr='--petVolume %s', ), + labelMap=dict( + argstr="--labelMap %s", + extensions=None, + ), + petDICOMPath=dict( + argstr="--petDICOMPath %s", + ), + petVolume=dict( + argstr="--petVolume %s", + extensions=None, + ), ) inputs = PETStandardUptakeValueComputation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PETStandardUptakeValueComputation_outputs(): - output_map = dict(csvFile=dict(), ) + output_map = dict( + csvFile=dict( + extensions=None, + ), + ) outputs = PETStandardUptakeValueComputation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/__init__.py b/nipype/interfaces/slicer/registration/__init__.py index 375b9b5416..ddb3988eae 100644 --- a/nipype/interfaces/slicer/registration/__init__.py +++ b/nipype/interfaces/slicer/registration/__init__.py @@ -1,6 +1,8 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .specialized import (ACPCTransform, FiducialRegistration, - VBRAINSDemonWarp, BRAINSDemonWarp) +from .specialized import ( + ACPCTransform, + FiducialRegistration, + VBRAINSDemonWarp, + BRAINSDemonWarp, +) from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py index adbd733976..079d538499 100644 --- a/nipype/interfaces/slicer/registration/brainsfit.py +++ b/nipype/interfaces/slicer/registration/brainsfit.py @@ -1,123 +1,129 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( - desc= - "The fixed image for registration by mutual information optimization.", + desc="The fixed image for registration by mutual information optimization.", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) movingVolume = File( - desc= - "The moving image for registration by mutual information optimization.", + desc="The moving image for registration by mutual information optimization.", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", - argstr="--bsplineTransform %s") + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", + argstr="--bsplineTransform %s", + ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", - argstr="--linearTransform %s") + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", + argstr="--linearTransform %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputVolume %s") + desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputVolume %s", + ) initialTransform = File( - desc= - "Filename of transform used to initialize the registration. This CAN NOT be used with either CenterOfHeadLAlign, MomentsAlign, GeometryAlign, or initialTransform file.", + desc="Filename of transform used to initialize the registration. This CAN NOT be used with either CenterOfHeadLAlign, MomentsAlign, GeometryAlign, or initialTransform file.", exists=True, - argstr="--initialTransform %s") + argstr="--initialTransform %s", + ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", - desc= - "Determine how to initialize the transform center. GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. MomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. Off assumes that the physical space of the images are close, and that centering in terms of the image Origins is a good starting point. This flag is mutually exclusive with the initialTransform flag.", - argstr="--initializeTransformMode %s") + desc="Determine how to initialize the transform center. GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. MomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. Off assumes that the physical space of the images are close, and that centering in terms of the image Origins is a good starting point. This flag is mutually exclusive with the initialTransform flag.", + argstr="--initializeTransformMode %s", + ) useRigid = traits.Bool( - desc= - "Perform a rigid registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useRigid ") + desc="Perform a rigid registration as part of the sequential registration steps. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useRigid ", + ) useScaleVersor3D = traits.Bool( - desc= - "Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useScaleVersor3D ") + desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useScaleVersor3D ", + ) useScaleSkewVersor3D = traits.Bool( - desc= - "Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useScaleSkewVersor3D ") + desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useScaleSkewVersor3D ", + ) useAffine = traits.Bool( - desc= - "Perform an Affine registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useAffine ") + desc="Perform an Affine registration as part of the sequential registration steps. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useAffine ", + ) useBSpline = traits.Bool( - desc= - "Perform a BSpline registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useBSpline ") + desc="Perform a BSpline registration as part of the sequential registration steps. This family of options supersedes the use of transformType if any of them are set.", + argstr="--useBSpline ", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation.", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation.", + argstr="--numberOfSamples %d", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", + desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) numberOfIterations = InputMultiPath( traits.Int, - desc= - "The maximum number of iterations to try before failing to converge. Use an explicit limit like 500 or 1000 to manage risk of divergence", + desc="The maximum number of iterations to try before failing to converge. Use an explicit limit like 500 or 1000 to manage risk of divergence", sep=",", - argstr="--numberOfIterations %s") + argstr="--numberOfIterations %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "What mode to use for using the masks. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks. If ROIAUTO is chosen, then the mask is implicitly defined using a otsu foreground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Fixed Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( desc="Moving Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", - argstr="--outputFixedVolumeROI %s") + desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", + argstr="--outputFixedVolumeROI %s", + ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", - argstr="--outputMovingVolumeROI %s") + desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", + argstr="--outputMovingVolumeROI %s", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -125,20 +131,21 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) backgroundFillValue = traits.Float( desc="Background fill value for output image.", - argstr="--backgroundFillValue %f") + argstr="--backgroundFillValue %f", + ) maskInferiorCutOffFromCenter = traits.Float( - desc= - "For use with --useCenterOfHeadAlign (and --maskProcessingMode ROIAUTO): the cut-off below the image centers, in millimeters, ", - argstr="--maskInferiorCutOffFromCenter %f") + desc="For use with --useCenterOfHeadAlign (and --maskProcessingMode ROIAUTO): the cut-off below the image centers, in millimeters, ", + argstr="--maskInferiorCutOffFromCenter %f", + ) scaleOutputValues = traits.Bool( - desc= - "If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", - argstr="--scaleOutputValues ") + desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", + argstr="--scaleOutputValues ", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -150,231 +157,231 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) minimumStepLength = InputMultiPath( traits.Float, - desc= - "Each step in the optimization takes steps at least this big. When none are possible, registration is complete.", + desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete.", sep=",", - argstr="--minimumStepLength %s") + argstr="--minimumStepLength %s", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the search pattern.", - argstr="--translationScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the search pattern.", + argstr="--translationScale %f", + ) reproportionScale = traits.Float( - desc= - "ScaleVersor3D 'Scale' compensation factor. Increase this to put more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--reproportionScale %f") + desc="ScaleVersor3D 'Scale' compensation factor. Increase this to put more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--reproportionScale %f", + ) skewScale = traits.Float( - desc= - "ScaleSkewVersor3D Skew compensation factor. Increase this to put more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--skewScale %f") + desc="ScaleSkewVersor3D Skew compensation factor. Increase this to put more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--skewScale %f", + ) maxBSplineDisplacement = traits.Float( - desc= - " Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile. Do NOT use if registering images from different modailties.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile. Do NOT use if registering images from different modailties.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( - desc="the number of match points", argstr="--numberOfMatchPoints %d") + desc="the number of match points", argstr="--numberOfMatchPoints %d" + ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", - argstr="--strippedOutputTransform %s") + desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", + argstr="--strippedOutputTransform %s", + ) transformType = InputMultiPath( traits.Str, - desc= - "Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifiying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", + desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputTransform %s") + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputTransform %s", + ) fixedVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D fixed image to fit, if 4-dimensional.", - argstr="--fixedVolumeTimeIndex %d") + desc="The index in the time series for the 3D fixed image to fit, if 4-dimensional.", + argstr="--fixedVolumeTimeIndex %d", + ) movingVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D moving image to fit, if 4-dimensional.", - argstr="--movingVolumeTimeIndex %d") + desc="The index in the time series for the 3D moving image to fit, if 4-dimensional.", + argstr="--movingVolumeTimeIndex %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "The radius for the optional MedianImageFilter preprocessing in all 3 directions.", + desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) removeIntensityOutliers = traits.Float( - desc= - "The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the moduel will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ", - argstr="--removeIntensityOutliers %f") + desc="The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the module will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ", + argstr="--removeIntensityOutliers %f", + ) useCachingOfBSplineWeightsMode = traits.Enum( "ON", "OFF", - desc= - "This is a 5x speed advantage at the expense of requiring much more memory. Only relevant when transformType is BSpline.", - argstr="--useCachingOfBSplineWeightsMode %s") + desc="This is a 5x speed advantage at the expense of requiring much more memory. Only relevant when transformType is BSpline.", + argstr="--useCachingOfBSplineWeightsMode %s", + ) useExplicitPDFDerivativesMode = traits.Enum( "AUTO", "ON", "OFF", - desc= - "Using mode AUTO means OFF for BSplineDeformableTransforms and ON for the linear transforms. The ON alternative uses more memory to sometimes do a better job.", - argstr="--useExplicitPDFDerivativesMode %s") + desc="Using mode AUTO means OFF for BSplineDeformableTransforms and ON for the linear transforms. The ON alternative uses more memory to sometimes do a better job.", + argstr="--useExplicitPDFDerivativesMode %s", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) ROIAutoClosingSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", - argstr="--ROIAutoClosingSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", + argstr="--ROIAutoClosingSize %f", + ) relaxationFactor = traits.Float( - desc= - "Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", - argstr="--relaxationFactor %f") + desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", + argstr="--relaxationFactor %f", + ) maximumStepLength = traits.Float( - desc= - "Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", - argstr="--maximumStepLength %f") + desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", + argstr="--maximumStepLength %f", + ) failureExitCode = traits.Int( - desc= - "If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", - argstr="--failureExitCode %d") + desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", + argstr="--failureExitCode %d", + ) writeTransformOnFailure = traits.Bool( - desc= - "Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", - argstr="--writeTransformOnFailure ") + desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", + argstr="--writeTransformOnFailure ", + ) numberOfThreads = traits.Int( - desc= - "Explicitly specify the maximum number of threads to use. (default is auto-detected)", - argstr="--numberOfThreads %d") + desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", + argstr="--numberOfThreads %d", + ) forceMINumberOfThreads = traits.Int( - desc= - "Force the the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!", - argstr="--forceMINumberOfThreads %d") + desc="Force the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!", + argstr="--forceMINumberOfThreads %d", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) costFunctionConvergenceFactor = traits.Float( - desc= - " From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", - argstr="--costFunctionConvergenceFactor %f") + desc=" From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", + argstr="--costFunctionConvergenceFactor %f", + ) projectedGradientTolerance = traits.Float( - desc= - " From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", - argstr="--projectedGradientTolerance %f") + desc=" From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", + argstr="--projectedGradientTolerance %f", + ) gui = traits.Bool( - desc= - "Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", - argstr="--gui ") + desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build system, and probably does nothing on your installation.", + argstr="--gui ", + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 " + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 " + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 " + ) permitParameterVariation = InputMultiPath( traits.Int, - desc= - "A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", + desc="A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", sep=",", - argstr="--permitParameterVariation %s") + argstr="--permitParameterVariation %s", + ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MC", - desc= - "The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", - argstr="--costMetric %s") + desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", + argstr="--costMetric %s", + ) writeOutputTransformInFloat = traits.Bool( - desc= - "By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", - argstr="--writeOutputTransformInFloat ") + desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", + argstr="--writeOutputTransformInFloat ", + ) class BRAINSFitOutputSpec(TraitedSpec): bsplineTransform = File( - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", - exists=True) + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", + exists=True, + ) linearTransform = File( - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", - exists=True) + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", + exists=True, + ) outputVolume = File( - desc= - "(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) outputFixedVolumeROI = File( - desc= - "The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", - exists=True) + desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", + exists=True, + ) outputMovingVolumeROI = File( - desc= - "The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", - exists=True) + desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", + exists=True, + ) strippedOutputTransform = File( - desc= - "File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", - exists=True) + desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", + exists=True, + ) outputTransform = File( - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) class BRAINSFit(SEMLikeCommandLine): """title: General Registration (BRAINS) -category: Registration - -description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 + category: Registration -version: 3.0.0 + description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSFit + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSFit -contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard + contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard + """ input_spec = BRAINSFitInputSpec output_spec = BRAINSFitOutputSpec _cmd = "BRAINSFit " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'bsplineTransform': 'bsplineTransform.mat', - 'outputTransform': 'outputTransform.mat', - 'outputFixedVolumeROI': 'outputFixedVolumeROI.nii', - 'strippedOutputTransform': 'strippedOutputTransform.mat', - 'outputMovingVolumeROI': 'outputMovingVolumeROI.nii', - 'linearTransform': 'linearTransform.mat' + "outputVolume": "outputVolume.nii", + "bsplineTransform": "bsplineTransform.mat", + "outputTransform": "outputTransform.mat", + "outputFixedVolumeROI": "outputFixedVolumeROI.nii", + "strippedOutputTransform": "strippedOutputTransform.mat", + "outputMovingVolumeROI": "outputMovingVolumeROI.nii", + "linearTransform": "linearTransform.mat", } diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py index a3b79681fd..82a69f6d0a 100644 --- a/nipype/interfaces/slicer/registration/brainsresample.py +++ b/nipype/interfaces/slicer/registration/brainsresample.py @@ -1,26 +1,30 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class BRAINSResampleInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Warp", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( - desc= - "Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", + desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -29,18 +33,19 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) deformationVolume = File( desc="Displacement Field to be used to warp the image", exists=True, - argstr="--deformationVolume %s") + argstr="--deformationVolume %s", + ) warpTransform = File( - desc= - "Filename for the BRAINSFit transform used in place of the deformation field", + desc="Filename for the BRAINSFit transform used in place of the deformation field", exists=True, - argstr="--warpTransform %s") + argstr="--warpTransform %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -52,24 +57,24 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) inverseTransform = traits.Bool( - desc= - "True/False is to compute inverse of given transformation. Default is false", - argstr="--inverseTransform ") - defaultValue = traits.Float( - desc="Default voxel value", argstr="--defaultValue %f") + desc="True/False is to compute inverse of given transformation. Default is false", + argstr="--inverseTransform ", + ) + defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, - desc= - "Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space ", + desc="Add warped grid to output image to help show the deformation that occurred with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for making a 2D image of grid lines from the 3D space ", sep=",", - argstr="--gridSpacing %s") + argstr="--gridSpacing %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSResampleOutputSpec(TraitedSpec): @@ -79,25 +84,24 @@ class BRAINSResampleOutputSpec(TraitedSpec): class BRAINSResample(SEMLikeCommandLine): """title: Resample Image (BRAINS) -category: Registration + category: Registration -description: - This program resamples an image image using a deformation field or a transform (BSpline, Affine, Rigid, etc.). + description: + This program resamples an image using a deformation field or a transform (BSpline, Affine, Rigid, etc.). -version: 3.0.0 + version: 3.0.0 -documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:BRAINSResample + documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:BRAINSResample -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. + contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. - -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = "BRAINSResample " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/registration/specialized.py b/nipype/interfaces/slicer/registration/specialized.py index 9c6c3f5f20..30688928c2 100644 --- a/nipype/interfaces/slicer/registration/specialized.py +++ b/nipype/interfaces/slicer/registration/specialized.py @@ -1,162 +1,172 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class ACPCTransformInputSpec(CommandLineInputSpec): acpc = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), - desc= - "ACPC line, two fiducial points, one at the anterior commissure and one at the posterior commissure.", - argstr="--acpc %s...") + desc="ACPC line, two fiducial points, one at the anterior commissure and one at the posterior commissure.", + argstr="--acpc %s...", + ) midline = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), - desc= - "The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane).", - argstr="--midline %s...") + desc="The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane).", + argstr="--midline %s...", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A transform filled in from the ACPC and Midline registration calculation", - argstr="--outputTransform %s") + desc="A transform filled in from the ACPC and Midline registration calculation", + argstr="--outputTransform %s", + ) debugSwitch = traits.Bool( - desc="Click if wish to see debugging output", argstr="--debugSwitch ") + desc="Click if wish to see debugging output", argstr="--debugSwitch " + ) class ACPCTransformOutputSpec(TraitedSpec): outputTransform = File( - desc= - "A transform filled in from the ACPC and Midline registration calculation", - exists=True) + desc="A transform filled in from the ACPC and Midline registration calculation", + exists=True, + ) class ACPCTransform(SEMLikeCommandLine): """title: ACPC Transform -category: Registration.Specialized - -description:

Calculate a transformation from two lists of fiducial points.

ACPC line is two fiducial points, one at the anterior commissure and one at the posterior commissure. The resulting transform will bring the line connecting them to horizontal to the AP axis.

The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane). The resulting transform will put the output volume with the mid sagittal plane lined up with the AS plane.

Use the Filtering moduleResample Scalar/Vector/DWI Volumeto apply the transformation to a volume.

+ category: Registration.Specialized -version: 1.0 + description:

Calculate a transformation from two lists of fiducial points.

ACPC line is two fiducial points, one at the anterior commissure and one at the posterior commissure. The resulting transform will bring the line connecting them to horizontal to the AP axis.

The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane). The resulting transform will put the output volume with the mid sagittal plane lined up with the AS plane.

Use the Filtering moduleResample Scalar/Vector/DWI Volumeto apply the transformation to a volume.

-documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ACPCTransform + version: 1.0 -license: slicer3 + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ACPCTransform -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) + license: slicer3 -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ACPCTransformInputSpec output_spec = ACPCTransformOutputSpec _cmd = "ACPCTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.mat'} + _outputs_filenames = {"outputTransform": "outputTransform.mat"} class FiducialRegistrationInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", - argstr="--transformType %s") + argstr="--transformType %s", + ) rms = traits.Float(desc="Display RMS Error.", argstr="--rms %f") outputMessage = traits.Str( - desc="Provides more information on the output", - argstr="--outputMessage %s") + desc="Provides more information on the output", argstr="--outputMessage %s" + ) class FiducialRegistrationOutputSpec(TraitedSpec): saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class FiducialRegistration(SEMLikeCommandLine): """title: Fiducial Registration -category: Registration.Specialized + category: Registration.Specialized -description: Computes a rigid, similarity or affine transform from a matched list of fiducials + description: Computes a rigid, similarity or affine transform from a matched list of fiducials -version: 0.1.0.$Revision$ + version: 0.1.0.$Revision$ -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/TransformFromFiducials + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/TransformFromFiducials -contributor: Casey B Goodlett (Kitware), Dominik Meier (SPL, BWH) + contributor: Casey B Goodlett (Kitware), Dominik Meier (SPL, BWH) -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. - -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = FiducialRegistrationInputSpec output_spec = FiducialRegistrationOutputSpec _cmd = "FiducialRegistration " - _outputs_filenames = {'saveTransform': 'saveTransform.txt'} + _outputs_filenames = {"saveTransform": "saveTransform.txt"} class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", - argstr="--movingVolume %s...") + argstr="--movingVolume %s...", + ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", - argstr="--fixedVolume %s...") + argstr="--fixedVolume %s...", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -168,246 +178,259 @@ class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation field at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) makeBOBF = traits.Bool( - desc= - "Flag to make Brain-Only Background-Filled versions of the input and target volumes.", - argstr="--makeBOBF ") + desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", + argstr="--makeBOBF ", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", - argstr="--weightFactors %s") + argstr="--weightFactors %s", + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class VBRAINSDemonWarp(SEMLikeCommandLine): """title: Vector Demon Registration (BRAINS) -category: Registration.Specialized - -description: - This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. + category: Registration.Specialized + description: + This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. -version: 3.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp -contributor: This tool was developed by Hans J. Johnson and Greg Harris. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Hans J. Johnson and Greg Harris. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = VBRAINSDemonWarpInputSpec output_spec = VBRAINSDemonWarpOutputSpec _cmd = "VBRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( - desc="Required: input moving image", - exists=True, - argstr="--movingVolume %s") + desc="Required: input moving image", exists=True, argstr="--movingVolume %s" + ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -419,197 +442,209 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation field at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", - desc= - "What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is chosen, then the mask is implicitly defined using a otsu foreground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class BRAINSDemonWarp(SEMLikeCommandLine): """title: Demon Registration (BRAINS) -category: Registration.Specialized - -description: - This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. + category: Registration.Specialized + description: + This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. -version: 3.0.0 -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp + version: 3.0.0 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp -contributor: This tool was developed by Hans J. Johnson and Greg Harris. + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + contributor: This tool was developed by Hans J. Johnson and Greg Harris. -""" + acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. + """ input_spec = BRAINSDemonWarpInputSpec output_spec = BRAINSDemonWarpOutputSpec _cmd = "BRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } diff --git a/nipype/interfaces/slicer/registration/tests/__init__.py b/nipype/interfaces/slicer/registration/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/registration/tests/__init__.py +++ b/nipype/interfaces/slicer/registration/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py b/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py index 454e290102..58c7c49f32 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import ACPCTransform def test_ACPCTransform_inputs(): input_map = dict( - acpc=dict(argstr='--acpc %s...', ), - args=dict(argstr='%s', ), - debugSwitch=dict(argstr='--debugSwitch ', ), + acpc=dict( + argstr="--acpc %s...", + ), + args=dict( + argstr="%s", + ), + debugSwitch=dict( + argstr="--debugSwitch ", + ), environ=dict( nohash=True, usedefault=True, ), - midline=dict(argstr='--midline %s...', ), + midline=dict( + argstr="--midline %s...", + ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), ) @@ -23,8 +30,14 @@ def test_ACPCTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACPCTransform_outputs(): - output_map = dict(outputTransform=dict(), ) + output_map = dict( + outputTransform=dict( + extensions=None, + ), + ) outputs = ACPCTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py index c631f9b96e..d1c8055df3 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py @@ -1,100 +1,171 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSDemonWarp def test_BRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", + sep=",", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %d", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', + argstr="--checkerboardPatternSubdivisions %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s", + extensions=None, + ), + gradient_type=dict( + argstr="--gradient_type %s", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', ), - initializeWithTransform=dict(argstr='--initializeWithTransform %s', ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - max_step_length=dict(argstr='--max_step_length %f', ), + argstr="--initializeWithDisplacementField %s", + extensions=None, + ), + initializeWithTransform=dict( + argstr="--initializeWithTransform %s", + extensions=None, + ), + inputPixelType=dict( + argstr="--inputPixelType %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + lowerThresholdForBOBF=dict( + argstr="--lowerThresholdForBOBF %d", + ), + maskProcessingMode=dict( + argstr="--maskProcessingMode %s", + ), + max_step_length=dict( + argstr="--max_step_length %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', + argstr="--minimumFixedPyramid %s", + sep=",", ), minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', + argstr="--minimumMovingPyramid %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s", + extensions=None, ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s', ), neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', + argstr="--neighborhoodForBOBF %s", + sep=",", ), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfPyramidLevels=dict( + argstr="--numberOfPyramidLevels %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict( + argstr="--outputDebug ", + ), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), + argstr="--outputDisplacementFieldPrefix %s", + ), outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', + argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), + outputNormalized=dict( + argstr="--outputNormalized ", + ), + outputPixelType=dict( + argstr="--outputPixelType %s", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), + promptUser=dict( + argstr="--promptUser ", + ), + registrationFilterType=dict( + argstr="--registrationFilterType %s", + ), seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', + argstr="--seedForBOBF %s", + sep=",", ), smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + argstr="--smoothDisplacementFieldSigma %f", + ), + upFieldSmoothing=dict( + argstr="--upFieldSmoothing %f", + ), + upperThresholdForBOBF=dict( + argstr="--upperThresholdForBOBF %d", + ), + use_vanilla_dem=dict( + argstr="--use_vanilla_dem ", + ), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(), - outputDisplacementFieldVolume=dict(), - outputVolume=dict(), + outputCheckerboardVolume=dict( + extensions=None, + ), + outputDisplacementFieldVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = BRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py index bb62633d94..0d7b124635 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py @@ -1,139 +1,248 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsfit import BRAINSFit def test_BRAINSFit_inputs(): input_map = dict( NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ', ), + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ", + ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ', ), + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ", + ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ', ), - ROIAutoClosingSize=dict(argstr='--ROIAutoClosingSize %f', ), - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - backgroundFillValue=dict(argstr='--backgroundFillValue %f', ), + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ", + ), + ROIAutoClosingSize=dict( + argstr="--ROIAutoClosingSize %f", + ), + ROIAutoDilateSize=dict( + argstr="--ROIAutoDilateSize %f", + ), + args=dict( + argstr="%s", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %f", + ), bsplineTransform=dict( - argstr='--bsplineTransform %s', + argstr="--bsplineTransform %s", hash_files=False, ), costFunctionConvergenceFactor=dict( - argstr='--costFunctionConvergenceFactor %f', ), - costMetric=dict(argstr='--costMetric %s', ), - debugLevel=dict(argstr='--debugLevel %d', ), + argstr="--costFunctionConvergenceFactor %f", + ), + costMetric=dict( + argstr="--costMetric %s", + ), + debugLevel=dict( + argstr="--debugLevel %d", + ), environ=dict( nohash=True, usedefault=True, ), - failureExitCode=dict(argstr='--failureExitCode %d', ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s', ), - fixedVolumeTimeIndex=dict(argstr='--fixedVolumeTimeIndex %d', ), - forceMINumberOfThreads=dict(argstr='--forceMINumberOfThreads %d', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), - initialTransform=dict(argstr='--initialTransform %s', ), - initializeTransformMode=dict(argstr='--initializeTransformMode %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), + failureExitCode=dict( + argstr="--failureExitCode %d", + ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s", + extensions=None, + ), + fixedVolumeTimeIndex=dict( + argstr="--fixedVolumeTimeIndex %d", + ), + forceMINumberOfThreads=dict( + argstr="--forceMINumberOfThreads %d", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), + initialTransform=dict( + argstr="--initialTransform %s", + extensions=None, + ), + initializeTransformMode=dict( + argstr="--initializeTransformMode %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), linearTransform=dict( - argstr='--linearTransform %s', + argstr="--linearTransform %s", hash_files=False, ), maskInferiorCutOffFromCenter=dict( - argstr='--maskInferiorCutOffFromCenter %f', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), - maximumStepLength=dict(argstr='--maximumStepLength %f', ), + argstr="--maskInferiorCutOffFromCenter %f", + ), + maskProcessingMode=dict( + argstr="--maskProcessingMode %s", + ), + maxBSplineDisplacement=dict( + argstr="--maxBSplineDisplacement %f", + ), + maximumStepLength=dict( + argstr="--maximumStepLength %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), minimumStepLength=dict( - argstr='--minimumStepLength %s', - sep=',', + argstr="--minimumStepLength %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s", + extensions=None, + ), + movingVolumeTimeIndex=dict( + argstr="--movingVolumeTimeIndex %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s', ), - movingVolumeTimeIndex=dict(argstr='--movingVolumeTimeIndex %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), numberOfIterations=dict( - argstr='--numberOfIterations %s', - sep=',', + argstr="--numberOfIterations %s", + sep=",", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfSamples=dict( + argstr="--numberOfSamples %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputFixedVolumeROI=dict( - argstr='--outputFixedVolumeROI %s', + argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( - argstr='--outputMovingVolumeROI %s', + argstr="--outputMovingVolumeROI %s", hash_files=False, ), outputTransform=dict( - argstr='--outputTransform %s', + argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), + outputVolumePixelType=dict( + argstr="--outputVolumePixelType %s", + ), permitParameterVariation=dict( - argstr='--permitParameterVariation %s', - sep=',', + argstr="--permitParameterVariation %s", + sep=",", ), projectedGradientTolerance=dict( - argstr='--projectedGradientTolerance %f', ), - promptUser=dict(argstr='--promptUser ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - removeIntensityOutliers=dict(argstr='--removeIntensityOutliers %f', ), - reproportionScale=dict(argstr='--reproportionScale %f', ), - scaleOutputValues=dict(argstr='--scaleOutputValues ', ), - skewScale=dict(argstr='--skewScale %f', ), + argstr="--projectedGradientTolerance %f", + ), + promptUser=dict( + argstr="--promptUser ", + ), + relaxationFactor=dict( + argstr="--relaxationFactor %f", + ), + removeIntensityOutliers=dict( + argstr="--removeIntensityOutliers %f", + ), + reproportionScale=dict( + argstr="--reproportionScale %f", + ), + scaleOutputValues=dict( + argstr="--scaleOutputValues ", + ), + skewScale=dict( + argstr="--skewScale %f", + ), splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', + argstr="--splineGridSize %s", + sep=",", ), strippedOutputTransform=dict( - argstr='--strippedOutputTransform %s', + argstr="--strippedOutputTransform %s", hash_files=False, ), transformType=dict( - argstr='--transformType %s', - sep=',', + argstr="--transformType %s", + sep=",", + ), + translationScale=dict( + argstr="--translationScale %f", + ), + useAffine=dict( + argstr="--useAffine ", + ), + useBSpline=dict( + argstr="--useBSpline ", ), - translationScale=dict(argstr='--translationScale %f', ), - useAffine=dict(argstr='--useAffine ', ), - useBSpline=dict(argstr='--useBSpline ', ), useCachingOfBSplineWeightsMode=dict( - argstr='--useCachingOfBSplineWeightsMode %s', ), + argstr="--useCachingOfBSplineWeightsMode %s", + ), useExplicitPDFDerivativesMode=dict( - argstr='--useExplicitPDFDerivativesMode %s', ), - useRigid=dict(argstr='--useRigid ', ), - useScaleSkewVersor3D=dict(argstr='--useScaleSkewVersor3D ', ), - useScaleVersor3D=dict(argstr='--useScaleVersor3D ', ), + argstr="--useExplicitPDFDerivativesMode %s", + ), + useRigid=dict( + argstr="--useRigid ", + ), + useScaleSkewVersor3D=dict( + argstr="--useScaleSkewVersor3D ", + ), + useScaleVersor3D=dict( + argstr="--useScaleVersor3D ", + ), writeOutputTransformInFloat=dict( - argstr='--writeOutputTransformInFloat ', ), - writeTransformOnFailure=dict(argstr='--writeTransformOnFailure ', ), + argstr="--writeOutputTransformInFloat ", + ), + writeTransformOnFailure=dict( + argstr="--writeTransformOnFailure ", + ), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSFit_outputs(): output_map = dict( - bsplineTransform=dict(), - linearTransform=dict(), - outputFixedVolumeROI=dict(), - outputMovingVolumeROI=dict(), - outputTransform=dict(), - outputVolume=dict(), - strippedOutputTransform=dict(), + bsplineTransform=dict( + extensions=None, + ), + linearTransform=dict( + extensions=None, + ), + outputFixedVolumeROI=dict( + extensions=None, + ), + outputMovingVolumeROI=dict( + extensions=None, + ), + outputTransform=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), + strippedOutputTransform=dict( + extensions=None, + ), ) outputs = BRAINSFit.output_spec() diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py index 98ec5f4ff3..46d175da07 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py @@ -1,40 +1,69 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..brainsresample import BRAINSResample def test_BRAINSResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - defaultValue=dict(argstr='--defaultValue %f', ), - deformationVolume=dict(argstr='--deformationVolume %s', ), + args=dict( + argstr="%s", + ), + defaultValue=dict( + argstr="--defaultValue %f", + ), + deformationVolume=dict( + argstr="--deformationVolume %s", + extensions=None, + ), environ=dict( nohash=True, usedefault=True, ), gridSpacing=dict( - argstr='--gridSpacing %s', - sep=',', + argstr="--gridSpacing %s", + sep=",", + ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + inverseTransform=dict( + argstr="--inverseTransform ", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", ), - inputVolume=dict(argstr='--inputVolume %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - inverseTransform=dict(argstr='--inverseTransform ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - pixelType=dict(argstr='--pixelType %s', ), - referenceVolume=dict(argstr='--referenceVolume %s', ), - warpTransform=dict(argstr='--warpTransform %s', ), + pixelType=dict( + argstr="--pixelType %s", + ), + referenceVolume=dict( + argstr="--referenceVolume %s", + extensions=None, + ), + warpTransform=dict( + argstr="--warpTransform %s", + extensions=None, + ), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResample_outputs(): - output_map = dict(outputVolume=dict(), ) + output_map = dict( + outputVolume=dict( + extensions=None, + ), + ) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py b/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py index 36d42fe8df..6b511790c7 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py @@ -1,32 +1,49 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import FiducialRegistration def test_FiducialRegistration_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - outputMessage=dict(argstr='--outputMessage %s', ), - rms=dict(argstr='--rms %f', ), + fixedLandmarks=dict( + argstr="--fixedLandmarks %s...", + ), + movingLandmarks=dict( + argstr="--movingLandmarks %s...", + ), + outputMessage=dict( + argstr="--outputMessage %s", + ), + rms=dict( + argstr="--rms %f", + ), saveTransform=dict( - argstr='--saveTransform %s', + argstr="--saveTransform %s", hash_files=False, ), - transformType=dict(argstr='--transformType %s', ), + transformType=dict( + argstr="--transformType %s", + ), ) inputs = FiducialRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FiducialRegistration_outputs(): - output_map = dict(saveTransform=dict(), ) + output_map = dict( + saveTransform=dict( + extensions=None, + ), + ) outputs = FiducialRegistration.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py b/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py index b3255da1d3..af8bac8680 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py @@ -1,92 +1,153 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import VBRAINSDemonWarp def test_VBRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", + sep=",", + ), + backgroundFillValue=dict( + argstr="--backgroundFillValue %d", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', + argstr="--checkerboardPatternSubdivisions %s", + sep=",", ), environ=dict( nohash=True, usedefault=True, ), - fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s', ), - fixedVolume=dict(argstr='--fixedVolume %s...', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + fixedBinaryVolume=dict( + argstr="--fixedBinaryVolume %s", + extensions=None, + ), + fixedVolume=dict( + argstr="--fixedVolume %s...", + ), + gradient_type=dict( + argstr="--gradient_type %s", + ), + gui=dict( + argstr="--gui ", + ), + histogramMatch=dict( + argstr="--histogramMatch ", + ), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', ), - initializeWithTransform=dict(argstr='--initializeWithTransform %s', ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - makeBOBF=dict(argstr='--makeBOBF ', ), - max_step_length=dict(argstr='--max_step_length %f', ), + argstr="--initializeWithDisplacementField %s", + extensions=None, + ), + initializeWithTransform=dict( + argstr="--initializeWithTransform %s", + extensions=None, + ), + inputPixelType=dict( + argstr="--inputPixelType %s", + ), + interpolationMode=dict( + argstr="--interpolationMode %s", + ), + lowerThresholdForBOBF=dict( + argstr="--lowerThresholdForBOBF %d", + ), + makeBOBF=dict( + argstr="--makeBOBF ", + ), + max_step_length=dict( + argstr="--max_step_length %f", + ), medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--medianFilterSize %s", + sep=",", ), minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', + argstr="--minimumFixedPyramid %s", + sep=",", ), minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', + argstr="--minimumMovingPyramid %s", + sep=",", + ), + movingBinaryVolume=dict( + argstr="--movingBinaryVolume %s", + extensions=None, + ), + movingVolume=dict( + argstr="--movingVolume %s...", ), - movingBinaryVolume=dict(argstr='--movingBinaryVolume %s', ), - movingVolume=dict(argstr='--movingVolume %s...', ), neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', + argstr="--neighborhoodForBOBF %s", + sep=",", ), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict( + argstr="--numberOfHistogramBins %d", + ), + numberOfMatchPoints=dict( + argstr="--numberOfMatchPoints %d", + ), + numberOfPyramidLevels=dict( + argstr="--numberOfPyramidLevels %d", + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict( + argstr="--outputDebug ", + ), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), + argstr="--outputDisplacementFieldPrefix %s", + ), outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', + argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), + outputNormalized=dict( + argstr="--outputNormalized ", + ), + outputPixelType=dict( + argstr="--outputPixelType %s", + ), outputVolume=dict( - argstr='--outputVolume %s', + argstr="--outputVolume %s", hash_files=False, ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), + promptUser=dict( + argstr="--promptUser ", + ), + registrationFilterType=dict( + argstr="--registrationFilterType %s", + ), seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', + argstr="--seedForBOBF %s", + sep=",", ), smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + argstr="--smoothDisplacementFieldSigma %f", + ), + upFieldSmoothing=dict( + argstr="--upFieldSmoothing %f", + ), + upperThresholdForBOBF=dict( + argstr="--upperThresholdForBOBF %d", + ), + use_vanilla_dem=dict( + argstr="--use_vanilla_dem ", + ), weightFactors=dict( - argstr='--weightFactors %s', - sep=',', + argstr="--weightFactors %s", + sep=",", ), ) inputs = VBRAINSDemonWarp.input_spec() @@ -94,11 +155,19 @@ def test_VBRAINSDemonWarp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(), - outputDisplacementFieldVolume=dict(), - outputVolume=dict(), + outputCheckerboardVolume=dict( + extensions=None, + ), + outputDisplacementFieldVolume=dict( + extensions=None, + ), + outputVolume=dict( + extensions=None, + ), ) outputs = VBRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/__init__.py b/nipype/interfaces/slicer/segmentation/__init__.py index d966f07e27..4c7e36f0c1 100644 --- a/nipype/interfaces/slicer/segmentation/__init__.py +++ b/nipype/interfaces/slicer/segmentation/__init__.py @@ -1,5 +1,2 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .specialized import (RobustStatisticsSegmenter, EMSegmentCommandLine, - BRAINSROIAuto) +from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation diff --git a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py index d466ccc1ac..4d652ffb0e 100644 --- a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py +++ b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py @@ -1,48 +1,52 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, +) class SimpleRegionGrowingSegmentationInputSpec(CommandLineInputSpec): smoothingIterations = traits.Int( - desc="Number of smoothing iterations", - argstr="--smoothingIterations %d") - timestep = traits.Float( - desc="Timestep for curvature flow", argstr="--timestep %f") + desc="Number of smoothing iterations", argstr="--smoothingIterations %d" + ) + timestep = traits.Float(desc="Timestep for curvature flow", argstr="--timestep %f") iterations = traits.Int( - desc="Number of iterations of region growing", - argstr="--iterations %d") + desc="Number of iterations of region growing", argstr="--iterations %d" + ) multiplier = traits.Float( desc="Number of standard deviations to include in intensity model", - argstr="--multiplier %f") + argstr="--multiplier %f", + ) neighborhood = traits.Int( - desc= - "The radius of the neighborhood over which to calculate intensity model", - argstr="--neighborhood %d") + desc="The radius of the neighborhood over which to calculate intensity model", + argstr="--neighborhood %d", + ) labelvalue = traits.Int( - desc= - "The integer value (0-255) to use for the segmentation results. This will determine the color of the segmentation that will be generated by the Region growing algorithm", - argstr="--labelvalue %d") + desc="The integer value (0-255) to use for the segmentation results. This will determine the color of the segmentation that will be generated by the Region growing algorithm", + argstr="--labelvalue %d", + ) seed = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Seed point(s) for region growing", - argstr="--seed %s...") + argstr="--seed %s...", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class SimpleRegionGrowingSegmentationOutputSpec(TraitedSpec): @@ -52,21 +56,20 @@ class SimpleRegionGrowingSegmentationOutputSpec(TraitedSpec): class SimpleRegionGrowingSegmentation(SEMLikeCommandLine): """title: Simple Region Growing Segmentation -category: Segmentation + category: Segmentation -description: A simple region growing segmentation algorithm based on intensity statistics. To create a list of fiducials (Seeds) for this algorithm, click on the tool bar icon of an arrow pointing to a starburst fiducial to enter the 'place a new object mode' and then use the fiducials module. This module uses the Slicer Command Line Interface (CLI) and the ITK filters CurvatureFlowImageFilter and ConfidenceConnectedImageFilter. + description: A simple region growing segmentation algorithm based on intensity statistics. To create a list of fiducials (Seeds) for this algorithm, click on the tool bar icon of an arrow pointing to a starburst fiducial to enter the 'place a new object mode' and then use the fiducials module. This module uses the Slicer Command Line Interface (CLI) and the ITK filters CurvatureFlowImageFilter and ConfidenceConnectedImageFilter. -version: 0.1.0.$Revision: 19904 $(alpha) + version: 0.1.0.$Revision: 19904 $(alpha) -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/SimpleRegionGrowingSegmentation + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/SimpleRegionGrowingSegmentation -contributor: Jim Miller (GE) + contributor: Jim Miller (GE) -acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium - -""" + acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium + """ input_spec = SimpleRegionGrowingSegmentationInputSpec output_spec = SimpleRegionGrowingSegmentationOutputSpec _cmd = "SimpleRegionGrowingSegmentation " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py index fdfeb74e37..fc278b1da5 100644 --- a/nipype/interfaces/slicer/segmentation/specialized.py +++ b/nipype/interfaces/slicer/segmentation/specialized.py @@ -1,214 +1,218 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + InputMultiPath, +) class RobustStatisticsSegmenterInputSpec(CommandLineInputSpec): expectedVolume = traits.Float( desc="The approximate volume of the object, in mL.", - argstr="--expectedVolume %f") + argstr="--expectedVolume %f", + ) intensityHomogeneity = traits.Float( - desc= - "What is the homogeneity of intensity within the object? Given constant intensity at 1.0 score and extreme fluctuating intensity at 0.", - argstr="--intensityHomogeneity %f") + desc="What is the homogeneity of intensity within the object? Given constant intensity at 1.0 score and extreme fluctuating intensity at 0.", + argstr="--intensityHomogeneity %f", + ) curvatureWeight = traits.Float( - desc= - "Given sphere 1.0 score and extreme rough bounday/surface 0 score, what is the expected smoothness of the object?", - argstr="--curvatureWeight %f") + desc="Given sphere 1.0 score and extreme rough boundary/surface 0 score, what is the expected smoothness of the object?", + argstr="--curvatureWeight %f", + ) labelValue = traits.Int( - desc="Label value of the output image", argstr="--labelValue %d") + desc="Label value of the output image", argstr="--labelValue %d" + ) maxRunningTime = traits.Float( desc="The program will stop if this time is reached.", - argstr="--maxRunningTime %f") + argstr="--maxRunningTime %f", + ) originalImageFileName = File( - position=-3, - desc="Original image to be segmented", - exists=True, - argstr="%s") + position=-3, desc="Original image to be segmented", exists=True, argstr="%s" + ) labelImageFileName = File( - position=-2, - desc="Label image for initialization", - exists=True, - argstr="%s") + position=-2, desc="Label image for initialization", exists=True, argstr="%s" + ) segmentedImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Segmented image", - argstr="%s") + argstr="%s", + ) class RobustStatisticsSegmenterOutputSpec(TraitedSpec): - segmentedImageFileName = File( - position=-1, desc="Segmented image", exists=True) + segmentedImageFileName = File(position=-1, desc="Segmented image", exists=True) class RobustStatisticsSegmenter(SEMLikeCommandLine): """title: Robust Statistics Segmenter -category: Segmentation.Specialized - -description: Active contour segmentation using robust statistic. + category: Segmentation.Specialized -version: 1.0 + description: Active contour segmentation using robust statistic. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RobustStatisticsSegmenter + version: 1.0 -contributor: Yi Gao (gatech), Allen Tannenbaum (gatech), Ron Kikinis (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RobustStatisticsSegmenter -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health + contributor: Yi Gao (gatech), Allen Tannenbaum (gatech), Ron Kikinis (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health + """ input_spec = RobustStatisticsSegmenterInputSpec output_spec = RobustStatisticsSegmenterOutputSpec _cmd = "RobustStatisticsSegmenter " - _outputs_filenames = { - 'segmentedImageFileName': 'segmentedImageFileName.nii' - } + _outputs_filenames = {"segmentedImageFileName": "segmentedImageFileName.nii"} class EMSegmentCommandLineInputSpec(CommandLineInputSpec): mrmlSceneFileName = File( desc="Active MRML scene that contains EMSegment algorithm parameters.", exists=True, - argstr="--mrmlSceneFileName %s") + argstr="--mrmlSceneFileName %s", + ) resultVolumeFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The file name that the segmentation result volume will be written to.", - argstr="--resultVolumeFileName %s") + desc="The file name that the segmentation result volume will be written to.", + argstr="--resultVolumeFileName %s", + ) targetVolumeFileNames = InputMultiPath( File(exists=True), - desc= - "File names of target volumes (to be segmented). The number of target images must be equal to the number of target images specified in the parameter set, and these images must be spatially aligned.", - argstr="--targetVolumeFileNames %s...") + desc="File names of target volumes (to be segmented). The number of target images must be equal to the number of target images specified in the parameter set, and these images must be spatially aligned.", + argstr="--targetVolumeFileNames %s...", + ) intermediateResultsDirectory = Directory( - desc= - "Directory where EMSegmenter will write intermediate data (e.g., aligned atlas data).", + desc="Directory where EMSegmenter will write intermediate data (e.g., aligned atlas data).", exists=True, - argstr="--intermediateResultsDirectory %s") + argstr="--intermediateResultsDirectory %s", + ) parametersMRMLNodeName = traits.Str( - desc= - "The name of the EMSegment parameters node within the active MRML scene. Leave blank for default.", - argstr="--parametersMRMLNodeName %s") + desc="The name of the EMSegment parameters node within the active MRML scene. Leave blank for default.", + argstr="--parametersMRMLNodeName %s", + ) disableMultithreading = traits.Int( - desc= - "Disable multithreading for the EMSegmenter algorithm only! Preprocessing might still run in multi-threaded mode. -1: Do not overwrite default value. 0: Disable. 1: Enable.", - argstr="--disableMultithreading %d") + desc="Disable multithreading for the EMSegmenter algorithm only! Preprocessing might still run in multi-threaded mode. -1: Do not overwrite default value. 0: Disable. 1: Enable.", + argstr="--disableMultithreading %d", + ) dontUpdateIntermediateData = traits.Int( - desc= - "Disable update of intermediate results. -1: Do not overwrite default value. 0: Disable. 1: Enable.", - argstr="--dontUpdateIntermediateData %d") + desc="Disable update of intermediate results. -1: Do not overwrite default value. 0: Disable. 1: Enable.", + argstr="--dontUpdateIntermediateData %d", + ) verbose = traits.Bool(desc="Enable verbose output.", argstr="--verbose ") loadTargetCentered = traits.Bool( - desc="Read target files centered.", argstr="--loadTargetCentered ") + desc="Read target files centered.", argstr="--loadTargetCentered " + ) loadAtlasNonCentered = traits.Bool( - desc="Read atlas files non-centered.", - argstr="--loadAtlasNonCentered ") + desc="Read atlas files non-centered.", argstr="--loadAtlasNonCentered " + ) taskPreProcessingSetting = traits.Str( desc="Specifies the different task parameter. Leave blank for default.", - argstr="--taskPreProcessingSetting %s") + argstr="--taskPreProcessingSetting %s", + ) keepTempFiles = traits.Bool( - desc= - "If flag is set then at the end of command the temporary files are not removed", - argstr="--keepTempFiles ") + desc="If flag is set then at the end of command the temporary files are not removed", + argstr="--keepTempFiles ", + ) resultStandardVolumeFileName = File( - desc= - "Used for testing. Compare segmentation results to this image and return EXIT_FAILURE if they do not match.", + desc="Used for testing. Compare segmentation results to this image and return EXIT_FAILURE if they do not match.", exists=True, - argstr="--resultStandardVolumeFileName %s") + argstr="--resultStandardVolumeFileName %s", + ) dontWriteResults = traits.Bool( - desc= - "Used for testing. Don't actually write the resulting labelmap to disk.", - argstr="--dontWriteResults ") + desc="Used for testing. Don't actually write the resulting labelmap to disk.", + argstr="--dontWriteResults ", + ) generateEmptyMRMLSceneAndQuit = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Used for testing. Only write a scene with default mrml parameters.", - argstr="--generateEmptyMRMLSceneAndQuit %s") + desc="Used for testing. Only write a scene with default mrml parameters.", + argstr="--generateEmptyMRMLSceneAndQuit %s", + ) resultMRMLSceneFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Write out the MRML scene after command line substitutions have been made.", - argstr="--resultMRMLSceneFileName %s") + desc="Write out the MRML scene after command line substitutions have been made.", + argstr="--resultMRMLSceneFileName %s", + ) disableCompression = traits.Bool( desc="Don't use compression when writing result image to disk.", - argstr="--disableCompression ") + argstr="--disableCompression ", + ) atlasVolumeFileNames = InputMultiPath( File(exists=True), - desc= - "Use an alternative atlas to the one that is specified by the mrml file - note the order matters ! ", - argstr="--atlasVolumeFileNames %s...") + desc="Use an alternative atlas to the one that is specified by the mrml file - note the order matters ! ", + argstr="--atlasVolumeFileNames %s...", + ) registrationPackage = traits.Str( - desc= - "specify the registration package for preprocessing (CMTK or BRAINS or PLASTIMATCH or DEMONS)", - argstr="--registrationPackage %s") + desc="specify the registration package for preprocessing (CMTK or BRAINS or PLASTIMATCH or DEMONS)", + argstr="--registrationPackage %s", + ) registrationAffineType = traits.Int( - desc= - "specify the accuracy of the affine registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", - argstr="--registrationAffineType %d") + desc="specify the accuracy of the affine registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", + argstr="--registrationAffineType %d", + ) registrationDeformableType = traits.Int( - desc= - "specify the accuracy of the deformable registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", - argstr="--registrationDeformableType %d") + desc="specify the accuracy of the deformable registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", + argstr="--registrationDeformableType %d", + ) class EMSegmentCommandLineOutputSpec(TraitedSpec): resultVolumeFileName = File( - desc= - "The file name that the segmentation result volume will be written to.", - exists=True) + desc="The file name that the segmentation result volume will be written to.", + exists=True, + ) generateEmptyMRMLSceneAndQuit = File( - desc= - "Used for testing. Only write a scene with default mrml parameters.", - exists=True) + desc="Used for testing. Only write a scene with default mrml parameters.", + exists=True, + ) resultMRMLSceneFileName = File( - desc= - "Write out the MRML scene after command line substitutions have been made.", - exists=True) + desc="Write out the MRML scene after command line substitutions have been made.", + exists=True, + ) class EMSegmentCommandLine(SEMLikeCommandLine): """title: - EMSegment Command-line - + EMSegment Command-line -category: - Segmentation.Specialized + category: + Segmentation.Specialized -description: - This module is used to simplify the process of segmenting large collections of images by providing a command line interface to the EMSegment algorithm for script and batch processing. + description: + This module is used to simplify the process of segmenting large collections of images by providing a command line interface to the EMSegment algorithm for script and batch processing. -documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.0/EMSegment_Command-line -contributor: Sebastien Barre, Brad Davis, Kilian Pohl, Polina Golland, Yumin Yuan, Daniel Haehn + documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.0/EMSegment_Command-line -acknowledgements: Many people and organizations have contributed to the funding, design, and development of the EMSegment algorithm and its various implementations. + contributor: Sebastien Barre, Brad Davis, Kilian Pohl, Polina Golland, Yumin Yuan, Daniel Haehn + acknowledgements: Many people and organizations have contributed to the funding, design, and development of the EMSegment algorithm and its various implementations. -""" + """ input_spec = EMSegmentCommandLineInputSpec output_spec = EMSegmentCommandLineOutputSpec _cmd = "EMSegmentCommandLine " _outputs_filenames = { - 'generateEmptyMRMLSceneAndQuit': 'generateEmptyMRMLSceneAndQuit', - 'resultMRMLSceneFileName': 'resultMRMLSceneFileName', - 'resultVolumeFileName': 'resultVolumeFileName.mhd' + "generateEmptyMRMLSceneAndQuit": "generateEmptyMRMLSceneAndQuit", + "resultMRMLSceneFileName": "resultMRMLSceneFileName", + "resultVolumeFileName": "resultVolumeFileName.mhd", } @@ -216,34 +220,38 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", - argstr="--outputROIMaskVolume %s") + argstr="--outputROIMaskVolume %s", + ) outputClippedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="The inputVolume clipped to the region of the brain mask.", - argstr="--outputClippedVolumeROI %s") + argstr="--outputClippedVolumeROI %s", + ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", - argstr="--otsuPercentileThreshold %f") + argstr="--otsuPercentileThreshold %f", + ) thresholdCorrectionFactor = traits.Float( - desc= - "A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", - argstr="--thresholdCorrectionFactor %f") + desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", + argstr="--thresholdCorrectionFactor %f", + ) closingSize = traits.Float( - desc= - "The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", - argstr="--closingSize %f") + desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", + argstr="--closingSize %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -251,44 +259,45 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( - desc="The ROI automatically found from the input image.", exists=True) + desc="The ROI automatically found from the input image.", exists=True + ) outputClippedVolumeROI = File( - desc="The inputVolume clipped to the region of the brain mask.", - exists=True) + desc="The inputVolume clipped to the region of the brain mask.", exists=True + ) class BRAINSROIAuto(SEMLikeCommandLine): """title: Foreground masking (BRAINS) -category: Segmentation.Specialized - -description: This tool uses a combination of otsu thresholding and a closing operations to identify the most prominant foreground region in an image. + category: Segmentation.Specialized + description: This tool uses a combination of otsu thresholding and a closing operations to identify the most prominent foreground region in an image. -version: 2.4.1 -license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt + version: 2.4.1 -contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu + license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt -acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) + contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu -""" + acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) + """ input_spec = BRAINSROIAutoInputSpec output_spec = BRAINSROIAutoOutputSpec _cmd = "BRAINSROIAuto " _outputs_filenames = { - 'outputROIMaskVolume': 'outputROIMaskVolume.nii', - 'outputClippedVolumeROI': 'outputClippedVolumeROI.nii' + "outputROIMaskVolume": "outputROIMaskVolume.nii", + "outputClippedVolumeROI": "outputClippedVolumeROI.nii", } diff --git a/nipype/interfaces/slicer/segmentation/tests/__init__.py b/nipype/interfaces/slicer/segmentation/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/segmentation/tests/__init__.py +++ b/nipype/interfaces/slicer/segmentation/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py index 89863fb730..8990caaf1a 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py @@ -1,41 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import BRAINSROIAuto def test_BRAINSROIAuto_inputs(): input_map = dict( - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %f', ), + ROIAutoDilateSize=dict( + argstr="--ROIAutoDilateSize %f", + ), + args=dict( + argstr="%s", + ), + closingSize=dict( + argstr="--closingSize %f", + ), environ=dict( nohash=True, usedefault=True, ), - inputVolume=dict(argstr='--inputVolume %s', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + inputVolume=dict( + argstr="--inputVolume %s", + extensions=None, + ), + numberOfThreads=dict( + argstr="--numberOfThreads %d", + ), + otsuPercentileThreshold=dict( + argstr="--otsuPercentileThreshold %f", + ), outputClippedVolumeROI=dict( - argstr='--outputClippedVolumeROI %s', + argstr="--outputClippedVolumeROI %s", hash_files=False, ), outputROIMaskVolume=dict( - argstr='--outputROIMaskVolume %s', + argstr="--outputROIMaskVolume %s", hash_files=False, ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), + outputVolumePixelType=dict( + argstr="--outputVolumePixelType %s", + ), thresholdCorrectionFactor=dict( - argstr='--thresholdCorrectionFactor %f', ), + argstr="--thresholdCorrectionFactor %f", + ), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSROIAuto_outputs(): output_map = dict( - outputClippedVolumeROI=dict(), - outputROIMaskVolume=dict(), + outputClippedVolumeROI=dict( + extensions=None, + ), + outputROIMaskVolume=dict( + extensions=None, + ), ) outputs = BRAINSROIAuto.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py index 09b0b1300f..2ed2595d4e 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py @@ -1,61 +1,103 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import EMSegmentCommandLine def test_EMSegmentCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlasVolumeFileNames=dict(argstr='--atlasVolumeFileNames %s...', ), - disableCompression=dict(argstr='--disableCompression ', ), - disableMultithreading=dict(argstr='--disableMultithreading %d', ), + args=dict( + argstr="%s", + ), + atlasVolumeFileNames=dict( + argstr="--atlasVolumeFileNames %s...", + ), + disableCompression=dict( + argstr="--disableCompression ", + ), + disableMultithreading=dict( + argstr="--disableMultithreading %d", + ), dontUpdateIntermediateData=dict( - argstr='--dontUpdateIntermediateData %d', ), - dontWriteResults=dict(argstr='--dontWriteResults ', ), + argstr="--dontUpdateIntermediateData %d", + ), + dontWriteResults=dict( + argstr="--dontWriteResults ", + ), environ=dict( nohash=True, usedefault=True, ), generateEmptyMRMLSceneAndQuit=dict( - argstr='--generateEmptyMRMLSceneAndQuit %s', + argstr="--generateEmptyMRMLSceneAndQuit %s", hash_files=False, ), intermediateResultsDirectory=dict( - argstr='--intermediateResultsDirectory %s', ), - keepTempFiles=dict(argstr='--keepTempFiles ', ), - loadAtlasNonCentered=dict(argstr='--loadAtlasNonCentered ', ), - loadTargetCentered=dict(argstr='--loadTargetCentered ', ), - mrmlSceneFileName=dict(argstr='--mrmlSceneFileName %s', ), - parametersMRMLNodeName=dict(argstr='--parametersMRMLNodeName %s', ), - registrationAffineType=dict(argstr='--registrationAffineType %d', ), + argstr="--intermediateResultsDirectory %s", + ), + keepTempFiles=dict( + argstr="--keepTempFiles ", + ), + loadAtlasNonCentered=dict( + argstr="--loadAtlasNonCentered ", + ), + loadTargetCentered=dict( + argstr="--loadTargetCentered ", + ), + mrmlSceneFileName=dict( + argstr="--mrmlSceneFileName %s", + extensions=None, + ), + parametersMRMLNodeName=dict( + argstr="--parametersMRMLNodeName %s", + ), + registrationAffineType=dict( + argstr="--registrationAffineType %d", + ), registrationDeformableType=dict( - argstr='--registrationDeformableType %d', ), - registrationPackage=dict(argstr='--registrationPackage %s', ), + argstr="--registrationDeformableType %d", + ), + registrationPackage=dict( + argstr="--registrationPackage %s", + ), resultMRMLSceneFileName=dict( - argstr='--resultMRMLSceneFileName %s', + argstr="--resultMRMLSceneFileName %s", hash_files=False, ), resultStandardVolumeFileName=dict( - argstr='--resultStandardVolumeFileName %s', ), + argstr="--resultStandardVolumeFileName %s", + extensions=None, + ), resultVolumeFileName=dict( - argstr='--resultVolumeFileName %s', + argstr="--resultVolumeFileName %s", hash_files=False, ), - targetVolumeFileNames=dict(argstr='--targetVolumeFileNames %s...', ), + targetVolumeFileNames=dict( + argstr="--targetVolumeFileNames %s...", + ), taskPreProcessingSetting=dict( - argstr='--taskPreProcessingSetting %s', ), - verbose=dict(argstr='--verbose ', ), + argstr="--taskPreProcessingSetting %s", + ), + verbose=dict( + argstr="--verbose ", + ), ) inputs = EMSegmentCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMSegmentCommandLine_outputs(): output_map = dict( - generateEmptyMRMLSceneAndQuit=dict(), - resultMRMLSceneFileName=dict(), - resultVolumeFileName=dict(), + generateEmptyMRMLSceneAndQuit=dict( + extensions=None, + ), + resultMRMLSceneFileName=dict( + extensions=None, + ), + resultVolumeFileName=dict( + extensions=None, + ), ) outputs = EMSegmentCommandLine.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py index ed46177df0..4bd05c6fc2 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py @@ -1,30 +1,43 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..specialized import RobustStatisticsSegmenter def test_RobustStatisticsSegmenter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curvatureWeight=dict(argstr='--curvatureWeight %f', ), + args=dict( + argstr="%s", + ), + curvatureWeight=dict( + argstr="--curvatureWeight %f", + ), environ=dict( nohash=True, usedefault=True, ), - expectedVolume=dict(argstr='--expectedVolume %f', ), - intensityHomogeneity=dict(argstr='--intensityHomogeneity %f', ), + expectedVolume=dict( + argstr="--expectedVolume %f", + ), + intensityHomogeneity=dict( + argstr="--intensityHomogeneity %f", + ), labelImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - labelValue=dict(argstr='--labelValue %d', ), - maxRunningTime=dict(argstr='--maxRunningTime %f', ), + labelValue=dict( + argstr="--labelValue %d", + ), + maxRunningTime=dict( + argstr="--maxRunningTime %f", + ), originalImageFileName=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), segmentedImageFileName=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -34,8 +47,15 @@ def test_RobustStatisticsSegmenter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustStatisticsSegmenter_outputs(): - output_map = dict(segmentedImageFileName=dict(position=-1, ), ) + output_map = dict( + segmentedImageFileName=dict( + extensions=None, + position=-1, + ), + ) outputs = RobustStatisticsSegmenter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py index 3c5e2124d0..9a308ec959 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py @@ -1,39 +1,62 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation def test_SimpleRegionGrowingSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - iterations=dict(argstr='--iterations %d', ), - labelvalue=dict(argstr='--labelvalue %d', ), - multiplier=dict(argstr='--multiplier %f', ), - neighborhood=dict(argstr='--neighborhood %d', ), + iterations=dict( + argstr="--iterations %d", + ), + labelvalue=dict( + argstr="--labelvalue %d", + ), + multiplier=dict( + argstr="--multiplier %f", + ), + neighborhood=dict( + argstr="--neighborhood %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - seed=dict(argstr='--seed %s...', ), - smoothingIterations=dict(argstr='--smoothingIterations %d', ), - timestep=dict(argstr='--timestep %f', ), + seed=dict( + argstr="--seed %s...", + ), + smoothingIterations=dict( + argstr="--smoothingIterations %d", + ), + timestep=dict( + argstr="--timestep %f", + ), ) inputs = SimpleRegionGrowingSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimpleRegionGrowingSegmentation_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = SimpleRegionGrowingSegmentation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py index 6a1dfe2cc0..3993e5028d 100644 --- a/nipype/interfaces/slicer/surface.py +++ b/nipype/interfaces/slicer/surface.py @@ -1,22 +1,23 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, + InputMultiPath, + OutputMultiPath, +) class MergeModelsInputSpec(CommandLineInputSpec): Model1 = File(position=-3, desc="Model", exists=True, argstr="%s") Model2 = File(position=-2, desc="Model", exists=True, argstr="%s") ModelOutput = traits.Either( - traits.Bool, - File(), - position=-1, - hash_files=False, - desc="Model", - argstr="%s") + traits.Bool, File(), position=-1, hash_files=False, desc="Model", argstr="%s" + ) class MergeModelsOutputSpec(TraitedSpec): @@ -26,30 +27,28 @@ class MergeModelsOutputSpec(TraitedSpec): class MergeModels(SEMLikeCommandLine): """title: Merge Models -category: Surface Models - -description: Merge the polydata from two input models and output a new model with the added polydata. Uses the vtkAppendPolyData filter. Works on .vtp and .vtk surface files. + category: Surface Models -version: $Revision$ + description: Merge the polydata from two input models and output a new model with the added polydata. Uses the vtkAppendPolyData filter. Works on .vtp and .vtk surface files. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MergeModels + version: $Revision$ -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Daniel Haehn (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MergeModels -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Daniel Haehn (SPL, BWH) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = MergeModelsInputSpec output_spec = MergeModelsOutputSpec _cmd = "MergeModels " - _outputs_filenames = {'ModelOutput': 'ModelOutput.vtk'} + _outputs_filenames = {"ModelOutput": "ModelOutput.vtk"} class ModelToLabelMapInputSpec(CommandLineInputSpec): distance = traits.Float(desc="Sample distance", argstr="--distance %f") - InputVolume = File( - position=-3, desc="Input volume", exists=True, argstr="%s") + InputVolume = File(position=-3, desc="Input volume", exists=True, argstr="%s") surface = File(position=-2, desc="Model", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, @@ -57,7 +56,8 @@ class ModelToLabelMapInputSpec(CommandLineInputSpec): position=-1, hash_files=False, desc="The label volume", - argstr="%s") + argstr="%s", + ) class ModelToLabelMapOutputSpec(TraitedSpec): @@ -67,24 +67,23 @@ class ModelToLabelMapOutputSpec(TraitedSpec): class ModelToLabelMap(SEMLikeCommandLine): """title: Model To Label Map -category: Surface Models - -description: Intersects an input model with an reference volume and produces an output label map. + category: Surface Models -version: 0.1.0.$Revision: 8643 $(alpha) + description: Intersects an input model with an reference volume and produces an output label map. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/PolyDataToLabelMap + version: 0.1.0.$Revision: 8643 $(alpha) -contributor: Nicole Aucoin (SPL, BWH), Xiaodong Tao (GE) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/PolyDataToLabelMap -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Xiaodong Tao (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ModelToLabelMapInputSpec output_spec = ModelToLabelMapOutputSpec _cmd = "ModelToLabelMap " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class GrayscaleModelMakerInputSpec(CommandLineInputSpec): @@ -92,81 +91,82 @@ class GrayscaleModelMakerInputSpec(CommandLineInputSpec): position=-2, desc="Volume containing the input grayscale data.", exists=True, - argstr="%s") + argstr="%s", + ) OutputGeometry = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output that contains geometry model.", - argstr="%s") + argstr="%s", + ) threshold = traits.Float( - desc= - "Grayscale threshold of isosurface. The resulting surface of triangles separates the volume into voxels that lie above (inside) and below (outside) the threshold.", - argstr="--threshold %f") + desc="Grayscale threshold of isosurface. The resulting surface of triangles separates the volume into voxels that lie above (inside) and below (outside) the threshold.", + argstr="--threshold %f", + ) name = traits.Str(desc="Name to use for this model.", argstr="--name %s") smooth = traits.Int( desc="Number of smoothing iterations. If 0, no smoothing will be done.", - argstr="--smooth %d") + argstr="--smooth %d", + ) decimate = traits.Float( - desc= - "Target reduction during decimation, as a decimal percentage reduction in the number of polygons. If 0, no decimation will be done.", - argstr="--decimate %f") + desc="Target reduction during decimation, as a decimal percentage reduction in the number of polygons. If 0, no decimation will be done.", + argstr="--decimate %f", + ) splitnormals = traits.Bool( - desc= - "Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affect measurements", - argstr="--splitnormals ") + desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affect measurements", + argstr="--splitnormals ", + ) pointnormals = traits.Bool( - desc= - "Calculate the point normals? Calculated point normals make the surface appear smooth. Without point normals, the surface will appear faceted.", - argstr="--pointnormals ") + desc="Calculate the point normals? Calculated point normals make the surface appear smooth. Without point normals, the surface will appear faceted.", + argstr="--pointnormals ", + ) class GrayscaleModelMakerOutputSpec(TraitedSpec): OutputGeometry = File( - position=-1, desc="Output that contains geometry model.", exists=True) + position=-1, desc="Output that contains geometry model.", exists=True + ) class GrayscaleModelMaker(SEMLikeCommandLine): """title: Grayscale Model Maker -category: Surface Models - -description: Create 3D surface models from grayscale data. This module uses Marching Cubes to create an isosurface at a given threshold. The resulting surface consists of triangles that separate a volume into regions below and above the threshold. The resulting surface can be smoothed and decimated. This model works on continuous data while the module Model Maker works on labeled (or discrete) data. + category: Surface Models -version: 3.0 + description: Create 3D surface models from grayscale data. This module uses Marching Cubes to create an isosurface at a given threshold. The resulting surface consists of triangles that separate a volume into regions below and above the threshold. The resulting surface can be smoothed and decimated. This model works on continuous data while the module Model Maker works on labeled (or discrete) data. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleModelMaker + version: 3.0 -license: slicer3 + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleModelMaker -contributor: Nicole Aucoin (SPL, BWH), Bill Lorensen (GE) + license: slicer3 -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = GrayscaleModelMakerInputSpec output_spec = GrayscaleModelMakerOutputSpec _cmd = "GrayscaleModelMaker " - _outputs_filenames = {'OutputGeometry': 'OutputGeometry.vtk'} + _outputs_filenames = {"OutputGeometry": "OutputGeometry.vtk"} class ProbeVolumeWithModelInputSpec(CommandLineInputSpec): InputVolume = File( - position=-3, - desc="Volume to use to 'paint' the model", - exists=True, - argstr="%s") - InputModel = File( - position=-2, desc="Input model", exists=True, argstr="%s") + position=-3, desc="Volume to use to 'paint' the model", exists=True, argstr="%s" + ) + InputModel = File(position=-2, desc="Input model", exists=True, argstr="%s") OutputModel = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output 'painted' model", - argstr="%s") + argstr="%s", + ) class ProbeVolumeWithModelOutputSpec(TraitedSpec): @@ -176,51 +176,50 @@ class ProbeVolumeWithModelOutputSpec(TraitedSpec): class ProbeVolumeWithModel(SEMLikeCommandLine): """title: Probe Volume With Model -category: Surface Models - -description: Paint a model by a volume (using vtkProbeFilter). + category: Surface Models -version: 0.1.0.$Revision: 1892 $(alpha) + description: Paint a model by a volume (using vtkProbeFilter). -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ProbeVolumeWithModel + version: 0.1.0.$Revision: 1892 $(alpha) -contributor: Lauren O'Donnell (SPL, BWH) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ProbeVolumeWithModel -acknowledgements: BWH, NCIGT/LMI + contributor: Lauren O'Donnell (SPL, BWH) -""" + acknowledgements: BWH, NCIGT/LMI + """ input_spec = ProbeVolumeWithModelInputSpec output_spec = ProbeVolumeWithModelOutputSpec _cmd = "ProbeVolumeWithModel " - _outputs_filenames = {'OutputModel': 'OutputModel.vtk'} + _outputs_filenames = {"OutputModel": "OutputModel.vtk"} class LabelMapSmoothingInputSpec(CommandLineInputSpec): labelToSmooth = traits.Int( - desc= - "The label to smooth. All others will be ignored. If no label is selected by the user, the maximum label in the image is chosen by default.", - argstr="--labelToSmooth %d") + desc="The label to smooth. All others will be ignored. If no label is selected by the user, the maximum label in the image is chosen by default.", + argstr="--labelToSmooth %d", + ) numberOfIterations = traits.Int( desc="The number of iterations of the level set AntiAliasing algorithm", - argstr="--numberOfIterations %d") - maxRMSError = traits.Float( - desc="The maximum RMS error.", argstr="--maxRMSError %f") + argstr="--numberOfIterations %d", + ) + maxRMSError = traits.Float(desc="The maximum RMS error.", argstr="--maxRMSError %f") gaussianSigma = traits.Float( desc="The standard deviation of the Gaussian kernel", - argstr="--gaussianSigma %f") + argstr="--gaussianSigma %f", + ) inputVolume = File( - position=-2, - desc="Input label map to smooth", - exists=True, - argstr="%s") + position=-2, desc="Input label map to smooth", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Smoothed label map", - argstr="%s") + argstr="%s", + ) class LabelMapSmoothingOutputSpec(TraitedSpec): @@ -230,138 +229,136 @@ class LabelMapSmoothingOutputSpec(TraitedSpec): class LabelMapSmoothing(SEMLikeCommandLine): """title: Label Map Smoothing -category: Surface Models - -description: This filter smoothes a binary label map. With a label map as input, this filter runs an anti-alising algorithm followed by a Gaussian smoothing algorithm. The output is a smoothed label map. + category: Surface Models -version: 1.0 + description: This filter smoothes a binary label map. With a label map as input, this filter runs an anti-alising algorithm followed by a Gaussian smoothing algorithm. The output is a smoothed label map. -documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LabelMapSmoothing + version: 1.0 -contributor: Dirk Padfield (GE), Josh Cates (Utah), Ross Whitaker (Utah) + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LabelMapSmoothing -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. This filter is based on work developed at the University of Utah, and implemented at GE Research. + contributor: Dirk Padfield (GE), Josh Cates (Utah), Ross Whitaker (Utah) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. This filter is based on work developed at the University of Utah, and implemented at GE Research. + """ input_spec = LabelMapSmoothingInputSpec output_spec = LabelMapSmoothingOutputSpec _cmd = "LabelMapSmoothing " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ModelMakerInputSpec(CommandLineInputSpec): InputVolume = File( position=-1, - desc= - "Input label map. The Input Volume drop down menu is populated with the label map volumes that are present in the scene, select one from which to generate models.", + desc="Input label map. The Input Volume drop down menu is populated with the label map volumes that are present in the scene, select one from which to generate models.", exists=True, - argstr="%s") + argstr="%s", + ) color = File( desc="Color table to make labels to colors and objects", exists=True, - argstr="--color %s") + argstr="--color %s", + ) modelSceneFile = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File()), hash_files=False, - desc= - "Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", - argstr="--modelSceneFile %s...") + desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", + argstr="--modelSceneFile %s...", + ) name = traits.Str( - desc= - "Name to use for this model. Any text entered in the entry box will be the starting string for the created model file names. The label number and the color name will also be part of the file name. If making multiple models, use this as a prefix to the label and color name.", - argstr="--name %s") + desc="Name to use for this model. Any text entered in the entry box will be the starting string for the created model file names. The label number and the color name will also be part of the file name. If making multiple models, use this as a prefix to the label and color name.", + argstr="--name %s", + ) generateAll = traits.Bool( - desc= - "Generate models for all labels in the input volume. select this option if you want to create all models that correspond to all values in a labelmap volume (using the Joint Smoothing option below is useful with this option). Ignores Labels, Start Label, End Label settings. Skips label 0.", - argstr="--generateAll ") + desc="Generate models for all labels in the input volume. select this option if you want to create all models that correspond to all values in a labelmap volume (using the Joint Smoothing option below is useful with this option). Ignores Labels, Start Label, End Label settings. Skips label 0.", + argstr="--generateAll ", + ) labels = InputMultiPath( traits.Int, - desc= - "A comma separated list of label values from which to make models. f you specify a list of Labels, it will override any start/end label settings. If you click Generate All Models it will override the list of labels and any start/end label settings.", + desc="A comma separated list of label values from which to make models. f you specify a list of Labels, it will override any start/end label settings. If you click Generate All Models it will override the list of labels and any start/end label settings.", sep=",", - argstr="--labels %s") + argstr="--labels %s", + ) start = traits.Int( - desc= - "If you want to specify a continuous range of labels from which to generate models, enter the lower label here. Voxel value from which to start making models. Used instead of the label list to specify a range (make sure the label list is empty or it will over ride this).", - argstr="--start %d") + desc="If you want to specify a continuous range of labels from which to generate models, enter the lower label here. Voxel value from which to start making models. Used instead of the label list to specify a range (make sure the label list is empty or it will over ride this).", + argstr="--start %d", + ) end = traits.Int( - desc= - "If you want to specify a continuous range of labels from which to generate models, enter the higher label here. Voxel value up to which to continue making models. Skip any values with zero voxels.", - argstr="--end %d") + desc="If you want to specify a continuous range of labels from which to generate models, enter the higher label here. Voxel value up to which to continue making models. Skip any values with zero voxels.", + argstr="--end %d", + ) skipUnNamed = traits.Bool( - desc= - "Select this to not generate models from labels that do not have names defined in the color look up table associated with the input label map. If true, only models which have an entry in the color table will be generated. If false, generate all models that exist within the label range.", - argstr="--skipUnNamed ") + desc="Select this to not generate models from labels that do not have names defined in the color look up table associated with the input label map. If true, only models which have an entry in the color table will be generated. If false, generate all models that exist within the label range.", + argstr="--skipUnNamed ", + ) jointsmooth = traits.Bool( - desc= - "This will ensure that all resulting models fit together smoothly, like jigsaw puzzle pieces. Otherwise the models will be smoothed independently and may overlap.", - argstr="--jointsmooth ") + desc="This will ensure that all resulting models fit together smoothly, like jigsaw puzzle pieces. Otherwise the models will be smoothed independently and may overlap.", + argstr="--jointsmooth ", + ) smooth = traits.Int( - desc= - "Here you can set the number of smoothing iterations for Laplacian smoothing, or the degree of the polynomial approximating the windowed Sinc function. Use 0 if you wish no smoothing. ", - argstr="--smooth %d") + desc="Here you can set the number of smoothing iterations for Laplacian smoothing, or the degree of the polynomial approximating the windowed Sinc function. Use 0 if you wish no smoothing. ", + argstr="--smooth %d", + ) filtertype = traits.Enum( "Sinc", "Laplacian", - desc= - "You can control the type of smoothing done on the models by selecting a filter type of either Sinc or Laplacian.", - argstr="--filtertype %s") + desc="You can control the type of smoothing done on the models by selecting a filter type of either Sinc or Laplacian.", + argstr="--filtertype %s", + ) decimate = traits.Float( - desc= - "Chose the target reduction in number of polygons as a decimal percentage (between 0 and 1) of the number of polygons. Specifies the percentage of triangles to be removed. For example, 0.1 means 10% reduction and 0.9 means 90% reduction.", - argstr="--decimate %f") + desc="Chose the target reduction in number of polygons as a decimal percentage (between 0 and 1) of the number of polygons. Specifies the percentage of triangles to be removed. For example, 0.1 means 10% reduction and 0.9 means 90% reduction.", + argstr="--decimate %f", + ) splitnormals = traits.Bool( - desc= - "Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affects measurements.", - argstr="--splitnormals ") + desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affects measurements.", + argstr="--splitnormals ", + ) pointnormals = traits.Bool( - desc= - "Turn this flag on if you wish to calculate the normal vectors for the points.", - argstr="--pointnormals ") + desc="Turn this flag on if you wish to calculate the normal vectors for the points.", + argstr="--pointnormals ", + ) pad = traits.Bool( - desc= - "Pad the input volume with zero value voxels on all 6 faces in order to ensure the production of closed surfaces. Sets the origin translation and extent translation so that the models still line up with the unpadded input volume.", - argstr="--pad ") + desc="Pad the input volume with zero value voxels on all 6 faces in order to ensure the production of closed surfaces. Sets the origin translation and extent translation so that the models still line up with the unpadded input volume.", + argstr="--pad ", + ) saveIntermediateModels = traits.Bool( - desc= - "You can save a copy of the models after each of the intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation). These intermediate models are not saved in the mrml file, you have to load them manually after turning off deleting temporary files in they python console (View ->Python Interactor) using the following command slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff().", - argstr="--saveIntermediateModels ") + desc="You can save a copy of the models after each of the intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation). These intermediate models are not saved in the mrml file, you have to load them manually after turning off deleting temporary files in they python console (View ->Python Interactor) using the following command slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff().", + argstr="--saveIntermediateModels ", + ) debug = traits.Bool( - desc= - "turn this flag on in order to see debugging output (look in the Error Log window that is accessed via the View menu)", - argstr="--debug ") + desc="turn this flag on in order to see debugging output (look in the Error Log window that is accessed via the View menu)", + argstr="--debug ", + ) class ModelMakerOutputSpec(TraitedSpec): modelSceneFile = OutputMultiPath( File(exists=True), - desc= - "Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you." + desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", ) class ModelMaker(SEMLikeCommandLine): """title: Model Maker -category: Surface Models - -description: Create 3D surface models from segmented data.

Models are imported into Slicer under a model hierarchy node in a MRML scene. The model colors are set by the color table associated with the input volume (these colours will only be visible if you load the model scene file).

Create Multiple:

If you specify a list of Labels, it will over ride any start/end label settings.

If you clickGenerate Allit will over ride the list of lables and any start/end label settings.

Model Maker Settings:

You can set the number of smoothing iterations, target reduction in number of polygons (decimal percentage). Use 0 and 1 if you wish no smoothing nor decimation.
You can set the flags to split normals or generate point normals in this pane as well.
You can save a copy of the models after intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation); these models are not saved in the mrml file, turn off deleting temporary files first in the python window:
slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff()

+ category: Surface Models -version: 4.1 + description: Create 3D surface models from segmented data.

Models are imported into Slicer under a model hierarchy node in a MRML scene. The model colors are set by the color table associated with the input volume (these colours will only be visible if you load the model scene file).

Create Multiple:

If you specify a list of Labels, it will over ride any start/end label settings.

If you clickGenerate Allit will over ride the list of labels and any start/end label settings.

Model Maker Settings:

You can set the number of smoothing iterations, target reduction in number of polygons (decimal percentage). Use 0 and 1 if you wish no smoothing nor decimation.
You can set the flags to split normals or generate point normals in this pane as well.
You can save a copy of the models after intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation); these models are not saved in the mrml file, turn off deleting temporary files first in the python window:
slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff()

-documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ModelMaker + version: 4.1 -license: slicer4 + documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ModelMaker -contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Bill Lorensen (GE) + license: slicer4 -acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Bill Lorensen (GE) -""" + acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. + """ input_spec = ModelMakerInputSpec output_spec = ModelMakerOutputSpec _cmd = "ModelMaker " - _outputs_filenames = {'modelSceneFile': 'modelSceneFile.mrml'} + _outputs_filenames = {"modelSceneFile": "modelSceneFile.mrml"} diff --git a/nipype/interfaces/slicer/tests/__init__.py b/nipype/interfaces/slicer/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/tests/__init__.py +++ b/nipype/interfaces/slicer/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py b/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py index 2997e805f9..b02dfd595d 100644 --- a/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py +++ b/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py @@ -1,36 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..converters import DicomToNrrdConverter def test_DicomToNrrdConverter_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputDicomDirectory=dict(argstr='--inputDicomDirectory %s', ), + inputDicomDirectory=dict( + argstr="--inputDicomDirectory %s", + ), outputDirectory=dict( - argstr='--outputDirectory %s', + argstr="--outputDirectory %s", hash_files=False, ), - outputVolume=dict(argstr='--outputVolume %s', ), - smallGradientThreshold=dict(argstr='--smallGradientThreshold %f', ), + outputVolume=dict( + argstr="--outputVolume %s", + ), + smallGradientThreshold=dict( + argstr="--smallGradientThreshold %f", + ), useBMatrixGradientDirections=dict( - argstr='--useBMatrixGradientDirections ', ), + argstr="--useBMatrixGradientDirections ", + ), useIdentityMeaseurementFrame=dict( - argstr='--useIdentityMeaseurementFrame ', ), + argstr="--useIdentityMeaseurementFrame ", + ), writeProtocolGradientsFile=dict( - argstr='--writeProtocolGradientsFile ', ), + argstr="--writeProtocolGradientsFile ", + ), ) inputs = DicomToNrrdConverter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DicomToNrrdConverter_outputs(): - output_map = dict(outputDirectory=dict(), ) + output_map = dict( + outputDirectory=dict(), + ) outputs = DicomToNrrdConverter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py b/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py index 279d68e0ab..338fa49cae 100644 --- a/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py +++ b/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py @@ -1,29 +1,41 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utilities import EMSegmentTransformToNewFormat def test_EMSegmentTransformToNewFormat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - inputMRMLFileName=dict(argstr='--inputMRMLFileName %s', ), + inputMRMLFileName=dict( + argstr="--inputMRMLFileName %s", + extensions=None, + ), outputMRMLFileName=dict( - argstr='--outputMRMLFileName %s', + argstr="--outputMRMLFileName %s", hash_files=False, ), - templateFlag=dict(argstr='--templateFlag ', ), + templateFlag=dict( + argstr="--templateFlag ", + ), ) inputs = EMSegmentTransformToNewFormat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMSegmentTransformToNewFormat_outputs(): - output_map = dict(outputMRMLFileName=dict(), ) + output_map = dict( + outputMRMLFileName=dict( + extensions=None, + ), + ) outputs = EMSegmentTransformToNewFormat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py b/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py index 7ad8cac8e9..8bab4bd963 100644 --- a/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py +++ b/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py @@ -1,38 +1,59 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import GrayscaleModelMaker def test_GrayscaleModelMaker_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), OutputGeometry=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), - decimate=dict(argstr='--decimate %f', ), + args=dict( + argstr="%s", + ), + decimate=dict( + argstr="--decimate %f", + ), environ=dict( nohash=True, usedefault=True, ), - name=dict(argstr='--name %s', ), - pointnormals=dict(argstr='--pointnormals ', ), - smooth=dict(argstr='--smooth %d', ), - splitnormals=dict(argstr='--splitnormals ', ), - threshold=dict(argstr='--threshold %f', ), + name=dict( + argstr="--name %s", + ), + pointnormals=dict( + argstr="--pointnormals ", + ), + smooth=dict( + argstr="--smooth %d", + ), + splitnormals=dict( + argstr="--splitnormals ", + ), + threshold=dict( + argstr="--threshold %f", + ), ) inputs = GrayscaleModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleModelMaker_outputs(): - output_map = dict(OutputGeometry=dict(position=-1, ), ) + output_map = dict( + OutputGeometry=dict( + extensions=None, + position=-1, + ), + ) outputs = GrayscaleModelMaker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py b/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py index bb3780495b..3dab7b1498 100644 --- a/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py +++ b/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py @@ -1,25 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import LabelMapSmoothing def test_LabelMapSmoothing_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - gaussianSigma=dict(argstr='--gaussianSigma %f', ), + gaussianSigma=dict( + argstr="--gaussianSigma %f", + ), inputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - labelToSmooth=dict(argstr='--labelToSmooth %d', ), - maxRMSError=dict(argstr='--maxRMSError %f', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), + labelToSmooth=dict( + argstr="--labelToSmooth %d", + ), + maxRMSError=dict( + argstr="--maxRMSError %f", + ), + numberOfIterations=dict( + argstr="--numberOfIterations %d", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -29,8 +39,15 @@ def test_LabelMapSmoothing_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelMapSmoothing_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = LabelMapSmoothing.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_MergeModels.py b/nipype/interfaces/slicer/tests/test_auto_MergeModels.py index 6453957a79..dc93147248 100644 --- a/nipype/interfaces/slicer/tests/test_auto_MergeModels.py +++ b/nipype/interfaces/slicer/tests/test_auto_MergeModels.py @@ -1,24 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import MergeModels def test_MergeModels_inputs(): input_map = dict( Model1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), Model2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), ModelOutput=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -29,8 +32,15 @@ def test_MergeModels_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeModels_outputs(): - output_map = dict(ModelOutput=dict(position=-1, ), ) + output_map = dict( + ModelOutput=dict( + extensions=None, + position=-1, + ), + ) outputs = MergeModels.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py b/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py index ed182137cf..905b1417e9 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py +++ b/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py @@ -1,50 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import ModelMaker def test_ModelMaker_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-1, ), - args=dict(argstr='%s', ), - color=dict(argstr='--color %s', ), - debug=dict(argstr='--debug ', ), - decimate=dict(argstr='--decimate %f', ), - end=dict(argstr='--end %d', ), + args=dict( + argstr="%s", + ), + color=dict( + argstr="--color %s", + extensions=None, + ), + debug=dict( + argstr="--debug ", + ), + decimate=dict( + argstr="--decimate %f", + ), + end=dict( + argstr="--end %d", + ), environ=dict( nohash=True, usedefault=True, ), - filtertype=dict(argstr='--filtertype %s', ), - generateAll=dict(argstr='--generateAll ', ), - jointsmooth=dict(argstr='--jointsmooth ', ), + filtertype=dict( + argstr="--filtertype %s", + ), + generateAll=dict( + argstr="--generateAll ", + ), + jointsmooth=dict( + argstr="--jointsmooth ", + ), labels=dict( - argstr='--labels %s', - sep=',', + argstr="--labels %s", + sep=",", ), modelSceneFile=dict( - argstr='--modelSceneFile %s...', + argstr="--modelSceneFile %s...", hash_files=False, ), - name=dict(argstr='--name %s', ), - pad=dict(argstr='--pad ', ), - pointnormals=dict(argstr='--pointnormals ', ), - saveIntermediateModels=dict(argstr='--saveIntermediateModels ', ), - skipUnNamed=dict(argstr='--skipUnNamed ', ), - smooth=dict(argstr='--smooth %d', ), - splitnormals=dict(argstr='--splitnormals ', ), - start=dict(argstr='--start %d', ), + name=dict( + argstr="--name %s", + ), + pad=dict( + argstr="--pad ", + ), + pointnormals=dict( + argstr="--pointnormals ", + ), + saveIntermediateModels=dict( + argstr="--saveIntermediateModels ", + ), + skipUnNamed=dict( + argstr="--skipUnNamed ", + ), + smooth=dict( + argstr="--smooth %d", + ), + splitnormals=dict( + argstr="--splitnormals ", + ), + start=dict( + argstr="--start %d", + ), ) inputs = ModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelMaker_outputs(): - output_map = dict(modelSceneFile=dict(), ) + output_map = dict( + modelSceneFile=dict(), + ) outputs = ModelMaker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py b/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py index efd11f1040..8449c15fce 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py +++ b/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py @@ -1,27 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import ModelToLabelMap def test_ModelToLabelMap_inputs(): input_map = dict( InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), OutputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %f', ), + args=dict( + argstr="%s", + ), + distance=dict( + argstr="--distance %f", + ), environ=dict( nohash=True, usedefault=True, ), surface=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), ) @@ -30,8 +35,15 @@ def test_ModelToLabelMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelToLabelMap_outputs(): - output_map = dict(OutputVolume=dict(position=-1, ), ) + output_map = dict( + OutputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = ModelToLabelMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py b/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py index f3d1908cd0..e2b4a1a2f7 100644 --- a/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py +++ b/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py @@ -1,22 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..converters import OrientScalarVolume def test_OrientScalarVolume_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), - orientation=dict(argstr='--orientation %s', ), + orientation=dict( + argstr="--orientation %s", + ), outputVolume=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), @@ -26,8 +30,15 @@ def test_OrientScalarVolume_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OrientScalarVolume_outputs(): - output_map = dict(outputVolume=dict(position=-1, ), ) + output_map = dict( + outputVolume=dict( + extensions=None, + position=-1, + ), + ) outputs = OrientScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py b/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py index 32a2fc2139..77498c0b08 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py +++ b/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py @@ -1,24 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..surface import ProbeVolumeWithModel def test_ProbeVolumeWithModel_inputs(): input_map = dict( InputModel=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-2, ), InputVolume=dict( - argstr='%s', + argstr="%s", + extensions=None, position=-3, ), OutputModel=dict( - argstr='%s', + argstr="%s", hash_files=False, position=-1, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -29,8 +32,15 @@ def test_ProbeVolumeWithModel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbeVolumeWithModel_outputs(): - output_map = dict(OutputModel=dict(position=-1, ), ) + output_map = dict( + OutputModel=dict( + extensions=None, + position=-1, + ), + ) outputs = ProbeVolumeWithModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py b/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py index b0e1e2c3b0..7a16ed38bb 100644 --- a/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py +++ b/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import SlicerCommandLine def test_SlicerCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/interfaces/slicer/utilities.py b/nipype/interfaces/slicer/utilities.py index 5faf640570..eb079766c9 100644 --- a/nipype/interfaces/slicer/utilities.py +++ b/nipype/interfaces/slicer/utilities.py @@ -1,54 +1,56 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath -import os +from nipype.interfaces.base import ( + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + traits, +) class EMSegmentTransformToNewFormatInputSpec(CommandLineInputSpec): inputMRMLFileName = File( - desc= - "Active MRML scene that contains EMSegment algorithm parameters in the format before 3.6.3 - please include absolute file name in path.", + desc="Active MRML scene that contains EMSegment algorithm parameters in the format before 3.6.3 - please include absolute file name in path.", exists=True, - argstr="--inputMRMLFileName %s") + argstr="--inputMRMLFileName %s", + ) outputMRMLFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", - argstr="--outputMRMLFileName %s") + desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", + argstr="--outputMRMLFileName %s", + ) templateFlag = traits.Bool( - desc= - "Set to true if the transformed mrml file should be used as template file ", - argstr="--templateFlag ") + desc="Set to true if the transformed mrml file should be used as template file ", + argstr="--templateFlag ", + ) class EMSegmentTransformToNewFormatOutputSpec(TraitedSpec): outputMRMLFileName = File( - desc= - "Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", - exists=True) + desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", + exists=True, + ) class EMSegmentTransformToNewFormat(SEMLikeCommandLine): """title: - Transform MRML Files to New EMSegmenter Standard - + Transform MRML Files to New EMSegmenter Standard -category: - Utilities + category: + Utilities -description: - Transform MRML Files to New EMSegmenter Standard + description: + Transform MRML Files to New EMSegmenter Standard -""" + """ input_spec = EMSegmentTransformToNewFormatInputSpec output_spec = EMSegmentTransformToNewFormatOutputSpec _cmd = "EMSegmentTransformToNewFormat " - _outputs_filenames = {'outputMRMLFileName': 'outputMRMLFileName.mrml'} + _outputs_filenames = {"outputMRMLFileName": "outputMRMLFileName.mrml"} diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index 09a680f692..160bbae150 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -1,16 +1,41 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for spm.""" - -from .base import (Info, SPMCommand, logger, no_spm, scans_for_fname, - scans_for_fnames) -from .preprocess import (FieldMap, SliceTiming, Realign, RealignUnwarp, - Coregister, Normalize, Normalize12, Segment, - Smooth, NewSegment, DARTEL, DARTELNorm2MNI, - CreateWarped, VBMSegment) -from .model import (Level1Design, EstimateModel, EstimateContrast, Threshold, - OneSampleTTestDesign, TwoSampleTTestDesign, - PairedTTestDesign, MultipleRegressionDesign) -from .utils import (Analyze2nii, CalcCoregAffine, ApplyTransform, Reslice, - ApplyInverseDeformation, ResliceToReference, DicomImport) +"""SPM is a software package for the analysis of brain imaging data sequences.""" +from .base import Info, SPMCommand, logger, no_spm, scans_for_fname, scans_for_fnames +from .preprocess import ( + ApplyVDM, + FieldMap, + SliceTiming, + Realign, + RealignUnwarp, + Coregister, + Normalize, + Normalize12, + Segment, + Smooth, + NewSegment, + MultiChannelNewSegment, + DARTEL, + DARTELNorm2MNI, + CreateWarped, + VBMSegment, +) +from .model import ( + Level1Design, + EstimateModel, + EstimateContrast, + Threshold, + OneSampleTTestDesign, + TwoSampleTTestDesign, + PairedTTestDesign, + MultipleRegressionDesign, +) +from .utils import ( + Analyze2nii, + CalcCoregAffine, + ApplyTransform, + Reslice, + ApplyInverseDeformation, + ResliceToReference, + DicomImport, +) diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index fd93dfc522..4998f0af34 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with SPM tools. @@ -14,10 +13,6 @@ spm.SPMCommand().version """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, object, str, bytes - # Standard library imports import os from copy import deepcopy @@ -28,15 +23,25 @@ # Local imports from ... import logging -from ...utils import spm_docs as sd, NUMPY_MMAP -from ..base import (BaseInterface, traits, isdefined, InputMultiPath, - BaseInterfaceInputSpec, Directory, Undefined, ImageFile, - PackageInfo) +from ...utils import spm_docs as sd +from ..base import ( + BaseInterface, + traits, + Tuple, + isdefined, + InputMultiPath, + BaseInterfaceInputSpec, + Directory, + Undefined, + ImageFile, + PackageInfo, +) +from ..base.traits_extension import NoDefaultSpecified from ..matlab import MatlabCommand -from ...external.due import due, Doi, BibTeX +from ...external.due import BibTeX -__docformat__ = 'restructuredtext' -logger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +logger = logging.getLogger("nipype.interface") def func_is_3d(in_file): @@ -45,12 +50,9 @@ def func_is_3d(in_file): if isinstance(in_file, list): return func_is_3d(in_file[0]) else: - img = load(in_file, mmap=NUMPY_MMAP) + img = load(in_file) shape = img.shape - if len(shape) == 3 or (len(shape) == 4 and shape[3] == 1): - return True - else: - return False + return len(shape) == 3 or (len(shape) == 4 and shape[3] == 1) def get_first_3dfile(in_files): @@ -69,18 +71,18 @@ def scans_for_fname(fname): """ if isinstance(fname, list): - scans = np.zeros((len(fname), ), dtype=object) + scans = np.zeros((len(fname),), dtype=object) for sno, f in enumerate(fname): - scans[sno] = '%s,1' % f + scans[sno] = "%s,1" % f return scans - img = load(fname, mmap=NUMPY_MMAP) + img = load(fname) if len(img.shape) == 3: - return np.array(('%s,1' % fname, ), dtype=object) + return np.array(("%s,1" % fname,), dtype=object) else: n_scans = img.shape[3] - scans = np.zeros((n_scans, ), dtype=object) + scans = np.zeros((n_scans,), dtype=object) for sno in range(n_scans): - scans[sno] = '%s,%d' % (fname, sno + 1) + scans[sno] = "%s,%d" % (fname, sno + 1) return scans @@ -101,7 +103,7 @@ def scans_for_fnames(fnames, keep4d=False, separate_sessions=False): if func_is_3d(fnames[0]): fnames = [fnames] if separate_sessions or keep4d: - flist = np.zeros((len(fnames), ), dtype=object) + flist = np.zeros((len(fnames),), dtype=object) for i, f in enumerate(fnames): if separate_sessions: if keep4d: @@ -131,6 +133,7 @@ class Info(PackageInfo): to any call in the Info class to maintain memoization. Otherwise, it will default to the parameters in the `getinfo` function below. """ + _path = None _name = None _command = None @@ -182,19 +185,21 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): returns None of path not found """ - use_mcr = use_mcr or 'FORCE_SPMMCR' in os.environ - matlab_cmd = matlab_cmd or ((use_mcr and os.getenv('SPMMCRCMD')) - or os.getenv('MATLABCMD', 'matlab -nodesktop -nosplash')) - - if klass._name and klass._path and klass._version and \ - klass._command == matlab_cmd and klass._paths == paths: - - return { - 'name': klass._name, - 'path': klass._path, - 'release': klass._version - } - logger.debug('matlab command or path has changed. recomputing version.') + use_mcr = use_mcr or "FORCE_SPMMCR" in os.environ + matlab_cmd = matlab_cmd or ( + (use_mcr and os.getenv("SPMMCRCMD")) + or os.getenv("MATLABCMD", "matlab -nodesktop -nosplash") + ) + + if ( + klass._name + and klass._path + and klass._version + and klass._command == matlab_cmd + and klass._paths == paths + ): + return {"name": klass._name, "path": klass._path, "release": klass._version} + logger.debug("matlab command or path has changed. recomputing version.") mlab = MatlabCommand(matlab_cmd=matlab_cmd, resource_monitor=False) mlab.inputs.mfile = False if paths: @@ -216,10 +221,10 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): """ try: out = mlab.run() - except (IOError, RuntimeError) as e: + except (OSError, RuntimeError) as e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm - logger.debug('%s', e) + logger.debug("%s", e) klass._version = None klass._path = None klass._name = None @@ -229,40 +234,37 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): out = sd._strip_header(out.runtime.stdout) out_dict = {} - for part in out.split('|'): - key, val = part.split(':') + for part in out.split("|"): + key, val = part.split(":") out_dict[key] = val - klass._version = out_dict['release'] - klass._path = out_dict['path'] - klass._name = out_dict['name'] + klass._version = out_dict["release"] + klass._path = out_dict["path"] + klass._name = out_dict["name"] klass._command = matlab_cmd klass._paths = paths return out_dict def no_spm(): - """ Checks if SPM is NOT installed + """Checks if SPM is NOT installed used with pytest.mark.skipif decorator to skip tests that will fail if spm is not installed""" - if 'NIPYPE_NO_MATLAB' in os.environ or Info.version() is None: - return True - else: - return False + return "NIPYPE_NO_MATLAB" in os.environ or Info.version() is None class SPMCommandInputSpec(BaseInterfaceInputSpec): - matlab_cmd = traits.Str(desc='matlab command to use') - paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') - mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True) - use_mcr = traits.Bool(desc='Run m-code using SPM MCR') + matlab_cmd = traits.Str(desc="matlab command to use") + paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") + mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) + use_mcr = traits.Bool(desc="Run m-code using SPM MCR") use_v8struct = traits.Bool( True, - min_ver='8', + min_ver="8", usedefault=True, - desc=('Generate SPM8 and higher ' - 'compatible jobs')) + desc=("Generate SPM8 and higher compatible jobs"), + ) class SPMCommand(BaseInterface): @@ -270,36 +272,38 @@ class SPMCommand(BaseInterface): WARNING: Pseudo prototype class, meant to be subclassed """ + input_spec = SPMCommandInputSpec - _additional_metadata = ['field'] + _additional_metadata = ["field"] - _jobtype = 'basetype' - _jobname = 'basename' + _jobtype = "basetype" + _jobname = "basename" _matlab_cmd = None _paths = None _use_mcr = None - references_ = [{ - 'entry': - BibTeX( - "@book{FrackowiakFristonFrithDolanMazziotta1997," - "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," - "title={Human Brain Function}," - "publisher={Academic Press USA}," - "year={1997}," - "}"), - 'description': - 'The fundamental text on Statistical Parametric Mapping (SPM)', - # 'path': "nipype.interfaces.spm", - 'tags': ['implementation'], - }] + _references = [ + { + "entry": BibTeX( + "@book{FrackowiakFristonFrithDolanMazziotta1997," + "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," + "title={Human Brain Function}," + "publisher={Academic Press USA}," + "year={1997}," + "}" + ), + "description": "The fundamental text on Statistical Parametric Mapping (SPM)", + # 'path': "nipype.interfaces.spm", + "tags": ["implementation"], + } + ] def __init__(self, **inputs): - super(SPMCommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change( - self._matlab_cmd_update, - ['matlab_cmd', 'mfile', 'paths', 'use_mcr']) + self._matlab_cmd_update, ["matlab_cmd", "mfile", "paths", "use_mcr"] + ) self._find_mlab_cmd_defaults() self._check_mlab_inputs() self._matlab_cmd_update() @@ -309,19 +313,16 @@ def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None): cls._matlab_cmd = matlab_cmd cls._paths = paths cls._use_mcr = use_mcr - info_dict = Info.getinfo( - matlab_cmd=matlab_cmd, - paths=paths, - use_mcr=use_mcr) + info_dict = Info.getinfo(matlab_cmd=matlab_cmd, paths=paths, use_mcr=use_mcr) def _find_mlab_cmd_defaults(self): # check if the user has set environment variables to enforce # the standalone (MCR) version of SPM - if self._use_mcr or 'FORCE_SPMMCR' in os.environ: + if self._use_mcr or "FORCE_SPMMCR" in os.environ: self._use_mcr = True if self._matlab_cmd is None: try: - self._matlab_cmd = os.environ['SPMMCRCMD'] + self._matlab_cmd = os.environ["SPMMCRCMD"] except KeyError: pass @@ -333,9 +334,11 @@ def _matlab_cmd_update(self): matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, - resource_monitor=False) - self.mlab.inputs.script_file = 'pyscript_%s.m' % \ - self.__class__.__name__.split('.')[-1].lower() + resource_monitor=False, + ) + self.mlab.inputs.script_file = ( + "pyscript_%s.m" % self.__class__.__name__.split(".")[-1].lower() + ) if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr: self.mlab.inputs.nodesktop = Undefined self.mlab.inputs.nosplash = Undefined @@ -348,10 +351,12 @@ def version(self): info_dict = Info.getinfo( matlab_cmd=self.inputs.matlab_cmd, paths=self.inputs.paths, - use_mcr=self.inputs.use_mcr) + use_mcr=self.inputs.use_mcr, + ) if info_dict: - return '%s.%s' % (info_dict['name'].split('SPM')[-1], - info_dict['release']) + return "{}.{}".format( + info_dict["name"].split("SPM")[-1], info_dict["release"] + ) @property def jobtype(self): @@ -372,11 +377,12 @@ def _check_mlab_inputs(self): def _run_interface(self, runtime): """Executes the SPM function using MATLAB.""" self.mlab.inputs.script = self._make_matlab_command( - deepcopy(self._parse_inputs())) + deepcopy(self._parse_inputs()) + ) results = self.mlab.run() runtime.returncode = results.runtime.returncode if self.mlab.inputs.uses_mcr: - if 'Skipped' in results.runtime.stdout: + if "Skipped" in results.runtime.stdout: self.raise_exception(runtime) runtime.stdout = results.runtime.stdout runtime.stderr = results.runtime.stderr @@ -392,7 +398,7 @@ def _format_arg(self, opt, spec, val): """Convert input to appropriate format for SPM.""" if spec.is_trait_type(traits.Bool): return int(val) - elif spec.is_trait_type(traits.Tuple): + elif spec.is_trait_type(traits.BaseTuple): return list(val) else: return val @@ -407,8 +413,8 @@ def _parse_inputs(self, skip=()): if not isdefined(value): continue field = spec.field - if '.' in field: - fields = field.split('.') + if "." in field: + fields = field.split(".") dictref = spmdict for f in fields[:-1]: if f not in list(dictref.keys()): @@ -446,9 +452,9 @@ def _reformat_dict_for_savemat(self, contents): return [newdict] except TypeError: - print('Requires dict input') + print("Requires dict input") - def _generate_job(self, prefix='', contents=None): + def _generate_job(self, prefix="", contents=None): """Recursive function to generate spm job specification as a string Parameters @@ -461,7 +467,7 @@ def _generate_job(self, prefix='', contents=None): matlab commands. """ - jobstring = '' + jobstring = "" if contents is None: return jobstring if isinstance(contents, list): @@ -474,7 +480,7 @@ def _generate_job(self, prefix='', contents=None): return jobstring if isinstance(contents, dict): for key, value in list(contents.items()): - newprefix = "%s.%s" % (prefix, key) + newprefix = f"{prefix}.{key}" jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, np.ndarray): @@ -485,22 +491,20 @@ def _generate_job(self, prefix='', contents=None): jobstring += "{...\n" for i, val in enumerate(contents): if isinstance(val, np.ndarray): - jobstring += self._generate_job( - prefix=None, contents=val) + jobstring += self._generate_job(prefix=None, contents=val) elif isinstance(val, list): items_format = [] for el in val: items_format += [ - '{}' if not isinstance(el, (str, bytes)) else - '\'{}\'' + "{}" if not isinstance(el, (str, bytes)) else "'{}'" ] - val_format = ', '.join(items_format).format - jobstring += '[{}];...\n'.format(val_format(*val)) + val_format = ", ".join(items_format).format + jobstring += f"[{val_format(*val)}];...\n" elif isinstance(val, (str, bytes)): - jobstring += '\'{}\';...\n'.format(val) + jobstring += f"'{val}';...\n" else: - jobstring += '%s;...\n' % str(val) - jobstring += '};\n' + jobstring += "%s;...\n" % str(val) + jobstring += "};\n" else: for i, val in enumerate(contents): for field in val.dtype.fields: @@ -511,9 +515,9 @@ def _generate_job(self, prefix='', contents=None): jobstring += self._generate_job(newprefix, val[field]) return jobstring if isinstance(contents, (str, bytes)): - jobstring += "%s = '%s';\n" % (prefix, contents) + jobstring += f"{prefix} = '{contents}';\n" return jobstring - jobstring += "%s = %s;\n" % (prefix, str(contents)) + jobstring += f"{prefix} = {contents};\n" return jobstring def _make_matlab_command(self, contents, postscript=None): @@ -551,36 +555,45 @@ def _make_matlab_command(self, contents, postscript=None): end\n """ if self.mlab.inputs.mfile: - if (isdefined(self.inputs.use_v8struct) - and self.inputs.use_v8struct): - mscript += self._generate_job('jobs{1}.spm.%s.%s' % - (self.jobtype, - self.jobname), contents[0]) + if isdefined(self.inputs.use_v8struct) and self.inputs.use_v8struct: + mscript += self._generate_job( + f"jobs{{1}}.spm.{self.jobtype}.{self.jobname}", contents[0] + ) else: if self.jobname in [ - 'st', 'smooth', 'preproc', 'preproc8', 'fmri_spec', - 'fmri_est', 'factorial_design', 'defs' + "st", + "smooth", + "preproc", + "preproc8", + "fmri_spec", + "fmri_est", + "factorial_design", + "defs", ]: # parentheses - mscript += self._generate_job('jobs{1}.%s{1}.%s(1)' % - (self.jobtype, - self.jobname), contents[0]) + mscript += self._generate_job( + f"jobs{{1}}.{self.jobtype}{{1}}.{self.jobname}(1)", + contents[0], + ) else: # curly brackets - mscript += self._generate_job('jobs{1}.%s{1}.%s{1}' % - (self.jobtype, - self.jobname), contents[0]) + mscript += self._generate_job( + f"jobs{{1}}.{self.jobtype}{{1}}.{self.jobname}{{1}}", + contents[0], + ) else: from scipy.io import savemat + jobdef = { - 'jobs': [{ - self.jobtype: [{ - self.jobname: - self.reformat_dict_for_savemat(contents[0]) - }] - }] + "jobs": [ + { + self.jobtype: [ + {self.jobname: self.reformat_dict_for_savemat(contents[0])} + ] + } + ] } - savemat(os.path.join(cwd, 'pyjobs_%s.mat' % self.jobname), jobdef) + savemat(os.path.join(cwd, "pyjobs_%s.mat" % self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname mscript += """ spm_jobman(\'run\', jobs);\n @@ -597,30 +610,17 @@ def _make_matlab_command(self, contents, postscript=None): class ImageFileSPM(ImageFile): - """ - Defines an ImageFile trait specific to SPM interfaces. - """ - - def __init__(self, - value='', - filter=None, - auto_set=False, - entries=0, - exists=False, - types=['nifti1', 'nifti2'], - allow_compressed=False, - **metadata): - """ Trait handles neuroimaging files. - - Parameters - ---------- - types : list - Strings of file format types accepted - compressed : boolean - Indicates whether the file format can compressed - """ - self.types = types - self.allow_compressed = allow_compressed - super(ImageFileSPM, - self).__init__(value, filter, auto_set, entries, exists, types, - allow_compressed, **metadata) + """Defines a trait whose value must be a NIfTI file.""" + + def __init__( + self, value=NoDefaultSpecified, exists=False, resolve=False, **metadata + ): + """Create an ImageFileSPM trait.""" + super().__init__( + value=value, + exists=exists, + types=["nifti1", "nifti2"], + allow_compressed=False, + resolve=resolve, + **metadata, + ) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 5293346dbb..de5447b4b7 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -1,13 +1,8 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with matlab and spm to access spm tools. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, bytes - # Standard library imports import os from glob import glob @@ -17,99 +12,112 @@ # Local imports from ... import logging -from ...utils.filemanip import (ensure_list, simplify_list, - split_filename) -from ..base import (Bunch, traits, TraitedSpec, File, Directory, - OutputMultiPath, InputMultiPath, isdefined) -from .base import (SPMCommand, SPMCommandInputSpec, scans_for_fnames, - ImageFileSPM) - -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +from ...utils.filemanip import ensure_list, simplify_list, split_filename, load_spm_mat +from ..base import ( + Bunch, + traits, + Tuple, + TraitedSpec, + File, + Directory, + OutputMultiPath, + InputMultiPath, + isdefined, +) +from .base import SPMCommand, SPMCommandInputSpec, scans_for_fnames, ImageFileSPM + +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class Level1DesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( - exists=True, field='dir', desc='directory to store SPM.mat file (opt)') + exists=True, field="dir", desc="directory to store SPM.mat file (opt)" + ) timing_units = traits.Enum( - 'secs', - 'scans', - field='timing.units', - desc='units for specification of onsets', - mandatory=True) + "secs", + "scans", + field="timing.units", + desc="units for specification of onsets", + mandatory=True, + ) interscan_interval = traits.Float( - field='timing.RT', desc='Interscan interval in secs', mandatory=True) + field="timing.RT", desc="Interscan interval in secs", mandatory=True + ) microtime_resolution = traits.Int( - field='timing.fmri_t', - desc=('Number of time-bins per scan ' - 'in secs (opt)')) + field="timing.fmri_t", desc=("Number of time-bins per scan in secs (opt)") + ) microtime_onset = traits.Float( - field='timing.fmri_t0', - desc=('The onset/time-bin in seconds for ' - 'alignment (opt)')) + field="timing.fmri_t0", + desc=("The onset/time-bin in seconds for alignment (opt)"), + ) session_info = traits.Any( - field='sess', - desc=('Session specific information generated ' - 'by ``modelgen.SpecifyModel``'), - mandatory=True) + field="sess", + desc=("Session specific information generated by ``modelgen.SpecifyModel``"), + mandatory=True, + ) factor_info = traits.List( - traits.Dict(traits.Enum('name', 'levels')), - field='fact', - desc=('Factor specific information ' - 'file (opt)')) + traits.Dict(traits.Enum("name", "levels")), + field="fact", + desc=("Factor specific information file (opt)"), + ) bases = traits.Dict( - traits.Enum('hrf', 'fourier', 'fourier_han', 'gamma', 'fir'), - field='bases', - desc=""" - dict {'name':{'basesparam1':val,...}} - name : string - Name of basis function (hrf, fourier, fourier_han, - gamma, fir) - - hrf : - derivs : 2-element list - Model HRF Derivatives. No derivatives: [0,0], - Time derivatives : [1,0], Time and Dispersion - derivatives: [1,1] - fourier, fourier_han, gamma, fir: - length : int - Post-stimulus window length (in seconds) - order : int - Number of basis functions + traits.Enum("hrf", "fourier", "fourier_han", "gamma", "fir"), + field="bases", + desc="""\ +Dictionary names of the basis function to parameters: + + * hrf + + * derivs -- (2-element list) Model HRF Derivatives. No derivatives: [0,0], + Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] + + * fourier, fourier_han, gamma, or fir: + + * length -- (int) Post-stimulus window length (in seconds) + * order -- (int) Number of basis functions + """, - mandatory=True) + mandatory=True, + ) volterra_expansion_order = traits.Enum( - 1, 2, field='volt', desc=('Model interactions - ' - 'yes:1, no:2')) + 1, 2, field="volt", desc=("Model interactions - no:1, yes:2") + ) global_intensity_normalization = traits.Enum( - 'none', - 'scaling', - field='global', - desc=('Global intensity ' - 'normalization - ' - 'scaling or none')) + "none", + "scaling", + field="global", + desc=("Global intensity normalization - scaling or none"), + ) mask_image = File( - exists=True, - field='mask', - desc='Image for explicitly masking the analysis') + exists=True, field="mask", desc="Image for explicitly masking the analysis" + ) mask_threshold = traits.Either( - traits.Enum('-Inf'), + traits.Enum("-Inf"), traits.Float(), desc="Thresholding for the mask", - default='-Inf', - usedefault=True) + default="-Inf", + usedefault=True, + ) model_serial_correlations = traits.Enum( - 'AR(1)', - 'FAST', - 'none', - field='cvi', - desc=('Model serial correlations ' - 'AR(1), FAST or none. FAST ' - 'is available in SPM12')) + "AR(1)", + "FAST", + "none", + field="cvi", + desc=( + "Model serial correlations " + "AR(1), FAST or none. FAST " + "is available in SPM12" + ), + ) + flags = traits.Dict( + desc="Additional arguments to the job, e.g., a common SPM operation is to " + "modify the default masking threshold (mthresh)" + ) class Level1DesignOutputSpec(TraitedSpec): - spm_mat_file = File(exists=True, desc='SPM mat file') + spm_mat_file = File(exists=True, desc="SPM mat file") class Level1Design(SPMCommand): @@ -125,6 +133,7 @@ class Level1Design(SPMCommand): >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} >>> level1design.inputs.session_info = 'session_info.npz' + >>> level1design.inputs.flags = {'mthresh': 0.4} >>> level1design.run() # doctest: +SKIP """ @@ -132,31 +141,31 @@ class Level1Design(SPMCommand): input_spec = Level1DesignInputSpec output_spec = Level1DesignOutputSpec - _jobtype = 'stats' - _jobname = 'fmri_spec' + _jobtype = "stats" + _jobname = "fmri_spec" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['spm_mat_dir', 'mask_image']: + """Convert input to appropriate format for spm""" + if opt in ["spm_mat_dir", "mask_image"]: return np.array([str(val)], dtype=object) - if opt in ['session_info']: # , 'factor_info']: + if opt in ["session_info"]: # , 'factor_info']: if isinstance(val, dict): return [val] else: return val - return super(Level1Design, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm realign options if set to None ignore - """ - einputs = super(Level1Design, - self)._parse_inputs(skip=('mask_threshold')) - for sessinfo in einputs[0]['sess']: - sessinfo['scans'] = scans_for_fnames( - ensure_list(sessinfo['scans']), keep4d=False) + """validate spm realign options if set to None ignore""" + einputs = super()._parse_inputs(skip=("mask_threshold", "flags")) + if isdefined(self.inputs.flags): + einputs[0].update(self.inputs.flags) + for sessinfo in einputs[0]["sess"]: + sessinfo["scans"] = scans_for_fnames( + ensure_list(sessinfo["scans"]), keep4d=False + ) if not isdefined(self.inputs.spm_mat_dir): - einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) + einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _make_matlab_command(self, content): @@ -168,68 +177,103 @@ def _make_matlab_command(self, content): # SPM doesn't handle explicit masking properly, especially # when you want to use the entire mask image postscript = "load SPM;\n" - postscript += ("SPM.xM.VM = spm_vol('%s');\n" % simplify_list( - self.inputs.mask_image)) + postscript += "SPM.xM.VM = spm_vol('%s');\n" % simplify_list( + self.inputs.mask_image + ) postscript += "SPM.xM.I = 0;\n" postscript += "SPM.xM.T = [];\n" - postscript += ("SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % - self.inputs.mask_threshold) - postscript += ("SPM.xM.xs = struct('Masking', " - "'explicit masking only');\n") + postscript += ( + "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold + ) + postscript += "SPM.xM.xs = struct('Masking', 'explicit masking only');\n" postscript += "save SPM SPM;\n" else: postscript = None - return super(Level1Design, self)._make_matlab_command( - content, postscript=postscript) + return super()._make_matlab_command(content, postscript=postscript) def _list_outputs(self): outputs = self._outputs().get() - spm = os.path.join(os.getcwd(), 'SPM.mat') - outputs['spm_mat_file'] = spm + spm = os.path.join(os.getcwd(), "SPM.mat") + outputs["spm_mat_file"] = spm return outputs class EstimateModelInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, - field='spmmat', + field="spmmat", copyfile=True, mandatory=True, - desc='Absolute path to SPM.mat') + desc="Absolute path to SPM.mat", + ) estimation_method = traits.Dict( - traits.Enum('Classical', 'Bayesian2', 'Bayesian'), - field='method', + traits.Enum("Classical", "Bayesian2", "Bayesian"), + field="method", mandatory=True, - desc=('Dictionary of either Classical: 1, Bayesian: 1, ' - 'or Bayesian2: 1 (dict)')) + desc=("Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)"), + ) write_residuals = traits.Bool( - field='write_residuals', desc="Write individual residual images") - flags = traits.Dict(desc='Additional arguments') + field="write_residuals", desc="Write individual residual images" + ) + flags = traits.Dict(desc="Additional arguments") class EstimateModelOutputSpec(TraitedSpec): - mask_image = ImageFileSPM( - exists=True, desc='binary mask to constrain estimation') + mask_image = ImageFileSPM(exists=True, desc="binary mask to constrain estimation") beta_images = OutputMultiPath( - ImageFileSPM(exists=True), desc='design parameter estimates') + ImageFileSPM(exists=True), desc="design parameter estimates" + ) residual_image = ImageFileSPM( - exists=True, desc='Mean-squared image of the residuals') + exists=True, desc="Mean-squared image of the residuals" + ) residual_images = OutputMultiPath( ImageFileSPM(exists=True), - desc="individual residual images (requires `write_residuals`") - RPVimage = ImageFileSPM(exists=True, desc='Resels per voxel image') - spm_mat_file = File(exists=True, desc='Updated SPM mat file') + desc="individual residual images (requires `write_residuals`", + ) + RPVimage = ImageFileSPM(exists=True, desc="Resels per voxel image") + spm_mat_file = File(exists=True, desc="Updated SPM mat file") labels = ImageFileSPM(exists=True, desc="label file") SDerror = OutputMultiPath( - ImageFileSPM(exists=True), - desc="Images of the standard deviation of the error") + ImageFileSPM(exists=True), desc="Images of the standard deviation of the error" + ) ARcoef = OutputMultiPath( - ImageFileSPM(exists=True), desc="Images of the AR coefficient") + ImageFileSPM(exists=True), desc="Images of the AR coefficient" + ) Cbetas = OutputMultiPath( - ImageFileSPM(exists=True), desc="Images of the parameter posteriors") + ImageFileSPM(exists=True), desc="Images of the parameter posteriors" + ) SDbetas = OutputMultiPath( ImageFileSPM(exists=True), - desc="Images of the standard deviation of parameter posteriors") + desc="Images of the standard deviation of parameter posteriors", + ) + con_images = OutputMultiPath( + File(exists=True), + desc=( + "contrast images from a t-contrast " + "(created if factor_info used in Level1Design)" + ), + ) + spmT_images = OutputMultiPath( + File(exists=True), + desc=( + "stat images from a t-contrast" + "(created if factor_info used in Level1Design)" + ), + ) + ess_images = OutputMultiPath( + File(exists=True), + desc=( + "contrast images from an F-contrast" + "(created if factor_info used in Level1Design)" + ), + ) + spmF_images = OutputMultiPath( + File(exists=True), + desc=( + "stat images from an F-contrast" + "(created if factor_info used in Level1Design)" + ), + ) class EstimateModel(SPMCommand): @@ -244,126 +288,168 @@ class EstimateModel(SPMCommand): >>> est.inputs.estimation_method = {'Classical': 1} >>> est.run() # doctest: +SKIP """ + input_spec = EstimateModelInputSpec output_spec = EstimateModelOutputSpec - _jobtype = 'stats' - _jobname = 'fmri_est' + _jobtype = "stats" + _jobname = "fmri_est" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'spm_mat_file': + """Convert input to appropriate format for spm""" + if opt == "spm_mat_file": return np.array([str(val)], dtype=object) - if opt == 'estimation_method': + if opt == "estimation_method": if isinstance(val, (str, bytes)): - return {'{}'.format(val): 1} + return {f"{val}": 1} else: return val - return super(EstimateModel, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm realign options if set to None ignore - """ - einputs = super(EstimateModel, self)._parse_inputs(skip=('flags')) + """validate spm realign options if set to None ignore""" + einputs = super()._parse_inputs(skip=("flags")) if isdefined(self.inputs.flags): - einputs[0].update( - {flag: val - for (flag, val) in self.inputs.flags.items()}) + einputs[0].update(self.inputs.flags) return einputs def _list_outputs(self): - import scipy.io as sio outputs = self._outputs().get() pth = os.path.dirname(self.inputs.spm_mat_file) - outtype = 'nii' if '12' in self.version.split('.')[0] else 'img' - spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) - - betas = [vbeta.fname[0] for vbeta in spm['SPM'][0, 0].Vbeta[0]] - if ('Bayesian' in self.inputs.estimation_method.keys() - or 'Bayesian2' in self.inputs.estimation_method.keys()): - outputs['labels'] = os.path.join(pth, 'labels.{}'.format(outtype)) - outputs['SDerror'] = glob(os.path.join(pth, 'Sess*_SDerror*')) - outputs['ARcoef'] = glob(os.path.join(pth, 'Sess*_AR_*')) + outtype = "nii" if "12" in self.version.split(".")[0] else "img" + spm = load_spm_mat(self.inputs.spm_mat_file, struct_as_record=False) + + betas = [vbeta.fname[0] for vbeta in spm["SPM"][0, 0].Vbeta[0]] + if ( + "Bayesian" in self.inputs.estimation_method + or "Bayesian2" in self.inputs.estimation_method + ): + outputs["labels"] = os.path.join(pth, f"labels.{outtype}") + outputs["SDerror"] = glob(os.path.join(pth, "Sess*_SDerror*")) + outputs["ARcoef"] = glob(os.path.join(pth, "Sess*_AR_*")) if betas: - outputs['Cbetas'] = [ - os.path.join(pth, 'C{}'.format(beta)) for beta in betas - ] - outputs['SDbetas'] = [ - os.path.join(pth, 'SD{}'.format(beta)) for beta in betas - ] + outputs["Cbetas"] = [os.path.join(pth, f"C{beta}") for beta in betas] + outputs["SDbetas"] = [os.path.join(pth, f"SD{beta}") for beta in betas] - if 'Classical' in self.inputs.estimation_method.keys(): - outputs['residual_image'] = os.path.join( - pth, 'ResMS.{}'.format(outtype)) - outputs['RPVimage'] = os.path.join(pth, 'RPV.{}'.format(outtype)) + if "Classical" in self.inputs.estimation_method: + outputs["residual_image"] = os.path.join(pth, f"ResMS.{outtype}") + outputs["RPVimage"] = os.path.join(pth, f"RPV.{outtype}") if self.inputs.write_residuals: - outputs['residual_images'] = glob(os.path.join(pth, 'Res_*')) + outputs["residual_images"] = glob(os.path.join(pth, "Res_*")) if betas: - outputs['beta_images'] = [ - os.path.join(pth, beta) for beta in betas + outputs["beta_images"] = [os.path.join(pth, beta) for beta in betas] + # When 'factor_info' is used in Level1Design + # spm automatically creates contrast + try: + contrast = [c.Vcon[0][0].fname[0] for c in spm["SPM"][0, 0].xCon[0]] + contrast_spm = [c.Vspm[0][0].fname[0] for c in spm["SPM"][0, 0].xCon[0]] + except Exception: + contrast = [] + contrast_spm = [] + + if contrast: + outputs["con_images"] = [ + os.path.join(pth, cont) for cont in contrast if 'con' in cont + ] + outputs["ess_images"] = [ + os.path.join(pth, cont) for cont in contrast if 'ess' in cont + ] + if contrast_spm: + outputs["spmT_images"] = [ + os.path.join(pth, cont) for cont in contrast_spm if 'spmT' in cont + ] + outputs["spmF_images"] = [ + os.path.join(pth, cont) for cont in contrast_spm if 'spmF' in cont ] - outputs['mask_image'] = os.path.join(pth, 'mask.{}'.format(outtype)) - outputs['spm_mat_file'] = os.path.join(pth, 'SPM.mat') + outputs["mask_image"] = os.path.join(pth, f"mask.{outtype}") + outputs["spm_mat_file"] = os.path.join(pth, "SPM.mat") return outputs class EstimateContrastInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, - field='spmmat', - desc='Absolute path to SPM.mat', + field="spmmat", + desc="Absolute path to SPM.mat", copyfile=True, - mandatory=True) + mandatory=True, + ) contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])] If session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", - mandatory=True) + mandatory=True, + ) beta_images = InputMultiPath( File(exists=True), - desc=('Parameter estimates of the ' - 'design matrix'), + desc=("Parameter estimates of the design matrix"), copyfile=False, - mandatory=True) + mandatory=True, + ) residual_image = File( exists=True, - desc='Mean-squared image of the residuals', + desc="Mean-squared image of the residuals", copyfile=False, - mandatory=True) + mandatory=True, + ) use_derivs = traits.Bool( - desc='use derivatives for estimation', xor=['group_contrast']) - group_contrast = traits.Bool( - desc='higher level contrast', xor=['use_derivs']) + desc="use derivatives for estimation", xor=["group_contrast"] + ) + group_contrast = traits.Bool(desc="higher level contrast", xor=["use_derivs"]) class EstimateContrastOutputSpec(TraitedSpec): con_images = OutputMultiPath( - File(exists=True), desc='contrast images from a t-contrast') + File(exists=True), desc="contrast images from a t-contrast" + ) spmT_images = OutputMultiPath( - File(exists=True), desc='stat images from a t-contrast') + File(exists=True), desc="stat images from a t-contrast" + ) ess_images = OutputMultiPath( - File(exists=True), desc='contrast images from an F-contrast') + File(exists=True), desc="contrast images from an F-contrast" + ) spmF_images = OutputMultiPath( - File(exists=True), desc='stat images from an F-contrast') - spm_mat_file = File(exists=True, desc='Updated SPM mat file') + File(exists=True), desc="stat images from an F-contrast" + ) + spm_mat_file = File(exists=True, desc="Updated SPM mat file") class EstimateContrast(SPMCommand): @@ -384,174 +470,209 @@ class EstimateContrast(SPMCommand): input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec - _jobtype = 'stats' - _jobname = 'con' + _jobtype = "stats" + _jobname = "con" def _make_matlab_command(self, _): - """validates spm options and generates job structure - """ + """Validate spm options and generate job structure.""" contrasts = [] cname = [] for i, cont in enumerate(self.inputs.contrasts): cname.insert(i, cont[0]) - contrasts.insert(i, - Bunch( - name=cont[0], - stat=cont[1], - conditions=cont[2], - weights=None, - sessions=None)) + contrasts.insert( + i, + Bunch( + name=cont[0], + stat=cont[1], + conditions=cont[2], + weights=None, + sessions=None, + ), + ) if len(cont) >= 4: contrasts[i].weights = cont[3] if len(cont) >= 5: contrasts[i].sessions = cont[4] - script = "% generated by nipype.interfaces.spm\n" - script += "spm_defaults;\n" - script += ("jobs{1}.stats{1}.con.spmmat = {'%s'};\n" % - self.inputs.spm_mat_file) - script += "load(jobs{1}.stats{1}.con.spmmat{:});\n" - script += "SPM.swd = '%s';\n" % os.getcwd() - script += "save(jobs{1}.stats{1}.con.spmmat{:},'SPM');\n" - script += "names = SPM.xX.name;\n" + script = [ + """\ +%% generated by nipype.interfaces.spm +spm_defaults; +jobs{1}.stats{1}.con.spmmat = {'%s'}; +load(jobs{1}.stats{1}.con.spmmat{:}); +SPM.swd = '%s'; +save(jobs{1}.stats{1}.con.spmmat{:},'SPM'); +[msg,id] = lastwarn(''); +if strcmp(id,'MATLAB:save:sizeTooBigForMATFile') + save(jobs{1}.stats{1}.con.spmmat{:},'SPM','-v7.3'); +end +names = SPM.xX.name;""" + % (self.inputs.spm_mat_file, os.getcwd()) + ] # get names for columns - if (isdefined(self.inputs.group_contrast) - and self.inputs.group_contrast): - script += "condnames=names;\n" + if isdefined(self.inputs.group_contrast) and self.inputs.group_contrast: + script += ["condnames=names;"] else: if self.inputs.use_derivs: - script += "pat = 'Sn\([0-9]*\) (.*)';\n" + script += [r"pat = 'Sn\([0-9]*\) (.*)';"] else: - script += ("pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " - ".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';\n") - script += "t = regexp(names,pat,'tokens');\n" + script += [ + r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " + r".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';" + ] + + script += ["t = regexp(names,pat,'tokens');"] # get sessidx for columns - script += "pat1 = 'Sn\(([0-9].*)\)\s.*';\n" - script += "t1 = regexp(names,pat1,'tokens');\n" - script += ("for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " - "~isempty(t{i0}{1}),condnames{i0} = t{i0}{1}{1};" - "condsess(i0)=str2num(t1{i0}{1}{1});end;end;\n") + script += [r"pat1 = 'Sn\(([0-9].*)\)\s.*';"] + script += ["t1 = regexp(names,pat1,'tokens');"] + script += [ + """\ +for i0=1:numel(t) + condnames{i0}=''; + condsess(i0)=0; + if ~isempty(t{i0}{1}) + condnames{i0} = t{i0}{1}{1}; + condsess(i0)=str2num(t1{i0}{1}{1}); + end; +end;""" + ] + # BUILD CONTRAST SESSION STRUCTURE for i, contrast in enumerate(contrasts): - if contrast.stat == 'T': - script += ("consess{%d}.tcon.name = '%s';\n" % - (i + 1, contrast.name)) - script += ( - "consess{%d}.tcon.convec = zeros(1,numel(names));\n" % - (i + 1)) + if contrast.stat == "T": + script += ["consess{%d}.tcon.name = '%s';" % (i + 1, contrast.name)] + script += ["consess{%d}.tcon.convec = zeros(1,numel(names));" % (i + 1)] for c0, cond in enumerate(contrast.conditions): - script += ("idx = strmatch('%s',condnames,'exact');\n" % - (cond)) - script += (("if isempty(idx), throw(MException(" - "'CondName:Chk', sprintf('Condition %%s not " - "found in design','%s'))); end;\n") % cond) + script += ["idx = strmatch('%s',condnames,'exact');" % cond] + script += [ + """\ +if isempty(idx) + throw(MException('CondName:Chk', sprintf('Condition %%s not found in design','%s'))); +end;""" + % cond + ] if contrast.sessions: for sno, sw in enumerate(contrast.sessions): - script += ("sidx = find(condsess(idx)==%d);\n" % - (sno + 1)) - script += (("consess{%d}.tcon.convec(idx(sidx)) " - "= %f;\n") % - (i + 1, sw * contrast.weights[c0])) + script += ["sidx = find(condsess(idx)==%d);" % (sno + 1)] + script += [ + "consess{%d}.tcon.convec(idx(sidx)) = %f;" + % (i + 1, sw * contrast.weights[c0]) + ] else: - script += ("consess{%d}.tcon.convec(idx) = %f;\n" % - (i + 1, contrast.weights[c0])) + script += [ + "consess{%d}.tcon.convec(idx) = %f;" + % (i + 1, contrast.weights[c0]) + ] for i, contrast in enumerate(contrasts): - if contrast.stat == 'F': - script += ("consess{%d}.fcon.name = '%s';\n" % - (i + 1, contrast.name)) + if contrast.stat == "F": + script += ["consess{%d}.fcon.name = '%s';" % (i + 1, contrast.name)] for cl0, fcont in enumerate(contrast.conditions): - try: - tidx = cname.index(fcont[0]) - except: - Exception("Contrast Estimate: could not get index of" - " T contrast. probably not defined prior " - "to the F contrasts") - script += (("consess{%d}.fcon.convec{%d} = " - "consess{%d}.tcon.convec;\n") % - (i + 1, cl0 + 1, tidx + 1)) - script += "jobs{1}.stats{1}.con.consess = consess;\n" - script += ("if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');" - "jobs=spm_jobman('spm5tospm8',{jobs});end\n") - script += "spm_jobman('run',jobs);" - return script + tidx = cname.index(fcont[0]) + script += [ + "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;" + % (i + 1, cl0 + 1, tidx + 1) + ] + script += ["jobs{1}.stats{1}.con.consess = consess;"] + script += [ + """\ +if strcmp(spm('ver'),'SPM8') + spm_jobman('initcfg'); + jobs=spm_jobman('spm5tospm8',{jobs}); +end;""" + ] + script += ["spm_jobman('run',jobs);"] + return "\n".join(script) def _list_outputs(self): - import scipy.io as sio outputs = self._outputs().get() pth, _ = os.path.split(self.inputs.spm_mat_file) - spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) + spm = load_spm_mat(self.inputs.spm_mat_file, struct_as_record=False) con_images = [] spmT_images = [] - for con in spm['SPM'][0, 0].xCon[0]: + for con in spm["SPM"][0, 0].xCon[0]: con_images.append(str(os.path.join(pth, con.Vcon[0, 0].fname[0]))) spmT_images.append(str(os.path.join(pth, con.Vspm[0, 0].fname[0]))) if con_images: - outputs['con_images'] = con_images - outputs['spmT_images'] = spmT_images - spm12 = '12' in self.version.split('.')[0] + outputs["con_images"] = con_images + outputs["spmT_images"] = spmT_images + spm12 = "12" in self.version.split(".")[0] if spm12: - ess = glob(os.path.join(pth, 'ess*.nii')) + ess = glob(os.path.join(pth, "ess*.nii")) else: - ess = glob(os.path.join(pth, 'ess*.img')) + ess = glob(os.path.join(pth, "ess*.img")) if len(ess) > 0: - outputs['ess_images'] = sorted(ess) + outputs["ess_images"] = sorted(ess) if spm12: - spmf = glob(os.path.join(pth, 'spmF*.nii')) + spmf = glob(os.path.join(pth, "spmF*.nii")) else: - spmf = glob(os.path.join(pth, 'spmF*.img')) + spmf = glob(os.path.join(pth, "spmF*.img")) if len(spmf) > 0: - outputs['spmF_images'] = sorted(spmf) - outputs['spm_mat_file'] = self.inputs.spm_mat_file + outputs["spmF_images"] = sorted(spmf) + outputs["spm_mat_file"] = self.inputs.spm_mat_file return outputs class ThresholdInputSpec(SPMCommandInputSpec): spm_mat_file = File( - exists=True, - desc='absolute path to SPM.mat', - copyfile=True, - mandatory=True) - stat_image = File( - exists=True, desc='stat image', copyfile=False, mandatory=True) + exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True + ) + stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( - mandatory=True, desc='which contrast in the SPM.mat to use') + mandatory=True, desc="which contrast in the SPM.mat to use" + ) use_fwe_correction = traits.Bool( True, usedefault=True, - desc=('whether to use FWE (Bonferroni) ' - 'correction for initial threshold ' - '(height_threshold_type has to be ' - 'set to p-value)')) + desc=( + "whether to use FWE (Bonferroni) " + "correction for initial threshold " + "(height_threshold_type has to be " + "set to p-value)" + ), + ) + use_vox_fdr_correction = traits.Bool( + False, + usedefault=True, + desc=( + "whether to use voxel-based FDR " + "correction for initial threshold " + "(height_threshold_type has to be " + "set to q-value)" + ), + ) use_topo_fdr = traits.Bool( True, usedefault=True, - desc=('whether to use FDR over cluster extent ' - 'probabilities')) + desc=("whether to use FDR over cluster extent probabilities"), + ) height_threshold = traits.Float( 0.05, usedefault=True, - desc=('value for initial thresholding ' - '(defining clusters)')) + desc=("value for initial thresholding (defining clusters)"), + ) height_threshold_type = traits.Enum( - 'p-value', - 'stat', + "p-value", + "stat", usedefault=True, - desc=('Is the cluster forming ' - 'threshold a stat value or ' - 'p-value?')) + desc=("Is the cluster forming threshold a stat value or p-value?"), + ) extent_fdr_p_threshold = traits.Float( 0.05, usedefault=True, - desc=('p threshold on FDR corrected ' - 'cluster size probabilities')) + desc=("p threshold on FDR corrected cluster size probabilities"), + ) extent_threshold = traits.Int( - 0, usedefault=True, desc='Minimum cluster size in voxels') + 0, usedefault=True, desc="Minimum cluster size in voxels" + ) force_activation = traits.Bool( False, usedefault=True, - desc=('In case no clusters survive the ' - 'topological inference step this ' - 'will pick a culster with the highes ' - 'sum of t-values. Use with care.')) + desc=( + "In case no clusters survive the " + "topological inference step this " + "will pick a culster with the highest " + "sum of t-values. Use with care." + ), + ) class ThresholdOutputSpec(TraitedSpec): @@ -578,6 +699,7 @@ class Threshold(SPMCommand): >>> thresh.inputs.extent_fdr_p_threshold = 0.05 >>> thresh.run() # doctest: +SKIP """ + input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec @@ -592,8 +714,16 @@ def _gen_pre_topo_map_filename(self): def _make_matlab_command(self, _): script = "con_index = %d;\n" % self.inputs.contrast_index script += "cluster_forming_thr = %f;\n" % self.inputs.height_threshold - if self.inputs.use_fwe_correction: + + if self.inputs.use_fwe_correction and self.inputs.use_vox_fdr_correction: + raise ValueError( + "'use_fwe_correction' and 'use_vox_fdr_correction' can't both be True" + ) + + if self.inputs.use_fwe_correction and not self.inputs.use_vox_fdr_correction: script += "thresDesc = 'FWE';\n" + elif self.inputs.use_vox_fdr_correction and not self.inputs.use_fwe_correction: + script += "thresDesc = 'FDR';\n" else: script += "thresDesc = 'none';\n" @@ -606,11 +736,11 @@ def _make_matlab_command(self, _): script += "force_activation = 1;\n" else: script += "force_activation = 0;\n" - script += ("cluster_extent_p_fdr_thr = %f;\n" % - self.inputs.extent_fdr_p_threshold) + script += ( + "cluster_extent_p_fdr_thr = %f;\n" % self.inputs.extent_fdr_p_threshold + ) script += "stat_filename = '%s';\n" % self.inputs.stat_image - script += ("height_threshold_type = '%s';\n" % - self.inputs.height_threshold_type) + script += "height_threshold_type = '%s';\n" % self.inputs.height_threshold_type script += "extent_threshold = %d;\n" % self.inputs.extent_threshold script += "load %s;\n" % self.inputs.spm_mat_file @@ -618,6 +748,8 @@ def _make_matlab_command(self, _): FWHM = SPM.xVol.FWHM; df = [SPM.xCon(con_index).eidf SPM.xX.erdf]; STAT = SPM.xCon(con_index).STAT; +VspmSv = cat(1,SPM.xCon(con_index).Vspm); + R = SPM.xVol.R; S = SPM.xVol.S; n = 1; @@ -626,6 +758,9 @@ def _make_matlab_command(self, _): case 'FWE' cluster_forming_thr = spm_uc(cluster_forming_thr,df,STAT,R,n,S); + case 'FDR' + cluster_forming_thr = spm_uc_FDR(cluster_forming_thr,df,STAT,n,VspmSv,0); + case 'none' if strcmp(height_threshold_type, 'p-value') cluster_forming_thr = spm_u(cluster_forming_thr^(1/n),df,STAT); @@ -643,9 +778,10 @@ def _make_matlab_command(self, _): Zth = Z(Z >= cluster_forming_thr); """ - script += (("spm_write_filtered(Zth,XYZth,stat_map_vol.dim'," - "stat_map_vol.mat,'thresholded map', '%s');\n") % - self._gen_pre_topo_map_filename()) + script += ( + "spm_write_filtered(Zth,XYZth,stat_map_vol.dim'," + "stat_map_vol.mat,'thresholded map', '%s');\n" + ) % self._gen_pre_topo_map_filename() script += """ max_size = 0; max_size_index = 0; @@ -703,55 +839,56 @@ def _make_matlab_command(self, _): fprintf('cluster_forming_thr = %f\\n',cluster_forming_thr); """ - script += (("spm_write_filtered(thresholded_Z,thresholded_XYZ," - "stat_map_vol.dim',stat_map_vol.mat,'thresholded map'," - " '%s');\n") % self._gen_thresholded_map_filename()) + script += ( + "spm_write_filtered(thresholded_Z,thresholded_XYZ," + "stat_map_vol.dim',stat_map_vol.mat,'thresholded map'," + " '%s');\n" + ) % self._gen_thresholded_map_filename() return script def aggregate_outputs(self, runtime=None): outputs = self._outputs() - setattr(outputs, 'thresholded_map', - self._gen_thresholded_map_filename()) - setattr(outputs, 'pre_topo_fdr_map', self._gen_pre_topo_map_filename()) - for line in runtime.stdout.split('\n'): + outputs.thresholded_map = self._gen_thresholded_map_filename() + outputs.pre_topo_fdr_map = self._gen_pre_topo_map_filename() + for line in runtime.stdout.split("\n"): if line.startswith("activation_forced = "): - setattr(outputs, 'activation_forced', - line[len("activation_forced = "):].strip() == "1") + outputs.activation_forced = ( + line[len("activation_forced = ") :].strip() == "1" + ) elif line.startswith("n_clusters = "): - setattr(outputs, 'n_clusters', - int(line[len("n_clusters = "):].strip())) + outputs.n_clusters = int(line[len("n_clusters = ") :].strip()) elif line.startswith("pre_topo_n_clusters = "): - setattr(outputs, 'pre_topo_n_clusters', - int(line[len("pre_topo_n_clusters = "):].strip())) + outputs.pre_topo_n_clusters = int( + line[len("pre_topo_n_clusters = ") :].strip() + ) elif line.startswith("cluster_forming_thr = "): - setattr(outputs, 'cluster_forming_thr', - float(line[len("cluster_forming_thr = "):].strip())) + outputs.cluster_forming_thr = float( + line[len("cluster_forming_thr = ") :].strip() + ) return outputs def _list_outputs(self): outputs = self._outputs().get() - outputs['thresholded_map'] = self._gen_thresholded_map_filename() - outputs['pre_topo_fdr_map'] = self._gen_pre_topo_map_filename() + outputs["thresholded_map"] = self._gen_thresholded_map_filename() + outputs["pre_topo_fdr_map"] = self._gen_pre_topo_map_filename() return outputs class ThresholdStatisticsInputSpec(SPMCommandInputSpec): spm_mat_file = File( - exists=True, - desc='absolute path to SPM.mat', - copyfile=True, - mandatory=True) - stat_image = File( - exists=True, desc='stat image', copyfile=False, mandatory=True) + exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True + ) + stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( - mandatory=True, desc='which contrast in the SPM.mat to use') + mandatory=True, desc="which contrast in the SPM.mat to use" + ) height_threshold = traits.Float( - desc=('stat value for initial ' - 'thresholding (defining clusters)'), - mandatory=True) + desc=("stat value for initial thresholding (defining clusters)"), mandatory=True + ) extent_threshold = traits.Int( - 0, usedefault=True, desc="Minimum cluster size in voxels") + 0, usedefault=True, desc="Minimum cluster size in voxels" + ) class ThresholdStatisticsOutputSpec(TraitedSpec): @@ -777,6 +914,7 @@ class ThresholdStatistics(SPMCommand): >>> thresh.inputs.height_threshold = 4.56 >>> thresh.run() # doctest: +SKIP """ + input_spec = ThresholdStatisticsInputSpec output_spec = ThresholdStatisticsOutputSpec @@ -837,16 +975,19 @@ def _make_matlab_command(self, _): def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() cur_output = "" - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if cur_output != "" and len(line.split()) != 0: setattr(outputs, cur_output, float(line)) cur_output = "" continue - if (len(line.split()) != 0 and line.split()[0] in [ - "clusterwise_P_FDR", "clusterwise_P_RF", - "voxelwise_P_Bonf", "voxelwise_P_FDR", "voxelwise_P_RF", - "voxelwise_P_uncor" - ]): + if len(line.split()) != 0 and line.split()[0] in [ + "clusterwise_P_FDR", + "clusterwise_P_RF", + "voxelwise_P_Bonf", + "voxelwise_P_FDR", + "voxelwise_P_RF", + "voxelwise_P_uncor", + ]: cur_output = line.split()[0] continue @@ -855,64 +996,68 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class FactorialDesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( - exists=True, field='dir', desc='directory to store SPM.mat file (opt)') + exists=True, field="dir", desc="directory to store SPM.mat file (opt)" + ) # Need to make an alias of InputMultiPath; the inputs below are not Path covariates = InputMultiPath( traits.Dict( - key_trait=traits.Enum('vector', 'name', 'interaction', - 'centering')), - field='cov', - desc=('covariate dictionary {vector, name, ' - 'interaction, centering}')) + key_trait=traits.Enum("vector", "name", "interaction", "centering") + ), + field="cov", + desc=("covariate dictionary {vector, name, interaction, centering}"), + ) threshold_mask_none = traits.Bool( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], - desc='do not use threshold masking') + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], + desc="do not use threshold masking", + ) threshold_mask_absolute = traits.Float( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], - desc='use an absolute threshold') + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], + desc="use an absolute threshold", + ) threshold_mask_relative = traits.Float( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], - desc=('threshold using a ' - 'proportion of the global ' - 'value')) + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + desc=("threshold using a proportion of the global value"), + ) use_implicit_threshold = traits.Bool( - field='masking.im', - desc=('use implicit mask NaNs or ' - 'zeros to threshold')) + field="masking.im", desc=("use implicit mask NaNs or zeros to threshold") + ) explicit_mask_file = File( - field='masking.em', # requires cell - desc='use an implicit mask file to threshold') + field="masking.em", # requires cell + desc="use an implicit mask file to threshold", + ) global_calc_omit = traits.Bool( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], - desc='omit global calculation') + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], + desc="omit global calculation", + ) global_calc_mean = traits.Bool( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], - desc='use mean for global calculation') + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], + desc="use mean for global calculation", + ) global_calc_values = traits.List( traits.Float, - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - desc='omit global calculation') + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + desc="omit global calculation", + ) no_grand_mean_scaling = traits.Bool( - field='globalm.gmsca.gmsca_no', - desc=('do not perform grand mean ' - 'scaling')) + field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean scaling") + ) global_normalization = traits.Enum( 1, 2, 3, - field='globalm.glonorm', - desc=('global normalization None-1, ' - 'Proportional-2, ANCOVA-3')) + field="globalm.glonorm", + desc=("global normalization None-1, Proportional-2, ANCOVA-3"), + ) class FactorialDesignOutputSpec(TraitedSpec): - spm_mat_file = File(exists=True, desc='SPM mat file') + spm_mat_file = File(exists=True, desc="SPM mat file") class FactorialDesign(SPMCommand): @@ -924,21 +1069,20 @@ class FactorialDesign(SPMCommand): input_spec = FactorialDesignInputSpec output_spec = FactorialDesignOutputSpec - _jobtype = 'stats' - _jobname = 'factorial_design' + _jobtype = "stats" + _jobname = "factorial_design" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['spm_mat_dir', 'explicit_mask_file']: + """Convert input to appropriate format for spm""" + if opt in ["spm_mat_dir", "explicit_mask_file"]: return np.array([str(val)], dtype=object) - if opt in ['covariates']: + if opt in ["covariates"]: outlist = [] mapping = { - 'name': 'cname', - 'vector': 'c', - 'interaction': 'iCFI', - 'centering': 'iCC' + "name": "cname", + "vector": "c", + "interaction": "iCFI", + "centering": "iCC", } for dictitem in val: outdict = {} @@ -946,30 +1090,30 @@ def _format_arg(self, opt, spec, val): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist - return super(FactorialDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm realign options if set to None ignore - """ - einputs = super(FactorialDesign, self)._parse_inputs() + """validate spm realign options if set to None ignore""" + einputs = super()._parse_inputs() if not isdefined(self.inputs.spm_mat_dir): - einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) + einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _list_outputs(self): outputs = self._outputs().get() - spm = os.path.join(os.getcwd(), 'SPM.mat') - outputs['spm_mat_file'] = spm + spm = os.path.join(os.getcwd(), "SPM.mat") + outputs["spm_mat_file"] = spm return outputs class OneSampleTTestDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), - field='des.t1.scans', + field="des.t1.scans", mandatory=True, minlen=2, - desc='input files') + desc="input files", + ) class OneSampleTTestDesign(FactorialDesign): @@ -986,11 +1130,10 @@ class OneSampleTTestDesign(FactorialDesign): input_spec = OneSampleTTestDesignInputSpec def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['in_files']: + """Convert input to appropriate format for spm""" + if opt in ["in_files"]: return np.array(val, dtype=object) - return super(OneSampleTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): @@ -998,24 +1141,25 @@ class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): # parameters to require at least two files in each group [SG] group1_files = traits.List( File(exists=True), - field='des.t2.scans1', + field="des.t2.scans1", mandatory=True, minlen=2, - desc='Group 1 input files') + desc="Group 1 input files", + ) group2_files = traits.List( File(exists=True), - field='des.t2.scans2', + field="des.t2.scans2", mandatory=True, minlen=2, - desc='Group 2 input files') + desc="Group 2 input files", + ) dependent = traits.Bool( - field='des.t2.dept', - desc=('Are the measurements dependent between ' - 'levels')) + field="des.t2.dept", desc=("Are the measurements dependent between levels") + ) unequal_variance = traits.Bool( - field='des.t2.variance', - desc=('Are the variances equal or unequal ' - 'between groups')) + field="des.t2.variance", + desc=("Are the variances equal or unequal between groups"), + ) class TwoSampleTTestDesign(FactorialDesign): @@ -1033,24 +1177,26 @@ class TwoSampleTTestDesign(FactorialDesign): input_spec = TwoSampleTTestDesignInputSpec def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['group1_files', 'group2_files']: + """Convert input to appropriate format for spm""" + if opt in ["group1_files", "group2_files"]: return np.array(val, dtype=object) - return super(TwoSampleTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class PairedTTestDesignInputSpec(FactorialDesignInputSpec): paired_files = traits.List( traits.List(File(exists=True), minlen=2, maxlen=2), - field='des.pt.pair', + field="des.pt.pair", mandatory=True, minlen=2, - desc='List of paired files') + desc="List of paired files", + ) grand_mean_scaling = traits.Bool( - field='des.pt.gmsca', desc='Perform grand mean scaling') + field="des.pt.gmsca", desc="Perform grand mean scaling" + ) ancova = traits.Bool( - field='des.pt.ancova', desc='Specify ancova-by-factor regressors') + field="des.pt.ancova", desc="Specify ancova-by-factor regressors" + ) class PairedTTestDesign(FactorialDesign): @@ -1067,30 +1213,31 @@ class PairedTTestDesign(FactorialDesign): input_spec = PairedTTestDesignInputSpec def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['paired_files']: + """Convert input to appropriate format for spm""" + if opt in ["paired_files"]: return [dict(scans=np.array(files, dtype=object)) for files in val] - return super(PairedTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), - field='des.mreg.scans', + field="des.mreg.scans", mandatory=True, minlen=2, - desc='List of files') + desc="List of files", + ) include_intercept = traits.Bool( True, - field='des.mreg.incint', + field="des.mreg.incint", usedefault=True, - desc='Include intercept in design') + desc="Include intercept in design", + ) user_covariates = InputMultiPath( - traits.Dict(key_trait=traits.Enum('vector', 'name', 'centering')), - field='des.mreg.mcov', - desc=('covariate dictionary {vector, ' - 'name, centering}')) + traits.Dict(key_trait=traits.Enum("vector", "name", "centering")), + field="des.mreg.mcov", + desc=("covariate dictionary {vector, name, centering}"), + ) class MultipleRegressionDesign(FactorialDesign): @@ -1107,18 +1254,16 @@ class MultipleRegressionDesign(FactorialDesign): input_spec = MultipleRegressionDesignInputSpec def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['in_files']: + """Convert input to appropriate format for spm""" + if opt in ["in_files"]: return np.array(val, dtype=object) - if opt in ['user_covariates']: + if opt in ["user_covariates"]: outlist = [] - mapping = {'name': 'cname', 'vector': 'c', 'centering': 'iCC'} + mapping = {"name": "cname", "vector": "c", "centering": "iCC"} for dictitem in val: outdict = {} for key, keyval in list(dictitem.items()): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist - return (super(MultipleRegressionDesign, self)._format_arg( - opt, spec, val)) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 2188824022..8a3a479705 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1,12 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM wrappers for preprocessing data """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - import os from copy import deepcopy @@ -14,98 +9,196 @@ import numpy as np # Local imports -from ...utils.filemanip import (fname_presuffix, ensure_list, - simplify_list, split_filename) -from ..base import (OutputMultiPath, TraitedSpec, isdefined, - traits, InputMultiPath, InputMultiObject, File, Str) -from .base import (SPMCommand, scans_for_fname, func_is_3d, - scans_for_fnames, SPMCommandInputSpec, ImageFileSPM) - -__docformat__ = 'restructuredtext' +from ...utils.filemanip import ( + fname_presuffix, + ensure_list, + simplify_list, + split_filename, +) +from ..base import ( + OutputMultiPath, + TraitedSpec, + isdefined, + traits, + Tuple, + InputMultiPath, + InputMultiObject, + File, + Str, +) +from .base import ( + SPMCommand, + scans_for_fname, + func_is_3d, + scans_for_fnames, + SPMCommandInputSpec, + ImageFileSPM, +) + +__docformat__ = "restructuredtext" class FieldMapInputSpec(SPMCommandInputSpec): - jobtype = traits.Enum('calculatevdm', 'applyvdm', usedefault=True, - desc='one of: calculatevdm, applyvdm') - phase_file = File(mandatory=True, exists=True, copyfile=False, - field='subj.data.presubphasemag.phase', - desc='presubstracted phase file') - magnitude_file = File(mandatory=True, exists=True, copyfile=False, - field='subj.data.presubphasemag.magnitude', - desc='presubstracted magnitude file') - echo_times = traits.Tuple(traits.Float, traits.Float, mandatory=True, - field='subj.defaults.defaultsval.et', - desc='short and long echo times') - maskbrain = traits.Bool(True, usedefault=True, - field='subj.defaults.defaultsval.maskbrain', - desc='masking or no masking of the brain') - blip_direction = traits.Enum(1, -1, mandatory=True, - field='subj.defaults.defaultsval.blipdir', - desc='polarity of the phase-encode blips') - total_readout_time = traits.Float(mandatory=True, - field='subj.defaults.defaultsval.tert', - desc='total EPI readout time') - epifm = traits.Bool(False, usedefault=True, - field='subj.defaults.defaultsval.epifm', - desc='epi-based field map'); - jacobian_modulation = traits.Bool(False, usedefault=True, - field='subj.defaults.defaultsval.ajm', - desc='jacobian modulation'); + jobtype = traits.Enum( + "calculatevdm", + usedefault=True, + deprecated="1.9.0", # Two minor releases in the future + desc="Must be 'calculatevdm'; to apply VDM, use the ApplyVDM interface.", + ) + + phase_file = File( + mandatory=True, + exists=True, + copyfile=False, + field="subj.data.presubphasemag.phase", + desc="presubstracted phase file", + ) + magnitude_file = File( + mandatory=True, + exists=True, + copyfile=False, + field="subj.data.presubphasemag.magnitude", + desc="presubstracted magnitude file", + ) + echo_times = Tuple( + traits.Float, + traits.Float, + mandatory=True, + field="subj.defaults.defaultsval.et", + desc="short and long echo times", + ) + maskbrain = traits.Bool( + True, + usedefault=True, + field="subj.defaults.defaultsval.maskbrain", + desc="masking or no masking of the brain", + ) + blip_direction = traits.Enum( + 1, + -1, + mandatory=True, + field="subj.defaults.defaultsval.blipdir", + desc="polarity of the phase-encode blips", + ) + total_readout_time = traits.Float( + mandatory=True, + field="subj.defaults.defaultsval.tert", + desc="total EPI readout time", + ) + epifm = traits.Bool( + False, + usedefault=True, + field="subj.defaults.defaultsval.epifm", + desc="epi-based field map", + ) + jacobian_modulation = traits.Bool( + False, + usedefault=True, + field="subj.defaults.defaultsval.ajm", + desc="jacobian modulation", + ) # Unwarping defaults parameters - method = traits.Enum('Mark3D', 'Mark2D', 'Huttonish', usedefault=True, - desc='One of: Mark3D, Mark2D, Huttonish', - field='subj.defaults.defaultsval.uflags.method'); - unwarp_fwhm = traits.Range(low=0, value=10, usedefault=True, - field='subj.defaults.defaultsval.uflags.fwhm', - desc='gaussian smoothing kernel width'); - pad = traits.Range(low=0, value=0, usedefault=True, - field='subj.defaults.defaultsval.uflags.pad', - desc='padding kernel width'); - ws = traits.Bool(True, usedefault=True, - field='subj.defaults.defaultsval.uflags.ws', - desc='weighted smoothing'); + method = traits.Enum( + "Mark3D", + "Mark2D", + "Huttonish", + usedefault=True, + desc="One of: Mark3D, Mark2D, Huttonish", + field="subj.defaults.defaultsval.uflags.method", + ) + unwarp_fwhm = traits.Range( + low=0, + value=10, + usedefault=True, + field="subj.defaults.defaultsval.uflags.fwhm", + desc="gaussian smoothing kernel width", + ) + pad = traits.Range( + low=0, + value=0, + usedefault=True, + field="subj.defaults.defaultsval.uflags.pad", + desc="padding kernel width", + ) + ws = traits.Bool( + True, + usedefault=True, + field="subj.defaults.defaultsval.uflags.ws", + desc="weighted smoothing", + ) # Brain mask defaults parameters - template = File(copyfile=False, exists=True, - field='subj.defaults.defaultsval.mflags.template', - desc='template image for brain masking'); - mask_fwhm = traits.Range(low=0, value=5, usedefault=True, - field='subj.defaults.defaultsval.mflags.fwhm', - desc='gaussian smoothing kernel width'); - nerode = traits.Range(low=0, value=2, usedefault=True, - field='subj.defaults.defaultsval.mflags.nerode', - desc='number of erosions'); - ndilate = traits.Range(low=0, value=4, usedefault=True, - field='subj.defaults.defaultsval.mflags.ndilate', - desc='number of erosions'); - thresh = traits.Float(0.5, usedefault=True, - field='subj.defaults.defaultsval.mflags.thresh', - desc='threshold used to create brain mask from segmented data'); - reg = traits.Float(0.02, usedefault=True, - field='subj.defaults.defaultsval.mflags.reg', - desc='regularization value used in the segmentation'); + template = File( + copyfile=False, + exists=True, + field="subj.defaults.defaultsval.mflags.template", + desc="template image for brain masking", + ) + mask_fwhm = traits.Range( + low=0, + value=5, + usedefault=True, + field="subj.defaults.defaultsval.mflags.fwhm", + desc="gaussian smoothing kernel width", + ) + nerode = traits.Range( + low=0, + value=2, + usedefault=True, + field="subj.defaults.defaultsval.mflags.nerode", + desc="number of erosions", + ) + ndilate = traits.Range( + low=0, + value=4, + usedefault=True, + field="subj.defaults.defaultsval.mflags.ndilate", + desc="number of erosions", + ) + thresh = traits.Float( + 0.5, + usedefault=True, + field="subj.defaults.defaultsval.mflags.thresh", + desc="threshold used to create brain mask from segmented data", + ) + reg = traits.Float( + 0.02, + usedefault=True, + field="subj.defaults.defaultsval.mflags.reg", + desc="regularization value used in the segmentation", + ) # EPI unwarping for quality check - epi_file = File(copyfile=False, exists=True, mandatory=True, - field='subj.session.epi', - desc='EPI to unwarp'); - matchvdm = traits.Bool(True, usedefault=True, - field='subj.matchvdm', - desc='match VDM to EPI'); - sessname = Str('_run-', usedefault=True, - field='subj.sessname', - desc='VDM filename extension'); - writeunwarped = traits.Bool(False, usedefault=True, - field='subj.writeunwarped', - desc='write unwarped EPI'); - anat_file = File(copyfile=False, exists=True, - field='subj.anat', - desc='anatomical image for comparison'); - matchanat = traits.Bool(True, usedefault=True, - field='subj.matchanat', - desc='match anatomical image to EPI'); + epi_file = File( + copyfile=False, + exists=True, + mandatory=True, + field="subj.session.epi", + desc="EPI to unwarp", + ) + matchvdm = traits.Bool( + True, usedefault=True, field="subj.matchvdm", desc="match VDM to EPI" + ) + sessname = Str( + "_run-", usedefault=True, field="subj.sessname", desc="VDM filename extension" + ) + writeunwarped = traits.Bool( + False, usedefault=True, field="subj.writeunwarped", desc="write unwarped EPI" + ) + anat_file = File( + copyfile=False, + exists=True, + field="subj.anat", + desc="anatomical image for comparison", + ) + matchanat = traits.Bool( + True, + usedefault=True, + field="subj.matchanat", + desc="match anatomical image to EPI", + ) class FieldMapOutputSpec(TraitedSpec): - vdm = File(exists=True, desc='voxel difference map') + vdm = File(exists=True, desc="voxel difference map") class FieldMap(SPMCommand): @@ -113,9 +206,10 @@ class FieldMap(SPMCommand): http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=173 - To do - ----- - Deal with real/imag magnitude images and with the two phase files case. + .. important:: + + This interface does not deal with real/imag magnitude images nor + with the two phase files case. Examples -------- @@ -133,72 +227,205 @@ class FieldMap(SPMCommand): input_spec = FieldMapInputSpec output_spec = FieldMapOutputSpec - _jobtype = 'tools' - _jobname = 'fieldmap' + _jobtype = "tools" + _jobname = "fieldmap" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['phase_file', 'magnitude_file', 'anat_file', 'epi_file']: + """Convert input to appropriate format for spm""" + + if opt in ["phase_file", "magnitude_file", "anat_file", "epi_file"]: return scans_for_fname(ensure_list(val)) - return super(FieldMap, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm fieldmap options if set to None ignore - """ - einputs = super(FieldMap, self)._parse_inputs() - return [{self.inputs.jobtype: einputs[0]}] + """validate spm fieldmap options if set to None ignore""" + + einputs = super()._parse_inputs() + return [{"calculatevdm": einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype - if jobtype == "calculatevdm": - outputs['vdm'] = fname_presuffix(self.inputs.phase_file, prefix='vdm5_sc') + + outputs["vdm"] = fname_presuffix(self.inputs.phase_file, prefix="vdm5_sc") return outputs +class ApplyVDMInputSpec(SPMCommandInputSpec): + in_files = InputMultiObject( + ImageFileSPM(exists=True), + field="data.scans", + mandatory=True, + copyfile=True, + desc="list of filenames to apply the vdm to", + ) + vdmfile = File( + field="data.vdmfile", + desc="Voxel displacement map to use", + mandatory=True, + copyfile=True, + ) + distortion_direction = traits.Int( + 2, + field="roptions.pedir", + desc="phase encode direction input data have been acquired with", + usedefault=True, + ) + write_which = traits.ListInt( + [2, 1], + field="roptions.which", + minlen=2, + maxlen=2, + usedefault=True, + desc="If the first value is non-zero, reslice all images. If the second value is non-zero, reslice a mean image.", + ) + interpolation = traits.Range( + value=4, + low=0, + high=7, + field="roptions.rinterp", + desc="degree of b-spline used for interpolation", + ) + write_wrap = traits.List( + traits.Int(), + minlen=3, + maxlen=3, + field="roptions.wrap", + desc=("Check if interpolation should wrap in [x,y,z]"), + ) + write_mask = traits.Bool( + field="roptions.mask", desc="True/False mask time series images" + ) + out_prefix = traits.String( + "u", + field="roptions.prefix", + usedefault=True, + desc="fieldmap corrected output prefix", + ) + + +class ApplyVDMOutputSpec(TraitedSpec): + out_files = OutputMultiPath( + traits.Either(traits.List(File(exists=True)), File(exists=True)), + desc=("These will be the fieldmap corrected files."), + ) + mean_image = File(exists=True, desc="Mean image") + + +class ApplyVDM(SPMCommand): + """Use the fieldmap toolbox from spm to apply the voxel displacement map (VDM) to some epi files. + + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=173 + + .. important:: + + This interface does not deal with real/imag magnitude images nor + with the two phase files case. + + """ + + input_spec = ApplyVDMInputSpec + output_spec = ApplyVDMOutputSpec + _jobtype = "tools" + _jobname = "fieldmap" + + def _format_arg(self, opt, spec, val): + """Convert input to appropriate format for spm""" + + if opt == 'in_files': + return scans_for_fnames( + ensure_list(val), keep4d=False, separate_sessions=False + ) + if opt == 'vdmfile': + return scans_for_fname(ensure_list(val)) + return super()._format_arg(opt, spec, val) + + def _parse_inputs(self): + """validate spm fieldmap options if set to None ignore""" + + einputs = super()._parse_inputs() + + return [{"applyvdm": einputs[0]}] + + def _list_outputs(self): + outputs = self._outputs().get() + resliced_all = self.inputs.write_which[0] > 0 + resliced_mean = self.inputs.write_which[1] > 0 + if resliced_mean: + if isinstance(self.inputs.in_files[0], list): + first_image = self.inputs.in_files[0][0] + else: + first_image = self.inputs.in_files[0] + outputs["mean_image"] = fname_presuffix(first_image, prefix="meanu") + + if resliced_all: + outputs["out_files"] = [] + for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): + appliedvdm_run = [] + if isinstance(imgf, list): + for i, inner_imgf in enumerate(ensure_list(imgf)): + newfile = fname_presuffix( + inner_imgf, prefix=self.inputs.out_prefix + ) + appliedvdm_run.append(newfile) + else: + appliedvdm_run = fname_presuffix( + imgf, prefix=self.inputs.out_prefix + ) + outputs["out_files"].append(appliedvdm_run) + return outputs + + class SliceTimingInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( traits.Either( - traits.List(ImageFileSPM(exists=True)), ImageFileSPM(exists=True)), - field='scans', - desc='list of filenames to apply slice timing', + traits.List(ImageFileSPM(exists=True)), ImageFileSPM(exists=True) + ), + field="scans", + desc="list of filenames to apply slice timing", mandatory=True, - copyfile=False) + copyfile=False, + ) num_slices = traits.Int( - field='nslices', desc='number of slices in a volume', mandatory=True) + field="nslices", desc="number of slices in a volume", mandatory=True + ) time_repetition = traits.Float( - field='tr', - desc=('time between volume acquisitions' - '(start to start time)'), - mandatory=True) + field="tr", + desc=("time between volume acquisitions (start to start time)"), + mandatory=True, + ) time_acquisition = traits.Float( - field='ta', - desc=('time of volume acquisition. usually' - 'calculated as TR-(TR/num_slices)'), - mandatory=True) + field="ta", + desc=("time of volume acquisition. usually calculated as TR-(TR/num_slices)"), + mandatory=True, + ) slice_order = traits.List( + traits.Either(traits.Int(), traits.Float()), + field="so", + desc=("1-based order or onset (in ms) in which slices are acquired"), + mandatory=True, + ) + ref_slice = traits.Either( + traits.Int(), traits.Float(), - field='so', - desc=('1-based order or onset (in ms) in which ' - 'slices are acquired'), - mandatory=True) - ref_slice = traits.Int( - field='refslice', - desc='1-based Number of the reference slice or ' - 'reference time point if slice_order is in ' - 'onsets (ms)', - mandatory=True) + field="refslice", + desc="1-based Number of the reference slice or " + "reference time point if slice_order is in " + "onsets (ms)", + mandatory=True, + ) out_prefix = traits.String( - 'a', field='prefix', usedefault=True, desc='slicetimed output prefix') + "a", field="prefix", usedefault=True, desc="slicetimed output prefix" + ) class SliceTimingOutputSpec(TraitedSpec): timecorrected_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc='slice time corrected files') + desc="slice time corrected files", + ) class SliceTiming(SPMCommand): @@ -224,127 +451,133 @@ class SliceTiming(SPMCommand): input_spec = SliceTimingInputSpec output_spec = SliceTimingOutputSpec - _jobtype = 'temporal' - _jobname = 'st' + _jobtype = "temporal" + _jobname = "st" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': + """Convert input to appropriate format for spm""" + if opt == "in_files": return scans_for_fnames( - ensure_list(val), keep4d=False, separate_sessions=True) - return super(SliceTiming, self)._format_arg(opt, spec, val) + ensure_list(val), keep4d=False, separate_sessions=True + ) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['timecorrected_files'] = [] + outputs["timecorrected_files"] = [] filelist = ensure_list(self.inputs.in_files) for f in filelist: if isinstance(f, list): run = [ - fname_presuffix(in_f, prefix=self.inputs.out_prefix) - for in_f in f + fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f ] else: run = fname_presuffix(f, prefix=self.inputs.out_prefix) - outputs['timecorrected_files'].append(run) + outputs["timecorrected_files"].append(run) return outputs class RealignInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( - traits.Either(ImageFileSPM(exists=True), - traits.List(ImageFileSPM(exists=True))), - field='data', + traits.Either( + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="data", mandatory=True, copyfile=True, - desc='list of filenames to realign') + desc="list of filenames to realign", + ) jobtype = traits.Enum( - 'estwrite', - 'estimate', - 'write', - desc='one of: estimate, write, estwrite', - usedefault=True) + "estwrite", + "estimate", + "write", + desc="one of: estimate, write, estwrite", + usedefault=True, + ) quality = traits.Range( - low=0.0, - high=1.0, - field='eoptions.quality', - desc='0.1 = fast, 1.0 = precise') + low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" + ) fwhm = traits.Range( - low=0.0, field='eoptions.fwhm', desc='gaussian smoothing kernel width') + low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" + ) separation = traits.Range( - low=0.0, field='eoptions.sep', desc='sampling separation in mm') + low=0.0, field="eoptions.sep", desc="sampling separation in mm" + ) register_to_mean = traits.Bool( - field='eoptions.rtm', - desc=('Indicate whether realignment is ' - 'done to the mean image')) + field="eoptions.rtm", + desc=("Indicate whether realignment is done to the mean image"), + ) weight_img = File( - exists=True, - field='eoptions.weight', - desc='filename of weighting image') + exists=True, field="eoptions.weight", desc="filename of weighting image" + ) interp = traits.Range( low=0, high=7, - field='eoptions.interp', - desc='degree of b-spline used for interpolation') + field="eoptions.interp", + desc="degree of b-spline used for interpolation", + ) wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='eoptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="eoptions.wrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) write_which = traits.ListInt( [2, 1], - field='roptions.which', + field="roptions.which", minlen=2, maxlen=2, usedefault=True, - desc='determines which images to reslice') + desc="determines which images to reslice", + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for interpolation"), + ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z]')) - write_mask = traits.Bool( - field='roptions.mask', desc='True/False mask output image') + field="roptions.wrap", + desc=("Check if interpolation should wrap in [x,y,z]"), + ) + write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( - 'r', - field='roptions.prefix', - usedefault=True, - desc='realigned output prefix') + "r", field="roptions.prefix", usedefault=True, desc="realigned output prefix" + ) class RealignOutputSpec(TraitedSpec): - mean_image = File(exists=True, desc='Mean image file from the realignment') + mean_image = File(exists=True, desc="Mean image file from the realignment") modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('Copies of all files passed to ' - 'in_files. Headers will have ' - 'been modified to align all ' - 'images with the first, or ' - 'optionally to first do that, ' - 'extract a mean image, and ' - 're-align to that mean image.')) + desc=( + "Copies of all files passed to " + "in_files. Headers will have " + "been modified to align all " + "images with the first, or " + "optionally to first do that, " + "extract a mean image, and " + "re-align to that mean image." + ), + ) realigned_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('If jobtype is write or estwrite, ' - 'these will be the resliced files.' - ' Otherwise, they will be copies ' - 'of in_files that have had their ' - 'headers rewritten.')) + desc=( + "If jobtype is write or estwrite, " + "these will be the resliced files." + " Otherwise, they will be copies " + "of in_files that have had their " + "headers rewritten." + ), + ) realignment_parameters = OutputMultiPath( - File(exists=True), - desc=('Estimated translation and ' - 'rotation parameters')) + File(exists=True), desc=("Estimated translation and rotation parameters") + ) class Realign(SPMCommand): @@ -366,26 +599,25 @@ class Realign(SPMCommand): input_spec = RealignInputSpec output_spec = RealignOutputSpec - _jobtype = 'spatial' - _jobname = 'realign' + _jobtype = "spatial" + _jobname = "realign" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': + """Convert input to appropriate format for spm""" + if opt == "in_files": if self.inputs.jobtype == "write": separate_sessions = False else: separate_sessions = True return scans_for_fnames( - val, keep4d=False, separate_sessions=separate_sessions) - return super(Realign, self)._format_arg(opt, spec, val) + val, keep4d=False, separate_sessions=separate_sessions + ) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm realign options if set to None ignore - """ - einputs = super(Realign, self)._parse_inputs() - return [{'%s' % (self.inputs.jobtype): einputs[0]}] + """validate spm realign options if set to None ignore""" + einputs = super()._parse_inputs() + return [{"%s" % (self.inputs.jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() @@ -394,22 +626,23 @@ def _list_outputs(self): if self.inputs.jobtype != "write": if isdefined(self.inputs.in_files): - outputs['realignment_parameters'] = [] + outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf - outputs['realignment_parameters'].append( + outputs["realignment_parameters"].append( fname_presuffix( - tmp_imgf, prefix='rp_', suffix='.txt', use_ext=False)) + tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False + ) + ) if not isinstance(imgf, list) and func_is_3d(imgf): break if self.inputs.jobtype == "estimate": - outputs['realigned_files'] = self.inputs.in_files - if (self.inputs.jobtype == "estimate" - or self.inputs.jobtype == "estwrite"): - outputs['modified_in_files'] = self.inputs.in_files + outputs["realigned_files"] = self.inputs.in_files + if self.inputs.jobtype == "estimate" or self.inputs.jobtype == "estwrite": + outputs["modified_in_files"] = self.inputs.in_files if self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] @@ -417,175 +650,199 @@ def _list_outputs(self): first_image = self.inputs.in_files[0] if resliced_mean: - outputs['mean_image'] = fname_presuffix( - first_image, prefix='mean') + outputs["mean_image"] = fname_presuffix(first_image, prefix="mean") if resliced_all: - outputs['realigned_files'] = [] - for idx, imgf in enumerate( - ensure_list(self.inputs.in_files)): + outputs["realigned_files"] = [] + for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): newfile = fname_presuffix( - inner_imgf, prefix=self.inputs.out_prefix) + inner_imgf, prefix=self.inputs.out_prefix + ) realigned_run.append(newfile) else: realigned_run = fname_presuffix( - imgf, prefix=self.inputs.out_prefix) - outputs['realigned_files'].append(realigned_run) + imgf, prefix=self.inputs.out_prefix + ) + outputs["realigned_files"].append(realigned_run) return outputs class RealignUnwarpInputSpec(SPMCommandInputSpec): - in_files = InputMultiObject( - traits.Either(ImageFileSPM(exists=True), - traits.List(ImageFileSPM(exists=True))), - field='data.scans', - mandatory=True, + traits.Either( + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="data.scans", + mandatory=True, copyfile=True, - desc='list of filenames to realign and unwarp') + desc="list of filenames to realign and unwarp", + ) phase_map = File( - field='data.pmscan', - desc='Voxel displacement map to use in unwarping. Unlike SPM standard ' - 'behaviour, the same map will be used for all sessions', - copyfile=False) + field="data.pmscan", + desc="Voxel displacement map to use in unwarping. Unlike SPM standard " + "behaviour, the same map will be used for all sessions", + copyfile=False, + ) quality = traits.Range( - low=0.0, - high=1.0, - field='eoptions.quality', - desc='0.1 = fast, 1.0 = precise') + low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" + ) fwhm = traits.Range( - low=0.0, - field='eoptions.fwhm', - desc='gaussian smoothing kernel width') + low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" + ) separation = traits.Range( - low=0.0, - field='eoptions.sep', - desc='sampling separation in mm') + low=0.0, field="eoptions.sep", desc="sampling separation in mm" + ) register_to_mean = traits.Bool( - field='eoptions.rtm', - desc='Indicate whether realignment is done to the mean image') + field="eoptions.rtm", + desc="Indicate whether realignment is done to the mean image", + ) weight_img = File( - exists=True, - field='eoptions.weight', - desc='filename of weighting image') + exists=True, field="eoptions.weight", desc="filename of weighting image" + ) interp = traits.Range( - low=0, - high=7, - field='eoptions.einterp', - desc='degree of b-spline used for interpolation') + low=0, + high=7, + field="eoptions.einterp", + desc="degree of b-spline used for interpolation", + ) wrap = traits.List( - traits.Int(), - minlen=3, + traits.Int(), + minlen=3, maxlen=3, - field='eoptions.ewrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="eoptions.ewrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) est_basis_func = traits.List( - traits.Int(), - minlen=2, + traits.Int(), + minlen=2, maxlen=2, - field='uweoptions.basfcn', - desc='Number of basis functions to use for each dimension') + field="uweoptions.basfcn", + desc="Number of basis functions to use for each dimension", + ) est_reg_order = traits.Range( - low=0, - high=3, - field='uweoptions.regorder', - desc=('This parameter determines how to balance the compromise between likelihood ' - 'maximization and smoothness maximization of the estimated field.')) + low=0, + high=3, + field="uweoptions.regorder", + desc=( + "This parameter determines how to balance the compromise between likelihood " + "maximization and smoothness maximization of the estimated field." + ), + ) est_reg_factor = traits.ListInt( - [100000], - field='uweoptions.lambda', - minlen=1, + [100000], + field="uweoptions.lambda", + minlen=1, maxlen=1, usedefault=True, - desc='Regularisation factor. Default: 100000 (medium).') + desc="Regularisation factor. Default: 100000 (medium).", + ) est_jacobian_deformations = traits.Bool( - field='uweoptions.jm', - desc=('Jacobian deformations. In theory a good idea to include them, ' - ' in practice a bad idea. Default: No.')) + field="uweoptions.jm", + desc=( + "Jacobian deformations. In theory a good idea to include them, " + " in practice a bad idea. Default: No." + ), + ) est_first_order_effects = traits.List( - traits.Int(), - minlen=1, + traits.Int(), + minlen=1, maxlen=6, - field='uweoptions.fot', - desc='First order effects should only depend on pitch and roll, i.e. [4 5]') + field="uweoptions.fot", + desc="First order effects should only depend on pitch and roll, i.e. [4 5]", + ) est_second_order_effects = traits.List( - traits.Int(), - minlen=1, + traits.Int(), + minlen=1, maxlen=6, - field='uweoptions.sot', - desc='List of second order terms to model second derivatives of.') + field="uweoptions.sot", + desc="List of second order terms to model second derivatives of.", + ) est_unwarp_fwhm = traits.Range( - low=0.0, - field='uweoptions.uwfwhm', - desc='gaussian smoothing kernel width for unwarp') + low=0.0, + field="uweoptions.uwfwhm", + desc="gaussian smoothing kernel width for unwarp", + ) est_re_est_mov_par = traits.Bool( - field='uweoptions.rem', - desc='Re-estimate movement parameters at each unwarping iteration.') + field="uweoptions.rem", + desc="Re-estimate movement parameters at each unwarping iteration.", + ) est_num_of_iterations = traits.ListInt( - [5], - field='uweoptions.noi', - minlen=1, - maxlen=1, + [5], + field="uweoptions.noi", + minlen=1, + maxlen=1, usedefault=True, - desc='Number of iterations.') + desc="Number of iterations.", + ) est_taylor_expansion_point = traits.String( - 'Average', - field='uweoptions.expround', + "Average", + field="uweoptions.expround", usedefault=True, - desc='Point in position space to perform Taylor-expansion around.') + desc="Point in position space to perform Taylor-expansion around.", + ) reslice_which = traits.ListInt( - [2, 1], - field='uwroptions.uwwhich', - minlen=2, - maxlen=2, + [2, 1], + field="uwroptions.uwwhich", + minlen=2, + maxlen=2, usedefault=True, - desc='determines which images to reslice') + desc="determines which images to reslice", + ) reslice_interp = traits.Range( - low=0, - high=7, - field='uwroptions.rinterp', - desc='degree of b-spline used for interpolation') + low=0, + high=7, + field="uwroptions.rinterp", + desc="degree of b-spline used for interpolation", + ) reslice_wrap = traits.List( - traits.Int(), - minlen=3, + traits.Int(), + minlen=3, maxlen=3, - field='uwroptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="uwroptions.wrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) reslice_mask = traits.Bool( - field='uwroptions.mask', - desc='True/False mask output image') + field="uwroptions.mask", desc="True/False mask output image" + ) out_prefix = traits.String( - 'u', - field='uwroptions.prefix', + "u", + field="uwroptions.prefix", usedefault=True, - desc='realigned and unwarped output prefix') + desc="realigned and unwarped output prefix", + ) class RealignUnwarpOutputSpec(TraitedSpec): - mean_image = File(exists=True, desc='Mean image file from the realignment & unwarping') + mean_image = File( + exists=True, desc="Mean image file from the realignment & unwarping" + ) modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('Copies of all files passed to ' - 'in_files. Headers will have ' - 'been modified to align all ' - 'images with the first, or ' - 'optionally to first do that, ' - 'extract a mean image, and ' - 're-align to that mean image.')) + desc=( + "Copies of all files passed to " + "in_files. Headers will have " + "been modified to align all " + "images with the first, or " + "optionally to first do that, " + "extract a mean image, and " + "re-align to that mean image." + ), + ) realigned_unwarped_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc='Realigned and unwarped files written to disc.') + desc="Realigned and unwarped files written to disc.", + ) realignment_parameters = OutputMultiPath( - File(exists=True), - desc='Estimated translation and rotation parameters') + File(exists=True), desc="Estimated translation and rotation parameters" + ) class RealignUnwarp(SPMCommand): """Use spm_uw_estimate for estimating within subject registration and unwarping - of time series. Function accepts only one single field map. If in_files is a + of time series. Function accepts only one single field map. If in_files is a list of files they will be treated as separate sessions but associated to the same fieldmap. @@ -606,56 +863,52 @@ class RealignUnwarp(SPMCommand): input_spec = RealignUnwarpInputSpec output_spec = RealignUnwarpOutputSpec - _jobtype = 'spatial' - _jobname = 'realignunwarp' + _jobtype = "spatial" + _jobname = "realignunwarp" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': - return scans_for_fnames(ensure_list(val), - keep4d=False, - separate_sessions=True) - return super(RealignUnwarp, self)._format_arg(opt, spec, val) - + """Convert input to appropriate format for spm""" + if opt == "in_files": + return scans_for_fnames( + ensure_list(val), keep4d=False, separate_sessions=True + ) + return super()._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): - - spmdict = super(RealignUnwarp, self)._parse_inputs(skip=())[0] + spmdict = super()._parse_inputs(skip=())[0] if isdefined(self.inputs.phase_map): - pmscan = spmdict['data']['pmscan'] + pmscan = spmdict["data"]["pmscan"] else: - pmscan = '' + pmscan = "" if isdefined(self.inputs.in_files): if isinstance(self.inputs.in_files, list): - data = [dict(scans = sess, pmscan = pmscan) - for sess in spmdict['data']['scans']] + data = [ + dict(scans=sess, pmscan=pmscan) for sess in spmdict["data"]["scans"] + ] else: - data = [dict(scans = spmdict['data']['scans'], pmscan = pmscan)] + data = [dict(scans=spmdict["data"]["scans"], pmscan=pmscan)] - spmdict['data'] = data + spmdict["data"] = data return [spmdict] - def _list_outputs(self): outputs = self._outputs().get() resliced_all = self.inputs.reslice_which[0] > 0 resliced_mean = self.inputs.reslice_which[1] > 0 if isdefined(self.inputs.in_files): - outputs['realignment_parameters'] = [] + outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf - outputs['realignment_parameters'].append(fname_presuffix(tmp_imgf, - prefix='rp_', - suffix='.txt', - use_ext=False)) + outputs["realignment_parameters"].append( + fname_presuffix(tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False) + ) if not isinstance(imgf, list) and func_is_3d(imgf): break @@ -665,21 +918,21 @@ def _list_outputs(self): first_image = self.inputs.in_files[0] if resliced_mean: - outputs['mean_image'] = fname_presuffix(first_image, prefix='meanu') + outputs["mean_image"] = fname_presuffix(first_image, prefix="meanu") if resliced_all: - outputs['realigned_unwarped_files'] = [] + outputs["realigned_unwarped_files"] = [] for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): - newfile = fname_presuffix(inner_imgf, - prefix=self.inputs.out_prefix) + newfile = fname_presuffix( + inner_imgf, prefix=self.inputs.out_prefix + ) realigned_run.append(newfile) else: - realigned_run = fname_presuffix(imgf, - prefix=self.inputs.out_prefix) - outputs['realigned_unwarped_files'].append(realigned_run) + realigned_run = fname_presuffix(imgf, prefix=self.inputs.out_prefix) + outputs["realigned_unwarped_files"].append(realigned_run) return outputs @@ -687,76 +940,83 @@ class CoregisterInputSpec(SPMCommandInputSpec): target = ImageFileSPM( exists=True, mandatory=True, - field='ref', - desc='reference file to register to', - copyfile=False) + field="ref", + desc="reference file to register to", + copyfile=False, + ) source = InputMultiPath( ImageFileSPM(exists=True), - field='source', - desc='file to register to target', + field="source", + desc="file to register to target", copyfile=True, - mandatory=True) + mandatory=True, + ) jobtype = traits.Enum( - 'estwrite', - 'estimate', - 'write', - desc='one of: estimate, write, estwrite', - usedefault=True) + "estwrite", + "estimate", + "write", + desc="one of: estimate, write, estwrite", + usedefault=True, + ) apply_to_files = InputMultiPath( File(exists=True), - field='other', - desc='files to apply transformation to', - copyfile=True) + field="other", + desc="files to apply transformation to", + copyfile=True, + ) cost_function = traits.Enum( - 'mi', - 'nmi', - 'ecc', - 'ncc', - field='eoptions.cost_fun', + "mi", + "nmi", + "ecc", + "ncc", + field="eoptions.cost_fun", desc="""cost function, one of: 'mi' - Mutual Information, 'nmi' - Normalised Mutual Information, 'ecc' - Entropy Correlation Coefficient, - 'ncc' - Normalised Cross Correlation""") + 'ncc' - Normalised Cross Correlation""", + ) fwhm = traits.List( traits.Float(), minlen=2, maxlen=2, - field='eoptions.fwhm', - desc='gaussian smoothing kernel width (mm)') + field="eoptions.fwhm", + desc="gaussian smoothing kernel width (mm)", + ) separation = traits.List( - traits.Float(), field='eoptions.sep', desc='sampling separation in mm') + traits.Float(), field="eoptions.sep", desc="sampling separation in mm" + ) tolerance = traits.List( traits.Float(), - field='eoptions.tol', - desc='acceptable tolerance for each of 12 params') + field="eoptions.tol", + desc="acceptable tolerance for each of 12 params", + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for interpolation"), + ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z]')) - write_mask = traits.Bool( - field='roptions.mask', desc='True/False mask output image') + field="roptions.wrap", + desc=("Check if interpolation should wrap in [x,y,z]"), + ) + write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( - 'r', - field='roptions.prefix', - usedefault=True, - desc='coregistered output prefix') + "r", field="roptions.prefix", usedefault=True, desc="coregistered output prefix" + ) class CoregisterOutputSpec(TraitedSpec): coregistered_source = OutputMultiPath( - File(exists=True), desc='Coregistered source files') + File(exists=True), desc="Coregistered source files" + ) coregistered_files = OutputMultiPath( - File(exists=True), desc='Coregistered other files') + File(exists=True), desc="Coregistered other files" + ) class Coregister(SPMCommand): @@ -777,54 +1037,55 @@ class Coregister(SPMCommand): input_spec = CoregisterInputSpec output_spec = CoregisterOutputSpec - _jobtype = 'spatial' - _jobname = 'coreg' + _jobtype = "spatial" + _jobname = "coreg" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if (opt == 'target' - or (opt == 'source' and self.inputs.jobtype != "write")): + """Convert input to appropriate format for spm""" + if opt == "target" or (opt == "source" and self.inputs.jobtype != "write"): return scans_for_fnames(ensure_list(val), keep4d=True) - if opt == 'apply_to_files': + if opt == "apply_to_files": return np.array(ensure_list(val), dtype=object) - if opt == 'source' and self.inputs.jobtype == "write": + if opt == "source" and self.inputs.jobtype == "write": if isdefined(self.inputs.apply_to_files): return scans_for_fnames(val + self.inputs.apply_to_files) else: return scans_for_fnames(val) - return super(Coregister, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """validate spm coregister options if set to None ignore - """ + """validate spm coregister options if set to None ignore""" if self.inputs.jobtype == "write": - einputs = (super(Coregister, self) - ._parse_inputs(skip=('jobtype', 'apply_to_files'))) + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) else: - einputs = super(Coregister, self)._parse_inputs(skip=('jobtype')) + einputs = super()._parse_inputs(skip=("jobtype")) jobtype = self.inputs.jobtype - return [{'%s' % (jobtype): einputs[0]}] + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): - outputs['coregistered_files'] = self.inputs.apply_to_files - outputs['coregistered_source'] = self.inputs.source - elif (self.inputs.jobtype == "write" - or self.inputs.jobtype == "estwrite"): + outputs["coregistered_files"] = self.inputs.apply_to_files + outputs["coregistered_source"] = self.inputs.source + elif self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isdefined(self.inputs.apply_to_files): - outputs['coregistered_files'] = [] + outputs["coregistered_files"] = [] for imgf in ensure_list(self.inputs.apply_to_files): - (outputs['coregistered_files'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix))) + ( + outputs["coregistered_files"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) + ) - outputs['coregistered_source'] = [] + outputs["coregistered_source"] = [] for imgf in ensure_list(self.inputs.source): - (outputs['coregistered_source'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix))) + ( + outputs["coregistered_source"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) + ) return outputs @@ -832,108 +1093,105 @@ def _list_outputs(self): class NormalizeInputSpec(SPMCommandInputSpec): template = File( exists=True, - field='eoptions.template', - desc='template file to normalize to', + field="eoptions.template", + desc="template file to normalize to", mandatory=True, - xor=['parameter_file'], - copyfile=False) + xor=["parameter_file"], + copyfile=False, + ) source = InputMultiPath( ImageFileSPM(exists=True), - field='subj.source', - xor=['parameter_file'], - desc='file to normalize to template', + field="subj.source", + xor=["parameter_file"], + desc="file to normalize to template", mandatory=True, - copyfile=True) + copyfile=True, + ) jobtype = traits.Enum( - 'estwrite', - 'est', - 'write', - usedefault=True, - desc='Estimate, Write or do both') + "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do both" + ) apply_to_files = InputMultiPath( traits.Either(File(exists=True), traits.List(File(exists=True))), - field='subj.resample', - desc='files to apply transformation to', - copyfile=True) + field="subj.resample", + desc="files to apply transformation to", + copyfile=True, + ) parameter_file = File( - field='subj.matname', + field="subj.matname", mandatory=True, - xor=['source', 'template'], - desc='normalization parameter file*_sn.mat', - copyfile=False) + xor=["source", "template"], + desc="normalization parameter file*_sn.mat", + copyfile=False, + ) source_weight = File( - field='subj.wtsrc', - desc='name of weighting image for source', - copyfile=False) + field="subj.wtsrc", desc="name of weighting image for source", copyfile=False + ) template_weight = File( - field='eoptions.weight', - desc='name of weighting image for template', - copyfile=False) + field="eoptions.weight", + desc="name of weighting image for template", + copyfile=False, + ) source_image_smoothing = traits.Float( - field='eoptions.smosrc', desc='source smoothing') + field="eoptions.smosrc", desc="source smoothing" + ) template_image_smoothing = traits.Float( - field='eoptions.smoref', desc='template smoothing') + field="eoptions.smoref", desc="template smoothing" + ) affine_regularization_type = traits.Enum( - 'mni', - 'size', - 'none', - field='eoptions.regtype', - desc='mni, size, none') + "mni", "size", "none", field="eoptions.regtype", desc="mni, size, none" + ) DCT_period_cutoff = traits.Float( - field='eoptions.cutoff', desc='Cutoff of for DCT bases') + field="eoptions.cutoff", desc="Cutoff of for DCT bases" + ) nonlinear_iterations = traits.Int( - field='eoptions.nits', - desc=('Number of iterations of ' - 'nonlinear warping')) + field="eoptions.nits", desc=("Number of iterations of nonlinear warping") + ) nonlinear_regularization = traits.Float( - field='eoptions.reg', - desc=('the amount of the ' - 'regularization for the ' - 'nonlinear part of the ' - 'normalization')) + field="eoptions.reg", + desc=( + "the amount of the " + "regularization for the " + "nonlinear part of the " + "normalization" + ), + ) write_preserve = traits.Bool( - field='roptions.preserve', - desc='True/False warped images are modulated') + field="roptions.preserve", desc="True/False warped images are modulated" + ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), - field='roptions.bb', + field="roptions.bb", minlen=2, maxlen=2, - desc='3x2-element list of lists') + desc="3x2-element list of lists", + ) write_voxel_sizes = traits.List( - traits.Float(), - field='roptions.vox', - minlen=3, - maxlen=3, - desc='3-element list') + traits.Float(), field="roptions.vox", minlen=3, maxlen=3, desc="3-element list" + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for interpolation"), + ) write_wrap = traits.List( traits.Int(), - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z] - list of bools')) + field="roptions.wrap", + desc=("Check if interpolation should wrap in [x,y,z] - list of bools"), + ) out_prefix = traits.String( - 'w', - field='roptions.prefix', - usedefault=True, - desc='normalized output prefix') + "w", field="roptions.prefix", usedefault=True, desc="normalized output prefix" + ) class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( - File(exists=True), - desc=('MAT files containing ' - 'the normalization ' - 'parameters')) + File(exists=True), desc=("MAT files containing the normalization parameters") + ) normalized_source = OutputMultiPath( - File(exists=True), desc='Normalized source files') - normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized other files') + File(exists=True), desc="Normalized source files" + ) + normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize(SPMCommand): @@ -952,82 +1210,77 @@ class Normalize(SPMCommand): input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec - _jobtype = 'spatial' - _jobname = 'normalise' + _jobtype = "spatial" + _jobname = "normalise" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'template': + """Convert input to appropriate format for spm""" + if opt == "template": return scans_for_fname(ensure_list(val)) - if opt == 'source': + if opt == "source": return scans_for_fname(ensure_list(val)) - if opt == 'apply_to_files': + if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) - if opt == 'parameter_file': + if opt == "parameter_file": return np.array([simplify_list(val)], dtype=object) - if opt in ['write_wrap']: + if opt in ["write_wrap"]: if len(val) != 3: - raise ValueError('%s must have 3 elements' % opt) - return super(Normalize, self)._format_arg(opt, spec, val) + raise ValueError("%s must have 3 elements" % opt) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - """Validate spm normalize options if set to None ignore - """ - einputs = super( - Normalize, self)._parse_inputs(skip=('jobtype', 'apply_to_files')) + """Validate spm normalize options if set to None ignore""" + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.source): inputfiles.extend(self.inputs.source) - einputs[0]['subj']['resample'] = scans_for_fnames(inputfiles) + einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype - if jobtype in ['estwrite', 'write']: + if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.source): - einputs[0]['subj']['resample'] = scans_for_fname( - self.inputs.source) - return [{'%s' % (jobtype): einputs[0]}] + einputs[0]["subj"]["resample"] = scans_for_fname(self.inputs.source) + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype - if jobtype.startswith('est'): - outputs['normalization_parameters'] = [] + if jobtype.startswith("est"): + outputs["normalization_parameters"] = [] for imgf in ensure_list(self.inputs.source): - outputs['normalization_parameters'].append( - fname_presuffix(imgf, suffix='_sn.mat', use_ext=False)) - outputs['normalization_parameters'] = simplify_list( - outputs['normalization_parameters']) + outputs["normalization_parameters"].append( + fname_presuffix(imgf, suffix="_sn.mat", use_ext=False) + ) + outputs["normalization_parameters"] = simplify_list( + outputs["normalization_parameters"] + ) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): - outputs['normalized_files'] = self.inputs.apply_to_files - outputs['normalized_source'] = self.inputs.source - elif 'write' in self.inputs.jobtype: - if (isdefined(self.inputs.write_preserve) - and self.inputs.write_preserve): - prefixNorm = ''.join(['m', self.inputs.out_prefix]) + outputs["normalized_files"] = self.inputs.apply_to_files + outputs["normalized_source"] = self.inputs.source + elif "write" in self.inputs.jobtype: + if isdefined(self.inputs.write_preserve) and self.inputs.write_preserve: + prefixNorm = f"m{self.inputs.out_prefix}" else: prefixNorm = self.inputs.out_prefix - outputs['normalized_files'] = [] + outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): - run = [ - fname_presuffix(in_f, prefix=prefixNorm) - for in_f in f - ] + run = [fname_presuffix(in_f, prefix=prefixNorm) for in_f in f] else: run = [fname_presuffix(f, prefix=prefixNorm)] - outputs['normalized_files'].extend(run) + outputs["normalized_files"].extend(run) if isdefined(self.inputs.source): - outputs['normalized_source'] = [] + outputs["normalized_source"] = [] for imgf in ensure_list(self.inputs.source): - outputs['normalized_source'].append( - fname_presuffix(imgf, prefix=prefixNorm)) + outputs["normalized_source"].append( + fname_presuffix(imgf, prefix=prefixNorm) + ) return outputs @@ -1035,32 +1288,34 @@ def _list_outputs(self): class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM( exists=True, - field='subj.vol', - desc=('file to estimate normalization parameters ' - 'with'), - xor=['deformation_file'], + field="subj.vol", + desc=("file to estimate normalization parameters with"), + xor=["deformation_file"], mandatory=True, - copyfile=True) + copyfile=True, + ) apply_to_files = InputMultiPath( traits.Either( - ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True))), - field='subj.resample', - desc='files to apply transformation to', - copyfile=True) + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="subj.resample", + desc="files to apply transformation to", + copyfile=True, + ) deformation_file = ImageFileSPM( - field='subj.def', + field="subj.def", mandatory=True, - xor=['image_to_align', 'tpm'], + xor=["image_to_align", "tpm"], copyfile=False, - desc=('file y_*.nii containing 3 deformation ' - 'fields for the deformation in x, y and z ' - 'dimension')) + desc=( + "file y_*.nii containing 3 deformation " + "fields for the deformation in x, y and z " + "dimension" + ), + ) jobtype = traits.Enum( - 'estwrite', - 'est', - 'write', - usedefault=True, - desc='Estimate, Write or do Both') + "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do Both" + ) bias_regularization = traits.Enum( 0, 0.00001, @@ -1070,8 +1325,9 @@ class Normalize12InputSpec(SPMCommandInputSpec): 0.1, 1, 10, - field='eoptions.biasreg', - desc='no(0) - extremely heavy (10)') + field="eoptions.biasreg", + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( 30, 40, @@ -1086,75 +1342,82 @@ class Normalize12InputSpec(SPMCommandInputSpec): 130, 140, 150, - 'Inf', - field='eoptions.biasfwhm', - desc='FWHM of Gaussian smoothness of bias') + "Inf", + field="eoptions.biasfwhm", + desc="FWHM of Gaussian smoothness of bias", + ) tpm = File( exists=True, - field='eoptions.tpm', - desc=('template in form of tissue probablitiy maps to ' - 'normalize to'), - xor=['deformation_file'], - copyfile=False) + field="eoptions.tpm", + desc=("template in form of tissue probablitiy maps to normalize to"), + xor=["deformation_file"], + copyfile=False, + ) affine_regularization_type = traits.Enum( - 'mni', 'size', 'none', field='eoptions.affreg', desc='mni, size, none') + "mni", "size", "none", field="eoptions.affreg", desc="mni, size, none" + ) warping_regularization = traits.List( traits.Float(), - field='eoptions.reg', + field="eoptions.reg", minlen=5, maxlen=5, - desc=('controls balance between ' - 'parameters and data')) + desc=("controls balance between parameters and data"), + ) smoothness = traits.Float( - field='eoptions.fwhm', - desc=('value (in mm) to smooth the data before ' - 'normalization')) + field="eoptions.fwhm", + desc=("value (in mm) to smooth the data before normalization"), + ) sampling_distance = traits.Float( - field='eoptions.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="eoptions.samp", + desc=("Sampling distance on data for parameter estimation"), + ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), - field='woptions.bb', + field="woptions.bb", minlen=2, maxlen=2, - desc=('3x2-element list of lists ' - 'representing the bounding box ' - '(in mm) to be written')) + desc=( + "3x2-element list of lists " + "representing the bounding box " + "(in mm) to be written" + ), + ) write_voxel_sizes = traits.List( traits.Float(), - field='woptions.vox', + field="woptions.vox", minlen=3, maxlen=3, - desc=('3-element list representing the ' - 'voxel sizes (in mm) of the written ' - 'normalised images')) + desc=( + "3-element list representing the " + "voxel sizes (in mm) of the written " + "normalised images" + ), + ) write_interp = traits.Range( low=0, high=7, - field='woptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="woptions.interp", + desc=("degree of b-spline used for interpolation"), + ) out_prefix = traits.String( - 'w', - field='woptions.prefix', - usedefault=True, - desc='Normalized output prefix') + "w", field="woptions.prefix", usedefault=True, desc="Normalized output prefix" + ) class Normalize12OutputSpec(TraitedSpec): deformation_field = OutputMultiPath( File(exists=True), - desc=('NIfTI file containing 3 ' - 'deformation fields for the ' - 'deformation in x, y and z ' - 'dimension')) + desc=( + "NIfTI file containing 3 " + "deformation fields for the " + "deformation in x, y and z " + "dimension" + ), + ) normalized_image = OutputMultiPath( - File(exists=True), - desc=('Normalized file that needed to ' - 'be aligned')) - normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized other files') + File(exists=True), desc=("Normalized file that needed to be aligned") + ) + normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize12(SPMCommand): @@ -1178,74 +1441,68 @@ class Normalize12(SPMCommand): input_spec = Normalize12InputSpec output_spec = Normalize12OutputSpec - _jobtype = 'spatial' - _jobname = 'normalise' + _jobtype = "spatial" + _jobname = "normalise" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'tpm': + """Convert input to appropriate format for spm""" + if opt == "tpm": return scans_for_fname(ensure_list(val)) - if opt == 'image_to_align': + if opt == "image_to_align": return scans_for_fname(ensure_list(val)) - if opt == 'apply_to_files': + if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) - if opt == 'deformation_file': + if opt == "deformation_file": return np.array([simplify_list(val)], dtype=object) - if opt in ['nonlinear_regularization']: + if opt in ["nonlinear_regularization"]: if len(val) != 5: - raise ValueError('%s must have 5 elements' % opt) - return super(Normalize12, self)._format_arg(opt, spec, val) + raise ValueError("%s must have 5 elements" % opt) + return super()._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): - """validate spm normalize options if set to None ignore - """ - einputs = super( - Normalize12, - self)._parse_inputs(skip=('jobtype', 'apply_to_files')) + """validate spm normalize options if set to None ignore""" + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.image_to_align): inputfiles.extend([self.inputs.image_to_align]) - einputs[0]['subj']['resample'] = scans_for_fnames(inputfiles) + einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype - if jobtype in ['estwrite', 'write']: + if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.image_to_align): - einputs[0]['subj']['resample'] = scans_for_fname( - self.inputs.image_to_align) - return [{'%s' % (jobtype): einputs[0]}] + einputs[0]["subj"]["resample"] = scans_for_fname( + self.inputs.image_to_align + ) + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype - if jobtype.startswith('est'): - outputs['deformation_field'] = [] + if jobtype.startswith("est"): + outputs["deformation_field"] = [] for imgf in ensure_list(self.inputs.image_to_align): - outputs['deformation_field'].append( - fname_presuffix(imgf, prefix='y_')) - outputs['deformation_field'] = simplify_list( - outputs['deformation_field']) + outputs["deformation_field"].append(fname_presuffix(imgf, prefix="y_")) + outputs["deformation_field"] = simplify_list(outputs["deformation_field"]) - if self.inputs.jobtype == "estimate": - if isdefined(self.inputs.apply_to_files): - outputs['normalized_files'] = self.inputs.apply_to_files - outputs['normalized_image'] = fname_presuffix( - self.inputs.image_to_align, prefix='w') - elif 'write' in self.inputs.jobtype: - outputs['normalized_files'] = [] + if "write" in self.inputs.jobtype: + outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): - run = [fname_presuffix(in_f, prefix='w') for in_f in f] + run = [ + fname_presuffix(in_f, prefix=self.inputs.out_prefix) + for in_f in f + ] else: - run = [fname_presuffix(f, prefix='w')] - outputs['normalized_files'].extend(run) + run = [fname_presuffix(f, prefix=self.inputs.out_prefix)] + outputs["normalized_files"].extend(run) if isdefined(self.inputs.image_to_align): - outputs['normalized_image'] = fname_presuffix( - self.inputs.image_to_align, prefix='w') + outputs["normalized_image"] = fname_presuffix( + self.inputs.image_to_align, prefix=self.inputs.out_prefix + ) return outputs @@ -1253,17 +1510,17 @@ def _list_outputs(self): class SegmentInputSpec(SPMCommandInputSpec): data = InputMultiPath( ImageFileSPM(exists=True), - field='data', - desc='one scan per subject', + field="data", + desc="one scan per subject", copyfile=False, - mandatory=True) + mandatory=True, + ) gm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.GM', - desc= - """Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. + field="output.GM", + desc="""Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. None: [False,False,False], Native Space: [False,False,True], Unmodulated Normalised: [False,True,False], @@ -1271,12 +1528,13 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) wm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.WM', + field="output.WM", desc=""" Options to produce white matter images: c2*.img, wc2*.img and mwc2*.img. None: [False,False,False], @@ -1286,12 +1544,13 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) csf_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.CSF', + field="output.CSF", desc=""" Options to produce CSF images: c3*.img, wc3*.img and mwc3*.img. None: [False,False,False], @@ -1301,45 +1560,48 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) save_bias_corrected = traits.Bool( - field='output.biascor', - desc=('True/False produce a bias ' - 'corrected image')) + field="output.biascor", desc=("True/False produce a bias corrected image") + ) clean_masks = traits.Enum( - 'no', - 'light', - 'thorough', - field='output.cleanup', - desc=("clean using estimated brain mask " - "('no','light','thorough')")) + "no", + "light", + "thorough", + field="output.cleanup", + desc=("clean using estimated brain mask ('no','light','thorough')"), + ) tissue_prob_maps = traits.List( File(exists=True), - field='opts.tpm', - desc=('list of gray, white & csf prob. ' - '(opt,)')) + field="opts.tpm", + desc=("list of gray, white & csf prob. (opt,)"), + ) gaussians_per_class = traits.List( traits.Int(), - field='opts.ngaus', - desc=('num Gaussians capture intensity ' - 'distribution')) + field="opts.ngaus", + desc=("num Gaussians capture intensity distribution"), + ) affine_regularization = traits.Enum( - 'mni', - 'eastern', - 'subj', - 'none', - '', - field='opts.regtype', - desc=('Possible options: "mni", ' - '"eastern", "subj", "none" ' - '(no reguralisation), "" ' - '(no affine registration)')) + "mni", + "eastern", + "subj", + "none", + "", + field="opts.regtype", + desc=( + 'Possible options: "mni", ' + '"eastern", "subj", "none" ' + '(no reguralisation), "" ' + "(no affine registration)" + ), + ) warping_regularization = traits.Float( - field='opts.warpreg', - desc=('Controls balance between ' - 'parameters and data')) + field="opts.warpreg", desc=("Controls balance between parameters and data") + ) warp_frequency_cutoff = traits.Float( - field='opts.warpco', desc='Cutoff of DCT bases') + field="opts.warpco", desc="Cutoff of DCT bases" + ) bias_regularization = traits.Enum( 0, 0.00001, @@ -1349,8 +1611,9 @@ class SegmentInputSpec(SPMCommandInputSpec): 0.1, 1, 10, - field='opts.biasreg', - desc='no(0) - extremely heavy (10)') + field="opts.biasreg", + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( 30, 40, @@ -1363,43 +1626,38 @@ class SegmentInputSpec(SPMCommandInputSpec): 110, 120, 130, - 'Inf', - field='opts.biasfwhm', - desc='FWHM of Gaussian smoothness of bias') + "Inf", + field="opts.biasfwhm", + desc="FWHM of Gaussian smoothness of bias", + ) sampling_distance = traits.Float( - field='opts.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="opts.samp", desc=("Sampling distance on data for parameter estimation") + ) mask_image = File( exists=True, - field='opts.msk', - desc='Binary image to restrict parameter estimation ') + field="opts.msk", + desc="Binary image to restrict parameter estimation ", + ) class SegmentOutputSpec(TraitedSpec): - native_gm_image = File(desc='native space grey probability map') - normalized_gm_image = File(desc='normalized grey probability map', ) - modulated_gm_image = File( - desc=('modulated, normalized grey ' - 'probability map')) - native_wm_image = File(desc='native space white probability map') - normalized_wm_image = File(desc='normalized white probability map') - modulated_wm_image = File( - desc=('modulated, normalized white ' - 'probability map')) - native_csf_image = File(desc='native space csf probability map') - normalized_csf_image = File(desc='normalized csf probability map') - modulated_csf_image = File( - desc=('modulated, normalized csf ' - 'probability map')) + native_gm_image = File(desc="native space grey probability map") + normalized_gm_image = File(desc="normalized grey probability map") + modulated_gm_image = File(desc=("modulated, normalized grey probability map")) + native_wm_image = File(desc="native space white probability map") + normalized_wm_image = File(desc="normalized white probability map") + modulated_wm_image = File(desc=("modulated, normalized white probability map")) + native_csf_image = File(desc="native space csf probability map") + normalized_csf_image = File(desc="normalized csf probability map") + modulated_csf_image = File(desc=("modulated, normalized csf probability map")) modulated_input_image = File( - deprecated='0.10', - new_name='bias_corrected_image', - desc='bias-corrected version of input image') - bias_corrected_image = File(desc='bias-corrected version of input image') - transformation_mat = File(exists=True, desc='Normalization transformation') - inverse_transformation_mat = File( - exists=True, desc='Inverse normalization info') + deprecated="0.10", + new_name="bias_corrected_image", + desc="bias-corrected version of input image", + ) + bias_corrected_image = File(desc="bias-corrected version of input image") + transformation_mat = File(exists=True, desc="Normalization transformation") + inverse_transformation_mat = File(exists=True, desc="Inverse normalization info") class Segment(SPMCommand): @@ -1422,54 +1680,56 @@ class Segment(SPMCommand): def __init__(self, **inputs): _local_version = SPMCommand().version - if _local_version and '12.' in _local_version: - self._jobtype = 'tools' - self._jobname = 'oldseg' + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "oldseg" else: - self._jobtype = 'spatial' - self._jobname = 'preproc' + self._jobtype = "spatial" + self._jobname = "preproc" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - clean_masks_dict = {'no': 0, 'light': 1, 'thorough': 2} + """Convert input to appropriate format for spm""" + clean_masks_dict = {"no": 0, "light": 1, "thorough": 2} - if opt in ['data', 'tissue_prob_maps']: + if opt in ["data", "tissue_prob_maps"]: if isinstance(val, list): return scans_for_fnames(val) else: return scans_for_fname(val) - if 'output_type' in opt: + if "output_type" in opt: return [int(v) for v in val] - if opt == 'mask_image': + if opt == "mask_image": return scans_for_fname(val) - if opt == 'clean_masks': + if opt == "clean_masks": return clean_masks_dict[val] - return super(Segment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() f = self.inputs.data[0] - for tidx, tissue in enumerate(['gm', 'wm', 'csf']): - outtype = '%s_output_type' % tissue + for tidx, tissue in enumerate(["gm", "wm", "csf"]): + outtype = "%s_output_type" % tissue if isdefined(getattr(self.inputs, outtype)): - for idx, (image, prefix) in enumerate([('modulated', 'mw'), - ('normalized', - 'w'), ('native', '')]): + for idx, (image, prefix) in enumerate( + [("modulated", "mw"), ("normalized", "w"), ("native", "")] + ): if getattr(self.inputs, outtype)[idx]: - outfield = '%s_%s_image' % (image, tissue) + outfield = f"{image}_{tissue}_image" outputs[outfield] = fname_presuffix( - f, prefix='%sc%d' % (prefix, tidx + 1)) - if (isdefined(self.inputs.save_bias_corrected) - and self.inputs.save_bias_corrected): - outputs['bias_corrected_image'] = fname_presuffix(f, prefix='m') - t_mat = fname_presuffix(f, suffix='_seg_sn.mat', use_ext=False) - outputs['transformation_mat'] = t_mat - invt_mat = fname_presuffix(f, suffix='_seg_inv_sn.mat', use_ext=False) - outputs['inverse_transformation_mat'] = invt_mat + f, prefix="%sc%d" % (prefix, tidx + 1) + ) + if ( + isdefined(self.inputs.save_bias_corrected) + and self.inputs.save_bias_corrected + ): + outputs["bias_corrected_image"] = fname_presuffix(f, prefix="m") + t_mat = fname_presuffix(f, suffix="_seg_sn.mat", use_ext=False) + outputs["transformation_mat"] = t_mat + invt_mat = fname_presuffix(f, suffix="_seg_inv_sn.mat", use_ext=False) + outputs["inverse_transformation_mat"] = invt_mat return outputs @@ -1478,74 +1738,85 @@ class NewSegmentInputSpec(SPMCommandInputSpec): ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be segmented", - field='channel', - copyfile=False) - channel_info = traits.Tuple( + field="channel", + copyfile=False, + ) + channel_info = Tuple( traits.Float(), traits.Float(), - traits.Tuple(traits.Bool, traits.Bool), + Tuple(traits.Bool, traits.Bool), desc="""A tuple with the following fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Field, Corrected) - a tuple of two boolean values""", - field='channel') + field="channel", + ) tissues = traits.List( - traits.Tuple( - traits.Tuple(ImageFileSPM(exists=True), traits.Int()), - traits.Int(), traits.Tuple(traits.Bool, traits.Bool), - traits.Tuple(traits.Bool, traits.Bool)), + Tuple( + Tuple(ImageFileSPM(exists=True), traits.Int()), + traits.Int(), + Tuple(traits.Bool, traits.Bool), + Tuple(traits.Bool, traits.Bool), + ), desc="""A list of tuples (one per tissue) with the following fields: - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", - field='tissue') + field="tissue", + ) affine_regularization = traits.Enum( - 'mni', - 'eastern', - 'subj', - 'none', - field='warp.affreg', - desc='mni, eastern, subj, none ') + "mni", + "eastern", + "subj", + "none", + field="warp.affreg", + desc="mni, eastern, subj, none ", + ) warping_regularization = traits.Either( traits.List(traits.Float(), minlen=5, maxlen=5), traits.Float(), - field='warp.reg', - desc=('Warping regularization ' - 'parameter(s). Accepts float ' - 'or list of floats (the ' - 'latter is required by ' - 'SPM12)')) + field="warp.reg", + desc=( + "Warping regularization " + "parameter(s). Accepts float " + "or list of floats (the " + "latter is required by " + "SPM12)" + ), + ) sampling_distance = traits.Float( - field='warp.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="warp.samp", desc=("Sampling distance on data for parameter estimation") + ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, - field='warp.write', - desc=("Which deformation fields to " - "write:[Inverse, Forward]")) + field="warp.write", + desc=("Which deformation fields to write:[Inverse, Forward]"), + ) class NewSegmentOutputSpec(TraitedSpec): native_class_images = traits.List( - traits.List(File(exists=True)), desc='native space probability maps') + traits.List(File(exists=True)), desc="native space probability maps" + ) dartel_input_images = traits.List( - traits.List(File(exists=True)), desc='dartel imported class images') + traits.List(File(exists=True)), desc="dartel imported class images" + ) normalized_class_images = traits.List( - traits.List(File(exists=True)), desc='normalized class images') + traits.List(File(exists=True)), desc="normalized class images" + ) modulated_class_images = traits.List( - traits.List(File(exists=True)), - desc=('modulated+normalized class ' - 'images')) + traits.List(File(exists=True)), desc=("modulated+normalized class images") + ) transformation_mat = OutputMultiPath( - File(exists=True), desc='Normalization transformation') + File(exists=True), desc="Normalization transformation" + ) bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') - bias_field_images = OutputMultiPath( - File(exists=True), desc='bias field images') + File(exists=True), desc="bias corrected images" + ) + bias_field_images = OutputMultiPath(File(exists=True), desc="bias field images") forward_deformation_field = OutputMultiPath(File(exists=True)) inverse_deformation_field = OutputMultiPath(File(exists=True)) @@ -1586,132 +1857,395 @@ class NewSegment(SPMCommand): def __init__(self, **inputs): _local_version = SPMCommand().version - if _local_version and '12.' in _local_version: - self._jobtype = 'spatial' - self._jobname = 'preproc' + if _local_version and "12." in _local_version: + self._jobtype = "spatial" + self._jobname = "preproc" else: - self._jobtype = 'tools' - self._jobname = 'preproc8' + self._jobtype = "tools" + self._jobname = "preproc8" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ + """Convert input to appropriate format for spm""" - if opt in ['channel_files', 'channel_info']: + if opt in ["channel_files", "channel_info"]: # structure have to be recreated because of some weird traits error new_channel = {} - new_channel['vols'] = scans_for_fnames(self.inputs.channel_files) + new_channel["vols"] = scans_for_fnames(self.inputs.channel_files) if isdefined(self.inputs.channel_info): info = self.inputs.channel_info - new_channel['biasreg'] = info[0] - new_channel['biasfwhm'] = info[1] - new_channel['write'] = [int(info[2][0]), int(info[2][1])] + new_channel["biasreg"] = info[0] + new_channel["biasfwhm"] = info[1] + new_channel["write"] = [int(info[2][0]), int(info[2][1])] return [new_channel] - elif opt == 'tissues': + elif opt == "tissues": new_tissues = [] for tissue in val: new_tissue = {} - new_tissue['tpm'] = np.array( - [','.join([tissue[0][0], str(tissue[0][1])])], - dtype=object) - new_tissue['ngaus'] = tissue[1] - new_tissue['native'] = [int(tissue[2][0]), int(tissue[2][1])] - new_tissue['warped'] = [int(tissue[3][0]), int(tissue[3][1])] + new_tissue["tpm"] = np.array( + [",".join([tissue[0][0], str(tissue[0][1])])], dtype=object + ) + new_tissue["ngaus"] = tissue[1] + new_tissue["native"] = [int(tissue[2][0]), int(tissue[2][1])] + new_tissue["warped"] = [int(tissue[3][0]), int(tissue[3][1])] new_tissues.append(new_tissue) return new_tissues - elif opt == 'write_deformation_fields': - return super(NewSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])]) + elif opt == "write_deformation_fields": + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) else: - return super(NewSegment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['native_class_images'] = [] - outputs['dartel_input_images'] = [] - outputs['normalized_class_images'] = [] - outputs['modulated_class_images'] = [] - outputs['transformation_mat'] = [] - outputs['bias_corrected_images'] = [] - outputs['bias_field_images'] = [] - outputs['inverse_deformation_field'] = [] - outputs['forward_deformation_field'] = [] + outputs["native_class_images"] = [] + outputs["dartel_input_images"] = [] + outputs["normalized_class_images"] = [] + outputs["modulated_class_images"] = [] + outputs["transformation_mat"] = [] + outputs["bias_corrected_images"] = [] + outputs["bias_field_images"] = [] + outputs["inverse_deformation_field"] = [] + outputs["forward_deformation_field"] = [] n_classes = 5 if isdefined(self.inputs.tissues): n_classes = len(self.inputs.tissues) for i in range(n_classes): - outputs['native_class_images'].append([]) - outputs['dartel_input_images'].append([]) - outputs['normalized_class_images'].append([]) - outputs['modulated_class_images'].append([]) + outputs["native_class_images"].append([]) + outputs["dartel_input_images"].append([]) + outputs["normalized_class_images"].append([]) + outputs["modulated_class_images"].append([]) for filename in self.inputs.channel_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.tissues): for i, tissue in enumerate(self.inputs.tissues): if tissue[2][0]: - outputs['native_class_images'][i].append( - os.path.join(pth, "c%d%s.nii" % (i + 1, base))) + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) if tissue[2][1]: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rc%d%s.nii" % (i + 1, base))) + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rc%d%s.nii" % (i + 1, base)) + ) if tissue[3][0]: - outputs['normalized_class_images'][i].append( - os.path.join(pth, "wc%d%s.nii" % (i + 1, base))) + outputs["normalized_class_images"][i].append( + os.path.join(pth, "wc%d%s.nii" % (i + 1, base)) + ) if tissue[3][1]: - outputs['modulated_class_images'][i].append( - os.path.join(pth, "mwc%d%s.nii" % (i + 1, base))) + outputs["modulated_class_images"][i].append( + os.path.join(pth, "mwc%d%s.nii" % (i + 1, base)) + ) else: for i in range(n_classes): - outputs['native_class_images'][i].append( - os.path.join(pth, "c%d%s.nii" % (i + 1, base))) - outputs['transformation_mat'].append( - os.path.join(pth, "%s_seg8.mat" % base)) + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) + outputs["transformation_mat"].append( + os.path.join(pth, "%s_seg8.mat" % base) + ) if isdefined(self.inputs.write_deformation_fields): if self.inputs.write_deformation_fields[0]: - outputs['inverse_deformation_field'].append( - os.path.join(pth, "iy_%s.nii" % base)) + outputs["inverse_deformation_field"].append( + os.path.join(pth, "iy_%s.nii" % base) + ) if self.inputs.write_deformation_fields[1]: - outputs['forward_deformation_field'].append( - os.path.join(pth, "y_%s.nii" % base)) + outputs["forward_deformation_field"].append( + os.path.join(pth, "y_%s.nii" % base) + ) if isdefined(self.inputs.channel_info): if self.inputs.channel_info[2][0]: - outputs['bias_field_images'].append( - os.path.join(pth, "BiasField_%s.nii" % (base))) + outputs["bias_field_images"].append( + os.path.join(pth, "BiasField_%s.nii" % (base)) + ) if self.inputs.channel_info[2][1]: - outputs['bias_corrected_images'].append( - os.path.join(pth, "m%s.nii" % (base))) + outputs["bias_corrected_images"].append( + os.path.join(pth, "m%s.nii" % (base)) + ) + return outputs + + +class MultiChannelNewSegmentInputSpec(SPMCommandInputSpec): + channels = traits.List( + Tuple( + InputMultiPath( + ImageFileSPM(exists=True), + mandatory=True, + desc="A list of files to be segmented", + field="channel", + copyfile=False, + ), + Tuple( + traits.Float(), + traits.Float(), + Tuple(traits.Bool, traits.Bool), + desc="""A tuple with the following fields: + - bias reguralisation (0-10) + - FWHM of Gaussian smoothness of bias + - which maps to save (Field, Corrected) - a tuple of two boolean values""", + field="channel", + ), + ), + desc="""A list of tuples (one per each channel) with the following fields: + - a list of channel files (only 1rst channel files will be segmented) + - a tuple with the following channel-specific info fields: + - bias reguralisation (0-10) + - FWHM of Gaussian smoothness of bias + - which maps to save (Field, Corrected) - a tuple of two boolean values""", + field="channel", + ) + tissues = traits.List( + Tuple( + Tuple(ImageFileSPM(exists=True), traits.Int()), + traits.Int(), + Tuple(traits.Bool, traits.Bool), + Tuple(traits.Bool, traits.Bool), + ), + desc="""A list of tuples (one per tissue) with the following fields: + - tissue probability map (4D), 1-based index to frame + - number of gaussians + - which maps to save [Native, DARTEL] - a tuple of two boolean values + - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", + field="tissue", + ) + affine_regularization = traits.Enum( + "mni", + "eastern", + "subj", + "none", + field="warp.affreg", + desc="mni, eastern, subj, none ", + ) + warping_regularization = traits.Either( + traits.List(traits.Float(), minlen=5, maxlen=5), + traits.Float(), + field="warp.reg", + desc=( + "Warping regularization " + "parameter(s). Accepts float " + "or list of floats (the " + "latter is required by " + "SPM12)" + ), + ) + sampling_distance = traits.Float( + field="warp.samp", desc=("Sampling distance on data for parameter estimation") + ) + write_deformation_fields = traits.List( + traits.Bool(), + minlen=2, + maxlen=2, + field="warp.write", + desc=("Which deformation fields to write:[Inverse, Forward]"), + ) + + +class MultiChannelNewSegmentOutputSpec(TraitedSpec): + native_class_images = traits.List( + traits.List(File(exists=True)), desc="native space probability maps" + ) + dartel_input_images = traits.List( + traits.List(File(exists=True)), desc="dartel imported class images" + ) + normalized_class_images = traits.List( + traits.List(File(exists=True)), desc="normalized class images" + ) + modulated_class_images = traits.List( + traits.List(File(exists=True)), desc=("modulated+normalized class images") + ) + transformation_mat = OutputMultiPath( + File(exists=True), desc="Normalization transformation" + ) + bias_corrected_images = OutputMultiPath( + File(exists=True), desc="bias corrected images" + ) + bias_field_images = OutputMultiPath(File(exists=True), desc="bias field images") + forward_deformation_field = OutputMultiPath(File(exists=True)) + inverse_deformation_field = OutputMultiPath(File(exists=True)) + + +class MultiChannelNewSegment(SPMCommand): + """Use spm_preproc8 (New Segment) to separate structural images into + different tissue classes. Supports multiple modalities and multichannel inputs. + + http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=45 + + Examples + -------- + >>> import nipype.interfaces.spm as spm + >>> seg = spm.MultiChannelNewSegment() + >>> seg.inputs.channels = [('structural.nii',(0.0001, 60, (True, True)))] + >>> seg.run() # doctest: +SKIP + + For VBM pre-processing [http://www.fil.ion.ucl.ac.uk/~john/misc/VBMclass10.pdf], + TPM.nii should be replaced by /path/to/spm8/toolbox/Seg/TPM.nii + + >>> seg = MultiChannelNewSegment() + >>> channel1= ('T1.nii',(0.0001, 60, (True, True))) + >>> channel2= ('T2.nii',(0.0001, 60, (True, True))) + >>> seg.inputs.channels = [channel1, channel2] + >>> tissue1 = (('TPM.nii', 1), 2, (True,True), (False, False)) + >>> tissue2 = (('TPM.nii', 2), 2, (True,True), (False, False)) + >>> tissue3 = (('TPM.nii', 3), 2, (True,False), (False, False)) + >>> tissue4 = (('TPM.nii', 4), 2, (False,False), (False, False)) + >>> tissue5 = (('TPM.nii', 5), 2, (False,False), (False, False)) + >>> seg.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5] + >>> seg.run() # doctest: +SKIP + + """ + + input_spec = MultiChannelNewSegmentInputSpec + output_spec = MultiChannelNewSegmentOutputSpec + + def __init__(self, **inputs): + _local_version = SPMCommand().version + if _local_version and "12." in _local_version: + self._jobtype = "spatial" + self._jobname = "preproc" + else: + self._jobtype = "tools" + self._jobname = "preproc8" + + SPMCommand.__init__(self, **inputs) + + def _format_arg(self, opt, spec, val): + """Convert input to appropriate format for spm""" + + if opt == "channels": + # structure have to be recreated because of some weird traits error + new_channels = [] + for channel in val: + new_channel = {} + new_channel["vols"] = scans_for_fnames(channel[0]) + if isdefined(channel[1]): + info = channel[1] + new_channel["biasreg"] = info[0] + new_channel["biasfwhm"] = info[1] + new_channel["write"] = [int(info[2][0]), int(info[2][1])] + new_channels.append(new_channel) + return new_channels + elif opt == "tissues": + new_tissues = [] + for tissue in val: + new_tissue = {} + new_tissue["tpm"] = np.array( + [",".join([tissue[0][0], str(tissue[0][1])])], dtype=object + ) + new_tissue["ngaus"] = tissue[1] + new_tissue["native"] = [int(tissue[2][0]), int(tissue[2][1])] + new_tissue["warped"] = [int(tissue[3][0]), int(tissue[3][1])] + new_tissues.append(new_tissue) + return new_tissues + elif opt == "write_deformation_fields": + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) + else: + return super()._format_arg(opt, spec, val) + + def _list_outputs(self): + outputs = self._outputs().get() + outputs["native_class_images"] = [] + outputs["dartel_input_images"] = [] + outputs["normalized_class_images"] = [] + outputs["modulated_class_images"] = [] + outputs["transformation_mat"] = [] + outputs["bias_corrected_images"] = [] + outputs["bias_field_images"] = [] + outputs["inverse_deformation_field"] = [] + outputs["forward_deformation_field"] = [] + + n_classes = 5 + if isdefined(self.inputs.tissues): + n_classes = len(self.inputs.tissues) + for i in range(n_classes): + outputs["native_class_images"].append([]) + outputs["dartel_input_images"].append([]) + outputs["normalized_class_images"].append([]) + outputs["modulated_class_images"].append([]) + + # main outputs are generated for the first channel images only + for filename in self.inputs.channels[0][0]: + pth, base, ext = split_filename(filename) + if isdefined(self.inputs.tissues): + for i, tissue in enumerate(self.inputs.tissues): + if tissue[2][0]: + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) + if tissue[2][1]: + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rc%d%s.nii" % (i + 1, base)) + ) + if tissue[3][0]: + outputs["normalized_class_images"][i].append( + os.path.join(pth, "wc%d%s.nii" % (i + 1, base)) + ) + if tissue[3][1]: + outputs["modulated_class_images"][i].append( + os.path.join(pth, "mwc%d%s.nii" % (i + 1, base)) + ) + else: + for i in range(n_classes): + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) + outputs["transformation_mat"].append( + os.path.join(pth, "%s_seg8.mat" % base) + ) + + if isdefined(self.inputs.write_deformation_fields): + if self.inputs.write_deformation_fields[0]: + outputs["inverse_deformation_field"].append( + os.path.join(pth, "iy_%s.nii" % base) + ) + if self.inputs.write_deformation_fields[1]: + outputs["forward_deformation_field"].append( + os.path.join(pth, "y_%s.nii" % base) + ) + + # bias field related images are generated for images in all channels + for channel in self.inputs.channels: + for filename in channel[0]: + pth, base, ext = split_filename(filename) + if isdefined(channel[1]): + if channel[1][2][0]: + outputs["bias_field_images"].append( + os.path.join(pth, "BiasField_%s.nii" % (base)) + ) + if channel[1][2][1]: + outputs["bias_corrected_images"].append( + os.path.join(pth, "m%s.nii" % (base)) + ) return outputs class SmoothInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), - field='data', - desc='list of files to smooth', + field="data", + desc="list of files to smooth", mandatory=True, - copyfile=False) + copyfile=False, + ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), - field='fwhm', - desc='3-list of fwhm for each dimension') - data_type = traits.Int( - field='dtype', desc='Data type of the output images') + field="fwhm", + desc="3-list of fwhm for each dimension", + ) + data_type = traits.Int(field="dtype", desc="Data type of the output images") implicit_masking = traits.Bool( - field='im', desc=('A mask implied by a particular' - 'voxel value')) + field="im", desc=("A mask implied by a particular voxel value") + ) out_prefix = traits.String( - 's', field='prefix', usedefault=True, desc='smoothed output prefix') + "s", field="prefix", usedefault=True, desc="smoothed output prefix" + ) class SmoothOutputSpec(TraitedSpec): - smoothed_files = OutputMultiPath(File(exists=True), desc='smoothed files') + smoothed_files = OutputMultiPath(File(exists=True), desc="smoothed files") class Smooth(SPMCommand): @@ -1730,13 +2264,13 @@ class Smooth(SPMCommand): input_spec = SmoothInputSpec output_spec = SmoothOutputSpec - _jobtype = 'spatial' - _jobname = 'smooth' + _jobtype = "spatial" + _jobname = "smooth" def _format_arg(self, opt, spec, val): - if opt in ['in_files']: + if opt in ["in_files"]: return scans_for_fnames(ensure_list(val)) - if opt == 'fwhm': + if opt == "fwhm": if not isinstance(val, list): return [val, val, val] if isinstance(val, list): @@ -1745,15 +2279,16 @@ def _format_arg(self, opt, spec, val): else: return val - return super(Smooth, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['smoothed_files'] = [] + outputs["smoothed_files"] = [] for imgf in ensure_list(self.inputs.in_files): - outputs['smoothed_files'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix)) + outputs["smoothed_files"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) return outputs @@ -1761,57 +2296,65 @@ class DARTELInputSpec(SPMCommandInputSpec): image_files = traits.List( traits.List(ImageFileSPM(exists=True)), desc="A list of files to be segmented", - field='warp.images', + field="warp.images", copyfile=False, - mandatory=True) + mandatory=True, + ) template_prefix = traits.Str( - 'Template', + "Template", usedefault=True, - field='warp.settings.template', - desc='Prefix for template') + field="warp.settings.template", + desc="Prefix for template", + ) regularization_form = traits.Enum( - 'Linear', - 'Membrane', - 'Bending', - field='warp.settings.rform', - desc=('Form of regularization energy ' - 'term')) + "Linear", + "Membrane", + "Bending", + field="warp.settings.rform", + desc=("Form of regularization energy term"), + ) iteration_parameters = traits.List( - traits.Tuple( + Tuple( traits.Range(1, 10), - traits.Tuple(traits.Float, traits.Float, traits.Float), + Tuple(traits.Float, traits.Float, traits.Float), traits.Enum(1, 2, 4, 8, 16, 32, 64, 128, 256, 512), - traits.Enum(0, 0.5, 1, 2, 4, 8, 16, 32)), + traits.Enum(0, 0.5, 1, 2, 4, 8, 16, 32), + ), minlen=3, maxlen=12, - field='warp.settings.param', - desc="""List of tuples for each iteration - - Inner iterations - - Regularization parameters - - Time points for deformation model - - smoothing parameter - """) - optimization_parameters = traits.Tuple( + field="warp.settings.param", + desc="""\ +List of tuples for each iteration + + * Inner iterations + * Regularization parameters + * Time points for deformation model + * smoothing parameter + +""", + ) + optimization_parameters = Tuple( traits.Float, traits.Range(1, 8), traits.Range(1, 8), - field='warp.settings.optim', - desc=""" - Optimization settings a tuple - - LM regularization - - cycles of multigrid solver - - relaxation iterations - """) + field="warp.settings.optim", + desc="""\ +Optimization settings a tuple: + + * LM regularization + * cycles of multigrid solver + * relaxation iterations + +""", + ) class DARTELOutputSpec(TraitedSpec): - final_template_file = File(exists=True, desc='final DARTEL template') + final_template_file = File(exists=True, desc="final DARTEL template") template_files = traits.List( - File(exists=True), - desc=('Templates from different stages of ' - 'iteration')) - dartel_flow_fields = traits.List( - File(exists=True), desc='DARTEL flow fields') + File(exists=True), desc=("Templates from different stages of iteration") + ) + dartel_flow_fields = traits.List(File(exists=True), desc="DARTEL flow fields") class DARTEL(SPMCommand): @@ -1830,52 +2373,52 @@ class DARTEL(SPMCommand): input_spec = DARTELInputSpec output_spec = DARTELOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ + """Convert input to appropriate format for spm""" - if opt in ['image_files']: + if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - elif opt == 'regularization_form': - mapper = {'Linear': 0, 'Membrane': 1, 'Bending': 2} + elif opt == "regularization_form": + mapper = {"Linear": 0, "Membrane": 1, "Bending": 2} return mapper[val] - elif opt == 'iteration_parameters': + elif opt == "iteration_parameters": params = [] for param in val: new_param = {} - new_param['its'] = param[0] - new_param['rparam'] = list(param[1]) - new_param['K'] = param[2] - new_param['slam'] = param[3] + new_param["its"] = param[0] + new_param["rparam"] = list(param[1]) + new_param["K"] = param[2] + new_param["slam"] = param[3] params.append(new_param) return params - elif opt == 'optimization_parameters': + elif opt == "optimization_parameters": new_param = {} - new_param['lmreg'] = val[0] - new_param['cyc'] = val[1] - new_param['its'] = val[2] + new_param["lmreg"] = val[0] + new_param["cyc"] = val[1] + new_param["its"] = val[2] return [new_param] else: - return super(DARTEL, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['template_files'] = [] + outputs["template_files"] = [] for i in range(6): - outputs['template_files'].append( - os.path.realpath('%s_%d.nii' % (self.inputs.template_prefix, - i + 1))) - outputs['final_template_file'] = os.path.realpath( - '%s_6.nii' % self.inputs.template_prefix) - outputs['dartel_flow_fields'] = [] + outputs["template_files"].append( + os.path.realpath("%s_%d.nii" % (self.inputs.template_prefix, i + 1)) + ) + outputs["final_template_file"] = os.path.realpath( + "%s_6.nii" % self.inputs.template_prefix + ) + outputs["dartel_flow_fields"] = [] for filename in self.inputs.image_files[0]: pth, base, ext = split_filename(filename) - outputs['dartel_flow_fields'].append( - os.path.realpath('u_%s_%s%s' % - (base, self.inputs.template_prefix, ext))) + outputs["dartel_flow_fields"].append( + os.path.realpath(f"u_{base}_{self.inputs.template_prefix}{ext}") + ) return outputs @@ -1885,25 +2428,29 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): copyfile=False, mandatory=True, desc="DARTEL template", - field='mni_norm.template') + field="mni_norm.template", + ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="DARTEL flow fields u_rc1*", - field='mni_norm.data.subjs.flowfields') + field="mni_norm.data.subjs.flowfields", + ) apply_to_files = InputMultiPath( ImageFileSPM(exists=True), desc="Files to apply the transform to", - field='mni_norm.data.subjs.images', + field="mni_norm.data.subjs.images", mandatory=True, - copyfile=False) - voxel_size = traits.Tuple( + copyfile=False, + ) + voxel_size = Tuple( traits.Float, traits.Float, traits.Float, desc="Voxel sizes for output file", - field='mni_norm.vox') - bounding_box = traits.Tuple( + field="mni_norm.vox", + ) + bounding_box = Tuple( traits.Float, traits.Float, traits.Float, @@ -1911,24 +2458,27 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): traits.Float, traits.Float, desc="Voxel sizes for output file", - field='mni_norm.bb') + field="mni_norm.bb", + ) modulate = traits.Bool( - field='mni_norm.preserve', - desc=("Modulate out images - no modulation " - "preserves concentrations")) + field="mni_norm.preserve", + desc=("Modulate out images - no modulation preserves concentrations"), + ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), - field='mni_norm.fwhm', - desc='3-list of fwhm for each dimension') + field="mni_norm.fwhm", + desc="3-list of fwhm for each dimension", + ) class DARTELNorm2MNIOutputSpec(TraitedSpec): normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized files in MNI space') + File(exists=True), desc="Normalized files in MNI space" + ) normalization_parameter_file = File( - exists=True, desc=('Transform parameters to MNI ' - 'space')) + exists=True, desc=("Transform parameters to MNI space") + ) class DARTELNorm2MNI(SPMCommand): @@ -1950,45 +2500,42 @@ class DARTELNorm2MNI(SPMCommand): input_spec = DARTELNorm2MNIInputSpec output_spec = DARTELNorm2MNIOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['template_file']: + """Convert input to appropriate format for spm""" + if opt in ["template_file"]: return np.array([val], dtype=object) - elif opt in ['flowfield_files']: + elif opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) - elif opt in ['apply_to_files']: + elif opt in ["apply_to_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - elif opt == 'voxel_size': + elif opt == "voxel_size": return list(val) - elif opt == 'bounding_box': + elif opt == "bounding_box": return list(val) - elif opt == 'fwhm': + elif opt == "fwhm": if isinstance(val, list): return val else: return [val, val, val] else: - return super(DARTELNorm2MNI, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() pth, base, ext = split_filename(self.inputs.template_file) - outputs['normalization_parameter_file'] = os.path.realpath( - base + '_2mni.mat') - outputs['normalized_files'] = [] + outputs["normalization_parameter_file"] = os.path.realpath(base + "_2mni.mat") + outputs["normalized_files"] = [] prefix = "w" if isdefined(self.inputs.modulate) and self.inputs.modulate: - prefix = 'm' + prefix + prefix = "m" + prefix if not isdefined(self.inputs.fwhm) or self.inputs.fwhm > 0: - prefix = 's' + prefix + prefix = "s" + prefix for filename in self.inputs.apply_to_files: pth, base, ext = split_filename(filename) - outputs['normalized_files'].append( - os.path.realpath('%s%s%s' % (prefix, base, ext))) + outputs["normalized_files"].append(os.path.realpath(f"{prefix}{base}{ext}")) return outputs @@ -1998,31 +2545,33 @@ class CreateWarpedInputSpec(SPMCommandInputSpec): ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be warped", - field='crt_warped.images', - copyfile=False) + field="crt_warped.images", + copyfile=False, + ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), copyfile=False, desc="DARTEL flow fields u_rc1*", - field='crt_warped.flowfields', - mandatory=True) + field="crt_warped.flowfields", + mandatory=True, + ) iterations = traits.Range( low=0, high=9, - desc=("The number of iterations: log2(number of " - "time steps)"), - field='crt_warped.K') + desc=("The number of iterations: log2(number of time steps)"), + field="crt_warped.K", + ) interp = traits.Range( low=0, high=7, - field='crt_warped.interp', - desc='degree of b-spline used for interpolation') - modulate = traits.Bool( - field='crt_warped.jactransf', desc="Modulate images") + field="crt_warped.interp", + desc="degree of b-spline used for interpolation", + ) + modulate = traits.Bool(field="crt_warped.jactransf", desc="Modulate images") class CreateWarpedOutputSpec(TraitedSpec): - warped_files = traits.List(File(exists=True, desc='final warped files')) + warped_files = traits.List(File(exists=True, desc="final warped files")) class CreateWarped(SPMCommand): @@ -2042,45 +2591,40 @@ class CreateWarped(SPMCommand): input_spec = CreateWarpedInputSpec output_spec = CreateWarpedOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ + """Convert input to appropriate format for spm""" - if opt in ['image_files']: + if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - if opt in ['flowfield_files']: + if opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) else: - return super(CreateWarped, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['warped_files'] = [] + outputs["warped_files"] = [] for filename in self.inputs.image_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.modulate) and self.inputs.modulate: - outputs['warped_files'].append( - os.path.realpath('mw%s%s' % (base, ext))) + outputs["warped_files"].append(os.path.realpath(f"mw{base}{ext}")) else: - outputs['warped_files'].append( - os.path.realpath('w%s%s' % (base, ext))) + outputs["warped_files"].append(os.path.realpath(f"w{base}{ext}")) return outputs class ApplyDeformationFieldInputSpec(SPMCommandInputSpec): - in_files = InputMultiPath( - ImageFileSPM(exists=True), mandatory=True, field='fnames') - deformation_field = File(exists=True, mandatory=True, field='comp{1}.def') + in_files = InputMultiPath(ImageFileSPM(exists=True), mandatory=True, field="fnames") + deformation_field = File(exists=True, mandatory=True, field="comp{1}.def") reference_volume = ImageFileSPM( - exists=True, mandatory=True, field='comp{2}.id.space') + exists=True, mandatory=True, field="comp{2}.id.space" + ) interp = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) class ApplyDeformationFieldOutputSpec(TraitedSpec): @@ -2091,228 +2635,223 @@ class ApplyDeformations(SPMCommand): input_spec = ApplyDeformationFieldInputSpec output_spec = ApplyDeformationFieldOutputSpec - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['deformation_field', 'reference_volume']: + """Convert input to appropriate format for spm""" + if opt in ["deformation_field", "reference_volume"]: val = [val] - if opt in ['deformation_field']: + if opt in ["deformation_field"]: return scans_for_fnames(val, keep4d=True, separate_sessions=False) - if opt in ['in_files', 'reference_volume']: + if opt in ["in_files", "reference_volume"]: return scans_for_fnames(val, keep4d=False, separate_sessions=False) else: - return super(ApplyDeformations, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs class VBMSegmentInputSpec(SPMCommandInputSpec): - in_files = InputMultiPath( ImageFileSPM(exists=True), desc="A list of files to be segmented", - field='estwrite.data', + field="estwrite.data", copyfile=False, - mandatory=True) + mandatory=True, + ) tissues = ImageFileSPM( - exists=True, field='estwrite.tpm', desc='tissue probability map') - gaussians_per_class = traits.Tuple( + exists=True, field="estwrite.tpm", desc="tissue probability map" + ) + gaussians_per_class = Tuple( (2, 2, 2, 3, 4, 2), *([traits.Int()] * 6), usedefault=True, - desc='number of gaussians for each tissue class') + desc="number of gaussians for each tissue class", + ) bias_regularization = traits.Enum( - 0.0001, (0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10), - field='estwrite.opts.biasreg', + 0.0001, + (0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10), + field="estwrite.opts.biasreg", usedefault=True, - desc='no(0) - extremely heavy (10)') + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( - 60, (30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 'Inf'), - field='estwrite.opts.biasfwhm', + 60, + (30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, "Inf"), + field="estwrite.opts.biasfwhm", usedefault=True, - desc='FWHM of Gaussian smoothness of bias') + desc="FWHM of Gaussian smoothness of bias", + ) sampling_distance = traits.Float( 3, usedefault=True, - field='estwrite.opts.samp', - desc='Sampling distance on data for parameter estimation') + field="estwrite.opts.samp", + desc="Sampling distance on data for parameter estimation", + ) warping_regularization = traits.Float( 4, usedefault=True, - field='estwrite.opts.warpreg', - desc='Controls balance between parameters and data') - - spatial_normalization = traits.Enum( - 'high', - 'low', - usedefault=True, + field="estwrite.opts.warpreg", + desc="Controls balance between parameters and data", ) + + spatial_normalization = traits.Enum("high", "low", usedefault=True) dartel_template = ImageFileSPM( - exists=True, field='estwrite.extopts.dartelwarp.normhigh.darteltpm') + exists=True, field="estwrite.extopts.dartelwarp.normhigh.darteltpm" + ) use_sanlm_denoising_filter = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.extopts.sanlm', - desc="0=No denoising, 1=denoising,2=denoising multi-threaded") - mrf_weighting = traits.Float( - 0.15, usedefault=True, field='estwrite.extopts.mrf') + field="estwrite.extopts.sanlm", + desc="0=No denoising, 1=denoising,2=denoising multi-threaded", + ) + mrf_weighting = traits.Float(0.15, usedefault=True, field="estwrite.extopts.mrf") cleanup_partitions = traits.Int( 1, usedefault=True, - field='estwrite.extopts.cleanup', - desc="0=None,1=light,2=thorough") - display_results = traits.Bool( - True, usedefault=True, field='estwrite.extopts.print') - - gm_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.GM.native', + field="estwrite.extopts.cleanup", + desc="0=None,1=light,2=thorough", ) + display_results = traits.Bool(True, usedefault=True, field="estwrite.extopts.print") + + gm_native = traits.Bool(False, usedefault=True, field="estwrite.output.GM.native") gm_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.GM.warped', + False, usedefault=True, field="estwrite.output.GM.warped" ) gm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.GM.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.GM.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) gm_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.GM.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") - - wm_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.WM.native', + field="estwrite.output.GM.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", ) + + wm_native = traits.Bool(False, usedefault=True, field="estwrite.output.WM.native") wm_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.WM.warped', + False, usedefault=True, field="estwrite.output.WM.warped" ) wm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.WM.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.WM.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) wm_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.WM.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") - - csf_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.CSF.native', + field="estwrite.output.WM.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", ) + + csf_native = traits.Bool(False, usedefault=True, field="estwrite.output.CSF.native") csf_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.CSF.warped', + False, usedefault=True, field="estwrite.output.CSF.warped" ) csf_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.CSF.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.CSF.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) csf_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.CSF.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") + field="estwrite.output.CSF.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", + ) bias_corrected_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.bias.native', + False, usedefault=True, field="estwrite.output.bias.native" ) bias_corrected_normalized = traits.Bool( - True, - usedefault=True, - field='estwrite.output.bias.warped', + True, usedefault=True, field="estwrite.output.bias.warped" ) bias_corrected_affine = traits.Bool( - False, - usedefault=True, - field='estwrite.output.bias.affine', + False, usedefault=True, field="estwrite.output.bias.affine" ) pve_label_native = traits.Bool( - False, usedefault=True, field='estwrite.output.label.native') + False, usedefault=True, field="estwrite.output.label.native" + ) pve_label_normalized = traits.Bool( - False, usedefault=True, field='estwrite.output.label.warped') + False, usedefault=True, field="estwrite.output.label.warped" + ) pve_label_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.label.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") + field="estwrite.output.label.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", + ) jacobian_determinant = traits.Bool( - False, usedefault=True, field='estwrite.jacobian.warped') + False, usedefault=True, field="estwrite.jacobian.warped" + ) - deformation_field = traits.Tuple( + deformation_field = Tuple( (0, 0), traits.Bool, traits.Bool, usedefault=True, - field='estwrite.output.warps', - desc='forward and inverse field') + field="estwrite.output.warps", + desc="forward and inverse field", + ) class VBMSegmentOuputSpec(TraitedSpec): - native_class_images = traits.List( - traits.List(File(exists=True)), desc='native space probability maps') + traits.List(File(exists=True)), desc="native space probability maps" + ) dartel_input_images = traits.List( - traits.List(File(exists=True)), desc='dartel imported class images') + traits.List(File(exists=True)), desc="dartel imported class images" + ) normalized_class_images = traits.List( - traits.List(File(exists=True)), desc='normalized class images') + traits.List(File(exists=True)), desc="normalized class images" + ) modulated_class_images = traits.List( - traits.List(File(exists=True)), - desc=('modulated+normalized class ' - 'images')) + traits.List(File(exists=True)), desc=("modulated+normalized class images") + ) transformation_mat = OutputMultiPath( - File(exists=True), desc='Normalization transformation') + File(exists=True), desc="Normalization transformation" + ) bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') + File(exists=True), desc="bias corrected images" + ) normalized_bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') + File(exists=True), desc="bias corrected images" + ) pve_label_native_images = OutputMultiPath(File(exists=True)) pve_label_normalized_images = OutputMultiPath(File(exists=True)) @@ -2346,124 +2885,134 @@ class VBMSegment(SPMCommand): input_spec = VBMSegmentInputSpec output_spec = VBMSegmentOuputSpec - _jobtype = 'tools' - _jobname = 'vbm8' + _jobtype = "tools" + _jobname = "vbm8" def _list_outputs(self): outputs = self._outputs().get() do_dartel = self.inputs.spatial_normalization - dartel_px = '' + dartel_px = "" if do_dartel: - dartel_px = 'r' + dartel_px = "r" - outputs['native_class_images'] = [[], [], []] - outputs['dartel_input_images'] = [[], [], []] - outputs['normalized_class_images'] = [[], [], []] - outputs['modulated_class_images'] = [[], [], []] + outputs["native_class_images"] = [[], [], []] + outputs["dartel_input_images"] = [[], [], []] + outputs["normalized_class_images"] = [[], [], []] + outputs["modulated_class_images"] = [[], [], []] - outputs['transformation_mat'] = [] + outputs["transformation_mat"] = [] - outputs['bias_corrected_images'] = [] - outputs['normalized_bias_corrected_images'] = [] + outputs["bias_corrected_images"] = [] + outputs["normalized_bias_corrected_images"] = [] - outputs['inverse_deformation_field'] = [] - outputs['forward_deformation_field'] = [] - outputs['jacobian_determinant_images'] = [] + outputs["inverse_deformation_field"] = [] + outputs["forward_deformation_field"] = [] + outputs["jacobian_determinant_images"] = [] - outputs['pve_label_native_images'] = [] - outputs['pve_label_normalized_images'] = [] - outputs['pve_label_registered_images'] = [] + outputs["pve_label_native_images"] = [] + outputs["pve_label_normalized_images"] = [] + outputs["pve_label_registered_images"] = [] for filename in self.inputs.in_files: pth, base, ext = split_filename(filename) - outputs['transformation_mat'].append( - os.path.join(pth, "%s_seg8.mat" % base)) + outputs["transformation_mat"].append( + os.path.join(pth, "%s_seg8.mat" % base) + ) - for i, tis in enumerate(['gm', 'wm', 'csf']): + for i, tis in enumerate(["gm", "wm", "csf"]): # native space - if getattr(self.inputs, '%s_native' % tis): - outputs['native_class_images'][i].append( - os.path.join(pth, "p%d%s.nii" % (i + 1, base))) - if getattr(self.inputs, '%s_dartel' % tis) == 1: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s.nii" % (i + 1, base))) - elif getattr(self.inputs, '%s_dartel' % tis) == 2: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base))) - - # normalized space - if getattr(self.inputs, '%s_normalized' % tis): - outputs['normalized_class_images'][i].append( - os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, - base))) - - if getattr(self.inputs, '%s_modulated_normalized' % tis) == 1: - outputs['modulated_class_images'][i].append( - os.path.join(pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, - base))) - elif getattr(self.inputs, - '%s_modulated_normalized' % tis) == 2: - outputs['normalized_class_images'][i].append( - os.path.join(pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, - base))) + if getattr(self.inputs, "%s_native" % tis): + outputs["native_class_images"][i].append( + os.path.join(pth, "p%d%s.nii" % (i + 1, base)) + ) + if getattr(self.inputs, "%s_dartel" % tis) == 1: + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rp%d%s.nii" % (i + 1, base)) + ) + elif getattr(self.inputs, "%s_dartel" % tis) == 2: + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base)) + ) + + # normalized space + if getattr(self.inputs, "%s_normalized" % tis): + outputs["normalized_class_images"][i].append( + os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) + + if getattr(self.inputs, "%s_modulated_normalized" % tis) == 1: + outputs["modulated_class_images"][i].append( + os.path.join(pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) + elif getattr(self.inputs, "%s_modulated_normalized" % tis) == 2: + outputs["normalized_class_images"][i].append( + os.path.join(pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) if self.inputs.pve_label_native: - outputs['pve_label_native_images'].append( - os.path.join(pth, "p0%s.nii" % (base))) + outputs["pve_label_native_images"].append( + os.path.join(pth, "p0%s.nii" % (base)) + ) if self.inputs.pve_label_normalized: - outputs['pve_label_normalized_images'].append( - os.path.join(pth, "w%sp0%s.nii" % (dartel_px, base))) + outputs["pve_label_normalized_images"].append( + os.path.join(pth, f"w{dartel_px}p0{base}.nii") + ) if self.inputs.pve_label_dartel == 1: - outputs['pve_label_registered_images'].append( - os.path.join(pth, "rp0%s.nii" % (base))) + outputs["pve_label_registered_images"].append( + os.path.join(pth, "rp0%s.nii" % (base)) + ) elif self.inputs.pve_label_dartel == 2: - outputs['pve_label_registered_images'].append( - os.path.join(pth, "rp0%s_affine.nii" % (base))) + outputs["pve_label_registered_images"].append( + os.path.join(pth, "rp0%s_affine.nii" % (base)) + ) if self.inputs.bias_corrected_native: - outputs['bias_corrected_images'].append( - os.path.join(pth, "m%s.nii" % (base))) + outputs["bias_corrected_images"].append( + os.path.join(pth, "m%s.nii" % (base)) + ) if self.inputs.bias_corrected_normalized: - outputs['normalized_bias_corrected_images'].append( - os.path.join(pth, "wm%s%s.nii" % (dartel_px, base))) + outputs["normalized_bias_corrected_images"].append( + os.path.join(pth, f"wm{dartel_px}{base}.nii") + ) if self.inputs.deformation_field[0]: - outputs['forward_deformation_field'].append( - os.path.join(pth, "y_%s%s.nii" % (dartel_px, base))) + outputs["forward_deformation_field"].append( + os.path.join(pth, f"y_{dartel_px}{base}.nii") + ) if self.inputs.deformation_field[1]: - outputs['inverse_deformation_field'].append( - os.path.join(pth, "iy_%s%s.nii" % (dartel_px, base))) + outputs["inverse_deformation_field"].append( + os.path.join(pth, f"iy_{dartel_px}{base}.nii") + ) if self.inputs.jacobian_determinant and do_dartel: - outputs['jacobian_determinant_images'].append( - os.path.join(pth, "jac_wrp1%s.nii" % (base))) + outputs["jacobian_determinant_images"].append( + os.path.join(pth, "jac_wrp1%s.nii" % (base)) + ) return outputs def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt in ['in_files']: + """Convert input to appropriate format for spm""" + if opt in ["in_files"]: return scans_for_fnames(val, keep4d=True) - elif opt in ['spatial_normalization']: - if val == 'low': - return {'normlow': []} - elif opt in ['dartel_template']: + elif opt in ["spatial_normalization"]: + if val == "low": + return {"normlow": []} + elif opt in ["dartel_template"]: return np.array([val], dtype=object) - elif opt in ['deformation_field']: - return super(VBMSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])]) + elif opt in ["deformation_field"]: + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) else: - return super(VBMSegment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): - if self.inputs.spatial_normalization == 'low': - einputs = super(VBMSegment, self)._parse_inputs( - skip=('spatial_normalization', 'dartel_template')) - einputs[0]['estwrite']['extopts']['dartelwarp'] = {'normlow': 1} + if self.inputs.spatial_normalization == "low": + einputs = super()._parse_inputs( + skip=("spatial_normalization", "dartel_template") + ) + einputs[0]["estwrite"]["extopts"]["dartelwarp"] = {"normlow": 1} return einputs else: - return super(VBMSegment, - self)._parse_inputs(skip=('spatial_normalization')) + return super()._parse_inputs(skip=("spatial_normalization")) diff --git a/nipype/interfaces/spm/tests/__init__.py b/nipype/interfaces/spm/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/spm/tests/__init__.py +++ b/nipype/interfaces/spm/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py b/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py index 1b1aa6edcd..15fe9399ed 100644 --- a/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py +++ b/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py @@ -1,17 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Analyze2nii def test_Analyze2nii_inputs(): input_map = dict( - analyze_file=dict(mandatory=True, ), + analyze_file=dict( + extensions=None, + mandatory=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -20,15 +24,21 @@ def test_Analyze2nii_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Analyze2nii_outputs(): output_map = dict( matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - nifti_file=dict(), + mfile=dict( + usedefault=True, + ), + nifti_file=dict( + extensions=None, + ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py b/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py index bbc925ec9e..d41a0fb4b6 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py @@ -1,29 +1,34 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import ApplyDeformations def test_ApplyDeformations_inputs(): input_map = dict( deformation_field=dict( - field='comp{1}.def', + extensions=None, + field="comp{1}.def", mandatory=True, ), in_files=dict( - field='fnames', + field="fnames", mandatory=True, ), - interp=dict(field='interp', ), + interp=dict( + field="interp", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), reference_volume=dict( - field='comp{2}.id.space', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="comp{2}.id.space", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -32,8 +37,12 @@ def test_ApplyDeformations_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyDeformations_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = ApplyDeformations.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py index d1c35dbcd2..c652bd7e12 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py @@ -1,42 +1,58 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ApplyInverseDeformation def test_ApplyInverseDeformation_inputs(): input_map = dict( - bounding_box=dict(field='comp{1}.inv.comp{1}.sn2def.bb', ), + bounding_box=dict( + field="comp{1}.inv.comp{1}.sn2def.bb", + ), deformation=dict( - field='comp{1}.inv.comp{1}.sn2def.matname', - xor=['deformation_field'], + extensions=None, + field="comp{1}.inv.comp{1}.sn2def.matname", + xor=["deformation_field"], ), deformation_field=dict( - field='comp{1}.inv.comp{1}.def', - xor=['deformation'], + extensions=None, + field="comp{1}.inv.comp{1}.def", + xor=["deformation"], ), in_files=dict( - field='fnames', + field="fnames", mandatory=True, ), - interpolation=dict(field='interp', ), + interpolation=dict( + field="interp", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), - target=dict(field='comp{1}.inv.space', ), + target=dict( + extensions=None, + field="comp{1}.inv.space", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - voxel_sizes=dict(field='comp{1}.inv.comp{1}.sn2def.vox', ), + voxel_sizes=dict( + field="comp{1}.inv.comp{1}.sn2def.vox", + ), ) inputs = ApplyInverseDeformation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyInverseDeformation_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = ApplyInverseDeformation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py b/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py index a686e95485..ae0516370d 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ApplyTransform @@ -7,16 +6,25 @@ def test_ApplyTransform_inputs(): input_map = dict( in_file=dict( copyfile=True, + extensions=None, + mandatory=True, + ), + mat=dict( + extensions=None, mandatory=True, ), - mat=dict(mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_file=dict(genfile=True, ), + mfile=dict( + usedefault=True, + ), + out_file=dict( + extensions=None, + genfile=True, + ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -25,8 +33,14 @@ def test_ApplyTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransform_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = ApplyTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py new file mode 100644 index 0000000000..2f56b49ef2 --- /dev/null +++ b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py @@ -0,0 +1,70 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import ApplyVDM + + +def test_ApplyVDM_inputs(): + input_map = dict( + distortion_direction=dict( + field="roptions.pedir", + usedefault=True, + ), + in_files=dict( + copyfile=True, + field="data.scans", + mandatory=True, + ), + interpolation=dict( + field="roptions.rinterp", + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + out_prefix=dict( + field="roptions.prefix", + usedefault=True, + ), + paths=dict(), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + vdmfile=dict( + copyfile=True, + extensions=None, + field="data.vdmfile", + mandatory=True, + ), + write_mask=dict( + field="roptions.mask", + ), + write_which=dict( + field="roptions.which", + maxlen=2, + minlen=2, + usedefault=True, + ), + write_wrap=dict( + field="roptions.wrap", + ), + ) + inputs = ApplyVDM.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ApplyVDM_outputs(): + output_map = dict( + mean_image=dict( + extensions=None, + ), + out_files=dict(), + ) + outputs = ApplyVDM.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py b/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py index 69695ca7b5..156591cbb9 100644 --- a/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py +++ b/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py @@ -1,23 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import CalcCoregAffine def test_CalcCoregAffine_inputs(): input_map = dict( - invmat=dict(), - mat=dict(), + invmat=dict( + extensions=None, + ), + mat=dict( + extensions=None, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), moving=dict( copyfile=False, + extensions=None, mandatory=True, ), paths=dict(), - target=dict(mandatory=True, ), + target=dict( + extensions=None, + mandatory=True, + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -26,10 +35,16 @@ def test_CalcCoregAffine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalcCoregAffine_outputs(): output_map = dict( - invmat=dict(), - mat=dict(), + invmat=dict( + extensions=None, + ), + mat=dict( + extensions=None, + ), ) outputs = CalcCoregAffine.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_Coregister.py b/nipype/interfaces/spm/tests/test_auto_Coregister.py index 957a2c84a9..940f69ebbf 100644 --- a/nipype/interfaces/spm/tests/test_auto_Coregister.py +++ b/nipype/interfaces/spm/tests/test_auto_Coregister.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Coregister @@ -7,44 +6,65 @@ def test_Coregister_inputs(): input_map = dict( apply_to_files=dict( copyfile=True, - field='other', + field="other", + ), + cost_function=dict( + field="eoptions.cost_fun", + ), + fwhm=dict( + field="eoptions.fwhm", + ), + jobtype=dict( + usedefault=True, ), - cost_function=dict(field='eoptions.cost_fun', ), - fwhm=dict(field='eoptions.fwhm', ), - jobtype=dict(usedefault=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), out_prefix=dict( - field='roptions.prefix', + field="roptions.prefix", usedefault=True, ), paths=dict(), - separation=dict(field='eoptions.sep', ), + separation=dict( + field="eoptions.sep", + ), source=dict( copyfile=True, - field='source', + field="source", mandatory=True, ), target=dict( copyfile=False, - field='ref', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="ref", mandatory=True, ), - tolerance=dict(field='eoptions.tol', ), + tolerance=dict( + field="eoptions.tol", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - write_interp=dict(field='roptions.interp', ), - write_mask=dict(field='roptions.mask', ), - write_wrap=dict(field='roptions.wrap', ), + write_interp=dict( + field="roptions.interp", + ), + write_mask=dict( + field="roptions.mask", + ), + write_wrap=dict( + field="roptions.wrap", + ), ) inputs = Coregister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Coregister_outputs(): output_map = dict( coregistered_files=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_CreateWarped.py b/nipype/interfaces/spm/tests/test_auto_CreateWarped.py index dd98d30e37..f62694641c 100644 --- a/nipype/interfaces/spm/tests/test_auto_CreateWarped.py +++ b/nipype/interfaces/spm/tests/test_auto_CreateWarped.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import CreateWarped @@ -7,23 +6,31 @@ def test_CreateWarped_inputs(): input_map = dict( flowfield_files=dict( copyfile=False, - field='crt_warped.flowfields', + field="crt_warped.flowfields", mandatory=True, ), image_files=dict( copyfile=False, - field='crt_warped.images', + field="crt_warped.images", mandatory=True, ), - interp=dict(field='crt_warped.interp', ), - iterations=dict(field='crt_warped.K', ), + interp=dict( + field="crt_warped.interp", + ), + iterations=dict( + field="crt_warped.K", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - modulate=dict(field='crt_warped.jactransf', ), + mfile=dict( + usedefault=True, + ), + modulate=dict( + field="crt_warped.jactransf", + ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -32,8 +39,12 @@ def test_CreateWarped_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateWarped_outputs(): - output_map = dict(warped_files=dict(), ) + output_map = dict( + warped_files=dict(), + ) outputs = CreateWarped.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_DARTEL.py b/nipype/interfaces/spm/tests/test_auto_DARTEL.py index cc1b9eee1b..5fbba0c287 100644 --- a/nipype/interfaces/spm/tests/test_auto_DARTEL.py +++ b/nipype/interfaces/spm/tests/test_auto_DARTEL.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DARTEL @@ -7,22 +6,30 @@ def test_DARTEL_inputs(): input_map = dict( image_files=dict( copyfile=False, - field='warp.images', + field="warp.images", mandatory=True, ), - iteration_parameters=dict(field='warp.settings.param', ), + iteration_parameters=dict( + field="warp.settings.param", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - optimization_parameters=dict(field='warp.settings.optim', ), + mfile=dict( + usedefault=True, + ), + optimization_parameters=dict( + field="warp.settings.optim", + ), paths=dict(), - regularization_form=dict(field='warp.settings.rform', ), + regularization_form=dict( + field="warp.settings.rform", + ), template_prefix=dict( - field='warp.settings.template', + field="warp.settings.template", usedefault=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -31,10 +38,14 @@ def test_DARTEL_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DARTEL_outputs(): output_map = dict( dartel_flow_fields=dict(), - final_template_file=dict(), + final_template_file=dict( + extensions=None, + ), template_files=dict(), ) outputs = DARTEL.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py b/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py index d25bde2e5d..d4b8ad8a78 100644 --- a/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py +++ b/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import DARTELNorm2MNI @@ -7,39 +6,54 @@ def test_DARTELNorm2MNI_inputs(): input_map = dict( apply_to_files=dict( copyfile=False, - field='mni_norm.data.subjs.images', + field="mni_norm.data.subjs.images", mandatory=True, ), - bounding_box=dict(field='mni_norm.bb', ), + bounding_box=dict( + field="mni_norm.bb", + ), flowfield_files=dict( - field='mni_norm.data.subjs.flowfields', + field="mni_norm.data.subjs.flowfields", mandatory=True, ), - fwhm=dict(field='mni_norm.fwhm', ), + fwhm=dict( + field="mni_norm.fwhm", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - modulate=dict(field='mni_norm.preserve', ), + mfile=dict( + usedefault=True, + ), + modulate=dict( + field="mni_norm.preserve", + ), paths=dict(), template_file=dict( copyfile=False, - field='mni_norm.template', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="mni_norm.template", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - voxel_size=dict(field='mni_norm.vox', ), + voxel_size=dict( + field="mni_norm.vox", + ), ) inputs = DARTELNorm2MNI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DARTELNorm2MNI_outputs(): output_map = dict( - normalization_parameter_file=dict(), + normalization_parameter_file=dict( + extensions=None, + ), normalized_files=dict(), ) outputs = DARTELNorm2MNI.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_DicomImport.py b/nipype/interfaces/spm/tests/test_auto_DicomImport.py index fec62ddeeb..e0459d467f 100644 --- a/nipype/interfaces/spm/tests/test_auto_DicomImport.py +++ b/nipype/interfaces/spm/tests/test_auto_DicomImport.py @@ -1,36 +1,37 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import DicomImport def test_DicomImport_inputs(): input_map = dict( format=dict( - field='convopts.format', + field="convopts.format", usedefault=True, ), icedims=dict( - field='convopts.icedims', + field="convopts.icedims", usedefault=True, ), in_files=dict( - field='data', + field="data", mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), output_dir=dict( - field='outdir', + field="outdir", usedefault=True, ), output_dir_struct=dict( - field='root', + field="root", usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -39,8 +40,12 @@ def test_DicomImport_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DicomImport_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = DicomImport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py index bc9bb9006e..84fb496e9c 100644 --- a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py +++ b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import EstimateContrast @@ -9,24 +8,34 @@ def test_EstimateContrast_inputs(): copyfile=False, mandatory=True, ), - contrasts=dict(mandatory=True, ), - group_contrast=dict(xor=['use_derivs'], ), + contrasts=dict( + mandatory=True, + ), + group_contrast=dict( + xor=["use_derivs"], + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), residual_image=dict( copyfile=False, + extensions=None, mandatory=True, ), spm_mat_file=dict( copyfile=True, - field='spmmat', + extensions=None, + field="spmmat", mandatory=True, ), - use_derivs=dict(xor=['group_contrast'], ), + use_derivs=dict( + xor=["group_contrast"], + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -35,13 +44,17 @@ def test_EstimateContrast_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateContrast_outputs(): output_map = dict( con_images=dict(), ess_images=dict(), spmF_images=dict(), spmT_images=dict(), - spm_mat_file=dict(), + spm_mat_file=dict( + extensions=None, + ), ) outputs = EstimateContrast.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_EstimateModel.py b/nipype/interfaces/spm/tests/test_auto_EstimateModel.py index 05f511a0bd..04c4679767 100644 --- a/nipype/interfaces/spm/tests/test_auto_EstimateModel.py +++ b/nipype/interfaces/spm/tests/test_auto_EstimateModel.py @@ -1,48 +1,68 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import EstimateModel def test_EstimateModel_inputs(): input_map = dict( estimation_method=dict( - field='method', + field="method", mandatory=True, ), flags=dict(), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), spm_mat_file=dict( copyfile=True, - field='spmmat', + extensions=None, + field="spmmat", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - write_residuals=dict(field='write_residuals', ), + write_residuals=dict( + field="write_residuals", + ), ) inputs = EstimateModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateModel_outputs(): output_map = dict( ARcoef=dict(), Cbetas=dict(), - RPVimage=dict(), + RPVimage=dict( + extensions=[".hdr", ".img", ".img.gz", ".nii"], + ), SDbetas=dict(), SDerror=dict(), beta_images=dict(), - labels=dict(), - mask_image=dict(), - residual_image=dict(), + con_images=dict(), + ess_images=dict(), + labels=dict( + extensions=[".hdr", ".img", ".img.gz", ".nii"], + ), + mask_image=dict( + extensions=[".hdr", ".img", ".img.gz", ".nii"], + ), + residual_image=dict( + extensions=[".hdr", ".img", ".img.gz", ".nii"], + ), residual_images=dict(), - spm_mat_file=dict(), + spmF_images=dict(), + spmT_images=dict(), + spm_mat_file=dict( + extensions=None, + ), ) outputs = EstimateModel.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py index 38d2b1c6fb..de435ea771 100644 --- a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py @@ -1,46 +1,60 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import FactorialDesign def test_FactorialDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict(field='masking.em', ), + covariates=dict( + field="cov", + ), + explicit_mask_file=dict( + extensions=None, + field="masking.em", + ), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + ), + global_normalization=dict( + field="globalm.glonorm", ), - global_normalization=dict(field='globalm.glonorm', ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict( + usedefault=True, + ), + no_grand_mean_scaling=dict( + field="globalm.gmsca.gmsca_no", + ), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + ), + use_implicit_threshold=dict( + field="masking.im", ), - use_implicit_threshold=dict(field='masking.im', ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -49,8 +63,14 @@ def test_FactorialDesign_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FactorialDesign_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = FactorialDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_FieldMap.py b/nipype/interfaces/spm/tests/test_auto_FieldMap.py index 43fbbcb8f4..ccd9e70c6e 100644 --- a/nipype/interfaces/spm/tests/test_auto_FieldMap.py +++ b/nipype/interfaces/spm/tests/test_auto_FieldMap.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import FieldMap @@ -7,110 +6,120 @@ def test_FieldMap_inputs(): input_map = dict( anat_file=dict( copyfile=False, - field='subj.anat', + extensions=None, + field="subj.anat", ), blip_direction=dict( - field='subj.defaults.defaultsval.blipdir', + field="subj.defaults.defaultsval.blipdir", mandatory=True, ), echo_times=dict( - field='subj.defaults.defaultsval.et', + field="subj.defaults.defaultsval.et", mandatory=True, ), epi_file=dict( copyfile=False, - field='subj.session.epi', + extensions=None, + field="subj.session.epi", mandatory=True, ), epifm=dict( - field='subj.defaults.defaultsval.epifm', + field="subj.defaults.defaultsval.epifm", usedefault=True, ), jacobian_modulation=dict( - field='subj.defaults.defaultsval.ajm', + field="subj.defaults.defaultsval.ajm", + usedefault=True, + ), + jobtype=dict( + deprecated="1.9.0", usedefault=True, ), - jobtype=dict(usedefault=True, ), magnitude_file=dict( copyfile=False, - field='subj.data.presubphasemag.magnitude', + extensions=None, + field="subj.data.presubphasemag.magnitude", mandatory=True, ), mask_fwhm=dict( - field='subj.defaults.defaultsval.mflags.fwhm', + field="subj.defaults.defaultsval.mflags.fwhm", usedefault=True, ), maskbrain=dict( - field='subj.defaults.defaultsval.maskbrain', + field="subj.defaults.defaultsval.maskbrain", usedefault=True, ), matchanat=dict( - field='subj.matchanat', + field="subj.matchanat", usedefault=True, ), matchvdm=dict( - field='subj.matchvdm', + field="subj.matchvdm", usedefault=True, ), matlab_cmd=dict(), method=dict( - field='subj.defaults.defaultsval.uflags.method', + field="subj.defaults.defaultsval.uflags.method", + usedefault=True, + ), + mfile=dict( usedefault=True, ), - mfile=dict(usedefault=True, ), ndilate=dict( - field='subj.defaults.defaultsval.mflags.ndilate', + field="subj.defaults.defaultsval.mflags.ndilate", usedefault=True, ), nerode=dict( - field='subj.defaults.defaultsval.mflags.nerode', + field="subj.defaults.defaultsval.mflags.nerode", usedefault=True, ), pad=dict( - field='subj.defaults.defaultsval.uflags.pad', + field="subj.defaults.defaultsval.uflags.pad", usedefault=True, ), paths=dict(), phase_file=dict( copyfile=False, - field='subj.data.presubphasemag.phase', + extensions=None, + field="subj.data.presubphasemag.phase", mandatory=True, ), reg=dict( - field='subj.defaults.defaultsval.mflags.reg', + field="subj.defaults.defaultsval.mflags.reg", usedefault=True, ), sessname=dict( - field='subj.sessname', + field="subj.sessname", usedefault=True, ), template=dict( copyfile=False, - field='subj.defaults.defaultsval.mflags.template', + extensions=None, + field="subj.defaults.defaultsval.mflags.template", ), thresh=dict( - field='subj.defaults.defaultsval.mflags.thresh', + field="subj.defaults.defaultsval.mflags.thresh", usedefault=True, ), total_readout_time=dict( - field='subj.defaults.defaultsval.tert', + field="subj.defaults.defaultsval.tert", mandatory=True, ), unwarp_fwhm=dict( - field='subj.defaults.defaultsval.uflags.fwhm', + field="subj.defaults.defaultsval.uflags.fwhm", usedefault=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), writeunwarped=dict( - field='subj.writeunwarped', + field="subj.writeunwarped", usedefault=True, ), ws=dict( - field='subj.defaults.defaultsval.uflags.ws', + field="subj.defaults.defaultsval.uflags.ws", usedefault=True, ), ) @@ -119,8 +128,14 @@ def test_FieldMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FieldMap_outputs(): - output_map = dict(vdm=dict(), ) + output_map = dict( + vdm=dict( + extensions=None, + ), + ) outputs = FieldMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Level1Design.py b/nipype/interfaces/spm/tests/test_auto_Level1Design.py index 7ad8ab8195..04c9f315ef 100644 --- a/nipype/interfaces/spm/tests/test_auto_Level1Design.py +++ b/nipype/interfaces/spm/tests/test_auto_Level1Design.py @@ -1,51 +1,78 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Level1Design def test_Level1Design_inputs(): input_map = dict( bases=dict( - field='bases', + field="bases", mandatory=True, ), - factor_info=dict(field='fact', ), - global_intensity_normalization=dict(field='global', ), + factor_info=dict( + field="fact", + ), + flags=dict(), + global_intensity_normalization=dict( + field="global", + ), interscan_interval=dict( - field='timing.RT', + field="timing.RT", mandatory=True, ), - mask_image=dict(field='mask', ), - mask_threshold=dict(usedefault=True, ), + mask_image=dict( + extensions=None, + field="mask", + ), + mask_threshold=dict( + usedefault=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - microtime_onset=dict(field='timing.fmri_t0', ), - microtime_resolution=dict(field='timing.fmri_t', ), - model_serial_correlations=dict(field='cvi', ), + mfile=dict( + usedefault=True, + ), + microtime_onset=dict( + field="timing.fmri_t0", + ), + microtime_resolution=dict( + field="timing.fmri_t", + ), + model_serial_correlations=dict( + field="cvi", + ), paths=dict(), session_info=dict( - field='sess', + field="sess", mandatory=True, ), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), timing_units=dict( - field='timing.units', + field="timing.units", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - volterra_expansion_order=dict(field='volt', ), + volterra_expansion_order=dict( + field="volt", + ), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Level1Design_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = Level1Design.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_MultiChannelNewSegment.py b/nipype/interfaces/spm/tests/test_auto_MultiChannelNewSegment.py new file mode 100644 index 0000000000..9c8f743d45 --- /dev/null +++ b/nipype/interfaces/spm/tests/test_auto_MultiChannelNewSegment.py @@ -0,0 +1,59 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..preprocess import MultiChannelNewSegment + + +def test_MultiChannelNewSegment_inputs(): + input_map = dict( + affine_regularization=dict( + field="warp.affreg", + ), + channels=dict( + field="channel", + ), + matlab_cmd=dict(), + mfile=dict( + usedefault=True, + ), + paths=dict(), + sampling_distance=dict( + field="warp.samp", + ), + tissues=dict( + field="tissue", + ), + use_mcr=dict(), + use_v8struct=dict( + min_ver="8", + usedefault=True, + ), + warping_regularization=dict( + field="warp.reg", + ), + write_deformation_fields=dict( + field="warp.write", + ), + ) + inputs = MultiChannelNewSegment.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_MultiChannelNewSegment_outputs(): + output_map = dict( + bias_corrected_images=dict(), + bias_field_images=dict(), + dartel_input_images=dict(), + forward_deformation_field=dict(), + inverse_deformation_field=dict(), + modulated_class_images=dict(), + native_class_images=dict(), + normalized_class_images=dict(), + transformation_mat=dict(), + ) + outputs = MultiChannelNewSegment.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py index 26957f2fbb..61d3b38a74 100644 --- a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py @@ -1,65 +1,87 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import MultipleRegressionDesign def test_MultipleRegressionDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict(field='masking.em', ), + covariates=dict( + field="cov", + ), + explicit_mask_file=dict( + extensions=None, + field="masking.em", + ), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + ), + global_normalization=dict( + field="globalm.glonorm", ), - global_normalization=dict(field='globalm.glonorm', ), in_files=dict( - field='des.mreg.scans', + field="des.mreg.scans", mandatory=True, ), include_intercept=dict( - field='des.mreg.incint', + field="des.mreg.incint", usedefault=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict( + usedefault=True, + ), + no_grand_mean_scaling=dict( + field="globalm.gmsca.gmsca_no", + ), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + ), + use_implicit_threshold=dict( + field="masking.im", ), - use_implicit_threshold=dict(field='masking.im', ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - user_covariates=dict(field='des.mreg.mcov', ), + user_covariates=dict( + field="des.mreg.mcov", + ), ) inputs = MultipleRegressionDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultipleRegressionDesign_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = MultipleRegressionDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_NewSegment.py b/nipype/interfaces/spm/tests/test_auto_NewSegment.py index 3f03685e11..dae310f6e2 100644 --- a/nipype/interfaces/spm/tests/test_auto_NewSegment.py +++ b/nipype/interfaces/spm/tests/test_auto_NewSegment.py @@ -1,35 +1,50 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import NewSegment def test_NewSegment_inputs(): input_map = dict( - affine_regularization=dict(field='warp.affreg', ), + affine_regularization=dict( + field="warp.affreg", + ), channel_files=dict( copyfile=False, - field='channel', + field="channel", mandatory=True, ), - channel_info=dict(field='channel', ), + channel_info=dict( + field="channel", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), - sampling_distance=dict(field='warp.samp', ), - tissues=dict(field='tissue', ), + sampling_distance=dict( + field="warp.samp", + ), + tissues=dict( + field="tissue", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - warping_regularization=dict(field='warp.reg', ), - write_deformation_fields=dict(field='warp.write', ), + warping_regularization=dict( + field="warp.reg", + ), + write_deformation_fields=dict( + field="warp.write", + ), ) inputs = NewSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NewSegment_outputs(): output_map = dict( bias_corrected_images=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize.py b/nipype/interfaces/spm/tests/test_auto_Normalize.py index fde0bf7fff..caa063d923 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize.py @@ -1,70 +1,101 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Normalize def test_Normalize_inputs(): input_map = dict( - DCT_period_cutoff=dict(field='eoptions.cutoff', ), - affine_regularization_type=dict(field='eoptions.regtype', ), + DCT_period_cutoff=dict( + field="eoptions.cutoff", + ), + affine_regularization_type=dict( + field="eoptions.regtype", + ), apply_to_files=dict( copyfile=True, - field='subj.resample', + field="subj.resample", + ), + jobtype=dict( + usedefault=True, ), - jobtype=dict(usedefault=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - nonlinear_iterations=dict(field='eoptions.nits', ), - nonlinear_regularization=dict(field='eoptions.reg', ), + mfile=dict( + usedefault=True, + ), + nonlinear_iterations=dict( + field="eoptions.nits", + ), + nonlinear_regularization=dict( + field="eoptions.reg", + ), out_prefix=dict( - field='roptions.prefix', + field="roptions.prefix", usedefault=True, ), parameter_file=dict( copyfile=False, - field='subj.matname', + extensions=None, + field="subj.matname", mandatory=True, - xor=['source', 'template'], + xor=["source", "template"], ), paths=dict(), source=dict( copyfile=True, - field='subj.source', + field="subj.source", mandatory=True, - xor=['parameter_file'], + xor=["parameter_file"], + ), + source_image_smoothing=dict( + field="eoptions.smosrc", ), - source_image_smoothing=dict(field='eoptions.smosrc', ), source_weight=dict( copyfile=False, - field='subj.wtsrc', + extensions=None, + field="subj.wtsrc", ), template=dict( copyfile=False, - field='eoptions.template', + extensions=None, + field="eoptions.template", mandatory=True, - xor=['parameter_file'], + xor=["parameter_file"], + ), + template_image_smoothing=dict( + field="eoptions.smoref", ), - template_image_smoothing=dict(field='eoptions.smoref', ), template_weight=dict( copyfile=False, - field='eoptions.weight', + extensions=None, + field="eoptions.weight", ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - write_bounding_box=dict(field='roptions.bb', ), - write_interp=dict(field='roptions.interp', ), - write_preserve=dict(field='roptions.preserve', ), - write_voxel_sizes=dict(field='roptions.vox', ), - write_wrap=dict(field='roptions.wrap', ), + write_bounding_box=dict( + field="roptions.bb", + ), + write_interp=dict( + field="roptions.interp", + ), + write_preserve=dict( + field="roptions.preserve", + ), + write_voxel_sizes=dict( + field="roptions.vox", + ), + write_wrap=dict( + field="roptions.wrap", + ), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize_outputs(): output_map = dict( normalization_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize12.py b/nipype/interfaces/spm/tests/test_auto_Normalize12.py index bf8da2dba1..965ffafec9 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize12.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize12.py @@ -1,59 +1,85 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Normalize12 def test_Normalize12_inputs(): input_map = dict( - affine_regularization_type=dict(field='eoptions.affreg', ), + affine_regularization_type=dict( + field="eoptions.affreg", + ), apply_to_files=dict( copyfile=True, - field='subj.resample', + field="subj.resample", + ), + bias_fwhm=dict( + field="eoptions.biasfwhm", + ), + bias_regularization=dict( + field="eoptions.biasreg", ), - bias_fwhm=dict(field='eoptions.biasfwhm', ), - bias_regularization=dict(field='eoptions.biasreg', ), deformation_file=dict( copyfile=False, - field='subj.def', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="subj.def", mandatory=True, - xor=['image_to_align', 'tpm'], + xor=["image_to_align", "tpm"], ), image_to_align=dict( copyfile=True, - field='subj.vol', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="subj.vol", mandatory=True, - xor=['deformation_file'], + xor=["deformation_file"], + ), + jobtype=dict( + usedefault=True, ), - jobtype=dict(usedefault=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), out_prefix=dict( - field='woptions.prefix', + field="woptions.prefix", usedefault=True, ), paths=dict(), - sampling_distance=dict(field='eoptions.samp', ), - smoothness=dict(field='eoptions.fwhm', ), + sampling_distance=dict( + field="eoptions.samp", + ), + smoothness=dict( + field="eoptions.fwhm", + ), tpm=dict( copyfile=False, - field='eoptions.tpm', - xor=['deformation_file'], + extensions=None, + field="eoptions.tpm", + xor=["deformation_file"], ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - warping_regularization=dict(field='eoptions.reg', ), - write_bounding_box=dict(field='woptions.bb', ), - write_interp=dict(field='woptions.interp', ), - write_voxel_sizes=dict(field='woptions.vox', ), + warping_regularization=dict( + field="eoptions.reg", + ), + write_bounding_box=dict( + field="woptions.bb", + ), + write_interp=dict( + field="woptions.interp", + ), + write_voxel_sizes=dict( + field="woptions.vox", + ), ) inputs = Normalize12.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize12_outputs(): output_map = dict( deformation_field=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py index dbb02a6275..9b77ab0af7 100644 --- a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py @@ -1,50 +1,64 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import OneSampleTTestDesign def test_OneSampleTTestDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict(field='masking.em', ), + covariates=dict( + field="cov", + ), + explicit_mask_file=dict( + extensions=None, + field="masking.em", + ), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + ), + global_normalization=dict( + field="globalm.glonorm", ), - global_normalization=dict(field='globalm.glonorm', ), in_files=dict( - field='des.t1.scans', + field="des.t1.scans", mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict( + usedefault=True, + ), + no_grand_mean_scaling=dict( + field="globalm.gmsca.gmsca_no", + ), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + ), + use_implicit_threshold=dict( + field="masking.im", ), - use_implicit_threshold=dict(field='masking.im', ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -53,8 +67,14 @@ def test_OneSampleTTestDesign_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneSampleTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = OneSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py index 3e1662268a..6be1f6ec01 100644 --- a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py @@ -1,52 +1,70 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import PairedTTestDesign def test_PairedTTestDesign_inputs(): input_map = dict( - ancova=dict(field='des.pt.ancova', ), - covariates=dict(field='cov', ), - explicit_mask_file=dict(field='masking.em', ), + ancova=dict( + field="des.pt.ancova", + ), + covariates=dict( + field="cov", + ), + explicit_mask_file=dict( + extensions=None, + field="masking.em", + ), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + ), + global_normalization=dict( + field="globalm.glonorm", + ), + grand_mean_scaling=dict( + field="des.pt.gmsca", ), - global_normalization=dict(field='globalm.glonorm', ), - grand_mean_scaling=dict(field='des.pt.gmsca', ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict( + usedefault=True, + ), + no_grand_mean_scaling=dict( + field="globalm.gmsca.gmsca_no", + ), paired_files=dict( - field='des.pt.pair', + field="des.pt.pair", mandatory=True, ), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + ), + use_implicit_threshold=dict( + field="masking.im", ), - use_implicit_threshold=dict(field='masking.im', ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -55,8 +73,14 @@ def test_PairedTTestDesign_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PairedTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = PairedTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Realign.py b/nipype/interfaces/spm/tests/test_auto_Realign.py index ac2f5bbd92..5165d6f33e 100644 --- a/nipype/interfaces/spm/tests/test_auto_Realign.py +++ b/nipype/interfaces/spm/tests/test_auto_Realign.py @@ -1,53 +1,81 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Realign def test_Realign_inputs(): input_map = dict( - fwhm=dict(field='eoptions.fwhm', ), + fwhm=dict( + field="eoptions.fwhm", + ), in_files=dict( copyfile=True, - field='data', + field="data", mandatory=True, ), - interp=dict(field='eoptions.interp', ), - jobtype=dict(usedefault=True, ), + interp=dict( + field="eoptions.interp", + ), + jobtype=dict( + usedefault=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), out_prefix=dict( - field='roptions.prefix', + field="roptions.prefix", usedefault=True, ), paths=dict(), - quality=dict(field='eoptions.quality', ), - register_to_mean=dict(field='eoptions.rtm', ), - separation=dict(field='eoptions.sep', ), + quality=dict( + field="eoptions.quality", + ), + register_to_mean=dict( + field="eoptions.rtm", + ), + separation=dict( + field="eoptions.sep", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - weight_img=dict(field='eoptions.weight', ), - wrap=dict(field='eoptions.wrap', ), - write_interp=dict(field='roptions.interp', ), - write_mask=dict(field='roptions.mask', ), + weight_img=dict( + extensions=None, + field="eoptions.weight", + ), + wrap=dict( + field="eoptions.wrap", + ), + write_interp=dict( + field="roptions.interp", + ), + write_mask=dict( + field="roptions.mask", + ), write_which=dict( - field='roptions.which', + field="roptions.which", maxlen=2, minlen=2, usedefault=True, ), - write_wrap=dict(field='roptions.wrap', ), + write_wrap=dict( + field="roptions.wrap", + ), ) inputs = Realign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Realign_outputs(): output_map = dict( - mean_image=dict(), + mean_image=dict( + extensions=None, + ), modified_in_files=dict(), realigned_files=dict(), realignment_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py index 5c7c36f80b..bb27419547 100644 --- a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py +++ b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py @@ -1,79 +1,120 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import RealignUnwarp def test_RealignUnwarp_inputs(): input_map = dict( - est_basis_func=dict(field='uweoptions.basfcn', ), - est_first_order_effects=dict(field='uweoptions.fot', ), - est_jacobian_deformations=dict(field='uweoptions.jm', ), + est_basis_func=dict( + field="uweoptions.basfcn", + ), + est_first_order_effects=dict( + field="uweoptions.fot", + ), + est_jacobian_deformations=dict( + field="uweoptions.jm", + ), est_num_of_iterations=dict( - field='uweoptions.noi', + field="uweoptions.noi", maxlen=1, minlen=1, usedefault=True, ), - est_re_est_mov_par=dict(field='uweoptions.rem', ), + est_re_est_mov_par=dict( + field="uweoptions.rem", + ), est_reg_factor=dict( - field='uweoptions.lambda', + field="uweoptions.lambda", maxlen=1, minlen=1, usedefault=True, ), - est_reg_order=dict(field='uweoptions.regorder', ), - est_second_order_effects=dict(field='uweoptions.sot', ), + est_reg_order=dict( + field="uweoptions.regorder", + ), + est_second_order_effects=dict( + field="uweoptions.sot", + ), est_taylor_expansion_point=dict( - field='uweoptions.expround', + field="uweoptions.expround", usedefault=True, ), - est_unwarp_fwhm=dict(field='uweoptions.uwfwhm', ), - fwhm=dict(field='eoptions.fwhm', ), + est_unwarp_fwhm=dict( + field="uweoptions.uwfwhm", + ), + fwhm=dict( + field="eoptions.fwhm", + ), in_files=dict( copyfile=True, - field='data.scans', + field="data.scans", mandatory=True, ), - interp=dict(field='eoptions.einterp', ), + interp=dict( + field="eoptions.einterp", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), out_prefix=dict( - field='uwroptions.prefix', + field="uwroptions.prefix", usedefault=True, ), paths=dict(), phase_map=dict( copyfile=False, - field='data.pmscan', + extensions=None, + field="data.pmscan", + ), + quality=dict( + field="eoptions.quality", + ), + register_to_mean=dict( + field="eoptions.rtm", + ), + reslice_interp=dict( + field="uwroptions.rinterp", + ), + reslice_mask=dict( + field="uwroptions.mask", ), - quality=dict(field='eoptions.quality', ), - register_to_mean=dict(field='eoptions.rtm', ), - reslice_interp=dict(field='uwroptions.rinterp', ), - reslice_mask=dict(field='uwroptions.mask', ), reslice_which=dict( - field='uwroptions.uwwhich', + field="uwroptions.uwwhich", maxlen=2, minlen=2, usedefault=True, ), - reslice_wrap=dict(field='uwroptions.wrap', ), - separation=dict(field='eoptions.sep', ), + reslice_wrap=dict( + field="uwroptions.wrap", + ), + separation=dict( + field="eoptions.sep", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - weight_img=dict(field='eoptions.weight', ), - wrap=dict(field='eoptions.ewrap', ), + weight_img=dict( + extensions=None, + field="eoptions.weight", + ), + wrap=dict( + field="eoptions.ewrap", + ), ) inputs = RealignUnwarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RealignUnwarp_outputs(): output_map = dict( - mean_image=dict(), + mean_image=dict( + extensions=None, + ), modified_in_files=dict(), realigned_unwarped_files=dict(), realignment_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Reslice.py b/nipype/interfaces/spm/tests/test_auto_Reslice.py index 81299fc748..c48d1a4b88 100644 --- a/nipype/interfaces/spm/tests/test_auto_Reslice.py +++ b/nipype/interfaces/spm/tests/test_auto_Reslice.py @@ -1,20 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import Reslice def test_Reslice_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - interp=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + interp=dict( + usedefault=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_file=dict(), + mfile=dict( + usedefault=True, + ), + out_file=dict( + extensions=None, + ), paths=dict(), - space_defining=dict(mandatory=True, ), + space_defining=dict( + extensions=None, + mandatory=True, + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -23,8 +34,14 @@ def test_Reslice_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reslice_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Reslice.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py b/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py index 3e39ade181..8bbb1d1307 100644 --- a/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py +++ b/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py @@ -1,34 +1,48 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..utils import ResliceToReference def test_ResliceToReference_inputs(): input_map = dict( - bounding_box=dict(field='comp{2}.idbbvox.bb', ), + bounding_box=dict( + field="comp{2}.idbbvox.bb", + ), in_files=dict( - field='fnames', + field="fnames", mandatory=True, ), - interpolation=dict(field='interp', ), + interpolation=dict( + field="interp", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), - target=dict(field='comp{1}.id.space', ), + target=dict( + extensions=None, + field="comp{1}.id.space", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - voxel_sizes=dict(field='comp{2}.idbbvox.vox', ), + voxel_sizes=dict( + field="comp{2}.idbbvox.vox", + ), ) inputs = ResliceToReference.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResliceToReference_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = ResliceToReference.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_SPMCommand.py b/nipype/interfaces/spm/tests/test_auto_SPMCommand.py index 7c1ba5cbc2..0c0a8d7506 100644 --- a/nipype/interfaces/spm/tests/test_auto_SPMCommand.py +++ b/nipype/interfaces/spm/tests/test_auto_SPMCommand.py @@ -1,16 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import SPMCommand def test_SPMCommand_inputs(): input_map = dict( matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) diff --git a/nipype/interfaces/spm/tests/test_auto_Segment.py b/nipype/interfaces/spm/tests/test_auto_Segment.py index f6df46e1de..4859c76a00 100644 --- a/nipype/interfaces/spm/tests/test_auto_Segment.py +++ b/nipype/interfaces/spm/tests/test_auto_Segment.py @@ -1,61 +1,118 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Segment def test_Segment_inputs(): input_map = dict( - affine_regularization=dict(field='opts.regtype', ), - bias_fwhm=dict(field='opts.biasfwhm', ), - bias_regularization=dict(field='opts.biasreg', ), - clean_masks=dict(field='output.cleanup', ), - csf_output_type=dict(field='output.CSF', ), + affine_regularization=dict( + field="opts.regtype", + ), + bias_fwhm=dict( + field="opts.biasfwhm", + ), + bias_regularization=dict( + field="opts.biasreg", + ), + clean_masks=dict( + field="output.cleanup", + ), + csf_output_type=dict( + field="output.CSF", + ), data=dict( copyfile=False, - field='data', + field="data", mandatory=True, ), - gaussians_per_class=dict(field='opts.ngaus', ), - gm_output_type=dict(field='output.GM', ), - mask_image=dict(field='opts.msk', ), + gaussians_per_class=dict( + field="opts.ngaus", + ), + gm_output_type=dict( + field="output.GM", + ), + mask_image=dict( + extensions=None, + field="opts.msk", + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), - sampling_distance=dict(field='opts.samp', ), - save_bias_corrected=dict(field='output.biascor', ), - tissue_prob_maps=dict(field='opts.tpm', ), + sampling_distance=dict( + field="opts.samp", + ), + save_bias_corrected=dict( + field="output.biascor", + ), + tissue_prob_maps=dict( + field="opts.tpm", + ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), - warp_frequency_cutoff=dict(field='opts.warpco', ), - warping_regularization=dict(field='opts.warpreg', ), - wm_output_type=dict(field='output.WM', ), + warp_frequency_cutoff=dict( + field="opts.warpco", + ), + warping_regularization=dict( + field="opts.warpreg", + ), + wm_output_type=dict( + field="output.WM", + ), ) inputs = Segment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Segment_outputs(): output_map = dict( - bias_corrected_image=dict(), - inverse_transformation_mat=dict(), - modulated_csf_image=dict(), - modulated_gm_image=dict(), + bias_corrected_image=dict( + extensions=None, + ), + inverse_transformation_mat=dict( + extensions=None, + ), + modulated_csf_image=dict( + extensions=None, + ), + modulated_gm_image=dict( + extensions=None, + ), modulated_input_image=dict( - deprecated='0.10', - new_name='bias_corrected_image', - ), - modulated_wm_image=dict(), - native_csf_image=dict(), - native_gm_image=dict(), - native_wm_image=dict(), - normalized_csf_image=dict(), - normalized_gm_image=dict(), - normalized_wm_image=dict(), - transformation_mat=dict(), + deprecated="0.10", + extensions=None, + new_name="bias_corrected_image", + ), + modulated_wm_image=dict( + extensions=None, + ), + native_csf_image=dict( + extensions=None, + ), + native_gm_image=dict( + extensions=None, + ), + native_wm_image=dict( + extensions=None, + ), + normalized_csf_image=dict( + extensions=None, + ), + normalized_gm_image=dict( + extensions=None, + ), + normalized_wm_image=dict( + extensions=None, + ), + transformation_mat=dict( + extensions=None, + ), ) outputs = Segment.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_SliceTiming.py b/nipype/interfaces/spm/tests/test_auto_SliceTiming.py index 85fb1bc4e4..85ddf03c52 100644 --- a/nipype/interfaces/spm/tests/test_auto_SliceTiming.py +++ b/nipype/interfaces/spm/tests/test_auto_SliceTiming.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import SliceTiming @@ -7,39 +6,41 @@ def test_SliceTiming_inputs(): input_map = dict( in_files=dict( copyfile=False, - field='scans', + field="scans", mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), num_slices=dict( - field='nslices', + field="nslices", mandatory=True, ), out_prefix=dict( - field='prefix', + field="prefix", usedefault=True, ), paths=dict(), ref_slice=dict( - field='refslice', + field="refslice", mandatory=True, ), slice_order=dict( - field='so', + field="so", mandatory=True, ), time_acquisition=dict( - field='ta', + field="ta", mandatory=True, ), time_repetition=dict( - field='tr', + field="tr", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -48,8 +49,12 @@ def test_SliceTiming_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SliceTiming_outputs(): - output_map = dict(timecorrected_files=dict(), ) + output_map = dict( + timecorrected_files=dict(), + ) outputs = SliceTiming.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Smooth.py b/nipype/interfaces/spm/tests/test_auto_Smooth.py index eea60ed619..5ed7aa57c0 100644 --- a/nipype/interfaces/spm/tests/test_auto_Smooth.py +++ b/nipype/interfaces/spm/tests/test_auto_Smooth.py @@ -1,28 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import Smooth def test_Smooth_inputs(): input_map = dict( - data_type=dict(field='dtype', ), - fwhm=dict(field='fwhm', ), - implicit_masking=dict(field='im', ), + data_type=dict( + field="dtype", + ), + fwhm=dict( + field="fwhm", + ), + implicit_masking=dict( + field="im", + ), in_files=dict( copyfile=False, - field='data', + field="data", mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), out_prefix=dict( - field='prefix', + field="prefix", usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -31,8 +38,12 @@ def test_Smooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_files=dict(), ) + output_map = dict( + smoothed_files=dict(), + ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Threshold.py b/nipype/interfaces/spm/tests/test_auto_Threshold.py index 078adb3a1b..128ab0586c 100644 --- a/nipype/interfaces/spm/tests/test_auto_Threshold.py +++ b/nipype/interfaces/spm/tests/test_auto_Threshold.py @@ -1,32 +1,54 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import Threshold def test_Threshold_inputs(): input_map = dict( - contrast_index=dict(mandatory=True, ), - extent_fdr_p_threshold=dict(usedefault=True, ), - extent_threshold=dict(usedefault=True, ), - force_activation=dict(usedefault=True, ), - height_threshold=dict(usedefault=True, ), - height_threshold_type=dict(usedefault=True, ), + contrast_index=dict( + mandatory=True, + ), + extent_fdr_p_threshold=dict( + usedefault=True, + ), + extent_threshold=dict( + usedefault=True, + ), + force_activation=dict( + usedefault=True, + ), + height_threshold=dict( + usedefault=True, + ), + height_threshold_type=dict( + usedefault=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), spm_mat_file=dict( copyfile=True, + extensions=None, mandatory=True, ), stat_image=dict( copyfile=False, + extensions=None, mandatory=True, ), - use_fwe_correction=dict(usedefault=True, ), + use_fwe_correction=dict( + usedefault=True, + ), use_mcr=dict(), - use_topo_fdr=dict(usedefault=True, ), + use_topo_fdr=dict( + usedefault=True, + ), use_v8struct=dict( - min_ver='8', + min_ver="8", + usedefault=True, + ), + use_vox_fdr_correction=dict( usedefault=True, ), ) @@ -35,14 +57,20 @@ def test_Threshold_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): output_map = dict( activation_forced=dict(), cluster_forming_thr=dict(), n_clusters=dict(), - pre_topo_fdr_map=dict(), + pre_topo_fdr_map=dict( + extensions=None, + ), pre_topo_n_clusters=dict(), - thresholded_map=dict(), + thresholded_map=dict( + extensions=None, + ), ) outputs = Threshold.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py b/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py index ccdc441e04..89c5a42e57 100644 --- a/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py +++ b/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py @@ -1,27 +1,36 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import ThresholdStatistics def test_ThresholdStatistics_inputs(): input_map = dict( - contrast_index=dict(mandatory=True, ), - extent_threshold=dict(usedefault=True, ), - height_threshold=dict(mandatory=True, ), + contrast_index=dict( + mandatory=True, + ), + extent_threshold=dict( + usedefault=True, + ), + height_threshold=dict( + mandatory=True, + ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), paths=dict(), spm_mat_file=dict( copyfile=True, + extensions=None, mandatory=True, ), stat_image=dict( copyfile=False, + extensions=None, mandatory=True, ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -30,6 +39,8 @@ def test_ThresholdStatistics_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ThresholdStatistics_outputs(): output_map = dict( clusterwise_P_FDR=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py index 4dfbc12570..fd03e6c867 100644 --- a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py @@ -1,56 +1,74 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..model import TwoSampleTTestDesign def test_TwoSampleTTestDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - dependent=dict(field='des.t2.dept', ), - explicit_mask_file=dict(field='masking.em', ), + covariates=dict( + field="cov", + ), + dependent=dict( + field="des.t2.dept", + ), + explicit_mask_file=dict( + extensions=None, + field="masking.em", + ), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + ), + global_normalization=dict( + field="globalm.glonorm", ), - global_normalization=dict(field='globalm.glonorm', ), group1_files=dict( - field='des.t2.scans1', + field="des.t2.scans1", mandatory=True, ), group2_files=dict( - field='des.t2.scans2', + field="des.t2.scans2", mandatory=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict( + usedefault=True, + ), + no_grand_mean_scaling=dict( + field="globalm.gmsca.gmsca_no", + ), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict( + field="dir", + ), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + ), + unequal_variance=dict( + field="des.t2.variance", + ), + use_implicit_threshold=dict( + field="masking.im", ), - unequal_variance=dict(field='des.t2.variance', ), - use_implicit_threshold=dict(field='masking.im', ), use_mcr=dict(), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), ) @@ -59,8 +77,14 @@ def test_TwoSampleTTestDesign_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TwoSampleTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(), ) + output_map = dict( + spm_mat_file=dict( + extensions=None, + ), + ) outputs = TwoSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_VBMSegment.py b/nipype/interfaces/spm/tests/test_auto_VBMSegment.py index 6aaac2b489..4bc4664c27 100644 --- a/nipype/interfaces/spm/tests/test_auto_VBMSegment.py +++ b/nipype/interfaces/spm/tests/test_auto_VBMSegment.py @@ -1,138 +1,148 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..preprocess import VBMSegment def test_VBMSegment_inputs(): input_map = dict( bias_corrected_affine=dict( - field='estwrite.output.bias.affine', + field="estwrite.output.bias.affine", usedefault=True, ), bias_corrected_native=dict( - field='estwrite.output.bias.native', + field="estwrite.output.bias.native", usedefault=True, ), bias_corrected_normalized=dict( - field='estwrite.output.bias.warped', + field="estwrite.output.bias.warped", usedefault=True, ), bias_fwhm=dict( - field='estwrite.opts.biasfwhm', + field="estwrite.opts.biasfwhm", usedefault=True, ), bias_regularization=dict( - field='estwrite.opts.biasreg', + field="estwrite.opts.biasreg", usedefault=True, ), cleanup_partitions=dict( - field='estwrite.extopts.cleanup', + field="estwrite.extopts.cleanup", usedefault=True, ), csf_dartel=dict( - field='estwrite.output.CSF.dartel', + field="estwrite.output.CSF.dartel", usedefault=True, ), csf_modulated_normalized=dict( - field='estwrite.output.CSF.modulated', + field="estwrite.output.CSF.modulated", usedefault=True, ), csf_native=dict( - field='estwrite.output.CSF.native', + field="estwrite.output.CSF.native", usedefault=True, ), csf_normalized=dict( - field='estwrite.output.CSF.warped', + field="estwrite.output.CSF.warped", usedefault=True, ), dartel_template=dict( - field='estwrite.extopts.dartelwarp.normhigh.darteltpm', ), + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="estwrite.extopts.dartelwarp.normhigh.darteltpm", + ), deformation_field=dict( - field='estwrite.output.warps', + field="estwrite.output.warps", usedefault=True, ), display_results=dict( - field='estwrite.extopts.print', + field="estwrite.extopts.print", + usedefault=True, + ), + gaussians_per_class=dict( usedefault=True, ), - gaussians_per_class=dict(usedefault=True, ), gm_dartel=dict( - field='estwrite.output.GM.dartel', + field="estwrite.output.GM.dartel", usedefault=True, ), gm_modulated_normalized=dict( - field='estwrite.output.GM.modulated', + field="estwrite.output.GM.modulated", usedefault=True, ), gm_native=dict( - field='estwrite.output.GM.native', + field="estwrite.output.GM.native", usedefault=True, ), gm_normalized=dict( - field='estwrite.output.GM.warped', + field="estwrite.output.GM.warped", usedefault=True, ), in_files=dict( copyfile=False, - field='estwrite.data', + field="estwrite.data", mandatory=True, ), jacobian_determinant=dict( - field='estwrite.jacobian.warped', + field="estwrite.jacobian.warped", usedefault=True, ), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict( + usedefault=True, + ), mrf_weighting=dict( - field='estwrite.extopts.mrf', + field="estwrite.extopts.mrf", usedefault=True, ), paths=dict(), pve_label_dartel=dict( - field='estwrite.output.label.dartel', + field="estwrite.output.label.dartel", usedefault=True, ), pve_label_native=dict( - field='estwrite.output.label.native', + field="estwrite.output.label.native", usedefault=True, ), pve_label_normalized=dict( - field='estwrite.output.label.warped', + field="estwrite.output.label.warped", usedefault=True, ), sampling_distance=dict( - field='estwrite.opts.samp', + field="estwrite.opts.samp", usedefault=True, ), - spatial_normalization=dict(usedefault=True, ), - tissues=dict(field='estwrite.tpm', ), + spatial_normalization=dict( + usedefault=True, + ), + tissues=dict( + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="estwrite.tpm", + ), use_mcr=dict(), use_sanlm_denoising_filter=dict( - field='estwrite.extopts.sanlm', + field="estwrite.extopts.sanlm", usedefault=True, ), use_v8struct=dict( - min_ver='8', + min_ver="8", usedefault=True, ), warping_regularization=dict( - field='estwrite.opts.warpreg', + field="estwrite.opts.warpreg", usedefault=True, ), wm_dartel=dict( - field='estwrite.output.WM.dartel', + field="estwrite.output.WM.dartel", usedefault=True, ), wm_modulated_normalized=dict( - field='estwrite.output.WM.modulated', + field="estwrite.output.WM.modulated", usedefault=True, ), wm_native=dict( - field='estwrite.output.WM.native', + field="estwrite.output.WM.native", usedefault=True, ), wm_normalized=dict( - field='estwrite.output.WM.warped', + field="estwrite.output.WM.warped", usedefault=True, ), ) @@ -141,6 +151,8 @@ def test_VBMSegment_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBMSegment_outputs(): output_map = dict( bias_corrected_images=dict(), diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index a8a23e8def..1f653c0a86 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -1,9 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import unicode_literals -from builtins import str, bytes - import os import numpy as np @@ -16,7 +12,7 @@ from nipype.interfaces.spm.base import SPMCommandInputSpec from nipype.interfaces.base import traits -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_scan_for_fnames(create_files_in_directory): @@ -34,7 +30,7 @@ def test_spm_path(): spm_path = spm.Info.path() if spm_path is not None: assert isinstance(spm_path, (str, bytes)) - assert 'spm' in spm_path.lower() + assert "spm" in spm_path.lower() def test_use_mfile(): @@ -52,7 +48,7 @@ class TestClass(spm.SPMCommand): pass # test without FORCE_SPMMCR, SPMMCRCMD set - for varname in ['FORCE_SPMMCR', 'SPMMCRCMD']: + for varname in ["FORCE_SPMMCR", "SPMMCRCMD"]: try: del os.environ[varname] except KeyError: @@ -61,15 +57,15 @@ class TestClass(spm.SPMCommand): assert dc._use_mcr is None assert dc._matlab_cmd is None # test with only FORCE_SPMMCR set - os.environ['FORCE_SPMMCR'] = '1' + os.environ["FORCE_SPMMCR"] = "1" dc = TestClass() assert dc._use_mcr assert dc._matlab_cmd is None # test with both, FORCE_SPMMCR and SPMMCRCMD set - os.environ['SPMMCRCMD'] = 'spmcmd' + os.environ["SPMMCRCMD"] = "spmcmd" dc = TestClass() assert dc._use_mcr - assert dc._matlab_cmd == 'spmcmd' + assert dc._matlab_cmd == "spmcmd" # restore environment os.environ.clear() os.environ.update(saved_env) @@ -81,19 +77,19 @@ class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - dc.inputs.matlab_cmd = 'foo' - assert dc.mlab._cmd == 'foo' + dc.inputs.matlab_cmd = "foo" + assert dc.mlab._cmd == "foo" def test_cmd_update2(): class TestClass(spm.SPMCommand): - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - assert dc.jobtype == 'jobtype' - assert dc.jobname == 'jobname' + assert dc.jobtype == "jobtype" + assert dc.jobname == "jobname" def test_reformat_dict_for_savemat(): @@ -101,8 +97,8 @@ class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - out = dc._reformat_dict_for_savemat({'a': {'b': {'c': []}}}) - assert out == [{'a': [{'b': [{'c': []}]}]}] + out = dc._reformat_dict_for_savemat({"a": {"b": {"c": []}}}) + assert out == [{"a": [{"b": [{"c": []}]}]}] def test_generate_job(create_files_in_directory): @@ -111,58 +107,60 @@ class TestClass(spm.SPMCommand): dc = TestClass() # dc = derived_class out = dc._generate_job() - assert out == '' + assert out == "" # struct array - contents = {'contents': [1, 2, 3, 4]} + contents = {"contents": [1, 2, 3, 4]} out = dc._generate_job(contents=contents) - assert out == ('.contents(1) = 1;\n.contents(2) = 2;' - '\n.contents(3) = 3;\n.contents(4) = 4;\n') + assert out == ( + ".contents(1) = 1;\n.contents(2) = 2;" + "\n.contents(3) = 3;\n.contents(4) = 4;\n" + ) # cell array of strings filelist, outdir = create_files_in_directory names = spm.scans_for_fnames(filelist, keep4d=True) - contents = {'files': names} - out = dc._generate_job(prefix='test', contents=contents) + contents = {"files": names} + out = dc._generate_job(prefix="test", contents=contents) assert out == "test.files = {...\n'a.nii';...\n'b.nii';...\n};\n" # string assignment - contents = 'foo' - out = dc._generate_job(prefix='test', contents=contents) + contents = "foo" + out = dc._generate_job(prefix="test", contents=contents) assert out == "test = 'foo';\n" # cell array of vectors - contents = {'onsets': np.array((1, ), dtype=object)} - contents['onsets'][0] = [1, 2, 3, 4] - out = dc._generate_job(prefix='test', contents=contents) - assert out == 'test.onsets = {...\n[1, 2, 3, 4];...\n};\n' + contents = {"onsets": np.array((1,), dtype=object)} + contents["onsets"][0] = [1, 2, 3, 4] + out = dc._generate_job(prefix="test", contents=contents) + assert out == "test.onsets = {...\n[1, 2, 3, 4];...\n};\n" def test_bool(): class TestClassInputSpec(SPMCommandInputSpec): - test_in = include_intercept = traits.Bool(field='testfield') + test_in = include_intercept = traits.Bool(field="testfield") class TestClass(spm.SPMCommand): input_spec = TestClassInputSpec - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" dc = TestClass() # dc = derived_class dc.inputs.test_in = True out = dc._make_matlab_command(dc._parse_inputs()) - assert out.find('jobs{1}.spm.jobtype.jobname.testfield = 1;') > 0, 1 + assert out.find("jobs{1}.spm.jobtype.jobname.testfield = 1;") > 0, 1 dc.inputs.use_v8struct = False out = dc._make_matlab_command(dc._parse_inputs()) - assert out.find('jobs{1}.jobtype{1}.jobname{1}.testfield = 1;') > 0, 1 + assert out.find("jobs{1}.jobtype{1}.jobname{1}.testfield = 1;") > 0, 1 def test_make_matlab_command(create_files_in_directory): class TestClass(spm.SPMCommand): - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class filelist, outdir = create_files_in_directory - contents = {'contents': [1, 2, 3, 4]} + contents = {"contents": [1, 2, 3, 4]} script = dc._make_matlab_command([contents]) - assert 'jobs{1}.spm.jobtype.jobname.contents(3) = 3;' in script + assert "jobs{1}.spm.jobtype.jobname.contents(3) = 3;" in script dc.inputs.use_v8struct = False script = dc._make_matlab_command([contents]) - assert 'jobs{1}.jobtype{1}.jobname{1}.contents(3) = 3;' in script + assert "jobs{1}.jobtype{1}.jobname{1}.contents(3) = 3;" in script diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index a9cb957944..fd9a0236d7 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -6,39 +5,39 @@ import nipype.interfaces.spm.model as spm import nipype.interfaces.matlab as mlab -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_level1design(): - assert spm.Level1Design._jobtype == 'stats' - assert spm.Level1Design._jobname == 'fmri_spec' + assert spm.Level1Design._jobtype == "stats" + assert spm.Level1Design._jobname == "fmri_spec" def test_estimatemodel(): - assert spm.EstimateModel._jobtype == 'stats' - assert spm.EstimateModel._jobname == 'fmri_est' + assert spm.EstimateModel._jobtype == "stats" + assert spm.EstimateModel._jobname == "fmri_est" def test_estimatecontrast(): - assert spm.EstimateContrast._jobtype == 'stats' - assert spm.EstimateContrast._jobname == 'con' + assert spm.EstimateContrast._jobtype == "stats" + assert spm.EstimateContrast._jobname == "con" def test_threshold(): - assert spm.Threshold._jobtype == 'basetype' - assert spm.Threshold._jobname == 'basename' + assert spm.Threshold._jobtype == "basetype" + assert spm.Threshold._jobname == "basename" def test_factorialdesign(): - assert spm.FactorialDesign._jobtype == 'stats' - assert spm.FactorialDesign._jobname == 'factorial_design' + assert spm.FactorialDesign._jobtype == "stats" + assert spm.FactorialDesign._jobname == "factorial_design" def test_onesamplettestdesign(): - assert spm.OneSampleTTestDesign._jobtype == 'stats' - assert spm.OneSampleTTestDesign._jobname == 'factorial_design' + assert spm.OneSampleTTestDesign._jobtype == "stats" + assert spm.OneSampleTTestDesign._jobname == "factorial_design" def test_twosamplettestdesign(): - assert spm.TwoSampleTTestDesign._jobtype == 'stats' - assert spm.TwoSampleTTestDesign._jobname == 'factorial_design' + assert spm.TwoSampleTTestDesign._jobtype == "stats" + assert spm.TwoSampleTTestDesign._jobname == "factorial_design" diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index 2b70b7bb54..74608749ec 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -10,107 +9,106 @@ from nipype.interfaces.spm import no_spm import nipype.interfaces.matlab as mlab -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_slicetiming(): - assert spm.SliceTiming._jobtype == 'temporal' - assert spm.SliceTiming._jobname == 'st' + assert spm.SliceTiming._jobtype == "temporal" + assert spm.SliceTiming._jobname == "st" def test_slicetiming_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory st = spm.SliceTiming(in_files=filelist[0]) - assert st._list_outputs()['timecorrected_files'][0][0] == 'a' + assert st._list_outputs()["timecorrected_files"][0][0] == "a" def test_realign(): - assert spm.Realign._jobtype == 'spatial' - assert spm.Realign._jobname == 'realign' - assert spm.Realign().inputs.jobtype == 'estwrite' + assert spm.Realign._jobtype == "spatial" + assert spm.Realign._jobname == "realign" + assert spm.Realign().inputs.jobtype == "estwrite" def test_realign_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory rlgn = spm.Realign(in_files=filelist[0]) - assert rlgn._list_outputs()['realignment_parameters'][0].startswith('rp_') - assert rlgn._list_outputs()['realigned_files'][0].startswith('r') - assert rlgn._list_outputs()['mean_image'].startswith('mean') + assert rlgn._list_outputs()["realignment_parameters"][0].startswith("rp_") + assert rlgn._list_outputs()["realigned_files"][0].startswith("r") + assert rlgn._list_outputs()["mean_image"].startswith("mean") def test_coregister(): - assert spm.Coregister._jobtype == 'spatial' - assert spm.Coregister._jobname == 'coreg' - assert spm.Coregister().inputs.jobtype == 'estwrite' + assert spm.Coregister._jobtype == "spatial" + assert spm.Coregister._jobname == "coreg" + assert spm.Coregister().inputs.jobtype == "estwrite" def test_coregister_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory coreg = spm.Coregister(source=filelist[0]) - assert coreg._list_outputs()['coregistered_source'][0].startswith('r') + assert coreg._list_outputs()["coregistered_source"][0].startswith("r") coreg = spm.Coregister(source=filelist[0], apply_to_files=filelist[1]) - assert coreg._list_outputs()['coregistered_files'][0].startswith('r') + assert coreg._list_outputs()["coregistered_files"][0].startswith("r") def test_normalize(): - assert spm.Normalize._jobtype == 'spatial' - assert spm.Normalize._jobname == 'normalise' - assert spm.Normalize().inputs.jobtype == 'estwrite' + assert spm.Normalize._jobtype == "spatial" + assert spm.Normalize._jobname == "normalise" + assert spm.Normalize().inputs.jobtype == "estwrite" def test_normalize_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm = spm.Normalize(source=filelist[0]) - assert norm._list_outputs()['normalized_source'][0].startswith('w') + assert norm._list_outputs()["normalized_source"][0].startswith("w") norm = spm.Normalize(source=filelist[0], apply_to_files=filelist[1]) - assert norm._list_outputs()['normalized_files'][0].startswith('w') + assert norm._list_outputs()["normalized_files"][0].startswith("w") def test_normalize12(): - assert spm.Normalize12._jobtype == 'spatial' - assert spm.Normalize12._jobname == 'normalise' - assert spm.Normalize12().inputs.jobtype == 'estwrite' + assert spm.Normalize12._jobtype == "spatial" + assert spm.Normalize12._jobname == "normalise" + assert spm.Normalize12().inputs.jobtype == "estwrite" def test_normalize12_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm12 = spm.Normalize12(image_to_align=filelist[0]) - assert norm12._list_outputs()['normalized_image'][0].startswith('w') - norm12 = spm.Normalize12( - image_to_align=filelist[0], apply_to_files=filelist[1]) - assert norm12._list_outputs()['normalized_files'][0].startswith('w') + assert norm12._list_outputs()["normalized_image"][0].startswith("w") + norm12 = spm.Normalize12(image_to_align=filelist[0], apply_to_files=filelist[1]) + assert norm12._list_outputs()["normalized_files"][0].startswith("w") @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_segment(): if spm.Info.name() == "SPM12": - assert spm.Segment()._jobtype == 'tools' - assert spm.Segment()._jobname == 'oldseg' + assert spm.Segment()._jobtype == "tools" + assert spm.Segment()._jobname == "oldseg" else: - assert spm.Segment()._jobtype == 'spatial' - assert spm.Segment()._jobname == 'preproc' + assert spm.Segment()._jobtype == "spatial" + assert spm.Segment()._jobname == "preproc" @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_newsegment(): if spm.Info.name() == "SPM12": - assert spm.NewSegment()._jobtype == 'spatial' - assert spm.NewSegment()._jobname == 'preproc' + assert spm.NewSegment()._jobtype == "spatial" + assert spm.NewSegment()._jobname == "preproc" else: - assert spm.NewSegment()._jobtype == 'tools' - assert spm.NewSegment()._jobname == 'preproc8' + assert spm.NewSegment()._jobtype == "tools" + assert spm.NewSegment()._jobname == "preproc8" def test_smooth(): - assert spm.Smooth._jobtype == 'spatial' - assert spm.Smooth._jobname == 'smooth' + assert spm.Smooth._jobtype == "spatial" + assert spm.Smooth._jobname == "smooth" def test_dartel(): - assert spm.DARTEL._jobtype == 'tools' - assert spm.DARTEL._jobname == 'dartel' + assert spm.DARTEL._jobtype == "tools" + assert spm.DARTEL._jobname == "dartel" def test_dartelnorm2mni(): - assert spm.DARTELNorm2MNI._jobtype == 'tools' - assert spm.DARTELNorm2MNI._jobname == 'dartel' + assert spm.DARTELNorm2MNI._jobtype == "tools" + assert spm.DARTELNorm2MNI._jobname == "dartel" diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index a574fb90a7..83a9b1e43e 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -11,73 +10,73 @@ def test_coreg(): - moving = example_data(infile='functional.nii') - target = example_data(infile='T1.nii') - mat = example_data(infile='trans.mat') - coreg = spmu.CalcCoregAffine(matlab_cmd='mymatlab') + moving = example_data(infile="functional.nii") + target = example_data(infile="T1.nii") + mat = example_data(infile="trans.mat") + coreg = spmu.CalcCoregAffine(matlab_cmd="mymatlab") coreg.inputs.target = target - assert coreg.inputs.matlab_cmd == 'mymatlab' + assert coreg.inputs.matlab_cmd == "mymatlab" coreg.inputs.moving = moving assert not isdefined(coreg.inputs.mat) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) - mat = os.path.join(pth, '%s_to_%s.mat' % (mov, tgt)) - invmat = fname_presuffix(mat, prefix='inverse_') - scrpt = coreg._make_matlab_command(None) + mat = os.path.join(pth, f"{mov}_to_{tgt}.mat") + invmat = fname_presuffix(mat, prefix="inverse_") + script = coreg._make_matlab_command(None) assert coreg.inputs.mat == mat assert coreg.inputs.invmat == invmat def test_apply_transform(): - moving = example_data(infile='functional.nii') - mat = example_data(infile='trans.mat') - applymat = spmu.ApplyTransform(matlab_cmd='mymatlab') - assert applymat.inputs.matlab_cmd == 'mymatlab' + moving = example_data(infile="functional.nii") + mat = example_data(infile="trans.mat") + applymat = spmu.ApplyTransform(matlab_cmd="mymatlab") + assert applymat.inputs.matlab_cmd == "mymatlab" applymat.inputs.in_file = moving applymat.inputs.mat = mat - scrpt = applymat._make_matlab_command(None) - expected = '[p n e v] = spm_fileparts(V.fname);' - assert expected in scrpt - expected = 'V.mat = transform.M * V.mat;' - assert expected in scrpt + script = applymat._make_matlab_command(None) + expected = "[p n e v] = spm_fileparts(V.fname);" + assert expected in script + expected = "V.mat = transform.M * V.mat;" + assert expected in script def test_reslice(): - moving = example_data(infile='functional.nii') - space_defining = example_data(infile='T1.nii') - reslice = spmu.Reslice(matlab_cmd='mymatlab_version') - assert reslice.inputs.matlab_cmd == 'mymatlab_version' + moving = example_data(infile="functional.nii") + space_defining = example_data(infile="T1.nii") + reslice = spmu.Reslice(matlab_cmd="mymatlab_version") + assert reslice.inputs.matlab_cmd == "mymatlab_version" reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert reslice.inputs.interp == 0 with pytest.raises(TraitError): - reslice.inputs.trait_set(interp='nearest') + reslice.inputs.trait_set(interp="nearest") with pytest.raises(TraitError): reslice.inputs.trait_set(interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) - outfile = fname_presuffix(moving, prefix='r') + outfile = fname_presuffix(moving, prefix="r") assert reslice.inputs.out_file == outfile - expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' - assert expected in script.replace(' ', '') - expected_interp = 'flags.interp = 1;\n' + expected = "\nflags.mean=0;\nflags.which=1;\nflags.mask=0;" + assert expected in script.replace(" ", "") + expected_interp = "flags.interp = 1;\n" assert expected_interp in script - assert 'spm_reslice(invols, flags);' in script + assert "spm_reslice(invols, flags);" in script def test_dicom_import(): - dicom = example_data(infile='dicomdir/123456-1-1.dcm') - di = spmu.DicomImport(matlab_cmd='mymatlab') - assert di.inputs.matlab_cmd == 'mymatlab' - assert di.inputs.output_dir_struct == 'flat' - assert di.inputs.output_dir == './converted_dicom' - assert di.inputs.format == 'nii' + dicom = example_data(infile="dicomdir/123456-1-1.dcm") + di = spmu.DicomImport(matlab_cmd="mymatlab") + assert di.inputs.matlab_cmd == "mymatlab" + assert di.inputs.output_dir_struct == "flat" + assert di.inputs.output_dir == "./converted_dicom" + assert di.inputs.format == "nii" assert not di.inputs.icedims with pytest.raises(TraitError): - di.inputs.trait_set(output_dir_struct='wrong') + di.inputs.trait_set(output_dir_struct="wrong") with pytest.raises(TraitError): - di.inputs.trait_set(format='FAT') + di.inputs.trait_set(format="FAT") with pytest.raises(TraitError): - di.inputs.trait_set(in_files=['does_sfd_not_32fn_exist.dcm']) + di.inputs.trait_set(in_files=["does_sfd_not_32fn_exist.dcm"]) di.inputs.in_files = [dicom] assert di.inputs.in_files == [dicom] diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 275f0781a9..76944893e1 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -1,18 +1,17 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import numpy as np -from ...utils.filemanip import (split_filename, fname_presuffix, - ensure_list, simplify_list) -from ..base import (TraitedSpec, isdefined, File, traits, OutputMultiPath, - InputMultiPath) -from .base import (SPMCommandInputSpec, SPMCommand, scans_for_fnames, - scans_for_fname) +from ...utils.filemanip import ( + split_filename, + fname_presuffix, + ensure_list, + simplify_list, +) +from ..base import TraitedSpec, isdefined, File, traits, OutputMultiPath, InputMultiPath +from .base import SPMCommandInputSpec, SPMCommand, scans_for_fnames, scans_for_fname class Analyze2niiInputSpec(SPMCommandInputSpec): @@ -24,7 +23,6 @@ class Analyze2niiOutputSpec(SPMCommandInputSpec): class Analyze2nii(SPMCommand): - input_spec = Analyze2niiInputSpec output_spec = Analyze2niiOutputSpec @@ -40,32 +38,31 @@ def _make_matlab_command(self, _): def _list_outputs(self): outputs = self._outputs().get() - outputs['nifti_file'] = self.output_name + outputs["nifti_file"] = self.output_name return outputs class CalcCoregAffineInputSpec(SPMCommandInputSpec): target = File( - exists=True, - mandatory=True, - desc='target for generating affine transform') + exists=True, mandatory=True, desc="target for generating affine transform" + ) moving = File( exists=True, mandatory=True, copyfile=False, - desc=('volume transform can be applied to register with ' - 'target')) - mat = File(desc='Filename used to store affine matrix') - invmat = File(desc='Filename used to store inverse affine matrix') + desc=("volume transform can be applied to register with target"), + ) + mat = File(desc="Filename used to store affine matrix") + invmat = File(desc="Filename used to store inverse affine matrix") class CalcCoregAffineOutputSpec(TraitedSpec): - mat = File(exists=True, desc='Matlab file holding transform') - invmat = File(desc='Matlab file holding inverse transform') + mat = File(exists=True, desc="Matlab file holding transform") + invmat = File(desc="Matlab file holding inverse transform") class CalcCoregAffine(SPMCommand): - """ Uses SPM (spm_coreg) to calculate the transform mapping + """Uses SPM (spm_coreg) to calculate the transform mapping moving to target. Saves Transform in mat (matlab binary file) Also saves inverse transform @@ -91,15 +88,15 @@ class CalcCoregAffine(SPMCommand): output_spec = CalcCoregAffineOutputSpec def _make_inv_file(self): - """ makes filename to hold inverse transform if not specified""" - invmat = fname_presuffix(self.inputs.mat, prefix='inverse_') + """makes filename to hold inverse transform if not specified""" + invmat = fname_presuffix(self.inputs.mat, prefix="inverse_") return invmat def _make_mat_file(self): - """ makes name for matfile if doesn exist""" + """makes name for matfile if doesn exist""" pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) - mat = os.path.join(pth, '%s_to_%s.mat' % (mv, tgt)) + mat = os.path.join(pth, f"{mv}_to_{tgt}.mat") return mat def _make_matlab_command(self, _): @@ -109,23 +106,27 @@ def _make_matlab_command(self, _): if not isdefined(self.inputs.invmat): self.inputs.invmat = self._make_inv_file() script = """ - target = '%s'; - moving = '%s'; + target = '{}'; + moving = '{}'; targetv = spm_vol(target); movingv = spm_vol(moving); x = spm_coreg(targetv, movingv); M = spm_matrix(x); - save('%s' , 'M' ); + save('{}' , 'M' ); M = inv(M); - save('%s','M') - """ % (self.inputs.target, self.inputs.moving, self.inputs.mat, - self.inputs.invmat) + save('{}','M') + """.format( + self.inputs.target, + self.inputs.moving, + self.inputs.mat, + self.inputs.invmat, + ) return script def _list_outputs(self): outputs = self._outputs().get() - outputs['mat'] = os.path.abspath(self.inputs.mat) - outputs['invmat'] = os.path.abspath(self.inputs.invmat) + outputs["mat"] = os.path.abspath(self.inputs.mat) + outputs["invmat"] = os.path.abspath(self.inputs.invmat) return outputs @@ -134,18 +135,18 @@ class ApplyTransformInputSpec(SPMCommandInputSpec): exists=True, mandatory=True, copyfile=True, - desc='file to apply transform to, (only updates header)') - mat = File( - exists=True, mandatory=True, desc='file holding transform to apply') + desc="file to apply transform to, (only updates header)", + ) + mat = File(exists=True, mandatory=True, desc="file holding transform to apply") out_file = File(desc="output file name for transformed data", genfile=True) class ApplyTransformOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Transformed image file') + out_file = File(exists=True, desc="Transformed image file") class ApplyTransform(SPMCommand): - """ Uses SPM to apply transform stored in a .mat file to given file + """Uses SPM to apply transform stored in a .mat file to given file Examples -------- @@ -157,17 +158,18 @@ class ApplyTransform(SPMCommand): >>> applymat.run() # doctest: +SKIP """ + input_spec = ApplyTransformInputSpec output_spec = ApplyTransformOutputSpec def _make_matlab_command(self, _): """checks for SPM, generates script""" outputs = self._list_outputs() - self.inputs.out_file = outputs['out_file'] + self.inputs.out_file = outputs["out_file"] script = """ - infile = '%s'; - outfile = '%s' - transform = load('%s'); + infile = '{}'; + outfile = '{}' + transform = load('{}'); V = spm_vol(infile); X = spm_read_vols(V); @@ -176,7 +178,11 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """ % (self.inputs.in_file, self.inputs.out_file, self.inputs.mat) + """.format( + self.inputs.in_file, + self.inputs.out_file, + self.inputs.mat, + ) # img_space = spm_get_space(infile); # spm_get_space(infile, transform.M * img_space); return script @@ -184,51 +190,51 @@ def _make_matlab_command(self, _): def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_trans.nii' + return name + "_trans.nii" class ResliceInputSpec(SPMCommandInputSpec): in_file = File( exists=True, mandatory=True, - desc='file to apply transform to, (only updates header)') + desc="file to apply transform to, (only updates header)", + ) space_defining = File( - exists=True, - mandatory=True, - desc='Volume defining space to slice in_file into') + exists=True, mandatory=True, desc="Volume defining space to slice in_file into" + ) interp = traits.Range( low=0, high=7, usedefault=True, - desc='degree of b-spline used for interpolation' - '0 is nearest neighbor (default)') + desc="degree of b-spline used for interpolation" + "0 is nearest neighbor (default)", + ) - out_file = File(desc='Optional file to save resliced volume') + out_file = File(desc="Optional file to save resliced volume") class ResliceOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='resliced volume') + out_file = File(exists=True, desc="resliced volume") class Reslice(SPMCommand): - """ uses spm_reslice to resample in_file into space of space_defining""" + """uses spm_reslice to resample in_file into space of space_defining""" input_spec = ResliceInputSpec output_spec = ResliceOutputSpec def _make_matlab_command(self, _): - """ generates script""" + """generates script""" if not isdefined(self.inputs.out_file): - self.inputs.out_file = fname_presuffix( - self.inputs.in_file, prefix='r') + self.inputs.out_file = fname_presuffix(self.inputs.in_file, prefix="r") script = """ flags.mean = 0; flags.which = 1; @@ -237,13 +243,16 @@ def _make_matlab_command(self, _): infiles = strvcat(\'%s\', \'%s\'); invols = spm_vol(infiles); spm_reslice(invols, flags); - """ % (self.inputs.interp, self.inputs.space_defining, - self.inputs.in_file) + """ % ( + self.inputs.interp, + self.inputs.space_defining, + self.inputs.in_file, + ) return script def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -251,48 +260,50 @@ class ApplyInverseDeformationInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='fnames', - desc='Files on which deformation is applied') + field="fnames", + desc="Files on which deformation is applied", + ) target = File( - exists=True, - field='comp{1}.inv.space', - desc='File defining target space') + exists=True, field="comp{1}.inv.space", desc="File defining target space" + ) deformation = File( exists=True, - field='comp{1}.inv.comp{1}.sn2def.matname', - desc='SN SPM deformation file', - xor=['deformation_field']) + field="comp{1}.inv.comp{1}.sn2def.matname", + desc="SN SPM deformation file", + xor=["deformation_field"], + ) deformation_field = File( exists=True, - field='comp{1}.inv.comp{1}.def', - desc='SN SPM deformation file', - xor=['deformation']) + field="comp{1}.inv.comp{1}.def", + desc="SN SPM deformation file", + xor=["deformation"], + ) interpolation = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) bounding_box = traits.List( traits.Float(), - field='comp{1}.inv.comp{1}.sn2def.bb', + field="comp{1}.inv.comp{1}.sn2def.bb", minlen=6, maxlen=6, - desc='6-element list (opt)') + desc="6-element list (opt)", + ) voxel_sizes = traits.List( traits.Float(), - field='comp{1}.inv.comp{1}.sn2def.vox', + field="comp{1}.inv.comp{1}.sn2def.vox", minlen=3, maxlen=3, - desc='3-element list (opt)') + desc="3-element list (opt)", + ) class ApplyInverseDeformationOutput(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='Transformed files') + out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ApplyInverseDeformation(SPMCommand): - """ Uses spm to apply inverse deformation stored in a .mat file or a + """Uses spm to apply inverse deformation stored in a .mat file or a deformation field to a given file Examples @@ -309,28 +320,27 @@ class ApplyInverseDeformation(SPMCommand): input_spec = ApplyInverseDeformationInput output_spec = ApplyInverseDeformationOutput - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': + """Convert input to appropriate format for spm""" + if opt == "in_files": return scans_for_fnames(ensure_list(val)) - if opt == 'target': + if opt == "target": return scans_for_fname(ensure_list(val)) - if opt == 'deformation': + if opt == "deformation": return np.array([simplify_list(val)], dtype=object) - if opt == 'deformation_field': + if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs @@ -338,34 +348,34 @@ class ResliceToReferenceInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='fnames', - desc='Files on which deformation is applied') + field="fnames", + desc="Files on which deformation is applied", + ) target = File( - exists=True, - field='comp{1}.id.space', - desc='File defining target space') + exists=True, field="comp{1}.id.space", desc="File defining target space" + ) interpolation = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) bounding_box = traits.List( traits.Float(), - field='comp{2}.idbbvox.bb', + field="comp{2}.idbbvox.bb", minlen=6, maxlen=6, - desc='6-element list (opt)') + desc="6-element list (opt)", + ) voxel_sizes = traits.List( traits.Float(), - field='comp{2}.idbbvox.vox', + field="comp{2}.idbbvox.vox", minlen=3, maxlen=3, - desc='3-element list (opt)') + desc="3-element list (opt)", + ) class ResliceToReferenceOutput(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='Transformed files') + out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ResliceToReference(SPMCommand): @@ -385,28 +395,27 @@ class ResliceToReference(SPMCommand): input_spec = ResliceToReferenceInput output_spec = ResliceToReferenceOutput - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': + """Convert input to appropriate format for spm""" + if opt == "in_files": return scans_for_fnames(ensure_list(val)) - if opt == 'target': + if opt == "target": return scans_for_fname(ensure_list(val)) - if opt == 'deformation': + if opt == "deformation": return np.array([simplify_list(val)], dtype=object) - if opt == 'deformation_field': + if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs @@ -414,46 +423,46 @@ class DicomImportInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='data', - desc='dicom files to be converted') + field="data", + desc="dicom files to be converted", + ) output_dir_struct = traits.Enum( - 'flat', - 'series', - 'patname', - 'patid_date', - 'patid', - 'date_time', - field='root', + "flat", + "series", + "patname", + "patid_date", + "patid", + "date_time", + field="root", usedefault=True, - desc='directory structure for the output.') + desc="directory structure for the output.", + ) output_dir = traits.Str( - './converted_dicom', - field='outdir', - usedefault=True, - desc='output directory.') + "./converted_dicom", field="outdir", usedefault=True, desc="output directory." + ) format = traits.Enum( - 'nii', - 'img', - field='convopts.format', - usedefault=True, - desc='output format.') + "nii", "img", field="convopts.format", usedefault=True, desc="output format." + ) icedims = traits.Bool( False, - field='convopts.icedims', + field="convopts.icedims", usedefault=True, - desc=('If image sorting fails, one can try using ' - 'the additional SIEMENS ICEDims information ' - 'to create unique filenames. Use this only if ' - 'there would be multiple volumes with exactly ' - 'the same file names.')) + desc=( + "If image sorting fails, one can try using " + "the additional SIEMENS ICEDims information " + "to create unique filenames. Use this only if " + "there would be multiple volumes with exactly " + "the same file names." + ), + ) class DicomImportOutputSpec(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='converted files') + out_files = OutputMultiPath(File(exists=True), desc="converted files") class DicomImport(SPMCommand): - """ Uses spm to convert DICOM files to nii or img+hdr. + """Uses spm to convert DICOM files to nii or img+hdr. Examples -------- @@ -467,47 +476,48 @@ class DicomImport(SPMCommand): input_spec = DicomImportInputSpec output_spec = DicomImportOutputSpec - _jobtype = 'util' - _jobname = 'dicom' + _jobtype = "util" + _jobname = "dicom" def _format_arg(self, opt, spec, val): - """Convert input to appropriate format for spm - """ - if opt == 'in_files': + """Convert input to appropriate format for spm""" + if opt == "in_files": return np.array(val, dtype=object) - if opt == 'output_dir': + if opt == "output_dir": return np.array([val], dtype=object) - if opt == 'output_dir': + if opt == "output_dir": return os.path.abspath(val) - if opt == 'icedims': + if opt == "icedims": if val: return 1 return 0 - return super(DicomImport, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): od = os.path.abspath(self.inputs.output_dir) if not os.path.isdir(od): os.mkdir(od) - return super(DicomImport, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): from glob import glob + outputs = self._outputs().get() od = os.path.abspath(self.inputs.output_dir) ext = self.inputs.format if self.inputs.output_dir_struct == "flat": - outputs['out_files'] = glob(os.path.join(od, '*.%s' % ext)) - elif self.inputs.output_dir_struct == 'series': - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*.%s' % ext))) - elif (self.inputs.output_dir_struct in [ - 'patid', 'date_time', 'patname' - ]): - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*', '*.%s' % ext))) - elif self.inputs.output_dir_struct == 'patid_date': - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*', '*', '*.%s' % ext))) + outputs["out_files"] = glob(os.path.join(od, "*.%s" % ext)) + elif self.inputs.output_dir_struct == "series": + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*.%s" % ext)) + ) + elif self.inputs.output_dir_struct in ["patid", "date_time", "patname"]: + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*", "*.%s" % ext)) + ) + elif self.inputs.output_dir_struct == "patid_date": + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*", "*", "*.%s" % ext)) + ) return outputs diff --git a/nipype/interfaces/tests/__init__.py b/nipype/interfaces/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/tests/__init__.py +++ b/nipype/interfaces/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py b/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py index 8acdcc9787..c2a899958c 100644 --- a/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py +++ b/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py @@ -1,21 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import BIDSDataGrabber def test_BIDSDataGrabber_inputs(): input_map = dict( - base_dir=dict(mandatory=True, ), + base_dir=dict( + mandatory=True, + ), + extra_derivatives=dict(), + index_derivatives=dict( + mandatory=True, + usedefault=True, + ), + load_layout=dict( + mandatory=False, + ), output_query=dict(), - raise_on_empty=dict(usedefault=True, ), - return_type=dict(usedefault=True, ), - strict=dict(), + raise_on_empty=dict( + usedefault=True, + ), ) inputs = BIDSDataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BIDSDataGrabber_outputs(): output_map = dict() outputs = BIDSDataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_Bru2.py b/nipype/interfaces/tests/test_auto_Bru2.py index ce436ac03e..7935d2fc97 100644 --- a/nipype/interfaces/tests/test_auto_Bru2.py +++ b/nipype/interfaces/tests/test_auto_Bru2.py @@ -1,26 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..bru2nii import Bru2 def test_Bru2_inputs(): input_map = dict( - actual_size=dict(argstr='-a', ), - append_protocol_name=dict(argstr='-p', ), - args=dict(argstr='%s', ), - compress=dict(argstr='-z', ), + actual_size=dict( + argstr="-a", + ), + append_protocol_name=dict( + argstr="-p", + ), + args=dict( + argstr="%s", + ), + compress=dict( + argstr="-z", + ), environ=dict( nohash=True, usedefault=True, ), - force_conversion=dict(argstr='-f', ), + force_conversion=dict( + argstr="-f", + ), input_dir=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, ), output_filename=dict( - argstr='-o %s', + argstr="-o %s", genfile=True, ), ) @@ -29,8 +38,14 @@ def test_Bru2_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bru2_outputs(): - output_map = dict(nii_file=dict(), ) + output_map = dict( + nii_file=dict( + extensions=None, + ), + ) outputs = Bru2.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_C3d.py b/nipype/interfaces/tests/test_auto_C3d.py index 9e74e82e2a..d4d26e8264 100644 --- a/nipype/interfaces/tests/test_auto_C3d.py +++ b/nipype/interfaces/tests/test_auto_C3d.py @@ -1,50 +1,70 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..c3 import C3d def test_C3d_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", mandatory=True, position=1, ), - interp=dict(argstr='-interpolation %s', ), - is_4d=dict(usedefault=True, ), + interp=dict( + argstr="-interpolation %s", + ), + is_4d=dict( + usedefault=True, + ), multicomp_split=dict( - argstr='-mcr', + argstr="-mcr", position=0, usedefault=True, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, position=-1, - xor=['out_files'], + xor=["out_files"], ), out_files=dict( - argstr='-oo %s', + argstr="-oo %s", position=-1, - xor=['out_file'], + xor=["out_file"], + ), + pix_type=dict( + argstr="-type %s", + ), + resample=dict( + argstr="-resample %s", + ), + scale=dict( + argstr="-scale %s", + ), + shift=dict( + argstr="-shift %s", + ), + smooth=dict( + argstr="-smooth %s", ), - pix_type=dict(argstr='-type %s', ), - resample=dict(argstr='-resample %s', ), - scale=dict(argstr='-scale %s', ), - shift=dict(argstr='-shift %s', ), - smooth=dict(argstr='-smooth %s', ), ) inputs = C3d.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_C3d_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict( + out_files=dict(), + ) outputs = C3d.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_C3dAffineTool.py b/nipype/interfaces/tests/test_auto_C3dAffineTool.py index 510ea2f02a..153f6090a7 100644 --- a/nipype/interfaces/tests/test_auto_C3dAffineTool.py +++ b/nipype/interfaces/tests/test_auto_C3dAffineTool.py @@ -1,34 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..c3 import C3dAffineTool def test_C3dAffineTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), fsl2ras=dict( - argstr='-fsl2ras', + argstr="-fsl2ras", position=4, ), itk_transform=dict( - argstr='-oitk %s', + argstr="-oitk %s", hash_files=False, position=5, ), reference_file=dict( - argstr='-ref %s', + argstr="-ref %s", + extensions=None, position=1, ), source_file=dict( - argstr='-src %s', + argstr="-src %s", + extensions=None, position=2, ), transform_file=dict( - argstr='%s', + argstr="%s", + extensions=None, position=3, ), ) @@ -37,8 +41,14 @@ def test_C3dAffineTool_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_C3dAffineTool_outputs(): - output_map = dict(itk_transform=dict(), ) + output_map = dict( + itk_transform=dict( + extensions=None, + ), + ) outputs = C3dAffineTool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_CopyMeta.py b/nipype/interfaces/tests/test_auto_CopyMeta.py index 012edfa886..f7a554226e 100644 --- a/nipype/interfaces/tests/test_auto_CopyMeta.py +++ b/nipype/interfaces/tests/test_auto_CopyMeta.py @@ -1,22 +1,33 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import CopyMeta def test_CopyMeta_inputs(): input_map = dict( - dest_file=dict(mandatory=True, ), + dest_file=dict( + extensions=None, + mandatory=True, + ), exclude_classes=dict(), include_classes=dict(), - src_file=dict(mandatory=True, ), + src_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CopyMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CopyMeta_outputs(): - output_map = dict(dest_file=dict(), ) + output_map = dict( + dest_file=dict( + extensions=None, + ), + ) outputs = CopyMeta.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_DataFinder.py b/nipype/interfaces/tests/test_auto_DataFinder.py index 9a1b7418df..e1f24b3517 100644 --- a/nipype/interfaces/tests/test_auto_DataFinder.py +++ b/nipype/interfaces/tests/test_auto_DataFinder.py @@ -1,22 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import DataFinder def test_DataFinder_inputs(): input_map = dict( ignore_regexes=dict(), - match_regex=dict(usedefault=True, ), + match_regex=dict( + usedefault=True, + ), max_depth=dict(), min_depth=dict(), - root_paths=dict(mandatory=True, ), - unpack_single=dict(usedefault=True, ), + root_paths=dict( + mandatory=True, + ), + unpack_single=dict( + usedefault=True, + ), ) inputs = DataFinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataFinder_outputs(): output_map = dict() outputs = DataFinder.output_spec() diff --git a/nipype/interfaces/tests/test_auto_DataGrabber.py b/nipype/interfaces/tests/test_auto_DataGrabber.py index d39db0b527..a58e4dd434 100644 --- a/nipype/interfaces/tests/test_auto_DataGrabber.py +++ b/nipype/interfaces/tests/test_auto_DataGrabber.py @@ -1,15 +1,22 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import DataGrabber def test_DataGrabber_inputs(): input_map = dict( base_directory=dict(), - drop_blank_outputs=dict(usedefault=True, ), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - template=dict(mandatory=True, ), + drop_blank_outputs=dict( + usedefault=True, + ), + raise_on_empty=dict( + usedefault=True, + ), + sort_filelist=dict( + mandatory=True, + ), + template=dict( + mandatory=True, + ), template_args=dict(), ) inputs = DataGrabber.input_spec() @@ -17,6 +24,8 @@ def test_DataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataGrabber_outputs(): output_map = dict() outputs = DataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_DataSink.py b/nipype/interfaces/tests/test_auto_DataSink.py index da26854451..1ce4183b70 100644 --- a/nipype/interfaces/tests/test_auto_DataSink.py +++ b/nipype/interfaces/tests/test_auto_DataSink.py @@ -1,20 +1,25 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import DataSink def test_DataSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), + _outputs=dict( + usedefault=True, + ), base_directory=dict(), bucket=dict(), container=dict(), creds_path=dict(), encrypt_bucket_keys=dict(), local_copy=dict(), - parameterization=dict(usedefault=True, ), + parameterization=dict( + usedefault=True, + ), regexp_substitutions=dict(), - remove_dest_dir=dict(usedefault=True, ), + remove_dest_dir=dict( + usedefault=True, + ), strip_dir=dict(), substitutions=dict(), ) @@ -23,8 +28,12 @@ def test_DataSink_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataSink_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict(), + ) outputs = DataSink.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_Dcm2nii.py b/nipype/interfaces/tests/test_auto_Dcm2nii.py index 0caa45a1f1..948aafa083 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2nii.py +++ b/nipype/interfaces/tests/test_auto_Dcm2nii.py @@ -1,29 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcm2nii import Dcm2nii def test_Dcm2nii_inputs(): input_map = dict( anonymize=dict( - argstr='-a', + argstr="-a", usedefault=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), collapse_folders=dict( - argstr='-c', + argstr="-c", usedefault=True, ), config_file=dict( - argstr='-b %s', + argstr="-b %s", + extensions=None, genfile=True, ), convert_all_pars=dict( - argstr='-v', + argstr="-v", usedefault=True, ), date_in_filename=dict( - argstr='-d', + argstr="-d", usedefault=True, ), environ=dict( @@ -31,54 +33,56 @@ def test_Dcm2nii_inputs(): usedefault=True, ), events_in_filename=dict( - argstr='-e', + argstr="-e", usedefault=True, ), gzip_output=dict( - argstr='-g', + argstr="-g", usedefault=True, ), id_in_filename=dict( - argstr='-i', + argstr="-i", usedefault=True, ), nii_output=dict( - argstr='-n', + argstr="-n", usedefault=True, ), output_dir=dict( - argstr='-o %s', + argstr="-o %s", genfile=True, ), protocol_in_filename=dict( - argstr='-p', + argstr="-p", usedefault=True, ), - reorient=dict(argstr='-r', ), + reorient=dict( + argstr="-r", + ), reorient_and_crop=dict( - argstr='-x', + argstr="-x", usedefault=True, ), source_dir=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, - xor=['source_names'], + xor=["source_names"], ), source_in_filename=dict( - argstr='-f', + argstr="-f", usedefault=True, ), source_names=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], + xor=["source_dir"], ), spm_analyze=dict( - argstr='-s', - xor=['nii_output'], + argstr="-s", + xor=["nii_output"], ), ) inputs = Dcm2nii.input_spec() @@ -86,6 +90,8 @@ def test_Dcm2nii_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dcm2nii_outputs(): output_map = dict( bvals=dict(), diff --git a/nipype/interfaces/tests/test_auto_Dcm2niix.py b/nipype/interfaces/tests/test_auto_Dcm2niix.py index 5917f48583..3dc69d325f 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2niix.py +++ b/nipype/interfaces/tests/test_auto_Dcm2niix.py @@ -1,27 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcm2nii import Dcm2niix def test_Dcm2niix_inputs(): input_map = dict( anon_bids=dict( - argstr='-ba', - requires=['bids_format'], + argstr="-ba", + requires=["bids_format"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), bids_format=dict( - argstr='-b', + argstr="-b", usedefault=True, ), - comment=dict(argstr='-c %s', ), + comment=dict( + argstr="-c %s", + ), compress=dict( - argstr='-z %s', + argstr="-z %s", usedefault=True, ), - compression=dict(argstr='-%d', ), + compression=dict( + argstr="-%d", + ), crop=dict( - argstr='-x', + argstr="-x", usedefault=True, ), environ=dict( @@ -29,40 +34,51 @@ def test_Dcm2niix_inputs(): usedefault=True, ), has_private=dict( - argstr='-t', + argstr="-t", usedefault=True, ), - ignore_deriv=dict(argstr='-i', ), + ignore_deriv=dict( + argstr="-i", + ), merge_imgs=dict( - argstr='-m', + argstr="-m %d", usedefault=True, ), - out_filename=dict(argstr='-f %s', ), + out_filename=dict( + argstr="-f %s", + ), output_dir=dict( - argstr='-o %s', + argstr="-o %s", usedefault=True, ), - philips_float=dict(argstr='-p', ), - series_numbers=dict(argstr='-n %s...', ), + philips_float=dict( + argstr="-p", + ), + series_numbers=dict( + argstr="-n %s...", + ), single_file=dict( - argstr='-s', + argstr="-s", usedefault=True, ), source_dir=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-1, - xor=['source_names'], + xor=["source_names"], ), source_names=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], + xor=["source_dir"], + ), + to_nrrd=dict( + argstr="-e", ), verbose=dict( - argstr='-v', + argstr="-v", usedefault=True, ), ) @@ -71,12 +87,15 @@ def test_Dcm2niix_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dcm2niix_outputs(): output_map = dict( bids=dict(), bvals=dict(), bvecs=dict(), converted_files=dict(), + mvecs=dict(), ) outputs = Dcm2niix.output_spec() diff --git a/nipype/interfaces/tests/test_auto_DcmStack.py b/nipype/interfaces/tests/test_auto_DcmStack.py index 53a5259af5..02bf268904 100644 --- a/nipype/interfaces/tests/test_auto_DcmStack.py +++ b/nipype/interfaces/tests/test_auto_DcmStack.py @@ -1,16 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import DcmStack def test_DcmStack_inputs(): input_map = dict( - dicom_files=dict(mandatory=True, ), + dicom_files=dict( + mandatory=True, + ), embed_meta=dict(), exclude_regexes=dict(), - force_read=dict(usedefault=True, ), + force_read=dict( + usedefault=True, + ), include_regexes=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict( + usedefault=True, + ), out_format=dict(), out_path=dict(), ) @@ -19,8 +24,14 @@ def test_DcmStack_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DcmStack_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = DcmStack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_ExportFile.py b/nipype/interfaces/tests/test_auto_ExportFile.py new file mode 100644 index 0000000000..adac52e161 --- /dev/null +++ b/nipype/interfaces/tests/test_auto_ExportFile.py @@ -0,0 +1,37 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..io import ExportFile + + +def test_ExportFile_inputs(): + input_map = dict( + check_extension=dict( + usedefault=True, + ), + clobber=dict(), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_file=dict( + extensions=None, + mandatory=True, + ), + ) + inputs = ExportFile.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_ExportFile_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = ExportFile.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/tests/test_auto_FreeSurferSource.py b/nipype/interfaces/tests/test_auto_FreeSurferSource.py index 15ea9c66cd..875380f75f 100644 --- a/nipype/interfaces/tests/test_auto_FreeSurferSource.py +++ b/nipype/interfaces/tests/test_auto_FreeSurferSource.py @@ -1,102 +1,164 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import FreeSurferSource def test_FreeSurferSource_inputs(): input_map = dict( - hemi=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), - subjects_dir=dict(mandatory=True, ), + hemi=dict( + usedefault=True, + ), + subject_id=dict( + mandatory=True, + ), + subjects_dir=dict( + mandatory=True, + ), ) inputs = FreeSurferSource.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FreeSurferSource_outputs(): output_map = dict( BA_stats=dict( - altkey='BA', - loc='stats', + altkey="BA", + loc="stats", + ), + T1=dict( + extensions=None, + loc="mri", ), - T1=dict(loc='mri', ), annot=dict( - altkey='*annot', - loc='label', + altkey="*annot", + loc="label", ), aparc_a2009s_stats=dict( - altkey='aparc.a2009s', - loc='stats', + altkey="aparc.a2009s", + loc="stats", ), aparc_aseg=dict( - altkey='aparc*aseg', - loc='mri', + altkey="aparc*aseg", + loc="mri", ), aparc_stats=dict( - altkey='aparc', - loc='stats', + altkey="aparc", + loc="stats", ), area_pial=dict( - altkey='area.pial', - loc='surf', + altkey="area.pial", + loc="surf", + ), + aseg=dict( + extensions=None, + loc="mri", ), - aseg=dict(loc='mri', ), aseg_stats=dict( - altkey='aseg', - loc='stats', + altkey="aseg", + loc="stats", + ), + avg_curv=dict( + loc="surf", + ), + brain=dict( + extensions=None, + loc="mri", + ), + brainmask=dict( + extensions=None, + loc="mri", + ), + curv=dict( + loc="surf", ), - avg_curv=dict(loc='surf', ), - brain=dict(loc='mri', ), - brainmask=dict(loc='mri', ), - curv=dict(loc='surf', ), curv_pial=dict( - altkey='curv.pial', - loc='surf', + altkey="curv.pial", + loc="surf", ), curv_stats=dict( - altkey='curv', - loc='stats', + altkey="curv", + loc="stats", ), entorhinal_exvivo_stats=dict( - altkey='entorhinal_exvivo', - loc='stats', + altkey="entorhinal_exvivo", + loc="stats", + ), + filled=dict( + extensions=None, + loc="mri", ), - filled=dict(loc='mri', ), graymid=dict( - altkey=['graymid', 'midthickness'], - loc='surf', + altkey=["graymid", "midthickness"], + loc="surf", + ), + inflated=dict( + loc="surf", + ), + jacobian_white=dict( + loc="surf", ), - inflated=dict(loc='surf', ), - jacobian_white=dict(loc='surf', ), label=dict( - altkey='*label', - loc='label', - ), - norm=dict(loc='mri', ), - nu=dict(loc='mri', ), - orig=dict(loc='mri', ), - pial=dict(loc='surf', ), - rawavg=dict(loc='mri', ), + altkey="*label", + loc="label", + ), + norm=dict( + extensions=None, + loc="mri", + ), + nu=dict( + extensions=None, + loc="mri", + ), + orig=dict( + extensions=None, + loc="mri", + ), + pial=dict( + loc="surf", + ), + rawavg=dict( + extensions=None, + loc="mri", + ), ribbon=dict( - altkey='*ribbon', - loc='mri', + altkey="*ribbon", + loc="mri", + ), + smoothwm=dict( + loc="surf", + ), + sphere=dict( + loc="surf", ), - smoothwm=dict(loc='surf', ), - sphere=dict(loc='surf', ), sphere_reg=dict( - altkey='sphere.reg', - loc='surf', - ), - sulc=dict(loc='surf', ), - thickness=dict(loc='surf', ), - volume=dict(loc='surf', ), - white=dict(loc='surf', ), - wm=dict(loc='mri', ), - wmparc=dict(loc='mri', ), + altkey="sphere.reg", + loc="surf", + ), + sulc=dict( + loc="surf", + ), + thickness=dict( + loc="surf", + ), + volume=dict( + loc="surf", + ), + white=dict( + loc="surf", + ), + wm=dict( + extensions=None, + loc="mri", + ), + wmparc=dict( + extensions=None, + loc="mri", + ), wmparc_stats=dict( - altkey='wmparc', - loc='stats', + altkey="wmparc", + loc="stats", ), ) outputs = FreeSurferSource.output_spec() diff --git a/nipype/interfaces/tests/test_auto_GroupAndStack.py b/nipype/interfaces/tests/test_auto_GroupAndStack.py index a8f30e32f9..7282e23a8b 100644 --- a/nipype/interfaces/tests/test_auto_GroupAndStack.py +++ b/nipype/interfaces/tests/test_auto_GroupAndStack.py @@ -1,16 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import GroupAndStack def test_GroupAndStack_inputs(): input_map = dict( - dicom_files=dict(mandatory=True, ), + dicom_files=dict( + mandatory=True, + ), embed_meta=dict(), exclude_regexes=dict(), - force_read=dict(usedefault=True, ), + force_read=dict( + usedefault=True, + ), include_regexes=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict( + usedefault=True, + ), out_format=dict(), out_path=dict(), ) @@ -19,8 +24,12 @@ def test_GroupAndStack_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GroupAndStack_outputs(): - output_map = dict(out_list=dict(), ) + output_map = dict( + out_list=dict(), + ) outputs = GroupAndStack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_IOBase.py b/nipype/interfaces/tests/test_auto_IOBase.py index c2c2f96431..ddac7b4449 100644 --- a/nipype/interfaces/tests/test_auto_IOBase.py +++ b/nipype/interfaces/tests/test_auto_IOBase.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import IOBase diff --git a/nipype/interfaces/tests/test_auto_JSONFileGrabber.py b/nipype/interfaces/tests/test_auto_JSONFileGrabber.py index 03a65cf6c2..6e548f17c1 100644 --- a/nipype/interfaces/tests/test_auto_JSONFileGrabber.py +++ b/nipype/interfaces/tests/test_auto_JSONFileGrabber.py @@ -1,18 +1,21 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import JSONFileGrabber def test_JSONFileGrabber_inputs(): input_map = dict( defaults=dict(), - in_file=dict(), + in_file=dict( + extensions=None, + ), ) inputs = JSONFileGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JSONFileGrabber_outputs(): output_map = dict() outputs = JSONFileGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_JSONFileSink.py b/nipype/interfaces/tests/test_auto_JSONFileSink.py index 002997912b..cdfa32195b 100644 --- a/nipype/interfaces/tests/test_auto_JSONFileSink.py +++ b/nipype/interfaces/tests/test_auto_JSONFileSink.py @@ -1,21 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import JSONFileSink def test_JSONFileSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - in_dict=dict(usedefault=True, ), - out_file=dict(), + _outputs=dict( + usedefault=True, + ), + in_dict=dict( + usedefault=True, + ), + out_file=dict( + extensions=None, + ), ) inputs = JSONFileSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JSONFileSink_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = JSONFileSink.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_LookupMeta.py b/nipype/interfaces/tests/test_auto_LookupMeta.py index 29100aaef7..22f6678734 100644 --- a/nipype/interfaces/tests/test_auto_LookupMeta.py +++ b/nipype/interfaces/tests/test_auto_LookupMeta.py @@ -1,18 +1,24 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import LookupMeta def test_LookupMeta_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - meta_keys=dict(mandatory=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + meta_keys=dict( + mandatory=True, + ), ) inputs = LookupMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LookupMeta_outputs(): output_map = dict() outputs = LookupMeta.output_spec() diff --git a/nipype/interfaces/tests/test_auto_MatlabCommand.py b/nipype/interfaces/tests/test_auto_MatlabCommand.py index c1b971d25d..1dfd9c1dde 100644 --- a/nipype/interfaces/tests/test_auto_MatlabCommand.py +++ b/nipype/interfaces/tests/test_auto_MatlabCommand.py @@ -1,43 +1,56 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..matlab import MatlabCommand def test_MatlabCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), - logfile=dict(argstr='-logfile %s', ), - mfile=dict(usedefault=True, ), + logfile=dict( + argstr="-logfile %s", + extensions=None, + ), + mfile=dict( + usedefault=True, + ), nodesktop=dict( - argstr='-nodesktop', + argstr="-nodesktop", nohash=True, usedefault=True, ), nosplash=dict( - argstr='-nosplash', + argstr="-nosplash", nohash=True, usedefault=True, ), paths=dict(), - postscript=dict(usedefault=True, ), - prescript=dict(usedefault=True, ), + postscript=dict( + usedefault=True, + ), + prescript=dict( + usedefault=True, + ), script=dict( argstr='-r "%s;exit"', mandatory=True, position=-1, ), - script_file=dict(usedefault=True, ), + script_file=dict( + extensions=None, + usedefault=True, + ), single_comp_thread=dict( - argstr='-singleCompThread', + argstr="-singleCompThread", nohash=True, ), uses_mcr=dict( nohash=True, - xor=['nodesktop', 'nosplash', 'single_comp_thread'], + xor=["nodesktop", "nosplash", "single_comp_thread"], ), ) inputs = MatlabCommand.input_spec() diff --git a/nipype/interfaces/tests/test_auto_MergeNifti.py b/nipype/interfaces/tests/test_auto_MergeNifti.py index 9e0a017c60..f199fc5da6 100644 --- a/nipype/interfaces/tests/test_auto_MergeNifti.py +++ b/nipype/interfaces/tests/test_auto_MergeNifti.py @@ -1,13 +1,16 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import MergeNifti def test_MergeNifti_inputs(): input_map = dict( - in_files=dict(mandatory=True, ), + in_files=dict( + mandatory=True, + ), merge_dim=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict( + usedefault=True, + ), out_format=dict(), out_path=dict(), sort_order=dict(), @@ -17,8 +20,14 @@ def test_MergeNifti_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeNifti_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = MergeNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_MeshFix.py b/nipype/interfaces/tests/test_auto_MeshFix.py index f306f4abed..1ae3b2b67c 100644 --- a/nipype/interfaces/tests/test_auto_MeshFix.py +++ b/nipype/interfaces/tests/test_auto_MeshFix.py @@ -1,100 +1,140 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..meshfix import MeshFix def test_MeshFix_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cut_inner=dict(argstr='--cut-inner %d', ), - cut_outer=dict(argstr='--cut-outer %d', ), - decouple_inin=dict(argstr='--decouple-inin %d', ), - decouple_outin=dict(argstr='--decouple-outin %d', ), - decouple_outout=dict(argstr='--decouple-outout %d', ), - dilation=dict(argstr='--dilate %d', ), - dont_clean=dict(argstr='--no-clean', ), + args=dict( + argstr="%s", + ), + cut_inner=dict( + argstr="--cut-inner %d", + ), + cut_outer=dict( + argstr="--cut-outer %d", + ), + decouple_inin=dict( + argstr="--decouple-inin %d", + ), + decouple_outin=dict( + argstr="--decouple-outin %d", + ), + decouple_outout=dict( + argstr="--decouple-outout %d", + ), + dilation=dict( + argstr="--dilate %d", + ), + dont_clean=dict( + argstr="--no-clean", + ), environ=dict( nohash=True, usedefault=True, ), - epsilon_angle=dict(argstr='-a %f', ), + epsilon_angle=dict( + argstr="-a %f", + ), finetuning_distance=dict( - argstr='%f', + argstr="%f", position=-2, - requires=['finetuning_substeps'], + requires=["finetuning_substeps"], ), finetuning_inwards=dict( - argstr='--fineTuneIn ', + argstr="--fineTuneIn ", position=-3, - requires=['finetuning_distance', 'finetuning_substeps'], + requires=["finetuning_distance", "finetuning_substeps"], ), finetuning_outwards=dict( - argstr='--fineTuneOut ', + argstr="--fineTuneOut ", position=-3, - requires=['finetuning_distance', 'finetuning_substeps'], - xor=['finetuning_inwards'], + requires=["finetuning_distance", "finetuning_substeps"], + xor=["finetuning_inwards"], ), finetuning_substeps=dict( - argstr='%d', + argstr="%d", position=-1, - requires=['finetuning_distance'], + requires=["finetuning_distance"], ), in_file1=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), in_file2=dict( - argstr='%s', + argstr="%s", + extensions=None, position=2, ), join_closest_components=dict( - argstr='-jc', - xor=['join_closest_components'], + argstr="-jc", + xor=["join_closest_components"], ), join_overlapping_largest_components=dict( - argstr='-j', - xor=['join_closest_components'], + argstr="-j", + xor=["join_closest_components"], + ), + laplacian_smoothing_steps=dict( + argstr="--smooth %d", + ), + number_of_biggest_shells=dict( + argstr="--shells %d", ), - laplacian_smoothing_steps=dict(argstr='--smooth %d', ), - number_of_biggest_shells=dict(argstr='--shells %d', ), out_filename=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, ), - output_type=dict(usedefault=True, ), - quiet_mode=dict(argstr='-q', ), - remove_handles=dict(argstr='--remove-handles', ), + output_type=dict( + usedefault=True, + ), + quiet_mode=dict( + argstr="-q", + ), + remove_handles=dict( + argstr="--remove-handles", + ), save_as_freesurfer_mesh=dict( - argstr='--fsmesh', - xor=['save_as_vrml', 'save_as_stl'], + argstr="--fsmesh", + xor=["save_as_vrml", "save_as_stl"], ), save_as_stl=dict( - argstr='--stl', - xor=['save_as_vrml', 'save_as_freesurfer_mesh'], + argstr="--stl", + xor=["save_as_vrml", "save_as_freesurfer_mesh"], ), save_as_vrml=dict( - argstr='--wrl', - xor=['save_as_stl', 'save_as_freesurfer_mesh'], + argstr="--wrl", + xor=["save_as_stl", "save_as_freesurfer_mesh"], + ), + set_intersections_to_one=dict( + argstr="--intersect", ), - set_intersections_to_one=dict(argstr='--intersect', ), uniform_remeshing_steps=dict( - argstr='-u %d', - requires=['uniform_remeshing_vertices'], + argstr="-u %d", + requires=["uniform_remeshing_vertices"], ), uniform_remeshing_vertices=dict( - argstr='--vertices %d', - requires=['uniform_remeshing_steps'], + argstr="--vertices %d", + requires=["uniform_remeshing_steps"], + ), + x_shift=dict( + argstr="--smooth %d", ), - x_shift=dict(argstr='--smooth %d', ), ) inputs = MeshFix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeshFix_outputs(): - output_map = dict(mesh_file=dict(), ) + output_map = dict( + mesh_file=dict( + extensions=None, + ), + ) outputs = MeshFix.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_MySQLSink.py b/nipype/interfaces/tests/test_auto_MySQLSink.py index 048699659a..702a21e9ce 100644 --- a/nipype/interfaces/tests/test_auto_MySQLSink.py +++ b/nipype/interfaces/tests/test_auto_MySQLSink.py @@ -1,23 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import MySQLSink def test_MySQLSink_inputs(): input_map = dict( config=dict( + extensions=None, + mandatory=True, + xor=["host"], + ), + database_name=dict( mandatory=True, - xor=['host'], ), - database_name=dict(mandatory=True, ), host=dict( mandatory=True, - requires=['username', 'password'], + requires=["username", "password"], usedefault=True, - xor=['config'], + xor=["config"], ), password=dict(), - table_name=dict(mandatory=True, ), + table_name=dict( + mandatory=True, + ), username=dict(), ) inputs = MySQLSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_NiftiGeneratorBase.py b/nipype/interfaces/tests/test_auto_NiftiGeneratorBase.py index 88bc12dfa2..9bdd24ac4a 100644 --- a/nipype/interfaces/tests/test_auto_NiftiGeneratorBase.py +++ b/nipype/interfaces/tests/test_auto_NiftiGeneratorBase.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import NiftiGeneratorBase diff --git a/nipype/interfaces/tests/test_auto_NilearnBaseInterface.py b/nipype/interfaces/tests/test_auto_NilearnBaseInterface.py index 38e4cfd698..0c9f8e2fc9 100644 --- a/nipype/interfaces/tests/test_auto_NilearnBaseInterface.py +++ b/nipype/interfaces/tests/test_auto_NilearnBaseInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..nilearn import NilearnBaseInterface diff --git a/nipype/interfaces/tests/test_auto_PETPVC.py b/nipype/interfaces/tests/test_auto_PETPVC.py index c5283435d5..f7da7f45dd 100644 --- a/nipype/interfaces/tests/test_auto_PETPVC.py +++ b/nipype/interfaces/tests/test_auto_PETPVC.py @@ -1,17 +1,18 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..petpvc import PETPVC def test_PETPVC_inputs(): input_map = dict( alpha=dict( - argstr='-a %.4f', + argstr="-a %.4f", usedefault=True, ), - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), debug=dict( - argstr='-d', + argstr="-d", usedefault=True, ), environ=dict( @@ -19,44 +20,47 @@ def test_PETPVC_inputs(): usedefault=True, ), fwhm_x=dict( - argstr='-x %.4f', + argstr="-x %.4f", mandatory=True, ), fwhm_y=dict( - argstr='-y %.4f', + argstr="-y %.4f", mandatory=True, ), fwhm_z=dict( - argstr='-z %.4f', + argstr="-z %.4f", mandatory=True, ), in_file=dict( - argstr='-i %s', + argstr="-i %s", + extensions=None, mandatory=True, ), mask_file=dict( - argstr='-m %s', + argstr="-m %s", + extensions=None, mandatory=True, ), n_deconv=dict( - argstr='-k %d', + argstr="-k %d", usedefault=True, ), n_iter=dict( - argstr='-n %d', + argstr="-n %d", usedefault=True, ), out_file=dict( - argstr='-o %s', + argstr="-o %s", + extensions=None, genfile=True, hash_files=False, ), pvc=dict( - argstr='-p %s', + argstr="-p %s", mandatory=True, ), stop_crit=dict( - argstr='-a %.4f', + argstr="-s %.4f", usedefault=True, ), ) @@ -65,8 +69,14 @@ def test_PETPVC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PETPVC_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = PETPVC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_Quickshear.py b/nipype/interfaces/tests/test_auto_Quickshear.py index 7f39a6bc96..92212335dc 100644 --- a/nipype/interfaces/tests/test_auto_Quickshear.py +++ b/nipype/interfaces/tests/test_auto_Quickshear.py @@ -1,13 +1,14 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..quickshear import Quickshear def test_Quickshear_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), buff=dict( - argstr='%d', + argstr="%d", position=4, ), environ=dict( @@ -15,20 +16,23 @@ def test_Quickshear_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), mask_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_defaced', + name_source="in_file", + name_template="%s_defaced", position=3, ), ) @@ -37,8 +41,14 @@ def test_Quickshear_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Quickshear_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Quickshear.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_RCommand.py b/nipype/interfaces/tests/test_auto_RCommand.py new file mode 100644 index 0000000000..adfcf36cf0 --- /dev/null +++ b/nipype/interfaces/tests/test_auto_RCommand.py @@ -0,0 +1,31 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..r import RCommand + + +def test_RCommand_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + environ=dict( + nohash=True, + usedefault=True, + ), + rfile=dict( + usedefault=True, + ), + script=dict( + argstr='-e "%s"', + mandatory=True, + position=-1, + ), + script_file=dict( + extensions=None, + usedefault=True, + ), + ) + inputs = RCommand.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/tests/test_auto_Reorient.py b/nipype/interfaces/tests/test_auto_Reorient.py index 2e45a1ca7f..eb7a0ce5ff 100644 --- a/nipype/interfaces/tests/test_auto_Reorient.py +++ b/nipype/interfaces/tests/test_auto_Reorient.py @@ -1,22 +1,32 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..image import Reorient def test_Reorient_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - orientation=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + orientation=dict( + usedefault=True, + ), ) inputs = Reorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reorient_outputs(): output_map = dict( - out_file=dict(), - transform=dict(), + out_file=dict( + extensions=None, + ), + transform=dict( + extensions=None, + ), ) outputs = Reorient.output_spec() diff --git a/nipype/interfaces/tests/test_auto_Rescale.py b/nipype/interfaces/tests/test_auto_Rescale.py index e180c82988..5b14cfc8c0 100644 --- a/nipype/interfaces/tests/test_auto_Rescale.py +++ b/nipype/interfaces/tests/test_auto_Rescale.py @@ -1,22 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..image import Rescale def test_Rescale_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), invert=dict(), - percentile=dict(usedefault=True, ), - ref_file=dict(mandatory=True, ), + percentile=dict( + usedefault=True, + ), + ref_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = Rescale.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rescale_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Rescale.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_S3DataGrabber.py b/nipype/interfaces/tests/test_auto_S3DataGrabber.py index 7c69413eb0..4b71fe49c3 100644 --- a/nipype/interfaces/tests/test_auto_S3DataGrabber.py +++ b/nipype/interfaces/tests/test_auto_S3DataGrabber.py @@ -1,18 +1,31 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import S3DataGrabber def test_S3DataGrabber_inputs(): input_map = dict( - anon=dict(usedefault=True, ), - bucket=dict(mandatory=True, ), - bucket_path=dict(usedefault=True, ), + anon=dict( + usedefault=True, + ), + bucket=dict( + mandatory=True, + ), + bucket_path=dict( + usedefault=True, + ), local_directory=dict(), - raise_on_empty=dict(usedefault=True, ), - region=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - template=dict(mandatory=True, ), + raise_on_empty=dict( + usedefault=True, + ), + region=dict( + usedefault=True, + ), + sort_filelist=dict( + mandatory=True, + ), + template=dict( + mandatory=True, + ), template_args=dict(), ) inputs = S3DataGrabber.input_spec() @@ -20,6 +33,8 @@ def test_S3DataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_S3DataGrabber_outputs(): output_map = dict() outputs = S3DataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SEMLikeCommandLine.py b/nipype/interfaces/tests/test_auto_SEMLikeCommandLine.py deleted file mode 100644 index 7777a8443e..0000000000 --- a/nipype/interfaces/tests/test_auto_SEMLikeCommandLine.py +++ /dev/null @@ -1,18 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals -from ..base import SEMLikeCommandLine - - -def test_SEMLikeCommandLine_inputs(): - input_map = dict(args=dict(argstr='%s', - ), - environ=dict(nohash=True, - usedefault=True, - ), - ) - inputs = SEMLikeCommandLine.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - assert getattr(inputs.traits()[key], metakey) == value - diff --git a/nipype/interfaces/tests/test_auto_SQLiteSink.py b/nipype/interfaces/tests/test_auto_SQLiteSink.py index ea03663c4c..fe33b65675 100644 --- a/nipype/interfaces/tests/test_auto_SQLiteSink.py +++ b/nipype/interfaces/tests/test_auto_SQLiteSink.py @@ -1,12 +1,16 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import SQLiteSink def test_SQLiteSink_inputs(): input_map = dict( - database_file=dict(mandatory=True, ), - table_name=dict(mandatory=True, ), + database_file=dict( + extensions=None, + mandatory=True, + ), + table_name=dict( + mandatory=True, + ), ) inputs = SQLiteSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_SSHDataGrabber.py b/nipype/interfaces/tests/test_auto_SSHDataGrabber.py index cc7aa22e38..5a58cad276 100644 --- a/nipype/interfaces/tests/test_auto_SSHDataGrabber.py +++ b/nipype/interfaces/tests/test_auto_SSHDataGrabber.py @@ -1,21 +1,38 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import SSHDataGrabber def test_SSHDataGrabber_inputs(): input_map = dict( - base_directory=dict(mandatory=True, ), - download_files=dict(usedefault=True, ), - drop_blank_outputs=dict(usedefault=True, ), - hostname=dict(mandatory=True, ), + base_directory=dict( + mandatory=True, + ), + download_files=dict( + usedefault=True, + ), + drop_blank_outputs=dict( + usedefault=True, + ), + hostname=dict( + mandatory=True, + ), password=dict(), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - ssh_log_to_file=dict(usedefault=True, ), - template=dict(mandatory=True, ), + raise_on_empty=dict( + usedefault=True, + ), + sort_filelist=dict( + mandatory=True, + ), + ssh_log_to_file=dict( + usedefault=True, + ), + template=dict( + mandatory=True, + ), template_args=dict(), - template_expression=dict(usedefault=True, ), + template_expression=dict( + usedefault=True, + ), username=dict(), ) inputs = SSHDataGrabber.input_spec() @@ -23,6 +40,8 @@ def test_SSHDataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SSHDataGrabber_outputs(): output_map = dict() outputs = SSHDataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SelectFiles.py b/nipype/interfaces/tests/test_auto_SelectFiles.py index bf438fb826..06f86c36f0 100644 --- a/nipype/interfaces/tests/test_auto_SelectFiles.py +++ b/nipype/interfaces/tests/test_auto_SelectFiles.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import SelectFiles def test_SelectFiles_inputs(): input_map = dict( base_directory=dict(), - force_lists=dict(usedefault=True, ), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(usedefault=True, ), + force_lists=dict( + usedefault=True, + ), + raise_on_empty=dict( + usedefault=True, + ), + sort_filelist=dict( + usedefault=True, + ), ) inputs = SelectFiles.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SelectFiles_outputs(): output_map = dict() outputs = SelectFiles.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SignalExtraction.py b/nipype/interfaces/tests/test_auto_SignalExtraction.py index bc76f5261a..272d94f54d 100644 --- a/nipype/interfaces/tests/test_auto_SignalExtraction.py +++ b/nipype/interfaces/tests/test_auto_SignalExtraction.py @@ -1,25 +1,46 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..nilearn import SignalExtraction def test_SignalExtraction_inputs(): input_map = dict( - class_labels=dict(mandatory=True, ), - detrend=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), - incl_shared_variance=dict(usedefault=True, ), - include_global=dict(usedefault=True, ), - label_files=dict(mandatory=True, ), - out_file=dict(usedefault=True, ), + class_labels=dict( + mandatory=True, + ), + detrend=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), + incl_shared_variance=dict( + usedefault=True, + ), + include_global=dict( + usedefault=True, + ), + label_files=dict( + mandatory=True, + ), + out_file=dict( + extensions=None, + usedefault=True, + ), ) inputs = SignalExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SignalExtraction_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = SignalExtraction.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_SlicerCommandLine.py b/nipype/interfaces/tests/test_auto_SlicerCommandLine.py index 057628e879..c13466483b 100644 --- a/nipype/interfaces/tests/test_auto_SlicerCommandLine.py +++ b/nipype/interfaces/tests/test_auto_SlicerCommandLine.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dynamic_slicer import SlicerCommandLine def test_SlicerCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, @@ -17,6 +18,8 @@ def test_SlicerCommandLine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SlicerCommandLine_outputs(): output_map = dict() outputs = SlicerCommandLine.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SplitNifti.py b/nipype/interfaces/tests/test_auto_SplitNifti.py index e1f6539fab..14ccc6bdb0 100644 --- a/nipype/interfaces/tests/test_auto_SplitNifti.py +++ b/nipype/interfaces/tests/test_auto_SplitNifti.py @@ -1,12 +1,16 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..dcmstack import SplitNifti def test_SplitNifti_inputs(): input_map = dict( - in_file=dict(mandatory=True, ), - out_ext=dict(usedefault=True, ), + in_file=dict( + extensions=None, + mandatory=True, + ), + out_ext=dict( + usedefault=True, + ), out_format=dict(), out_path=dict(), split_dim=dict(), @@ -16,8 +20,12 @@ def test_SplitNifti_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplitNifti_outputs(): - output_map = dict(out_list=dict(), ) + output_map = dict( + out_list=dict(), + ) outputs = SplitNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_XNATSink.py b/nipype/interfaces/tests/test_auto_XNATSink.py index b4db5ec8d3..ec6f920f57 100644 --- a/nipype/interfaces/tests/test_auto_XNATSink.py +++ b/nipype/interfaces/tests/test_auto_XNATSink.py @@ -1,28 +1,42 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import XNATSink def test_XNATSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - assessor_id=dict(xor=['reconstruction_id'], ), + _outputs=dict( + usedefault=True, + ), + assessor_id=dict( + xor=["reconstruction_id"], + ), cache_dir=dict(), config=dict( + extensions=None, + mandatory=True, + xor=["server"], + ), + experiment_id=dict( + mandatory=True, + ), + project_id=dict( mandatory=True, - xor=['server'], ), - experiment_id=dict(mandatory=True, ), - project_id=dict(mandatory=True, ), pwd=dict(), - reconstruction_id=dict(xor=['assessor_id'], ), + reconstruction_id=dict( + xor=["assessor_id"], + ), server=dict( mandatory=True, - requires=['user', 'pwd'], - xor=['config'], + requires=["user", "pwd"], + xor=["config"], + ), + share=dict( + usedefault=True, + ), + subject_id=dict( + mandatory=True, ), - share=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), user=dict(), ) inputs = XNATSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_XNATSource.py b/nipype/interfaces/tests/test_auto_XNATSource.py index 8faa79af81..f115657c9f 100644 --- a/nipype/interfaces/tests/test_auto_XNATSource.py +++ b/nipype/interfaces/tests/test_auto_XNATSource.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..io import XNATSource @@ -7,16 +6,21 @@ def test_XNATSource_inputs(): input_map = dict( cache_dir=dict(), config=dict( + extensions=None, mandatory=True, - xor=['server'], + xor=["server"], ), pwd=dict(), - query_template=dict(mandatory=True, ), - query_template_args=dict(usedefault=True, ), + query_template=dict( + mandatory=True, + ), + query_template_args=dict( + usedefault=True, + ), server=dict( mandatory=True, - requires=['user', 'pwd'], - xor=['config'], + requires=["user", "pwd"], + xor=["config"], ), user=dict(), ) @@ -25,6 +29,8 @@ def test_XNATSource_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XNATSource_outputs(): output_map = dict() outputs = XNATSource.output_spec() diff --git a/nipype/interfaces/tests/test_dcm2nii.py b/nipype/interfaces/tests/test_dcm2nii.py new file mode 100644 index 0000000000..5154534c5a --- /dev/null +++ b/nipype/interfaces/tests/test_dcm2nii.py @@ -0,0 +1,32 @@ +import pytest + + +from nipype.interfaces import dcm2nii + + +@pytest.mark.parametrize( + "fname, extension, search_crop", + [ + ("output_1", ".txt", False), + ("output_w_[]_meta_1", ".json", False), + ("output_w_**^$?_meta_2", ".txt", False), + ("output_cropped", ".txt", True), + ], +) +def test_search_files(tmp_path, fname, extension, search_crop): + tmp_fname = fname + extension + test_file = tmp_path / tmp_fname + test_file.touch() + if search_crop: + tmp_cropped_fname = fname + "_Crop_1" + extension + test_cropped_file = tmp_path / tmp_cropped_fname + test_cropped_file.touch() + + actual_files_list = dcm2nii.search_files( + str(tmp_path / fname), [extension], search_crop + ) + for f in actual_files_list: + if search_crop: + assert f in (str(test_cropped_file), str(test_file)) + else: + assert str(test_file) == f diff --git a/nipype/interfaces/tests/test_extra_dcm2nii.py b/nipype/interfaces/tests/test_extra_dcm2nii.py index dd68454ad0..b093d26e6d 100644 --- a/nipype/interfaces/tests/test_extra_dcm2nii.py +++ b/nipype/interfaces/tests/test_extra_dcm2nii.py @@ -1,57 +1,60 @@ import os import pytest -import shutil from nipype.interfaces.dcm2nii import Dcm2niix + no_dcm2niix = not bool(Dcm2niix().version) no_datalad = False try: - from datalad import api # to pull and grab data + from datalad import api # to pull and grab data from datalad.support.exceptions import IncompleteResultsError except ImportError: no_datalad = True -DICOM_DIR = 'http://datasets-tests.datalad.org/dicoms/dcm2niix-tests' +DICOM_DIR = "http://datasets-tests.datalad.org/dicoms/dcm2niix-tests" + + +@pytest.fixture +def fetch_data(): + def _fetch_data(datadir, dicoms): + try: + """Fetches some test DICOMs using datalad""" + api.install(path=datadir, source=DICOM_DIR) + data = os.path.join(datadir, dicoms) + api.get(path=data, dataset=datadir) + except IncompleteResultsError as exc: + pytest.skip("Failed to fetch test data: %s" % str(exc)) + return data + return _fetch_data -def fetch_data(tmpdir, dicoms): - """Fetches some test DICOMs using datalad""" - data = os.path.join(tmpdir, 'data') - api.install(path=data, source=DICOM_DIR) - data = os.path.join(data, dicoms) - api.get(path=data) - return data @pytest.mark.skipif(no_datalad, reason="Datalad required") @pytest.mark.skipif(no_dcm2niix, reason="Dcm2niix required") -def test_dcm2niix_dwi(tmpdir): +def test_dcm2niix_dti(fetch_data, tmpdir): tmpdir.chdir() - try: - datadir = fetch_data(tmpdir.strpath, 'Siemens_Sag_DTI_20160825_145811') - except IncompleteResultsError as exc: - pytest.skip("Failed to fetch test data: %s" % str(exc)) + datadir = tmpdir.mkdir("data").strpath + dicoms = fetch_data(datadir, "Siemens_Sag_DTI_20160825_145811") - def assert_dwi(eg, bids): + def assert_dti(res): "Some assertions we will make" - assert eg.outputs.converted_files - assert eg.outputs.bvals - assert eg.outputs.bvecs - outputs = [y for x,y in eg.outputs.get().items()] - if bids: + assert res.outputs.converted_files + assert res.outputs.bvals + assert res.outputs.bvecs + outputs = [y for x, y in res.outputs.get().items()] + if res.inputs.get("bids_format"): # ensure all outputs are of equal lengths assert len(set(map(len, outputs))) == 1 else: - assert not eg2.outputs.bids + assert not res.outputs.bids dcm = Dcm2niix() - dcm.inputs.source_dir = datadir - dcm.inputs.out_filename = '%u%z' - eg1 = dcm.run() - assert_dwi(eg1, True) + dcm.inputs.source_dir = dicoms + dcm.inputs.out_filename = "%u%z" + assert_dti(dcm.run()) # now run specifying output directory and removing BIDS option - outdir = tmpdir.mkdir('conversion').strpath + outdir = tmpdir.mkdir("conversion").strpath dcm.inputs.output_dir = outdir dcm.inputs.bids_format = False - eg2 = dcm.run() - assert_dwi(eg2, False) + assert_dti(dcm.run()) diff --git a/nipype/interfaces/tests/test_image.py b/nipype/interfaces/tests/test_image.py index bb4adf1d01..43e0cd0648 100644 --- a/nipype/interfaces/tests/test_image.py +++ b/nipype/interfaces/tests/test_image.py @@ -4,16 +4,15 @@ import nibabel as nb import pytest +from looseversion import LooseVersion from nibabel.orientations import axcodes2ornt, ornt_transform from ..image import _as_reoriented_backport, _orientations -from ... import LooseVersion -nibabel22 = LooseVersion(nb.__version__) >= LooseVersion('2.2.0') +nibabel24 = LooseVersion(nb.__version__) >= LooseVersion("2.4.0") -@pytest.mark.skipif(not nibabel22, - reason="Old nibabel - can't directly compare") +@pytest.mark.skipif(not nibabel24, reason="Old nibabel - can't directly compare") def test_reorientation_backport(): pixdims = ((1, 1, 1), (2, 2, 3)) data = np.random.normal(size=(17, 18, 19, 2)) @@ -28,7 +27,7 @@ def test_reorientation_backport(): # Create image img = nb.Nifti1Image(data, affine) - dim_info = {'freq': 0, 'phase': 1, 'slice': 2} + dim_info = {"freq": 0, "phase": 1, "slice": 2} img.header.set_dim_info(**dim_info) # Find a random, non-identity transform @@ -51,14 +50,17 @@ def test_reorientation_backport(): # Reorientation changes affine and data array assert not np.allclose(img.affine, reoriented_a.affine) - assert not (flips_only and - np.allclose(img.get_data(), reoriented_a.get_data())) + assert not ( + flips_only and np.allclose(img.get_fdata(), reoriented_a.get_fdata()) + ) # Dimension info changes iff axes are reordered - assert flips_only == np.array_equal(img.header.get_dim_info(), - reoriented_a.header.get_dim_info()) + assert flips_only == np.array_equal( + img.header.get_dim_info(), reoriented_a.header.get_dim_info() + ) # Both approaches produce equivalent images assert np.allclose(reoriented_a.affine, reoriented_b.affine) - assert np.array_equal(reoriented_a.get_data(), reoriented_b.get_data()) - assert np.array_equal(reoriented_a.header.get_dim_info(), - reoriented_b.header.get_dim_info()) + assert np.array_equal(reoriented_a.get_fdata(), reoriented_b.get_fdata()) + assert np.array_equal( + reoriented_a.header.get_dim_info(), reoriented_b.header.get_dim_info() + ) diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index c81e6f8e06..fc7f03db9f 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -1,16 +1,10 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals -from builtins import str, zip, range, open -from future import standard_library import os import copy import simplejson import glob -import shutil import os.path as op -import sys from subprocess import Popen import hashlib from collections import namedtuple @@ -41,6 +35,7 @@ # Check for paramiko try: import paramiko + no_paramiko = False # Check for localhost SSH Server @@ -50,14 +45,15 @@ client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect('127.0.0.1', username=os.getenv('USER'), sock=proxy, - timeout=10) + client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) no_local_ssh = False - except (paramiko.SSHException, - paramiko.ssh_exception.NoValidConnectionsError, - OSError): + except ( + paramiko.SSHException, + paramiko.ssh_exception.NoValidConnectionsError, + OSError, + ): no_local_ssh = True except ImportError: @@ -65,11 +61,11 @@ no_local_ssh = True # Check for fakes3 -standard_library.install_aliases() from subprocess import check_call, CalledProcessError + try: - ret_code = check_call(['which', 'fakes3'], stdout=open(os.devnull, 'wb')) - fakes3 = (ret_code == 0) + ret_code = check_call(["which", "fakes3"], stdout=open(os.devnull, "wb")) + fakes3 = ret_code == 0 except CalledProcessError: fakes3 = False @@ -77,8 +73,9 @@ have_pybids = True try: import bids + filepath = os.path.realpath(os.path.dirname(bids.__file__)) - datadir = os.path.realpath(os.path.join(filepath, 'tests/data/')) + datadir = os.path.realpath(os.path.join(filepath, "tests/data/")) except ImportError: have_pybids = False @@ -87,7 +84,7 @@ def test_datagrabber(): dg = nio.DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.base_directory == Undefined - assert dg.inputs.template_args == {'outfiles': []} + assert dg.inputs.template_args == {"outfiles": []} @pytest.mark.skipif(noboto, reason="boto library is not available") @@ -95,97 +92,103 @@ def test_s3datagrabber(): dg = nio.S3DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.local_directory == Undefined - assert dg.inputs.template_args == {'outfiles': []} + assert dg.inputs.template_args == {"outfiles": []} templates1 = { "model": "interfaces/{package}/model.py", - "preprocess": "interfaces/{package}/pre*.py" + "preprocess": "interfaces/{package}/pre*.py", } templates2 = {"converter": "interfaces/dcm{to!s}nii.py"} templates3 = {"model": "interfaces/{package.name}/model.py"} -@pytest.mark.parametrize("SF_args, inputs_att, expected", [ - ({ - "templates": templates1 - }, { - "package": "fsl" - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py"), - "preprocess": - op.join( - op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py") - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates1, - "force_lists": True - }, { - "package": "spm" - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - [op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py")], - "preprocess": [ - op.join( - op.dirname(nipype.__file__), - "interfaces/spm/preprocess.py") - ] - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates1 - }, { - "package": "fsl", - "force_lists": ["model"] - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - [op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py")], - "preprocess": - op.join( - op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py") - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates2 - }, { - "to": 2 - }, { - "infields": ["to"], - "outfields": ["converter"], - "run_output": { - "converter": - op.join(op.dirname(nipype.__file__), "interfaces/dcm2nii.py") - }, - "node_output": ["converter"] - }), - ({ - "templates": templates3 - }, { - "package": namedtuple("package", ["name"])("fsl") - }, { - "infields": ["package"], - "outfields": ["model"], - "run_output": { - "model": - op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py") - }, - "node_output": ["model"] - }), -]) +@pytest.mark.parametrize( + "SF_args, inputs_att, expected", + [ + ( + {"templates": templates1}, + {"package": "fsl"}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/model.py" + ), + "preprocess": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" + ), + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates1, "force_lists": True}, + {"package": "spm"}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": [ + op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py") + ], + "preprocess": [ + op.join( + op.dirname(nipype.__file__), "interfaces/spm/preprocess.py" + ) + ], + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates1}, + {"package": "fsl", "force_lists": ["model"]}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": [ + op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py") + ], + "preprocess": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" + ), + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates2}, + {"to": 2}, + { + "infields": ["to"], + "outfields": ["converter"], + "run_output": { + "converter": op.join( + op.dirname(nipype.__file__), "interfaces/dcm2nii.py" + ) + }, + "node_output": ["converter"], + }, + ), + ( + {"templates": templates3}, + {"package": namedtuple("package", ["name"])("fsl")}, + { + "infields": ["package"], + "outfields": ["model"], + "run_output": { + "model": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/model.py" + ) + }, + "node_output": ["model"], + }, + ), + ], +) def test_selectfiles(tmpdir, SF_args, inputs_att, expected): tmpdir.chdir() base_dir = op.dirname(nipype.__file__) @@ -207,11 +210,10 @@ def test_selectfiles_valueerror(): base_dir = op.dirname(nipype.__file__) templates = { "model": "interfaces/{package}/model.py", - "preprocess": "interfaces/{package}/pre*.py" + "preprocess": "interfaces/{package}/pre*.py", } force_lists = ["model", "preprocess", "registration"] - sf = nio.SelectFiles( - templates, base_directory=base_dir, force_lists=force_lists) + sf = nio.SelectFiles(templates, base_directory=base_dir, force_lists=force_lists) with pytest.raises(ValueError): sf.run() @@ -219,67 +221,82 @@ def test_selectfiles_valueerror(): @pytest.mark.skipif(noboto, reason="boto library is not available") def test_s3datagrabber_communication(tmpdir): dg = nio.S3DataGrabber( - infields=['subj_id', 'run_num'], outfields=['func', 'struct']) + infields=["subj_id", "run_num"], outfields=["func", "struct"] + ) dg.inputs.anon = True - dg.inputs.bucket = 'openfmri' - dg.inputs.bucket_path = 'ds001/' + dg.inputs.bucket = "openfmri" + dg.inputs.bucket_path = "ds001/" dg.inputs.local_directory = tmpdir.strpath dg.inputs.sort_filelist = True - dg.inputs.template = '*' + dg.inputs.template = "*" dg.inputs.field_template = dict( - func='%s/BOLD/task001_%s/bold.nii.gz', - struct='%s/anatomy/highres001_brain.nii.gz') - dg.inputs.subj_id = ['sub001', 'sub002'] - dg.inputs.run_num = ['run001', 'run003'] - dg.inputs.template_args = dict( - func=[['subj_id', 'run_num']], struct=[['subj_id']]) + func="%s/BOLD/task001_%s/bold.nii.gz", + struct="%s/anatomy/highres001_brain.nii.gz", + ) + dg.inputs.subj_id = ["sub001", "sub002"] + dg.inputs.run_num = ["run001", "run003"] + dg.inputs.template_args = dict(func=[["subj_id", "run_num"]], struct=[["subj_id"]]) res = dg.run() func_outfiles = res.outputs.func struct_outfiles = res.outputs.struct # check for all files - assert os.path.join( - dg.inputs.local_directory, - '/sub001/BOLD/task001_run001/bold.nii.gz') in func_outfiles[0] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub001/BOLD/task001_run001/bold.nii.gz" + ) + in func_outfiles[0] + ) assert os.path.exists(func_outfiles[0]) - assert os.path.join( - dg.inputs.local_directory, - '/sub001/anatomy/highres001_brain.nii.gz') in struct_outfiles[0] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub001/anatomy/highres001_brain.nii.gz" + ) + in struct_outfiles[0] + ) assert os.path.exists(struct_outfiles[0]) - assert os.path.join( - dg.inputs.local_directory, - '/sub002/BOLD/task001_run003/bold.nii.gz') in func_outfiles[1] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub002/BOLD/task001_run003/bold.nii.gz" + ) + in func_outfiles[1] + ) assert os.path.exists(func_outfiles[1]) - assert os.path.join( - dg.inputs.local_directory, - '/sub002/anatomy/highres001_brain.nii.gz') in struct_outfiles[1] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub002/anatomy/highres001_brain.nii.gz" + ) + in struct_outfiles[1] + ) assert os.path.exists(struct_outfiles[1]) def test_datagrabber_order(tmpdir): for file_name in [ - 'sub002_L1_R1.q', 'sub002_L1_R2.q', 'sub002_L2_R1.q', - 'sub002_L2_R2.qd', 'sub002_L3_R10.q', 'sub002_L3_R2.q' + "sub002_L1_R1.q", + "sub002_L1_R2.q", + "sub002_L2_R1.q", + "sub002_L2_R2.qd", + "sub002_L3_R10.q", + "sub002_L3_R2.q", ]: - tmpdir.join(file_name).open('a').close() + tmpdir.join(file_name).open("a").close() - dg = nio.DataGrabber(infields=['sid']) + dg = nio.DataGrabber(infields=["sid"]) dg.inputs.base_directory = tmpdir.strpath - dg.inputs.template = '%s_L%d_R*.q*' - dg.inputs.template_args = { - 'outfiles': [['sid', 1], ['sid', 2], ['sid', 3]] - } - dg.inputs.sid = 'sub002' + dg.inputs.template = "%s_L%d_R*.q*" + dg.inputs.template_args = {"outfiles": [["sid", 1], ["sid", 2], ["sid", 3]]} + dg.inputs.sid = "sub002" dg.inputs.sort_filelist = True res = dg.run() outfiles = res.outputs.outfiles - assert 'sub002_L1_R1' in outfiles[0][0] - assert 'sub002_L1_R2' in outfiles[0][1] - assert 'sub002_L2_R1' in outfiles[1][0] - assert 'sub002_L2_R2' in outfiles[1][1] - assert 'sub002_L3_R2' in outfiles[2][0] - assert 'sub002_L3_R10' in outfiles[2][1] + assert "sub002_L1_R1" in outfiles[0][0] + assert "sub002_L1_R2" in outfiles[0][1] + assert "sub002_L2_R1" in outfiles[1][0] + assert "sub002_L2_R2" in outfiles[1][1] + assert "sub002_L3_R2" in outfiles[2][0] + assert "sub002_L3_R10" in outfiles[2][1] def test_datasink(): @@ -289,26 +306,25 @@ def test_datasink(): assert ds.inputs.strip_dir == Undefined assert ds.inputs._outputs == {} - ds = nio.DataSink(base_directory='foo') - assert ds.inputs.base_directory == 'foo' + ds = nio.DataSink(base_directory="foo") + assert ds.inputs.base_directory == "foo" - ds = nio.DataSink(infields=['test']) - assert 'test' in ds.inputs.copyable_trait_names() + ds = nio.DataSink(infields=["test"]) + assert "test" in ds.inputs.copyable_trait_names() # Make dummy input file @pytest.fixture(scope="module") def dummy_input(request, tmpdir_factory): - ''' + """ Function to create a dummy file - ''' + """ # Init variables - input_path = tmpdir_factory.mktemp('input_data').join( - 'datasink_test_s3.txt') + input_path = tmpdir_factory.mktemp("input_data").join("datasink_test_s3.txt") # Create input file - input_path.write_binary(b'ABCD1234') + input_path.write_binary(b"ABCD1234") # Return path return str(input_path) @@ -316,35 +332,37 @@ def dummy_input(request, tmpdir_factory): # Test datasink writes to s3 properly @pytest.mark.skipif( - noboto3 or not fakes3, reason="boto3 or fakes3 library is not available") + noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" +) def test_datasink_to_s3(dummy_input, tmpdir): - ''' + """ This function tests to see if the S3 functionality of a DataSink works properly - ''' + """ # Init variables ds = nio.DataSink() - bucket_name = 'test' - container = 'outputs' - attr_folder = 'text_file' - output_dir = 's3://' + bucket_name + bucket_name = "test" + container = "outputs" + attr_folder = "text_file" + output_dir = "s3://" + bucket_name # Local temporary filepaths for testing fakes3_dir = tmpdir.strpath input_path = dummy_input # Start up fake-S3 server proc = Popen( - ['fakes3', '-r', fakes3_dir, '-p', '4567'], - stdout=open(os.devnull, 'wb')) + ["fakes3", "-r", fakes3_dir, "-p", "4567"], stdout=open(os.devnull, "wb") + ) # Init boto3 s3 resource to talk with fakes3 resource = boto3.resource( - aws_access_key_id='mykey', - aws_secret_access_key='mysecret', - service_name='s3', - endpoint_url='http://127.0.0.1:4567', - use_ssl=False) - resource.meta.client.meta.events.unregister('before-sign.s3', fix_s3_host) + aws_access_key_id="mykey", + aws_secret_access_key="mysecret", + service_name="s3", + endpoint_url="http://127.0.0.1:4567", + use_ssl=False, + ) + resource.meta.client.meta.events.unregister("before-sign.s3", fix_s3_host) # Create bucket bucket = resource.create_bucket(Bucket=bucket_name) @@ -359,10 +377,10 @@ def test_datasink_to_s3(dummy_input, tmpdir): ds.run() # Get MD5sums and compare - key = '/'.join([container, attr_folder, os.path.basename(input_path)]) + key = "/".join([container, attr_folder, os.path.basename(input_path)]) obj = bucket.Object(key=key) - dst_md5 = obj.e_tag.replace('"', '') - src_md5 = hashlib.md5(open(input_path, 'rb').read()).hexdigest() + dst_md5 = obj.e_tag.replace('"', "") + src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() # Kill fakes3 proc.kill() @@ -373,21 +391,22 @@ def test_datasink_to_s3(dummy_input, tmpdir): # Test AWS creds read from env vars @pytest.mark.skipif( - noboto3 or not fakes3, reason="boto3 or fakes3 library is not available") + noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" +) def test_aws_keys_from_env(): - ''' + """ Function to ensure the DataSink can successfully read in AWS credentials from the environment variables - ''' + """ # Init variables ds = nio.DataSink() - aws_access_key_id = 'ABCDACCESS' - aws_secret_access_key = 'DEFGSECRET' + aws_access_key_id = "ABCDACCESS" + aws_secret_access_key = "DEFGSECRET" # Set env vars - os.environ['AWS_ACCESS_KEY_ID'] = aws_access_key_id - os.environ['AWS_SECRET_ACCESS_KEY'] = aws_secret_access_key + os.environ["AWS_ACCESS_KEY_ID"] = aws_access_key_id + os.environ["AWS_SECRET_ACCESS_KEY"] = aws_secret_access_key # Call function to return creds access_key_test, secret_key_test = ds._return_aws_keys() @@ -399,15 +418,15 @@ def test_aws_keys_from_env(): # Test the local copy attribute def test_datasink_localcopy(dummy_input, tmpdir): - ''' + """ Function to validate DataSink will make local copy via local_copy attribute - ''' + """ # Init variables local_dir = tmpdir.strpath - container = 'outputs' - attr_folder = 'text_file' + container = "outputs" + attr_folder = "text_file" # Make dummy input file and datasink input_path = dummy_input @@ -421,44 +440,50 @@ def test_datasink_localcopy(dummy_input, tmpdir): setattr(ds.inputs, attr_folder, input_path) # Expected local copy path - local_copy = os.path.join(local_dir, container, attr_folder, - os.path.basename(input_path)) + local_copy = os.path.join( + local_dir, container, attr_folder, os.path.basename(input_path) + ) # Run the datasink ds.run() # Check md5sums of both - src_md5 = hashlib.md5(open(input_path, 'rb').read()).hexdigest() - dst_md5 = hashlib.md5(open(local_copy, 'rb').read()).hexdigest() + src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() + dst_md5 = hashlib.md5(open(local_copy, "rb").read()).hexdigest() # Perform test assert src_md5 == dst_md5 def test_datasink_substitutions(tmpdir): - indir = tmpdir.mkdir('-Tmp-nipype_ds_subs_in') - outdir = tmpdir.mkdir('-Tmp-nipype_ds_subs_out') + indir = tmpdir.mkdir("-Tmp-nipype_ds_subs_in") + outdir = tmpdir.mkdir("-Tmp-nipype_ds_subs_out") files = [] - for n in ['ababab.n', 'xabababyz.n']: + for n in ["ababab.n", "xabababyz.n"]: f = str(indir.join(n)) files.append(f) - open(f, 'w') + open(f, "w") ds = nio.DataSink( - parametrization=False, + parameterization=False, base_directory=str(outdir), - substitutions=[('ababab', 'ABABAB')], + substitutions=[("ababab", "ABABAB")], # end archoring ($) is used to assure operation on the filename # instead of possible temporary directories names matches # Patterns should be more comprehendable in the real-world usage # cases since paths would be quite more sensible - regexp_substitutions=[(r'xABABAB(\w*)\.n$', r'a-\1-b.n'), - ('(.*%s)[-a]([^%s]*)$' % ((os.path.sep, ) * 2), - r'\1!\2')]) - setattr(ds.inputs, '@outdir', files) + regexp_substitutions=[ + (r"xABABAB(\w*)\.n$", r"a-\1-b.n"), + ("(.*%s)[-a]([^%s]*)$" % ((os.path.sep,) * 2), r"\1!\2"), + ], + ) + setattr(ds.inputs, "@outdir", files) ds.run() - assert sorted([os.path.basename(x) for - x in glob.glob(os.path.join(str(outdir), '*'))]) \ - == ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + assert sorted( + [os.path.basename(x) for x in glob.glob(os.path.join(str(outdir), "*"))] + ) == [ + "!-yz-b.n", + "ABABAB.n", + ] # so we got re used 2nd and both patterns @pytest.fixture() @@ -467,8 +492,8 @@ def _temp_analyze_files(tmpdir): img_dir = tmpdir.mkdir("img") orig_img = img_dir.join("orig.img") orig_hdr = img_dir.join("orig.hdr") - orig_img.open('w') - orig_hdr.open('w') + orig_img.open("w") + orig_hdr.open("w") return orig_img.strpath, orig_hdr.strpath @@ -477,40 +502,40 @@ def test_datasink_copydir_1(_temp_analyze_files, tmpdir): outdir = tmpdir pth, fname = os.path.split(orig_img) ds = nio.DataSink( - base_directory=outdir.mkdir("basedir").strpath, parameterization=False) - setattr(ds.inputs, '@outdir', pth) + base_directory=outdir.mkdir("basedir").strpath, parameterization=False + ) + setattr(ds.inputs, "@outdir", pth) ds.run() sep = os.path.sep - assert tmpdir.join('basedir', pth.split(sep)[-1], fname).check() + assert tmpdir.join("basedir", pth.split(sep)[-1], fname).check() def test_datasink_copydir_2(_temp_analyze_files, tmpdir): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) ds = nio.DataSink( - base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False) + base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False + ) ds.inputs.remove_dest_dir = True - setattr(ds.inputs, 'outdir', pth) + ds.inputs.outdir = pth ds.run() sep = os.path.sep - assert not tmpdir.join('basedir', pth.split(sep)[-1], fname).check() - assert tmpdir.join('basedir', 'outdir', pth.split(sep)[-1], fname).check() + assert not tmpdir.join("basedir", pth.split(sep)[-1], fname).check() + assert tmpdir.join("basedir", "outdir", pth.split(sep)[-1], fname).check() def test_datafinder_depth(tmpdir): outdir = tmpdir.strpath - os.makedirs(os.path.join(outdir, '0', '1', '2', '3')) + os.makedirs(os.path.join(outdir, "0", "1", "2", "3")) df = nio.DataFinder() - df.inputs.root_paths = os.path.join(outdir, '0') + df.inputs.root_paths = os.path.join(outdir, "0") for min_depth in range(4): for max_depth in range(min_depth, 4): df.inputs.min_depth = min_depth df.inputs.max_depth = max_depth result = df.run() - expected = [ - '{}'.format(x) for x in range(min_depth, max_depth + 1) - ] + expected = [f"{x}" for x in range(min_depth, max_depth + 1)] for path, exp_fname in zip(result.outputs.out_paths, expected): _, fname = os.path.split(path) assert fname == exp_fname @@ -519,12 +544,12 @@ def test_datafinder_depth(tmpdir): def test_datafinder_unpack(tmpdir): outdir = tmpdir.strpath single_res = os.path.join(outdir, "findme.txt") - open(single_res, 'a').close() - open(os.path.join(outdir, "dontfindme"), 'a').close() + open(single_res, "a").close() + open(os.path.join(outdir, "dontfindme"), "a").close() df = nio.DataFinder() df.inputs.root_paths = outdir - df.inputs.match_regex = '.+/(?P.+)\.txt' + df.inputs.match_regex = r".+/(?P.+)\.txt" df.inputs.unpack_single = True result = df.run() print(result.outputs.out_paths) @@ -533,124 +558,115 @@ def test_datafinder_unpack(tmpdir): def test_freesurfersource(): fss = nio.FreeSurferSource() - assert fss.inputs.hemi == 'both' + assert fss.inputs.hemi == "both" assert fss.inputs.subject_id == Undefined assert fss.inputs.subjects_dir == Undefined def test_freesurfersource_incorrectdir(): fss = nio.FreeSurferSource() - with pytest.raises(TraitError) as err: - fss.inputs.subjects_dir = 'path/to/no/existing/directory' + with pytest.raises(TraitError): + fss.inputs.subjects_dir = "path/to/no/existing/directory" def test_jsonsink_input(): - ds = nio.JSONFileSink() assert ds.inputs._outputs == {} - ds = nio.JSONFileSink(in_dict={'foo': 'var'}) - assert ds.inputs.in_dict == {'foo': 'var'} + ds = nio.JSONFileSink(in_dict={"foo": "var"}) + assert ds.inputs.in_dict == {"foo": "var"} - ds = nio.JSONFileSink(infields=['test']) - assert 'test' in ds.inputs.copyable_trait_names() + ds = nio.JSONFileSink(infields=["test"]) + assert "test" in ds.inputs.copyable_trait_names() -@pytest.mark.parametrize("inputs_attributes", [{ - 'new_entry': 'someValue' -}, { - 'new_entry': 'someValue', - 'test': 'testInfields' -}]) +@pytest.mark.parametrize( + "inputs_attributes", + [{"new_entry": "someValue"}, {"new_entry": "someValue", "test": "testInfields"}], +) def test_jsonsink(tmpdir, inputs_attributes): tmpdir.chdir() - js = nio.JSONFileSink(infields=['test'], in_dict={'foo': 'var'}) - setattr(js.inputs, 'contrasts.alt', 'someNestedValue') + js = nio.JSONFileSink(infields=["test"], in_dict={"foo": "var"}) + setattr(js.inputs, "contrasts.alt", "someNestedValue") expected_data = {"contrasts": {"alt": "someNestedValue"}, "foo": "var"} for key, val in inputs_attributes.items(): setattr(js.inputs, key, val) expected_data[key] = val res = js.run() - with open(res.outputs.out_file, 'r') as f: + with open(res.outputs.out_file) as f: data = simplejson.load(f) assert data == expected_data # There are three reasons these tests will be skipped: -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(sys.version_info < (3, 0), - reason="Pybids no longer supports Python 2") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_grabber(tmpdir): tmpdir.chdir() bg = nio.BIDSDataGrabber() - bg.inputs.base_dir = os.path.join(datadir, 'ds005') - bg.inputs.subject = '01' + bg.inputs.base_dir = os.path.join(datadir, "ds005") + bg.inputs.subject = "01" results = bg.run() - assert 'sub-01_T1w.nii.gz' in map(os.path.basename, results.outputs.anat) - assert 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' in \ - map(os.path.basename, results.outputs.func) + assert "sub-01_T1w.nii.gz" in map(os.path.basename, results.outputs.T1w) + assert "sub-01_task-mixedgamblestask_run-01_bold.nii.gz" in map( + os.path.basename, results.outputs.bold + ) -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(sys.version_info < (3, 0), - reason="Pybids no longer supports Python 2") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_fields(tmpdir): tmpdir.chdir() - bg = nio.BIDSDataGrabber(infields = ['subject'], outfields = ['dwi']) - bg.inputs.base_dir = os.path.join(datadir, 'ds005') - bg.inputs.subject = '01' - bg.inputs.output_query['dwi'] = dict(modality='dwi') + bg = nio.BIDSDataGrabber(infields=["subject"], outfields=["dwi"]) + bg.inputs.base_dir = os.path.join(datadir, "ds005") + bg.inputs.subject = "01" + bg.inputs.output_query["dwi"] = dict(datatype="dwi") results = bg.run() - assert 'sub-01_dwi.nii.gz' in map(os.path.basename, results.outputs.dwi) + assert "sub-01_dwi.nii.gz" in map(os.path.basename, results.outputs.dwi) -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(sys.version_info < (3, 0), - reason="Pybids no longer supports Python 2") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_infields_outfields(tmpdir): tmpdir.chdir() - infields = ['infield1', 'infield2'] - outfields = ['outfield1', 'outfield2'] + infields = ["infield1", "infield2"] + outfields = ["outfield1", "outfield2"] bg = nio.BIDSDataGrabber(infields=infields) for outfield in outfields: - bg.inputs.output_query[outfield] = {'key': 'value'} + bg.inputs.output_query[outfield] = {"key": "value"} for infield in infields: - assert(infield in bg.inputs.traits()) - assert(not(isdefined(bg.inputs.get()[infield]))) + assert infield in bg.inputs.traits() + assert not (isdefined(bg.inputs.get()[infield])) for outfield in outfields: - assert(outfield in bg._outputs().traits()) + assert outfield in bg._outputs().traits() - # now try without defining outfields, we should get anat and func for free + # now try without defining outfields bg = nio.BIDSDataGrabber() - for outfield in ['anat', 'func']: + for outfield in ["T1w", "bold"]: assert outfield in bg._outputs().traits() @pytest.mark.skipif(no_paramiko, reason="paramiko library is not available") @pytest.mark.skipif(no_local_ssh, reason="SSH Server is not running") def test_SSHDataGrabber(tmpdir): - """Test SSHDataGrabber by connecting to localhost and collecting some data. - """ + """Test SSHDataGrabber by connecting to localhost and collecting some data.""" old_cwd = tmpdir.chdir() - source_dir = tmpdir.mkdir('source') - source_hdr = source_dir.join('somedata.hdr') - source_dat = source_dir.join('somedata.img') - source_hdr.ensure() # create - source_dat.ensure() # create + source_dir = tmpdir.mkdir("source") + source_hdr = source_dir.join("somedata.hdr") + source_dat = source_dir.join("somedata.img") + source_hdr.ensure() # create + source_dat.ensure() # create # ssh client that connects to localhost, current user, regardless of # ~/.ssh/config @@ -659,21 +675,20 @@ def _mock_get_ssh_client(self): client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect('127.0.0.1', username=os.getenv('USER'), sock=proxy, - timeout=10) + client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) return client + MockSSHDataGrabber = copy.copy(nio.SSHDataGrabber) MockSSHDataGrabber._get_ssh_client = _mock_get_ssh_client # grabber to get files from source_dir matching test.hdr - ssh_grabber = MockSSHDataGrabber(infields=['test'], - outfields=['test_file']) + ssh_grabber = MockSSHDataGrabber(infields=["test"], outfields=["test_file"]) ssh_grabber.inputs.base_directory = str(source_dir) - ssh_grabber.inputs.hostname = '127.0.0.1' - ssh_grabber.inputs.field_template = dict(test_file='%s.hdr') - ssh_grabber.inputs.template = '' - ssh_grabber.inputs.template_args = dict(test_file=[['test']]) - ssh_grabber.inputs.test = 'somedata' + ssh_grabber.inputs.hostname = "127.0.0.1" + ssh_grabber.inputs.field_template = dict(test_file="%s.hdr") + ssh_grabber.inputs.template = "" + ssh_grabber.inputs.template_args = dict(test_file=[["test"]]) + ssh_grabber.inputs.test = "somedata" ssh_grabber.inputs.sort_filelist = True runtime = ssh_grabber.run() @@ -681,8 +696,33 @@ def _mock_get_ssh_client(self): # did we successfully get the header? assert runtime.outputs.test_file == str(tmpdir.join(source_hdr.basename)) # did we successfully get the data? - assert (tmpdir.join(source_hdr.basename) # header file - .new(ext='.img') # data file - .check(file=True, exists=True)) # exists? + assert ( + tmpdir.join(source_hdr.basename) # header file + .new(ext=".img") # data file + .check(file=True, exists=True) + ) # exists? old_cwd.chdir() + + +def test_ExportFile(tmp_path): + test_in = tmp_path / "in.txt" + test_in.write_text("test string", encoding='utf-8') + i = nio.ExportFile() + i.inputs.in_file = str(test_in) + i.inputs.out_file = str(tmp_path / "out.tsv") + i.inputs.check_extension = True + with pytest.raises(RuntimeError): + i.run() + i.inputs.check_extension = False + i.run() + assert (tmp_path / "out.tsv").read_text() == "test string" + i.inputs.out_file = str(tmp_path / "out.txt") + i.inputs.check_extension = True + i.run() + assert (tmp_path / "out.txt").read_text() == "test string" + with pytest.raises(FileExistsError): + i.run() + i.inputs.clobber = True + i.run() + assert (tmp_path / "out.txt").read_text() == "test string" diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 2576a379e7..d028dd3059 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -26,24 +25,24 @@ def clean_workspace_and_get_default_script_file(): def test_cmdline(): default_script_file = clean_workspace_and_get_default_script_file() - mi = mlab.MatlabCommand( - script='whos', script_file='testscript', mfile=False) - - assert mi.cmdline == \ - matlab_cmd + (' -nodesktop -nosplash -singleCompThread -r "fprintf(1,' - '\'Executing code at %s:\\n\',datestr(now));ver,try,' - 'whos,catch ME,fprintf(2,\'MATLAB code threw an ' - 'exception:\\n\');fprintf(2,\'%s\\n\',ME.message);if ' - 'length(ME.stack) ~= 0, fprintf(2,\'File:%s\\nName:%s\\n' - 'Line:%d\\n\',ME.stack.file,ME.stack.name,' - 'ME.stack.line);, end;end;;exit"') - - assert mi.inputs.script == 'whos' - assert mi.inputs.script_file == 'testscript' + mi = mlab.MatlabCommand(script="whos", script_file="testscript", mfile=False) + + assert mi.cmdline == matlab_cmd + ( + ' -nodesktop -nosplash -singleCompThread -r "fprintf(1,' + "'Executing code at %s:\\n',datestr(now));ver,try," + "whos,catch ME,fprintf(2,'MATLAB code threw an " + "exception:\\n');fprintf(2,'%s\\n',ME.message);if " + "length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\n" + "Line:%d\\n',ME.stack.file,ME.stack.name," + 'ME.stack.line);, end;end;;exit"' + ) + + assert mi.inputs.script == "whos" + assert mi.inputs.script_file == "testscript" + assert not os.path.exists(mi.inputs.script_file), "scriptfile should not exist" assert not os.path.exists( - mi.inputs.script_file), 'scriptfile should not exist' - assert not os.path.exists( - default_script_file), 'default scriptfile should not exist.' + default_script_file + ), "default scriptfile should not exist." @pytest.mark.skipif(no_matlab, reason="matlab is not available") @@ -51,8 +50,13 @@ def test_mlab_inputspec(): default_script_file = clean_workspace_and_get_default_script_file() spec = mlab.MatlabInputSpec() for k in [ - 'paths', 'script', 'nosplash', 'mfile', 'logfile', 'script_file', - 'nodesktop' + "paths", + "script", + "nosplash", + "mfile", + "logfile", + "script_file", + "nodesktop", ]: assert k in spec.copyable_trait_names() assert spec.nodesktop @@ -65,54 +69,49 @@ def test_mlab_inputspec(): def test_mlab_init(): default_script_file = clean_workspace_and_get_default_script_file() - assert mlab.MatlabCommand._cmd == 'matlab' + assert mlab.MatlabCommand._cmd == "matlab" assert mlab.MatlabCommand.input_spec == mlab.MatlabInputSpec assert mlab.MatlabCommand().cmd == matlab_cmd - mc = mlab.MatlabCommand(matlab_cmd='foo_m') - assert mc.cmd == 'foo_m' + mc = mlab.MatlabCommand(matlab_cmd="foo_m") + assert mc.cmd == "foo_m" @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_run_interface(tmpdir): default_script_file = clean_workspace_and_get_default_script_file() - mc = mlab.MatlabCommand(matlab_cmd='foo_m') - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 1.' + mc = mlab.MatlabCommand(matlab_cmd="foo_m") + assert not os.path.exists(default_script_file), "scriptfile should not exist 1." with pytest.raises(ValueError): mc.run() # script is mandatory - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 2.' + assert not os.path.exists(default_script_file), "scriptfile should not exist 2." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) - mc.inputs.script = 'a=1;' - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 3.' + mc.inputs.script = "a=1;" + assert not os.path.exists(default_script_file), "scriptfile should not exist 3." with pytest.raises(IOError): mc.run() # foo_m is not an executable - assert os.path.exists(default_script_file), 'scriptfile should exist 3.' + assert os.path.exists(default_script_file), "scriptfile should exist 3." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) cwd = tmpdir.chdir() # bypasses ubuntu dash issue - mc = mlab.MatlabCommand(script='foo;', paths=[tmpdir.strpath], mfile=True) - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 4.' + mc = mlab.MatlabCommand(script="foo;", paths=[tmpdir.strpath], mfile=True) + assert not os.path.exists(default_script_file), "scriptfile should not exist 4." with pytest.raises(RuntimeError): mc.run() - assert os.path.exists(default_script_file), 'scriptfile should exist 4.' + assert os.path.exists(default_script_file), "scriptfile should exist 4." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) # bypasses ubuntu dash issue - res = mlab.MatlabCommand( - script='a=1;', paths=[tmpdir.strpath], mfile=True).run() + res = mlab.MatlabCommand(script="a=1;", paths=[tmpdir.strpath], mfile=True).run() assert res.runtime.returncode == 0 - assert os.path.exists(default_script_file), 'scriptfile should exist 5.' + assert os.path.exists(default_script_file), "scriptfile should exist 5." cwd.chdir() @@ -121,8 +120,7 @@ def test_set_matlabcmd(): default_script_file = clean_workspace_and_get_default_script_file() mi = mlab.MatlabCommand() - mi.set_default_matlab_cmd('foo') - assert not os.path.exists( - default_script_file), 'scriptfile should not exist.' - assert mi._default_matlab_cmd == 'foo' + mi.set_default_matlab_cmd("foo") + assert not os.path.exists(default_script_file), "scriptfile should not exist." + assert mi._default_matlab_cmd == "foo" mi.set_default_matlab_cmd(matlab_cmd) diff --git a/nipype/interfaces/tests/test_nilearn.py b/nipype/interfaces/tests/test_nilearn.py index 79432bc180..1fed076b47 100644 --- a/nipype/interfaces/tests/test_nilearn.py +++ b/nipype/interfaces/tests/test_nilearn.py @@ -14,38 +14,38 @@ no_nilearn = True try: - __import__('nilearn') + __import__("nilearn") no_nilearn = False except ImportError: pass @pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available") -class TestSignalExtraction(): - +class TestSignalExtraction: filenames = { - 'in_file': 'fmri.nii', - 'label_files': 'labels.nii', - '4d_label_file': '4dlabels.nii', - 'out_file': 'signals.tsv' + "in_file": "fmri.nii", + "label_files": "labels.nii", + "4d_label_file": "4dlabels.nii", + "out_file": "signals.tsv", } - labels = ['CSF', 'GrayMatter', 'WhiteMatter'] - global_labels = ['GlobalSignal'] + labels + labels = ["CSF", "GrayMatter", "WhiteMatter"] + global_labels = ["GlobalSignal"] + labels - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def setup_class(self, tmpdir_factory): tempdir = tmpdir_factory.mktemp("test") self.orig_dir = tempdir.chdir() - utils.save_toy_nii(self.fake_fmri_data, self.filenames['in_file']) - utils.save_toy_nii(self.fake_label_data, self.filenames['label_files']) + utils.save_toy_nii(self.fake_fmri_data, self.filenames["in_file"]) + utils.save_toy_nii(self.fake_label_data, self.filenames["label_files"]) def test_signal_extract_no_shared(self): # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], class_labels=self.labels, - incl_shared_variance=False).run() + incl_shared_variance=False, + ).run() # assert self.assert_expected_output(self.labels, self.base_wanted) @@ -53,44 +53,44 @@ def test_signal_extr_bad_label_list(self): # run with pytest.raises(ValueError): iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], - class_labels=['bad'], - incl_shared_variance=False).run() + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], + class_labels=["bad"], + incl_shared_variance=False, + ).run() def test_signal_extr_equiv_4d_no_shared(self): self._test_4d_label( - self.base_wanted, - self.fake_equiv_4d_label_data, - incl_shared_variance=False) + self.base_wanted, self.fake_equiv_4d_label_data, incl_shared_variance=False + ) def test_signal_extr_4d_no_shared(self): # set up & run & assert self._test_4d_label( - self.fourd_wanted, - self.fake_4d_label_data, - incl_shared_variance=False) + self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False + ) def test_signal_extr_global_no_shared(self): # set up - wanted_global = [[-4. / 6], [-1. / 6], [3. / 6], [-1. / 6], [-7. / 6]] + wanted_global = [[-4.0 / 6], [-1.0 / 6], [3.0 / 6], [-1.0 / 6], [-7.0 / 6]] for i, vals in enumerate(self.base_wanted): wanted_global[i].extend(vals) # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], class_labels=self.labels, include_global=True, - incl_shared_variance=False).run() + incl_shared_variance=False, + ).run() # assert self.assert_expected_output(self.global_labels, wanted_global) def test_signal_extr_4d_global_no_shared(self): # set up - wanted_global = [[3. / 8], [-3. / 8], [1. / 8], [-7. / 8], [-9. / 8]] + wanted_global = [[3.0 / 8], [-3.0 / 8], [1.0 / 8], [-7.0 / 8], [-9.0 / 8]] for i, vals in enumerate(self.fourd_wanted): wanted_global[i].extend(vals) @@ -99,7 +99,8 @@ def test_signal_extr_4d_global_no_shared(self): wanted_global, self.fake_4d_label_data, include_global=True, - incl_shared_variance=False) + incl_shared_variance=False, + ) def test_signal_extr_shared(self): # set up @@ -109,45 +110,45 @@ def test_signal_extr_shared(self): wanted_row = [] for reg in range(self.fake_4d_label_data.shape[3]): region = self.fake_4d_label_data[:, :, :, reg].flatten() - wanted_row.append( - (volume * region).sum() / (region * region).sum()) + wanted_row.append((volume * region).sum() / (region * region).sum()) wanted.append(wanted_row) # run & assert self._test_4d_label(wanted, self.fake_4d_label_data) def test_signal_extr_traits_valid(self): - ''' Test a node using the SignalExtraction interface. + """Test a node using the SignalExtraction interface. Unlike interface.run(), node.run() checks the traits - ''' + """ # run node = pe.Node( iface.SignalExtraction( - in_file=os.path.abspath(self.filenames['in_file']), - label_files=os.path.abspath(self.filenames['label_files']), + in_file=os.path.abspath(self.filenames["in_file"]), + label_files=os.path.abspath(self.filenames["label_files"]), class_labels=self.labels, - incl_shared_variance=False), - name='SignalExtraction') + incl_shared_variance=False, + ), + name="SignalExtraction", + ) node.run() # assert # just checking that it passes trait validations - def _test_4d_label(self, - wanted, - fake_labels, - include_global=False, - incl_shared_variance=True): + def _test_4d_label( + self, wanted, fake_labels, include_global=False, incl_shared_variance=True + ): # set up - utils.save_toy_nii(fake_labels, self.filenames['4d_label_file']) + utils.save_toy_nii(fake_labels, self.filenames["4d_label_file"]) # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['4d_label_file'], + in_file=self.filenames["in_file"], + label_files=self.filenames["4d_label_file"], class_labels=self.labels, incl_shared_variance=incl_shared_variance, - include_global=include_global).run() + include_global=include_global, + ).run() wanted_labels = self.global_labels if include_global else self.labels @@ -155,12 +156,11 @@ def _test_4d_label(self, self.assert_expected_output(wanted_labels, wanted) def assert_expected_output(self, labels, wanted): - with open(self.filenames['out_file'], 'r') as output: + with open(self.filenames["out_file"]) as output: got = [line.split() for line in output] labels_got = got.pop(0) # remove header assert labels_got == labels - assert len(got) == self.fake_fmri_data.shape[ - 3], 'num rows and num volumes' + assert len(got) == self.fake_fmri_data.shape[3], "num rows and num volumes" # convert from string to float got = [[float(num) for num in row] for row in got] for i, time in enumerate(got): @@ -168,33 +168,53 @@ def assert_expected_output(self, labels, wanted): for j, segment in enumerate(time): npt.assert_almost_equal(segment, wanted[i][j], decimal=1) - -# dj: self doesnt have orig_dir at this point, not sure how to change it. -# should work without it -# def teardown_class(self): -# self.orig_dir.chdir() - - fake_fmri_data = np.array([[[[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], - [[-2, 0, 1, 4, 4], [-5, 3, -3, 1, -5]]], - [[[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]], - [[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]]]]) - - fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]]) + # dj: self doesn't have orig_dir at this point, not sure how to change it. + # should work without it + # def teardown_class(self): + # self.orig_dir.chdir() + + fake_fmri_data = np.array( + [ + [ + [[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], + [[-2, 0, 1, 4, 4], [-5, 3, -3, 1, -5]], + ], + [ + [[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]], + [[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]], + ], + ], + np.int16, + ) + + fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]], np.uint8) fake_equiv_4d_label_data = np.array( - [[[[1., 0., 0.], [0., 0., 0.]], [[0., 0., 1.], [1., 0., 0.]]], - [[[0., 1., 0.], [0., 0., 0.]], [[1., 0., 0.], [0., 0., 1.]]]]) - - base_wanted = [[-2.33333, 2, .5], [0, -2, .5], [-.3333333, -1, 2.5], - [0, -2, .5], [-1.3333333, -5, 1]] - - fake_4d_label_data = np.array([[[[0.2, 0.3, 0.5], [0.1, 0.1, 0.8]], - [[0.1, 0.3, 0.6], [0.3, 0.4, 0.3]]], - [[[0.2, 0.2, 0.6], [0., 0.3, 0.7]], - [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]]]) - - fourd_wanted = [[-5.0652173913, -5.44565217391, 5.50543478261], [ - -7.02173913043, 11.1847826087, -4.33152173913 - ], [-19.0869565217, 21.2391304348, - -4.57608695652], [5.19565217391, -3.66304347826, -1.51630434783], - [-12.0, 3., 0.5]] + [ + [[[1.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]]], + [[[0.0, 1.0, 0.0], [0.0, 0.0, 0.0]], [[1.0, 0.0, 0.0], [0.0, 0.0, 1.0]]], + ] + ) + + base_wanted = [ + [-2.33333, 2, 0.5], + [0, -2, 0.5], + [-0.3333333, -1, 2.5], + [0, -2, 0.5], + [-1.3333333, -5, 1], + ] + + fake_4d_label_data = np.array( + [ + [[[0.2, 0.3, 0.5], [0.1, 0.1, 0.8]], [[0.1, 0.3, 0.6], [0.3, 0.4, 0.3]]], + [[[0.2, 0.2, 0.6], [0.0, 0.3, 0.7]], [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]], + ] + ) + + fourd_wanted = [ + [-5.0652173913, -5.44565217391, 5.50543478261], + [-7.02173913043, 11.1847826087, -4.33152173913], + [-19.0869565217, 21.2391304348, -4.57608695652], + [5.19565217391, -3.66304347826, -1.51630434783], + [-12.0, 3.0, 0.5], + ] diff --git a/nipype/interfaces/tests/test_r.py b/nipype/interfaces/tests/test_r.py new file mode 100644 index 0000000000..6e980e61cd --- /dev/null +++ b/nipype/interfaces/tests/test_r.py @@ -0,0 +1,61 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import os + +import pytest +from nipype.interfaces import r + +no_r = r.no_r + + +@pytest.mark.skipif(no_r, reason="R is not available") +def test_cmdline(tmp_path): + default_script_file = str(tmp_path / "testscript") + ri = r.RCommand(script="1 + 1", script_file=default_script_file, rfile=False) + r_cmd = r.get_r_command() + + assert ri.cmdline == r_cmd + (' -e "1 + 1"') + + assert ri.inputs.script == "1 + 1" + assert ri.inputs.script_file == default_script_file + assert not os.path.exists(ri.inputs.script_file), "scriptfile should not exist" + assert not os.path.exists( + default_script_file + ), "default scriptfile should not exist." + + +@pytest.mark.skipif(no_r, reason="R is not available") +def test_run_interface(tmpdir): + cwd = tmpdir.chdir() + default_script_file = r.RInputSpec().script_file + + rc = r.RCommand(r_cmd="foo_m") + assert not os.path.exists(default_script_file), "scriptfile should not exist 1." + with pytest.raises(ValueError): + rc.run() # script is mandatory + assert not os.path.exists(default_script_file), "scriptfile should not exist 2." + if os.path.exists(default_script_file): # cleanup + os.remove(default_script_file) + + rc.inputs.script = "a=1;" + assert not os.path.exists(default_script_file), "scriptfile should not exist 3." + with pytest.raises(IOError): + rc.run() # foo_m is not an executable + assert os.path.exists(default_script_file), "scriptfile should exist 3." + if os.path.exists(default_script_file): # cleanup + os.remove(default_script_file) + cwd.chdir() + + +@pytest.mark.skipif(no_r, reason="R is not available") +def test_set_rcmd(tmpdir): + cwd = tmpdir.chdir() + default_script_file = r.RInputSpec().script_file + + ri = r.RCommand() + _default_r_cmd = ri._cmd + ri.set_default_r_cmd("foo") + assert not os.path.exists(default_script_file), "scriptfile should not exist." + assert ri._cmd == "foo" + ri.set_default_r_cmd(_default_r_cmd) + cwd.chdir() diff --git a/nipype/interfaces/utility/__init__.py b/nipype/interfaces/utility/__init__.py index 084acb569c..b4df1c2afb 100644 --- a/nipype/interfaces/utility/__init__.py +++ b/nipype/interfaces/utility/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,7 +6,6 @@ Requires Packages to be installed """ -from .base import (IdentityInterface, Rename, Select, Split, Merge, - AssertEqual) +from .base import IdentityInterface, Rename, Select, Split, Merge, AssertEqual from .csv import CSVReader from .wrappers import Function diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 864951f36a..564966cb5b 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -6,20 +5,24 @@ >>> tmp = getfixture('tmpdir') >>> old = tmp.chdir() """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range - -from future import standard_library -standard_library.install_aliases() - import os import re import numpy as np -from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, Undefined, - isdefined, OutputMultiPath, InputMultiPath, BaseInterface, - BaseInterfaceInputSpec, Str, SimpleInterface) +from ..base import ( + traits, + TraitedSpec, + DynamicTraitedSpec, + File, + Undefined, + isdefined, + OutputMultiPath, + InputMultiPath, + BaseInterface, + BaseInterfaceInputSpec, + Str, + SimpleInterface, +) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list, copyfile, split_filename @@ -49,20 +52,20 @@ class IdentityInterface(IOBase): >>> out = ii2.run() # doctest: +SKIP ValueError: IdentityInterface requires a value for input 'b' because it was listed in 'fields' Interface IdentityInterface failed to run. """ + input_spec = DynamicTraitedSpec output_spec = DynamicTraitedSpec def __init__(self, fields=None, mandatory_inputs=True, **inputs): - super(IdentityInterface, self).__init__(**inputs) + super().__init__(**inputs) if fields is None or not fields: - raise ValueError( - 'Identity Interface fields must be a non-empty list') + raise ValueError("Identity Interface fields must be a non-empty list") # Each input must be in the fields. for in_field in inputs: if in_field not in fields: raise ValueError( - 'Identity Interface input is not in the fields: %s' % - in_field) + "Identity Interface input is not in the fields: %s" % in_field + ) self._fields = fields self._mandatory_inputs = mandatory_inputs add_traits(self.inputs, fields) @@ -80,9 +83,11 @@ def _list_outputs(self): for key in self._fields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'fields'. \ - You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'fields'. \ + You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = self._outputs().get() @@ -95,22 +100,23 @@ def _list_outputs(self): class MergeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): axis = traits.Enum( - 'vstack', - 'hstack', + "vstack", + "hstack", usedefault=True, - desc= - 'direction in which to merge, hstack requires same number of elements in each input' + desc="direction in which to merge, hstack requires same number of elements in each input", ) no_flatten = traits.Bool( False, usedefault=True, - desc='append to outlist instead of extending in vstack mode') + desc="append to outlist instead of extending in vstack mode", + ) ravel_inputs = traits.Bool( - False, usedefault=True, desc='ravel inputs when no_flatten is False') + False, usedefault=True, desc="ravel inputs when no_flatten is False" + ) class MergeOutputSpec(TraitedSpec): - out = traits.List(desc='Merged output') + out = traits.List(desc="Merged output") def _ravel(in_val): @@ -163,14 +169,15 @@ class Merge(IOBase): >>> out.outputs.out [[1, [2, 5], 3]] """ + input_spec = MergeInputSpec output_spec = MergeOutputSpec def __init__(self, numinputs=0, **inputs): - super(Merge, self).__init__(**inputs) + super().__init__(**inputs) self._numinputs = numinputs if numinputs >= 1: - input_names = ['in%d' % (i + 1) for i in range(numinputs)] + input_names = ["in%d" % (i + 1) for i in range(numinputs)] else: input_names = [] add_traits(self.inputs, input_names) @@ -182,46 +189,44 @@ def _list_outputs(self): if self._numinputs < 1: return outputs else: - getval = lambda idx: getattr(self.inputs, 'in%d' % (idx + 1)) + getval = lambda idx: getattr(self.inputs, "in%d" % (idx + 1)) values = [ - getval(idx) for idx in range(self._numinputs) - if isdefined(getval(idx)) + getval(idx) for idx in range(self._numinputs) if isdefined(getval(idx)) ] - if self.inputs.axis == 'vstack': + if self.inputs.axis == "vstack": for value in values: if isinstance(value, list) and not self.inputs.no_flatten: - out.extend( - _ravel(value) if self.inputs.ravel_inputs else value) + out.extend(_ravel(value) if self.inputs.ravel_inputs else value) else: out.append(value) else: lists = [ensure_list(val) for val in values] out = [[val[i] for val in lists] for i in range(len(lists[0]))] - outputs['out'] = out + outputs["out"] = out return outputs class RenameInputSpec(DynamicTraitedSpec): in_file = File(exists=True, mandatory=True, desc="file to rename") keep_ext = traits.Bool( - desc=("Keep in_file extension, replace " - "non-extension component of name")) + desc="Keep in_file extension, replace non-extension component of name" + ) format_string = Str( - mandatory=True, desc="Python formatting string for output template") - parse_string = Str(desc="Python regexp parse string to define " - "replacement inputs") + mandatory=True, desc="Python formatting string for output template" + ) + parse_string = Str(desc="Python regexp parse string to define replacement inputs") use_fullpath = traits.Bool( - False, usedefault=True, desc="Use full path as input to regex parser") + False, usedefault=True, desc="Use full path as input to regex parser" + ) class RenameOutputSpec(TraitedSpec): - out_file = traits.File( - exists=True, desc="softlink to original file with new name") + out_file = File(exists=True, desc="softlink to original file with new name") class Rename(SimpleInterface, IOBase): - """Change the name of a file based on a mapped format string. + r"""Change the name of a file based on a mapped format string. To use additional inputs that will be defined at run-time, the class constructor must be called with the format template, and the fields @@ -254,7 +259,7 @@ class Rename(SimpleInterface, IOBase): >>> rename3 = Rename(format_string="%(subject_id)s_%(seq)s_run%(run)02d.nii") >>> rename3.inputs.in_file = os.path.join(datadir, "func_epi_1_1.nii") - >>> rename3.inputs.parse_string = "func_(?P\w*)_.*" + >>> rename3.inputs.parse_string = r"func_(?P\w*)_.*" >>> rename3.inputs.subject_id = "subj_201" >>> rename3.inputs.run = 2 >>> res = rename3.run() # doctest: +SKIP @@ -262,11 +267,12 @@ class Rename(SimpleInterface, IOBase): 'subj_201_epi_run02.nii' # doctest: +SKIP """ + input_spec = RenameInputSpec output_spec = RenameOutputSpec def __init__(self, format_string=None, **inputs): - super(Rename, self).__init__(**inputs) + super().__init__(**inputs) if format_string is not None: self.inputs.format_string = format_string self.fmt_fields = re.findall(r"%\((.+?)\)", format_string) @@ -277,12 +283,12 @@ def __init__(self, format_string=None, **inputs): def _rename(self): fmt_dict = dict() if isdefined(self.inputs.parse_string): - if isdefined( - self.inputs.use_fullpath) and self.inputs.use_fullpath: + if isdefined(self.inputs.use_fullpath) and self.inputs.use_fullpath: m = re.search(self.inputs.parse_string, self.inputs.in_file) else: - m = re.search(self.inputs.parse_string, - os.path.split(self.inputs.in_file)[1]) + m = re.search( + self.inputs.parse_string, os.path.split(self.inputs.in_file)[1] + ) if m: fmt_dict.update(m.groupdict()) for field in self.fmt_fields: @@ -290,10 +296,9 @@ def _rename(self): if isdefined(val): fmt_dict[field] = getattr(self.inputs, field) if self.inputs.keep_ext: - fmt_string = "".join([ - self.inputs.format_string, - split_filename(self.inputs.in_file)[2] - ]) + fmt_string = "".join( + [self.inputs.format_string, split_filename(self.inputs.in_file)[2]] + ) else: fmt_string = self.inputs.format_string return fmt_string % fmt_dict @@ -302,22 +307,20 @@ def _run_interface(self, runtime): runtime.returncode = 0 out_file = os.path.join(runtime.cwd, self._rename()) _ = copyfile(self.inputs.in_file, out_file) - self._results['out_file'] = out_file + self._results["out_file"] = out_file return runtime class SplitInputSpec(BaseInterfaceInputSpec): - inlist = traits.List( - traits.Any, mandatory=True, desc='list of values to split') + inlist = traits.List(traits.Any, mandatory=True, desc="list of values to split") splits = traits.List( traits.Int, mandatory=True, - desc='Number of outputs in each split - should add to number of inputs' + desc="Number of outputs in each split - should add to number of inputs", ) squeeze = traits.Bool( - False, - usedefault=True, - desc='unfold one-element splits removing the list') + False, usedefault=True, desc="unfold one-element splits removing the list" + ) class Split(IOBase): @@ -341,7 +344,7 @@ class Split(IOBase): def _add_output_traits(self, base): undefined_traits = {} for i in range(len(self.inputs.splits)): - key = 'out%d' % (i + 1) + key = "out%d" % (i + 1) base.add_trait(key, traits.Any) undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) @@ -351,28 +354,31 @@ def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.splits): if sum(self.inputs.splits) != len(self.inputs.inlist): - raise RuntimeError('sum of splits != num of list elements') + raise RuntimeError("sum of splits != num of list elements") splits = [0] splits.extend(self.inputs.splits) splits = np.cumsum(splits) for i in range(len(splits) - 1): - val = np.array( - self.inputs.inlist)[splits[i]:splits[i + 1]].tolist() + val = np.array(self.inputs.inlist, dtype=object)[ + splits[i] : splits[i + 1] + ].tolist() if self.inputs.squeeze and len(val) == 1: val = val[0] - outputs['out%d' % (i + 1)] = val + outputs["out%d" % (i + 1)] = val return outputs class SelectInputSpec(BaseInterfaceInputSpec): inlist = InputMultiPath( - traits.Any, mandatory=True, desc='list of values to choose from') + traits.Any, mandatory=True, desc="list of values to choose from" + ) index = InputMultiPath( - traits.Int, mandatory=True, desc='0-based indices of values to choose') + traits.Int, mandatory=True, desc="0-based indices of values to choose" + ) class SelectOutputSpec(TraitedSpec): - out = OutputMultiPath(traits.Any, desc='list of selected values') + out = OutputMultiPath(traits.Any, desc="list of selected values") class Select(IOBase): @@ -400,9 +406,10 @@ class Select(IOBase): def _list_outputs(self): outputs = self._outputs().get() - out = np.array(self.inputs.inlist)[np.array( - self.inputs.index)].tolist() - outputs['out'] = out + out = np.array(self.inputs.inlist, dtype=object)[ + np.array(self.inputs.index) + ].tolist() + outputs["out"] = out return outputs @@ -416,9 +423,10 @@ class AssertEqual(BaseInterface): def _run_interface(self, runtime): import nibabel as nb - data1 = nb.load(self.inputs.volume1).get_data() - data2 = nb.load(self.inputs.volume2).get_data() - if not np.all(data1 == data2): - raise RuntimeError('Input images are not exactly equal') + data1 = np.asanyarray(nb.load(self.inputs.volume1)) + data2 = np.asanyarray(nb.load(self.inputs.volume2)) + + if not np.array_equal(data1, data2): + raise RuntimeError("Input images are not exactly equal") return runtime diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index 16c377e3b5..979e328bb6 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -1,29 +1,20 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """CSV Handling utilities """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import zip, range, str, open - -from future import standard_library -standard_library.install_aliases() - -from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, - BaseInterface) +import csv +from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface from ..io import add_traits class CSVReaderInputSpec(DynamicTraitedSpec, TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='Input comma-seperated value (CSV) file') + exists=True, mandatory=True, desc="Input comma-seperated value (CSV) file" + ) header = traits.Bool( - False, - usedefault=True, - desc='True if the first line is a column header') + False, usedefault=True, desc="True if the first line is a column header" + ) + delimiter = traits.String(",", usedefault=True, desc="Delimiter to use.") class CSVReader(BaseInterface): @@ -53,6 +44,7 @@ class CSVReader(BaseInterface): True """ + input_spec = CSVReaderInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -62,19 +54,15 @@ def _append_entry(self, outputs, entry): outputs[key].append(value) return outputs - def _parse_line(self, line): - line = line.replace('\n', '') - entry = [x.strip() for x in line.split(',')] - return entry - def _get_outfields(self): - with open(self.inputs.in_file, 'r') as fid: - entry = self._parse_line(fid.readline()) + with open(self.inputs.in_file) as fid: + reader = csv.reader(fid, delimiter=self.inputs.delimiter) + + entry = next(reader) if self.inputs.header: self._outfields = tuple(entry) else: - self._outfields = tuple( - ['column_' + str(x) for x in range(len(entry))]) + self._outfields = tuple("column_" + str(x) for x in range(len(entry))) return self._outfields def _run_interface(self, runtime): @@ -82,7 +70,7 @@ def _run_interface(self, runtime): return runtime def _outputs(self): - return self._add_output_traits(super(CSVReader, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return add_traits(base, self._get_outfields()) @@ -92,11 +80,11 @@ def _list_outputs(self): isHeader = True for key in self._outfields: outputs[key] = [] # initialize outfields - with open(self.inputs.in_file, 'r') as fid: - for line in fid.readlines(): + with open(self.inputs.in_file) as fid: + reader = csv.reader(fid, delimiter=self.inputs.delimiter) + for entry in reader: if self.inputs.header and isHeader: # skip header line isHeader = False continue - entry = self._parse_line(line) outputs = self._append_entry(outputs, entry) return outputs diff --git a/nipype/interfaces/utility/tests/__init__.py b/nipype/interfaces/utility/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/utility/tests/__init__.py +++ b/nipype/interfaces/utility/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/utility/tests/test_auto_AssertEqual.py b/nipype/interfaces/utility/tests/test_auto_AssertEqual.py index 284e0f4d62..c550a5efba 100644 --- a/nipype/interfaces/utility/tests/test_auto_AssertEqual.py +++ b/nipype/interfaces/utility/tests/test_auto_AssertEqual.py @@ -1,12 +1,17 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import AssertEqual def test_AssertEqual_inputs(): input_map = dict( - volume1=dict(mandatory=True, ), - volume2=dict(mandatory=True, ), + volume1=dict( + extensions=None, + mandatory=True, + ), + volume2=dict( + extensions=None, + mandatory=True, + ), ) inputs = AssertEqual.input_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_CSVReader.py b/nipype/interfaces/utility/tests/test_auto_CSVReader.py index 8a51ca4170..a96a4d11bf 100644 --- a/nipype/interfaces/utility/tests/test_auto_CSVReader.py +++ b/nipype/interfaces/utility/tests/test_auto_CSVReader.py @@ -1,18 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..csv import CSVReader def test_CSVReader_inputs(): input_map = dict( - header=dict(usedefault=True, ), - in_file=dict(mandatory=True, ), + delimiter=dict( + usedefault=True, + ), + header=dict( + usedefault=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), ) inputs = CSVReader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CSVReader_outputs(): output_map = dict() outputs = CSVReader.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_Function.py b/nipype/interfaces/utility/tests/test_auto_Function.py index 5c2505fe16..fdbccc3098 100644 --- a/nipype/interfaces/utility/tests/test_auto_Function.py +++ b/nipype/interfaces/utility/tests/test_auto_Function.py @@ -1,15 +1,20 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..wrappers import Function def test_Function_inputs(): - input_map = dict(function_str=dict(mandatory=True, ), ) + input_map = dict( + function_str=dict( + mandatory=True, + ), + ) inputs = Function.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Function_outputs(): output_map = dict() outputs = Function.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py b/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py index 97523d0b86..7adb95ee88 100644 --- a/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py +++ b/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py @@ -1,5 +1,4 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import IdentityInterface @@ -10,6 +9,8 @@ def test_IdentityInterface_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IdentityInterface_outputs(): output_map = dict() outputs = IdentityInterface.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_Merge.py b/nipype/interfaces/utility/tests/test_auto_Merge.py index 71e7d2db0a..f9304a9897 100644 --- a/nipype/interfaces/utility/tests/test_auto_Merge.py +++ b/nipype/interfaces/utility/tests/test_auto_Merge.py @@ -1,21 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import Merge def test_Merge_inputs(): input_map = dict( - axis=dict(usedefault=True, ), - no_flatten=dict(usedefault=True, ), - ravel_inputs=dict(usedefault=True, ), + axis=dict( + usedefault=True, + ), + no_flatten=dict( + usedefault=True, + ), + ravel_inputs=dict( + usedefault=True, + ), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out=dict(), ) + output_map = dict( + out=dict(), + ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Rename.py b/nipype/interfaces/utility/tests/test_auto_Rename.py index 1e6e1cab34..5b9a183796 100644 --- a/nipype/interfaces/utility/tests/test_auto_Rename.py +++ b/nipype/interfaces/utility/tests/test_auto_Rename.py @@ -1,23 +1,35 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import Rename def test_Rename_inputs(): input_map = dict( - format_string=dict(mandatory=True, ), - in_file=dict(mandatory=True, ), + format_string=dict( + mandatory=True, + ), + in_file=dict( + extensions=None, + mandatory=True, + ), keep_ext=dict(), parse_string=dict(), - use_fullpath=dict(usedefault=True, ), + use_fullpath=dict( + usedefault=True, + ), ) inputs = Rename.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rename_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Rename.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Select.py b/nipype/interfaces/utility/tests/test_auto_Select.py index e241f7ed76..21dcb91670 100644 --- a/nipype/interfaces/utility/tests/test_auto_Select.py +++ b/nipype/interfaces/utility/tests/test_auto_Select.py @@ -1,20 +1,27 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import Select def test_Select_inputs(): input_map = dict( - index=dict(mandatory=True, ), - inlist=dict(mandatory=True, ), + index=dict( + mandatory=True, + ), + inlist=dict( + mandatory=True, + ), ) inputs = Select.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Select_outputs(): - output_map = dict(out=dict(), ) + output_map = dict( + out=dict(), + ) outputs = Select.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Split.py b/nipype/interfaces/utility/tests/test_auto_Split.py index 8acbceef99..60f46fb73c 100644 --- a/nipype/interfaces/utility/tests/test_auto_Split.py +++ b/nipype/interfaces/utility/tests/test_auto_Split.py @@ -1,19 +1,26 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import Split def test_Split_inputs(): input_map = dict( - inlist=dict(mandatory=True, ), - splits=dict(mandatory=True, ), - squeeze=dict(usedefault=True, ), + inlist=dict( + mandatory=True, + ), + splits=dict( + mandatory=True, + ), + squeeze=dict( + usedefault=True, + ), ) inputs = Split.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Split_outputs(): output_map = dict() outputs = Split.output_spec() diff --git a/nipype/interfaces/utility/tests/test_base.py b/nipype/interfaces/utility/tests/test_base.py index 159454a7fc..4a4e6d8899 100644 --- a/nipype/interfaces/utility/tests/test_base.py +++ b/nipype/interfaces/utility/tests/test_base.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals import os import pytest @@ -23,9 +21,8 @@ def test_rename(tmpdir): # Now a string-formatting version rn = utility.Rename( - in_file="file.txt", - format_string="%(field1)s_file%(field2)d", - keep_ext=True) + in_file="file.txt", format_string="%(field1)s_file%(field2)d", keep_ext=True + ) # Test .input field creation assert hasattr(rn.inputs, "field1") assert hasattr(rn.inputs, "field2") @@ -39,45 +36,42 @@ def test_rename(tmpdir): assert os.path.exists(outfile) -@pytest.mark.parametrize("args, expected", [({}, ([0], [1, 2, 3])), - ({ - "squeeze": True - }, (0, [1, 2, 3]))]) +@pytest.mark.parametrize( + "args, expected", [({}, ([0], [1, 2, 3])), ({"squeeze": True}, (0, [1, 2, 3]))] +) def test_split(tmpdir, args, expected): tmpdir.chdir() node = pe.Node( utility.Split(inlist=list(range(4)), splits=[1, 3], **args), - name='split_squeeze') + name="split_squeeze", + ) res = node.run() assert res.outputs.out1 == expected[0] assert res.outputs.out2 == expected[1] -@pytest.mark.parametrize("args, kwargs, in_lists, expected", [ - ([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]), - ([0], {}, None, None), - ([], {}, [], []), - ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]), - ([3], { - 'axis': 'hstack' - }, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]), - ([3], { - 'axis': 'hstack' - }, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), - ([3], { - 'axis': 'hstack' - }, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), -]) +@pytest.mark.parametrize( + "args, kwargs, in_lists, expected", + [ + ([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]), + ([0], {}, None, None), + ([], {}, [], []), + ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]), + ([3], {"axis": "hstack"}, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]), + ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), + ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), + ], +) def test_merge(tmpdir, args, kwargs, in_lists, expected): tmpdir.chdir() - node = pe.Node(utility.Merge(*args, **kwargs), name='merge') + node = pe.Node(utility.Merge(*args, **kwargs), name="merge") numinputs = args[0] if args else 0 if numinputs >= 1: for i in range(1, numinputs + 1): - setattr(node.inputs, 'in{:d}'.format(i), in_lists[i - 1]) + setattr(node.inputs, f"in{i:d}", in_lists[i - 1]) res = node.run() if numinputs < 1: diff --git a/nipype/interfaces/utility/tests/test_csv.py b/nipype/interfaces/utility/tests/test_csv.py index a5c678153e..2ce78876cc 100644 --- a/nipype/interfaces/utility/tests/test_csv.py +++ b/nipype/interfaces/utility/tests/test_csv.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals from nipype.interfaces import utility @@ -11,7 +9,7 @@ def test_csvReader(tmpdir): lines = ["foo,hello,300.1\n", "bar,world,5\n", "baz,goodbye,0.3\n"] for x in range(2): name = tmpdir.join("testfile.csv").strpath - with open(name, 'w') as fid: + with open(name, "w") as fid: reader = utility.CSVReader() if x % 2 == 0: fid.write(header) @@ -21,10 +19,50 @@ def test_csvReader(tmpdir): reader.inputs.in_file = name out = reader.run() if x % 2 == 0: - assert out.outputs.files == ['foo', 'bar', 'baz'] - assert out.outputs.labels == ['hello', 'world', 'goodbye'] - assert out.outputs.erosion == ['300.1', '5', '0.3'] + assert out.outputs.files == ["foo", "bar", "baz"] + assert out.outputs.labels == ["hello", "world", "goodbye"] + assert out.outputs.erosion == ["300.1", "5", "0.3"] else: - assert out.outputs.column_0 == ['foo', 'bar', 'baz'] - assert out.outputs.column_1 == ['hello', 'world', 'goodbye'] - assert out.outputs.column_2 == ['300.1', '5', '0.3'] + assert out.outputs.column_0 == ["foo", "bar", "baz"] + assert out.outputs.column_1 == ["hello", "world", "goodbye"] + assert out.outputs.column_2 == ["300.1", "5", "0.3"] + + +def test_csvReader_quoted(tmpdir): + lines = ['foo,"hello, world",300.1\n'] + + name = tmpdir.join("testfile.csv").strpath + with open(name, "w") as fid: + reader = utility.CSVReader() + fid.writelines(lines) + fid.flush() + reader.inputs.in_file = name + out = reader.run() + + assert out.outputs.column_0 == ["foo"] + assert out.outputs.column_1 == ["hello, world"] + assert out.outputs.column_2 == ["300.1"] + + +def test_csvReader_tabs(tmpdir): + header = "files\tlabels\terosion\n" + lines = ["foo\thello\t300.1\n", "bar\tworld\t5\n", "baz\tgoodbye\t0.3\n"] + for x in range(2): + name = tmpdir.join("testfile.csv").strpath + with open(name, "w") as fid: + reader = utility.CSVReader(delimiter="\t") + if x % 2 == 0: + fid.write(header) + reader.inputs.header = True + fid.writelines(lines) + fid.flush() + reader.inputs.in_file = name + out = reader.run() + if x % 2 == 0: + assert out.outputs.files == ["foo", "bar", "baz"] + assert out.outputs.labels == ["hello", "world", "goodbye"] + assert out.outputs.erosion == ["300.1", "5", "0.3"] + else: + assert out.outputs.column_0 == ["foo", "bar", "baz"] + assert out.outputs.column_1 == ["hello", "world", "goodbye"] + assert out.outputs.column_2 == ["300.1", "5", "0.3"] diff --git a/nipype/interfaces/utility/tests/test_wrappers.py b/nipype/interfaces/utility/tests/test_wrappers.py index 392ae094b0..345d6483ad 100644 --- a/nipype/interfaces/utility/tests/test_wrappers.py +++ b/nipype/interfaces/utility/tests/test_wrappers.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals -import os import pytest from nipype.interfaces import utility @@ -21,15 +18,18 @@ def test_function(tmpdir): def gen_random_array(size): import numpy as np + return np.random.rand(size, size) f1 = pe.MapNode( utility.Function( - input_names=['size'], - output_names=['random_array'], - function=gen_random_array), - name='random_array', - iterfield=['size']) + input_names=["size"], + output_names=["random_array"], + function=gen_random_array, + ), + name="random_array", + iterfield=["size"], + ) f1.inputs.size = [2, 3, 5] wf = pe.Workflow(name="test_workflow") @@ -39,19 +39,20 @@ def increment_array(in_array): f2 = pe.MapNode( utility.Function(function=increment_array), - name='increment_array', - iterfield=['in_array']) + name="increment_array", + iterfield=["in_array"], + ) - wf.connect(f1, 'random_array', f2, 'in_array') + wf.connect(f1, "random_array", f2, "in_array") f3 = pe.Node(utility.Function(function=concat_sort), name="concat_sort") - wf.connect(f2, 'out', f3, 'in_arrays') + wf.connect(f2, "out", f3, "in_arrays") wf.run() def make_random_array(size): - return np.random.randn(size, size) + return np.random.randn(size, size) # noqa def should_fail(tmp): @@ -61,14 +62,16 @@ def should_fail(tmp): utility.Function( input_names=["size"], output_names=["random_array"], - function=make_random_array), - name="should_fail") + function=make_random_array, + ), + name="should_fail", + ) node.inputs.size = 10 node.run() def test_should_fail(tmpdir): - with pytest.raises(NameError): + with pytest.raises(pe.nodes.NodeExecutionError): should_fail(tmpdir) @@ -80,15 +83,17 @@ def test_function_with_imports(tmpdir): input_names=["size"], output_names=["random_array"], function=make_random_array, - imports=["import numpy as np"]), - name="should_not_fail") + imports=["import numpy as np"], + ), + name="should_not_fail", + ) print(node.inputs.function_str) node.inputs.size = 10 node.run() def test_aux_connect_function(tmpdir): - """ This tests excution nodes with multiple inputs and auxiliary + """This tests execution nodes with multiple inputs and auxiliary function inside the Workflow connect function. """ tmpdir.chdir() @@ -96,9 +101,7 @@ def test_aux_connect_function(tmpdir): wf = pe.Workflow(name="test_workflow") def _gen_tuple(size): - return [ - 1, - ] * size + return [1] * size def _sum_and_sub_mul(a, b, c): return (a + b) * c, (a - b) * c @@ -106,33 +109,35 @@ def _sum_and_sub_mul(a, b, c): def _inc(x): return x + 1 - params = pe.Node( - utility.IdentityInterface(fields=['size', 'num']), name='params') + params = pe.Node(utility.IdentityInterface(fields=["size", "num"]), name="params") params.inputs.num = 42 params.inputs.size = 1 gen_tuple = pe.Node( utility.Function( - input_names=['size'], output_names=['tuple'], function=_gen_tuple), - name='gen_tuple') + input_names=["size"], output_names=["tuple"], function=_gen_tuple + ), + name="gen_tuple", + ) ssm = pe.Node( utility.Function( - input_names=['a', 'b', 'c'], - output_names=['sum', 'sub'], - function=_sum_and_sub_mul), - name='sum_and_sub_mul') - - split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), name='split') - - wf.connect([ - (params, gen_tuple, [(("size", _inc), "size")]), - (params, ssm, [(("num", _inc), "c")]), - (gen_tuple, split, [("tuple", "inlist")]), - (split, ssm, [ - (("out1", _inc), "a"), - ("out2", "b"), - ]), - ]) + input_names=["a", "b", "c"], + output_names=["sum", "sub"], + function=_sum_and_sub_mul, + ), + name="sum_and_sub_mul", + ) + + split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), name="split") + + wf.connect( + [ + (params, gen_tuple, [(("size", _inc), "size")]), + (params, ssm, [(("num", _inc), "c")]), + (gen_tuple, split, [("tuple", "inlist")]), + (split, ssm, [(("out1", _inc), "a"), ("out2", "b")]), + ] + ) wf.run() diff --git a/nipype/interfaces/utility/wrappers.py b/nipype/interfaces/utility/wrappers.py index 80a6f89738..db38de660c 100644 --- a/nipype/interfaces/utility/wrappers.py +++ b/nipype/interfaces/utility/wrappers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -6,27 +5,23 @@ >>> tmp = getfixture('tmpdir') >>> old = tmp.chdir() """ - -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from future import standard_library -standard_library.install_aliases() - -from builtins import str, bytes - from ... import logging -from ..base import (traits, DynamicTraitedSpec, Undefined, isdefined, - BaseInterfaceInputSpec) +from ..base import ( + traits, + DynamicTraitedSpec, + Undefined, + isdefined, + BaseInterfaceInputSpec, +) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list from ...utils.functions import getsource, create_function_from_source -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class FunctionInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - function_str = traits.Str(mandatory=True, desc='code for function') + function_str = traits.Str(mandatory=True, desc="code for function") class Function(IOBase): @@ -47,12 +42,14 @@ class Function(IOBase): input_spec = FunctionInputSpec output_spec = DynamicTraitedSpec - def __init__(self, - input_names=None, - output_names='out', - function=None, - imports=None, - **inputs): + def __init__( + self, + input_names=None, + output_names="out", + function=None, + imports=None, + **inputs + ): """ Parameters @@ -73,51 +70,48 @@ def __init__(self, in an otherwise empty namespace """ - super(Function, self).__init__(**inputs) + super().__init__(**inputs) if function: - if hasattr(function, '__call__'): + if callable(function): try: self.inputs.function_str = getsource(function) - except IOError: - raise Exception('Interface Function does not accept ' - 'function objects defined interactively ' - 'in a python session') + except OSError: + raise Exception( + "Interface Function does not accept " + "function objects defined interactively " + "in a python session" + ) else: if input_names is None: fninfo = function.__code__ elif isinstance(function, (str, bytes)): self.inputs.function_str = function if input_names is None: - fninfo = create_function_from_source(function, - imports).__code__ + fninfo = create_function_from_source(function, imports).__code__ else: - raise Exception('Unknown type of function') + raise Exception("Unknown type of function") if input_names is None: - input_names = fninfo.co_varnames[:fninfo.co_argcount] - self.inputs.on_trait_change(self._set_function_string, 'function_str') + input_names = fninfo.co_varnames[: fninfo.co_argcount] + self.inputs.on_trait_change(self._set_function_string, "function_str") self._input_names = ensure_list(input_names) self._output_names = ensure_list(output_names) - add_traits(self.inputs, [name for name in self._input_names]) + add_traits(self.inputs, self._input_names) self.imports = imports - self._out = {} - for name in self._output_names: - self._out[name] = None + self._out = {name: None for name in self._output_names} def _set_function_string(self, obj, name, old, new): - if name == 'function_str': - if hasattr(new, '__call__'): + if name == "function_str": + if callable(new): function_source = getsource(new) fninfo = new.__code__ elif isinstance(new, (str, bytes)): function_source = new - fninfo = create_function_from_source(new, - self.imports).__code__ + fninfo = create_function_from_source(new, self.imports).__code__ self.inputs.trait_set( - trait_change_notify=False, **{ - '%s' % name: function_source - }) + trait_change_notify=False, **{"%s" % name: function_source} + ) # Update input traits - input_names = fninfo.co_varnames[:fninfo.co_argcount] + input_names = fninfo.co_varnames[: fninfo.co_argcount] new_names = set(input_names) - set(self._input_names) add_traits(self.inputs, list(new_names)) self._input_names.extend(new_names) @@ -132,8 +126,9 @@ def _add_output_traits(self, base): def _run_interface(self, runtime): # Create function handle - function_handle = create_function_from_source(self.inputs.function_str, - self.imports) + function_handle = create_function_from_source( + self.inputs.function_str, self.imports + ) # Get function args args = {} for name in self._input_names: @@ -145,9 +140,8 @@ def _run_interface(self, runtime): if len(self._output_names) == 1: self._out[self._output_names[0]] = out else: - if isinstance(out, tuple) and \ - (len(out) != len(self._output_names)): - raise RuntimeError('Mismatch in number of expected outputs') + if isinstance(out, tuple) and (len(out) != len(self._output_names)): + raise RuntimeError("Mismatch in number of expected outputs") else: for idx, name in enumerate(self._output_names): diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index d0372042aa..414a002527 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .vista import (Vnifti2Image, VtoMat) +"""VistaSoft contains Matlab code to perform a variety of analysis on MRI data.""" +from .vista import Vnifti2Image, VtoMat diff --git a/nipype/interfaces/vista/tests/__init__.py b/nipype/interfaces/vista/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/vista/tests/__init__.py +++ b/nipype/interfaces/vista/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py index 785e87e8b1..f06a118fc2 100644 --- a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py +++ b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py @@ -1,13 +1,15 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..vista import Vnifti2Image def test_Vnifti2Image_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), attributes=dict( - argstr='-attr %s', + argstr="-attr %s", + extensions=None, position=2, ), environ=dict( @@ -15,16 +17,18 @@ def test_Vnifti2Image_inputs(): usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.v', + name_source=["in_file"], + name_template="%s.v", position=-1, ), ) @@ -33,8 +37,14 @@ def test_Vnifti2Image_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Vnifti2Image_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = Vnifti2Image.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/vista/tests/test_auto_VtoMat.py b/nipype/interfaces/vista/tests/test_auto_VtoMat.py index ee16266402..8d2b53ab0c 100644 --- a/nipype/interfaces/vista/tests/test_auto_VtoMat.py +++ b/nipype/interfaces/vista/tests/test_auto_VtoMat.py @@ -1,26 +1,29 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..vista import VtoMat def test_VtoMat_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( - argstr='-in %s', + argstr="-in %s", + extensions=None, mandatory=True, position=1, ), out_file=dict( - argstr='-out %s', + argstr="-out %s", + extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.mat', + name_source=["in_file"], + name_template="%s.mat", position=-1, ), ) @@ -29,8 +32,14 @@ def test_VtoMat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VtoMat_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict( + out_file=dict( + extensions=None, + ), + ) outputs = VtoMat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 5000036d02..5e6571639a 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -1,33 +1,27 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) from ..base import CommandLineInputSpec, CommandLine, TraitedSpec, File class Vnifti2ImageInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=1, - desc='in file') - attributes = File( - exists=True, argstr='-attr %s', position=2, desc='attribute file') + exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" + ) + attributes = File(exists=True, argstr="-attr %s", position=2, desc="attribute file") out_file = File( name_template="%s.v", keep_extension=False, - argstr='-out %s', + argstr="-out %s", hash_files=False, position=-1, - desc='output data file', - name_source=["in_file"]) + desc="output data file", + name_source=["in_file"], + ) class Vnifti2ImageOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output vista file') + out_file = File(exists=True, desc="Output vista file") class Vnifti2Image(CommandLine): @@ -36,38 +30,36 @@ class Vnifti2Image(CommandLine): Example ------- - >>> vimage = Vnifti2Image() >>> vimage.inputs.in_file = 'image.nii' >>> vimage.cmdline 'vnifti2image -in image.nii -out image.v' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ - _cmd = 'vnifti2image' + _cmd = "vnifti2image" input_spec = Vnifti2ImageInputSpec output_spec = Vnifti2ImageOutputSpec class VtoMatInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=1, - desc='in file') + exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" + ) out_file = File( name_template="%s.mat", keep_extension=False, - argstr='-out %s', + argstr="-out %s", hash_files=False, position=-1, - desc='output mat file', - name_source=["in_file"]) + desc="output mat file", + name_source=["in_file"], + ) class VtoMatOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output mat file') + out_file = File(exists=True, desc="Output mat file") class VtoMat(CommandLine): @@ -76,14 +68,14 @@ class VtoMat(CommandLine): Example ------- - >>> vimage = VtoMat() >>> vimage.inputs.in_file = 'image.v' >>> vimage.cmdline 'vtomat -in image.v -out image.mat' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ - _cmd = 'vtomat' + _cmd = "vtomat" input_spec = VtoMatInputSpec output_spec = VtoMatOutputSpec diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py index 1ec66ea614..4b49d56815 100644 --- a/nipype/interfaces/vtkbase.py +++ b/nipype/interfaces/vtkbase.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -6,63 +5,65 @@ Code using tvtk should import it through this module """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from .. import logging -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") # Check that VTK can be imported and get version _vtk_version = None try: import vtk - _vtk_version = (vtk.vtkVersion.GetVTKMajorVersion(), - vtk.vtkVersion.GetVTKMinorVersion()) + + _vtk_version = ( + vtk.vtkVersion.GetVTKMajorVersion(), + vtk.vtkVersion.GetVTKMinorVersion(), + ) except ImportError: - iflogger.warning('VTK was not found') + iflogger.warning("VTK was not found") # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var -old_ets = os.getenv('ETS_TOOLKIT') -os.environ['ETS_TOOLKIT'] = 'null' +old_ets = os.getenv("ETS_TOOLKIT") +os.environ["ETS_TOOLKIT"] = "null" _have_tvtk = False try: from tvtk.api import tvtk + _have_tvtk = True except ImportError: - iflogger.warning('tvtk wasn\'t found') + iflogger.warning("tvtk wasn't found") tvtk = None finally: if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets + os.environ["ETS_TOOLKIT"] = old_ets else: - del os.environ['ETS_TOOLKIT'] + del os.environ["ETS_TOOLKIT"] def vtk_version(): - """ Get VTK version """ + """Get VTK version""" global _vtk_version return _vtk_version def no_vtk(): - """ Checks if VTK is installed and the python wrapper is functional """ + """Checks if VTK is installed and the python wrapper is functional""" global _vtk_version return _vtk_version is None def no_tvtk(): - """ Checks if tvtk was found """ + """Checks if tvtk was found""" global _have_tvtk return not _have_tvtk def vtk_old(): - """ Checks if VTK uses the old-style pipeline (VTK<6.0) """ + """Checks if VTK uses the old-style pipeline (VTK<6.0)""" global _vtk_version if _vtk_version is None: - raise RuntimeException('VTK is not correctly installed.') + raise RuntimeError("VTK is not correctly installed.") return _vtk_version[0] < 6 @@ -78,7 +79,7 @@ def configure_input_data(obj, data): def vtk_output(obj): - """ Configure the input data for vtk pipeline object obj.""" + """Configure the input data for vtk pipeline object obj.""" if vtk_old(): return obj.output return obj.get_output() diff --git a/nipype/interfaces/workbench/__init__.py b/nipype/interfaces/workbench/__init__.py index 5ced0d2fb3..693ee395a8 100644 --- a/nipype/interfaces/workbench/__init__.py +++ b/nipype/interfaces/workbench/__init__.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Connectome Workbench is a visualization for neuroimaging data, esp. derived from HCP data.""" from .metric import MetricResample +from .cifti import CiftiSmooth diff --git a/nipype/interfaces/workbench/base.py b/nipype/interfaces/workbench/base.py index 4adc9dc69b..0cfb8624d7 100644 --- a/nipype/interfaces/workbench/base.py +++ b/nipype/interfaces/workbench/base.py @@ -1,17 +1,14 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The workbench module provides classes for interfacing with `connectome workbench `_ tools. -`Connectome Workbench is an open source, freely available visualization and - discovery tool used to map neuroimaging data, especially data generated by the - Human Connectome Project. +Connectome Workbench is an open source, freely available visualization and +discovery tool used to map neuroimaging data, especially data generated by the +Human Connectome Project. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import re @@ -19,19 +16,17 @@ from ...utils.filemanip import split_filename from ..base import CommandLine, PackageInfo -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): - """ - Handle `wb_command` version information. - """ + """Handle Connectome Workbench version information.""" - version_cmd = 'wb_command -version' + version_cmd = "wb_command -version" @staticmethod def parse_version(raw_info): - m = re.search(r'\nVersion (\S+)', raw_info) + m = re.search(r"\nVersion (\S+)", raw_info) return m.groups()[0] if m else None @@ -42,7 +37,7 @@ class WBCommand(CommandLine): def version(self): return Info.version() - def _gen_filename(self, name, outdir=None, suffix='', ext=None): + def _gen_filename(self, name, outdir=None, suffix="", ext=None): """Generate a filename based on the given parameters. The filename will take the form: . Parameters @@ -65,5 +60,5 @@ def _gen_filename(self, name, outdir=None, suffix='', ext=None): if ext is None: ext = fext if outdir is None: - outdir = '.' + outdir = "." return os.path.join(outdir, fname + suffix + ext) diff --git a/nipype/interfaces/workbench/cifti.py b/nipype/interfaces/workbench/cifti.py new file mode 100644 index 0000000000..d8b0c1a721 --- /dev/null +++ b/nipype/interfaces/workbench/cifti.py @@ -0,0 +1,155 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""This module provides interfaces for workbench CIFTI commands""" +from ..base import TraitedSpec, File, traits, CommandLineInputSpec +from .base import WBCommand +from ... import logging + +iflogger = logging.getLogger("nipype.interface") + + +class CiftiSmoothInputSpec(CommandLineInputSpec): + in_file = File( + exists=True, + mandatory=True, + argstr="%s", + position=0, + desc="The input CIFTI file", + ) + sigma_surf = traits.Float( + mandatory=True, + argstr="%s", + position=1, + desc="the sigma for the gaussian surface smoothing kernel, in mm", + ) + sigma_vol = traits.Float( + mandatory=True, + argstr="%s", + position=2, + desc="the sigma for the gaussian volume smoothing kernel, in mm", + ) + direction = traits.Enum( + "ROW", + "COLUMN", + mandatory=True, + argstr="%s", + position=3, + desc="which dimension to smooth along, ROW or COLUMN", + ) + out_file = File( + name_source=["in_file"], + name_template="smoothed_%s.nii", + keep_extension=True, + argstr="%s", + position=4, + desc="The output CIFTI", + ) + left_surf = File( + exists=True, + mandatory=True, + position=5, + argstr="-left-surface %s", + desc="Specify the left surface to use", + ) + left_corrected_areas = File( + exists=True, + position=6, + argstr="-left-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from " + "the left surface.", + ) + right_surf = File( + exists=True, + mandatory=True, + position=7, + argstr="-right-surface %s", + desc="Specify the right surface to use", + ) + right_corrected_areas = File( + exists=True, + position=8, + argstr="-right-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from " + "the right surface", + ) + cerebellum_surf = File( + exists=True, + position=9, + argstr="-cerebellum-surface %s", + desc="specify the cerebellum surface to use", + ) + cerebellum_corrected_areas = File( + exists=True, + position=10, + requires=["cerebellum_surf"], + argstr="cerebellum-corrected-areas %s", + desc="vertex areas (as a metric) to use instead of computing them from " + "the cerebellum surface", + ) + cifti_roi = File( + exists=True, + position=11, + argstr="-cifti-roi %s", + desc="CIFTI file for ROI smoothing", + ) + fix_zeros_vol = traits.Bool( + position=12, + argstr="-fix-zeros-volume", + desc="treat values of zero in the volume as missing data", + ) + fix_zeros_surf = traits.Bool( + position=13, + argstr="-fix-zeros-surface", + desc="treat values of zero on the surface as missing data", + ) + merged_volume = traits.Bool( + position=14, + argstr="-merged-volume", + desc="smooth across subcortical structure boundaries", + ) + + +class CiftiSmoothOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="output CIFTI file") + + +class CiftiSmooth(WBCommand): + """ + Smooth a CIFTI file + + The input cifti file must have a brain models mapping on the chosen + dimension, columns for .dtseries, and either for .dconn. By default, + data in different structures is smoothed independently (i.e., "parcel + constrained" smoothing), so volume structures that touch do not smooth + across this boundary. Specify ``merged_volume`` to ignore these + boundaries. Surface smoothing uses the ``GEO_GAUSS_AREA`` smoothing method. + + The ``*_corrected_areas`` options are intended for when it is unavoidable + to smooth on group average surfaces, it is only an approximate correction + for the reduction of structure in a group average surface. It is better + to smooth the data on individuals before averaging, when feasible. + + The ``fix_zeros_*`` options will treat values of zero as lack of data, and + not use that value when generating the smoothed values, but will fill + zeros with extrapolated values. The ROI should have a brain models + mapping along columns, exactly matching the mapping of the chosen + direction in the input file. Data outside the ROI is ignored. + + >>> from nipype.interfaces.workbench import CiftiSmooth + >>> smooth = CiftiSmooth() + >>> smooth.inputs.in_file = 'sub-01_task-rest.dtseries.nii' + >>> smooth.inputs.sigma_surf = 4 + >>> smooth.inputs.sigma_vol = 4 + >>> smooth.inputs.direction = 'COLUMN' + >>> smooth.inputs.right_surf = 'sub-01.R.midthickness.32k_fs_LR.surf.gii' + >>> smooth.inputs.left_surf = 'sub-01.L.midthickness.32k_fs_LR.surf.gii' + >>> smooth.cmdline + 'wb_command -cifti-smoothing sub-01_task-rest.dtseries.nii 4.0 4.0 COLUMN \ + smoothed_sub-01_task-rest.dtseries.nii \ + -left-surface sub-01.L.midthickness.32k_fs_LR.surf.gii \ + -right-surface sub-01.R.midthickness.32k_fs_LR.surf.gii' + """ + + input_spec = CiftiSmoothInputSpec + output_spec = CiftiSmoothOutputSpec + _cmd = "wb_command -cifti-smoothing" diff --git a/nipype/interfaces/workbench/metric.py b/nipype/interfaces/workbench/metric.py index e5bbb60739..9183488f93 100644 --- a/nipype/interfaces/workbench/metric.py +++ b/nipype/interfaces/workbench/metric.py @@ -1,16 +1,13 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench surface commands""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from ..base import (TraitedSpec, File, traits, CommandLineInputSpec) +from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import WBCommand from ... import logging -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class MetricResampleInputSpec(CommandLineInputSpec): @@ -19,20 +16,23 @@ class MetricResampleInputSpec(CommandLineInputSpec): mandatory=True, argstr="%s", position=0, - desc="The metric file to resample") + desc="The metric file to resample", + ) current_sphere = File( exists=True, mandatory=True, argstr="%s", position=1, - desc="A sphere surface with the mesh that the metric is currently on") + desc="A sphere surface with the mesh that the metric is currently on", + ) new_sphere = File( exists=True, mandatory=True, argstr="%s", position=2, desc="A sphere surface that is in register with and" - " has the desired output mesh") + " has the desired output mesh", + ) method = traits.Enum( "ADAP_BARY_AREA", "BARYCENTRIC", @@ -40,51 +40,60 @@ class MetricResampleInputSpec(CommandLineInputSpec): mandatory=True, position=3, desc="The method name - ADAP_BARY_AREA method is recommended for" - " ordinary metric data, because it should use all data while" - " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," - " exactly one of area_surfs or area_metrics must be specified") + " ordinary metric data, because it should use all data while" + " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," + " exactly one of area_surfs or area_metrics must be specified", + ) out_file = File( name_source=["new_sphere"], name_template="%s.out", keep_extension=True, argstr="%s", position=4, - desc="The output metric") + desc="The output metric", + ) area_surfs = traits.Bool( position=5, argstr="-area-surfs", xor=["area_metrics"], - desc="Specify surfaces to do vertex area correction based on") + desc="Specify surfaces to do vertex area correction based on", + ) area_metrics = traits.Bool( position=5, argstr="-area-metrics", xor=["area_surfs"], - desc="Specify vertex area metrics to do area correction based on") + desc="Specify vertex area metrics to do area correction based on", + ) current_area = File( exists=True, position=6, argstr="%s", desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh") + " a metric file with vertex areas for mesh", + ) new_area = File( exists=True, position=7, argstr="%s", desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh") + " a metric file with vertex areas for mesh", + ) roi_metric = File( exists=True, position=8, argstr="-current-roi %s", - desc="Input roi on the current mesh used to exclude non-data vertices") + desc="Input roi on the current mesh used to exclude non-data vertices", + ) valid_roi_out = traits.Bool( position=9, argstr="-valid-roi-out", - desc="Output the ROI of vertices that got data from valid source vertices") + desc="Output the ROI of vertices that got data from valid source vertices", + ) largest = traits.Bool( position=10, argstr="-largest", - desc="Use only the value of the vertex with the largest weight") + desc="Use only the value of the vertex with the largest weight", + ) class MetricResampleOutputSpec(TraitedSpec): @@ -131,31 +140,37 @@ class MetricResample(WBCommand): -area-metrics fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii \ fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' """ + input_spec = MetricResampleInputSpec output_spec = MetricResampleOutputSpec - _cmd = 'wb_command -metric-resample' + _cmd = "wb_command -metric-resample" def _format_arg(self, opt, spec, val): - if opt in ['current_area', 'new_area']: + if opt in ["current_area", "new_area"]: if not self.inputs.area_surfs and not self.inputs.area_metrics: - raise ValueError("{} was set but neither area_surfs or" - " area_metrics were set".format(opt)) + raise ValueError( + "{} was set but neither area_surfs or" + " area_metrics were set".format(opt) + ) if opt == "method": - if (val == "ADAP_BARY_AREA" and - not self.inputs.area_surfs and - not self.inputs.area_metrics): - raise ValueError("Exactly one of area_surfs or area_metrics" - " must be specified") + if ( + val == "ADAP_BARY_AREA" + and not self.inputs.area_surfs + and not self.inputs.area_metrics + ): + raise ValueError( + "Exactly one of area_surfs or area_metrics must be specified" + ) if opt == "valid_roi_out" and val: # generate a filename and add it to argstr - roi_out = self._gen_filename(self.inputs.in_file, suffix='_roi') + roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") iflogger.info("Setting roi output file as", roi_out) spec.argstr += " " + roi_out - return super(MetricResample, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): - outputs = super(MetricResample, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.valid_roi_out: - roi_file = self._gen_filename(self.inputs.in_file, suffix='_roi') - outputs['roi_file'] = os.path.abspath(roi_file) + roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") + outputs["roi_file"] = os.path.abspath(roi_file) return outputs diff --git a/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py b/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py new file mode 100644 index 0000000000..03beac887a --- /dev/null +++ b/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py @@ -0,0 +1,111 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ..cifti import CiftiSmooth + + +def test_CiftiSmooth_inputs(): + input_map = dict( + args=dict( + argstr="%s", + ), + cerebellum_corrected_areas=dict( + argstr="cerebellum-corrected-areas %s", + extensions=None, + position=10, + requires=["cerebellum_surf"], + ), + cerebellum_surf=dict( + argstr="-cerebellum-surface %s", + extensions=None, + position=9, + ), + cifti_roi=dict( + argstr="-cifti-roi %s", + extensions=None, + position=11, + ), + direction=dict( + argstr="%s", + mandatory=True, + position=3, + ), + environ=dict( + nohash=True, + usedefault=True, + ), + fix_zeros_surf=dict( + argstr="-fix-zeros-surface", + position=13, + ), + fix_zeros_vol=dict( + argstr="-fix-zeros-volume", + position=12, + ), + in_file=dict( + argstr="%s", + extensions=None, + mandatory=True, + position=0, + ), + left_corrected_areas=dict( + argstr="-left-corrected-areas %s", + extensions=None, + position=6, + ), + left_surf=dict( + argstr="-left-surface %s", + extensions=None, + mandatory=True, + position=5, + ), + merged_volume=dict( + argstr="-merged-volume", + position=14, + ), + out_file=dict( + argstr="%s", + extensions=None, + keep_extension=True, + name_source=["in_file"], + name_template="smoothed_%s.nii", + position=4, + ), + right_corrected_areas=dict( + argstr="-right-corrected-areas %s", + extensions=None, + position=8, + ), + right_surf=dict( + argstr="-right-surface %s", + extensions=None, + mandatory=True, + position=7, + ), + sigma_surf=dict( + argstr="%s", + mandatory=True, + position=1, + ), + sigma_vol=dict( + argstr="%s", + mandatory=True, + position=2, + ), + ) + inputs = CiftiSmooth.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(inputs.traits()[key], metakey) == value + + +def test_CiftiSmooth_outputs(): + output_map = dict( + out_file=dict( + extensions=None, + ), + ) + outputs = CiftiSmooth.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + assert getattr(outputs.traits()[key], metakey) == value diff --git a/nipype/interfaces/workbench/tests/test_auto_MetricResample.py b/nipype/interfaces/workbench/tests/test_auto_MetricResample.py index 46a66aa728..e7a861963b 100644 --- a/nipype/interfaces/workbench/tests/test_auto_MetricResample.py +++ b/nipype/interfaces/workbench/tests/test_auto_MetricResample.py @@ -1,27 +1,30 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..metric import MetricResample def test_MetricResample_inputs(): input_map = dict( area_metrics=dict( - argstr='-area-metrics', + argstr="-area-metrics", position=5, - xor=['area_surfs'], + xor=["area_surfs"], ), area_surfs=dict( - argstr='-area-surfs', + argstr="-area-surfs", position=5, - xor=['area_metrics'], + xor=["area_metrics"], + ), + args=dict( + argstr="%s", ), - args=dict(argstr='%s', ), current_area=dict( - argstr='%s', + argstr="%s", + extensions=None, position=6, ), current_sphere=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=1, ), @@ -30,41 +33,46 @@ def test_MetricResample_inputs(): usedefault=True, ), in_file=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=0, ), largest=dict( - argstr='-largest', + argstr="-largest", position=10, ), method=dict( - argstr='%s', + argstr="%s", mandatory=True, position=3, ), new_area=dict( - argstr='%s', + argstr="%s", + extensions=None, position=7, ), new_sphere=dict( - argstr='%s', + argstr="%s", + extensions=None, mandatory=True, position=2, ), out_file=dict( - argstr='%s', + argstr="%s", + extensions=None, keep_extension=True, - name_source=['new_sphere'], - name_template='%s.out', + name_source=["new_sphere"], + name_template="%s.out", position=4, ), roi_metric=dict( - argstr='-current-roi %s', + argstr="-current-roi %s", + extensions=None, position=8, ), valid_roi_out=dict( - argstr='-valid-roi-out', + argstr="-valid-roi-out", position=9, ), ) @@ -73,10 +81,16 @@ def test_MetricResample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MetricResample_outputs(): output_map = dict( - out_file=dict(), - roi_file=dict(), + out_file=dict( + extensions=None, + ), + roi_file=dict( + extensions=None, + ), ) outputs = MetricResample.output_spec() diff --git a/nipype/interfaces/workbench/tests/test_auto_WBCommand.py b/nipype/interfaces/workbench/tests/test_auto_WBCommand.py index b496a270dd..d2cc9a6b96 100644 --- a/nipype/interfaces/workbench/tests/test_auto_WBCommand.py +++ b/nipype/interfaces/workbench/tests/test_auto_WBCommand.py @@ -1,11 +1,12 @@ # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from __future__ import unicode_literals from ..base import WBCommand def test_WBCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict( + argstr="%s", + ), environ=dict( nohash=True, usedefault=True, diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index badfda5ba0..63c5557f56 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -1,11 +1,8 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains modules for generating pipelines using interfaces """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .engine import Node, MapNode, JoinNode, Workflow diff --git a/nipype/pipeline/engine/__init__.py b/nipype/pipeline/engine/__init__.py index e950086307..20829e63a7 100644 --- a/nipype/pipeline/engine/__init__.py +++ b/nipype/pipeline/engine/__init__.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,8 +6,7 @@ """ -from __future__ import absolute_import -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .workflows import Workflow from .nodes import Node, MapNode, JoinNode from .utils import generate_expanded_graph diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 7f7afd3928..27d3426863 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -1,29 +1,24 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Defines functionality for pipelined execution of interfaces - -The `EngineBase` class implements the more general view of a task. -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import object - +"""Defines functionality for pipelined execution of interfaces.""" from copy import deepcopy import re -import numpy as np from ... import config from ...interfaces.base import DynamicTraitedSpec from ...utils.filemanip import loadpkl, savepkl -class EngineBase(object): - """Defines common attributes and functions for workflows and nodes.""" +class EngineBase: + """ + Defines common attributes and functions for workflows and nodes. + + Implements the more general view of a task. + """ def __init__(self, name=None, base_dir=None): - """ Initialize base parameters of a workflow or node + """ + Initialize base parameters of a workflow or node. Parameters ---------- @@ -35,27 +30,32 @@ def __init__(self, name=None, base_dir=None): default=None, which results in the use of mkdtemp """ + self._name = None self._hierarchy = None self.name = name - self._id = self.name # for compatibility with node expansion using iterables + self._id = self.name # for compatibility with node expansion using iterables self.base_dir = base_dir + """Define the work directory for this instance of workflow element.""" + self.config = deepcopy(config._sections) @property def name(self): + """Set the unique name of this workflow element.""" return self._name @name.setter def name(self, name): - if not name or not re.match(r'^[\w-]+$', name): + if not name or not re.match(r"^[\w-]+$", name): raise ValueError('[Workflow|Node] name "%s" is not valid.' % name) self._name = name @property def fullname(self): + """Build the full name down the hierarchy.""" if self._hierarchy: - return '%s.%s' % (self._hierarchy, self.name) + return f"{self._hierarchy}.{self.name}" return self.name @property @@ -68,27 +68,28 @@ def outputs(self): @property def itername(self): - """Name for expanded iterable""" + """Get the name of the expanded iterable.""" itername = self._id if self._hierarchy: - itername = '%s.%s' % (self._hierarchy, self._id) + itername = f"{self._hierarchy}.{self._id}" return itername def clone(self, name): - """Clone an EngineBase object + """ + Clone an EngineBase object. Parameters ---------- name : string (mandatory) A clone of node or workflow must have a new name + """ if name == self.name: - raise ValueError('Cloning requires a new name, "%s" is ' - 'in use.' % name) + raise ValueError('Cloning requires a new name, "%s" is in use.' % name) clone = deepcopy(self) clone.name = name - if hasattr(clone, '_id'): + if hasattr(clone, "_id"): clone._id = name return clone @@ -101,15 +102,20 @@ def _check_inputs(self, parameter): return hasattr(self.inputs, parameter) def __str__(self): + """Convert to string.""" return self.fullname def __repr__(self): + """Get Python representation.""" return self.itername def save(self, filename=None): + """Store this workflow element to a file.""" if filename is None: - filename = 'temp.pklz' + filename = "temp.pklz" savepkl(filename, self) - def load(self, filename): + @staticmethod + def load(filename): + """Load this workflow element from a file.""" return loadpkl(filename) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index b338fd862d..31ee29e04d 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -1,18 +1,14 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces The `Node` class provides core functionality for batch processing. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, str, bytes, open - -from collections import OrderedDict +from collections import OrderedDict, defaultdict import os import os.path as op +from pathlib import Path import shutil import socket from copy import deepcopy @@ -20,30 +16,54 @@ from logging import INFO from tempfile import mkdtemp -from future import standard_library from ... import config, logging from ...utils.misc import flatten, unflatten, str2bool, dict_diff -from ...utils.filemanip import (md5, FileNotFoundError, ensure_list, - simplify_list, copyfiles, fnames_presuffix, - loadpkl, split_filename, load_json, makedirs, - emptydirs, savepkl, to_str, indirectory) - -from ...interfaces.base import (traits, InputMultiPath, CommandLine, Undefined, - DynamicTraitedSpec, Bunch, InterfaceResult, - Interface, isdefined) +from ...utils.filemanip import ( + md5, + ensure_list, + simplify_list, + copyfiles, + fnames_presuffix, + loadpkl, + split_filename, + load_json, + emptydirs, + savepkl, + silentrm, +) + +from ...interfaces.base import ( + traits, + InputMultiPath, + Undefined, + DynamicTraitedSpec, + Bunch, + InterfaceResult, + Interface, + isdefined, +) from ...interfaces.base.specs import get_filecopy_info from .utils import ( - _parameterization_dir, save_hashfile as _save_hashfile, load_resultfile as - _load_resultfile, save_resultfile as _save_resultfile, nodelist_runner as - _node_runner, strip_temp as _strip_temp, write_report, - clean_working_directory, merge_dict, evaluate_connect_function) + _parameterization_dir, + save_hashfile as _save_hashfile, + load_resultfile as _load_resultfile, + save_resultfile as _save_resultfile, + nodelist_runner as _node_runner, + strip_temp as _strip_temp, + write_node_report, + clean_working_directory, + merge_dict, + evaluate_connect_function, +) from .base import EngineBase -standard_library.install_aliases() +logger = logging.getLogger("nipype.workflow") + -logger = logging.getLogger('nipype.workflow') +class NodeExecutionError(RuntimeError): + """A nipype-specific name for exceptions when executing a Node.""" class Node(EngineBase): @@ -68,18 +88,20 @@ class Node(EngineBase): """ - def __init__(self, - interface, - name, - iterables=None, - itersource=None, - synchronize=False, - overwrite=None, - needed_outputs=None, - run_without_submitting=False, - n_procs=None, - mem_gb=0.20, - **kwargs): + def __init__( + self, + interface, + name, + iterables=None, + itersource=None, + synchronize=False, + overwrite=None, + needed_outputs=None, + run_without_submitting=False, + n_procs=None, + mem_gb=0.20, + **kwargs, + ): """ Parameters ---------- @@ -150,11 +172,11 @@ def __init__(self, """ # Make sure an interface is set, and that it is an Interface if interface is None: - raise IOError('Interface must be provided') + raise OSError("Interface must be provided") if not isinstance(interface, Interface): - raise IOError('interface must be an instance of an Interface') + raise OSError("interface must be an instance of an Interface") - super(Node, self).__init__(name, kwargs.get('base_dir')) + super().__init__(name, kwargs.get("base_dir")) self._interface = interface self._hierarchy = None @@ -175,8 +197,7 @@ def __init__(self, self._n_procs = n_procs # Downstream n_procs - if hasattr(self._interface.inputs, - 'num_threads') and self._n_procs is not None: + if hasattr(self._interface.inputs, "num_threads") and self._n_procs is not None: self._interface.inputs.num_threads = self._n_procs # Initialize needed_outputs and hashes @@ -186,6 +207,9 @@ def __init__(self, self.needed_outputs = needed_outputs self.config = None + if hasattr(self._interface, "write_cmdline"): + self._interface.write_cmdline = True + @property def interface(self): """Return the underlying interface object""" @@ -194,7 +218,9 @@ def interface(self): @property def result(self): """Get result from result file (do not hold it in memory)""" - return _load_resultfile(self.output_dir(), self.name)[0] + return _load_resultfile( + op.join(self.output_dir(), "result_%s.pklz" % self.name) + ) @property def inputs(self): @@ -213,7 +239,7 @@ def needed_outputs(self): @needed_outputs.setter def needed_outputs(self, new_outputs): """Needed outputs changes the hash, refresh if changed""" - new_outputs = sorted(list(set(new_outputs or []))) + new_outputs = sorted(set(new_outputs or [])) if new_outputs != self._needed_outputs: # Reset hash self._hashvalue = None @@ -223,11 +249,12 @@ def needed_outputs(self, new_outputs): @property def mem_gb(self): """Get estimated memory (GB)""" - if hasattr(self._interface, 'estimated_memory_gb'): + if hasattr(self._interface, "estimated_memory_gb"): self._mem_gb = self._interface.estimated_memory_gb logger.warning( 'Setting "estimated_memory_gb" on Interfaces has been ' - 'deprecated as of nipype 1.0, please use Node.mem_gb.') + "deprecated as of nipype 1.0, please use Node.mem_gb." + ) return self._mem_gb @@ -236,8 +263,9 @@ def n_procs(self): """Get the estimated number of processes/threads""" if self._n_procs is not None: return self._n_procs - if hasattr(self._interface.inputs, 'num_threads') and isdefined( - self._interface.inputs.num_threads): + if hasattr(self._interface.inputs, "num_threads") and isdefined( + self._interface.inputs.num_threads + ): return self._interface.inputs.num_threads return 1 @@ -247,7 +275,7 @@ def n_procs(self, value): self._n_procs = value # Overwrite interface's dynamic input of num_threads - if hasattr(self._interface.inputs, 'num_threads'): + if hasattr(self._interface.inputs, "num_threads"): self._interface.inputs.num_threads = self._n_procs def output_dir(self): @@ -261,11 +289,14 @@ def output_dir(self): self.base_dir = mkdtemp() outputdir = self.base_dir if self._hierarchy: - outputdir = op.join(outputdir, *self._hierarchy.split('.')) + outputdir = op.join(outputdir, *self._hierarchy.split(".")) if self.parameterization: - params_str = ['{}'.format(p) for p in self.parameterization] - if not str2bool(self.config['execution']['parameterize_dirs']): - params_str = [_parameterization_dir(p) for p in params_str] + maxlen = ( + 252 if str2bool(self.config["execution"]["parameterize_dirs"]) else 32 + ) + params_str = [ + _parameterization_dir(str(p), maxlen) for p in self.parameterization + ] outputdir = op.join(outputdir, *params_str) self._output_dir = op.realpath(op.join(outputdir, self.name)) @@ -273,8 +304,9 @@ def output_dir(self): def set_input(self, parameter, val): """Set interface input value""" - logger.debug('[Node] %s - setting input %s = %s', self.name, parameter, - to_str(val)) + logger.debug( + "[Node] %s - setting input %s = %s", self.name, parameter, str(val) + ) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): @@ -292,39 +324,47 @@ def is_cached(self, rm_outdated=False): """ outdir = self.output_dir() - # Update hash - hashed_inputs, hashvalue = self._get_hashval() - # The output folder does not exist: not cached - if not op.exists(outdir): - logger.debug('[Node] Directory not found "%s".', outdir) + if not op.exists(outdir) or not op.exists( + op.join(outdir, "result_%s.pklz" % self.name) + ): + logger.debug('[Node] Not cached "%s".', outdir) return False, False - hashfile = op.join(outdir, '_0x%s.json' % hashvalue) - cached = op.exists(hashfile) - - # Check if updated - globhashes = glob(op.join(outdir, '_0x*.json')) - unfinished = [ - path for path in globhashes - if path.endswith('_unfinished.json') - ] + # Check if there are hashfiles + globhashes = glob(op.join(outdir, "_0x*.json")) + unfinished = [path for path in globhashes if path.endswith("_unfinished.json")] hashfiles = list(set(globhashes) - set(unfinished)) - logger.debug('[Node] Hashes: %s, %s, %s, %s', - hashed_inputs, hashvalue, hashfile, hashfiles) + + # Update hash + hashed_inputs, hashvalue = self._get_hashval() + + hashfile = op.join(outdir, "_0x%s.json" % hashvalue) + logger.debug( + "[Node] Hashes: %s, %s, %s, %s", + hashed_inputs, + hashvalue, + hashfile, + hashfiles, + ) + + cached = hashfile in hashfiles # No previous hashfiles found, we're all set. if cached and len(hashfiles) == 1: - assert(hashfile == hashfiles[0]) + assert hashfile == hashfiles[0] logger.debug('[Node] Up-to-date cache found for "%s".', self.fullname) return True, True # Cached and updated if len(hashfiles) > 1: if cached: hashfiles.remove(hashfile) # Do not clean up the node, if cached - logger.warning('[Node] Found %d previous hashfiles indicating that the working ' - 'directory of node "%s" is stale, deleting old hashfiles.', - len(hashfiles), self.fullname) + logger.warning( + "[Node] Found %d previous hashfiles indicating that the working " + 'directory of node "%s" is stale, deleting old hashfiles.', + len(hashfiles), + self.fullname, + ) for rmfile in hashfiles: os.remove(rmfile) @@ -332,7 +372,7 @@ def is_cached(self, rm_outdated=False): if not hashfiles: logger.debug('[Node] No hashfiles found in "%s".', outdir) - assert(not cached) + assert not cached return False, False # At this point only one hashfile is in the folder @@ -345,21 +385,21 @@ def is_cached(self, rm_outdated=False): loglevel = logger.getEffectiveLevel() if loglevel < INFO: # Lazy logging: only < INFO exp_hash_file_base = split_filename(hashfiles[0])[1] - exp_hash = exp_hash_file_base[len('_0x'):] - logger.log(loglevel, "[Node] Old/new hashes = %s/%s", - exp_hash, hashvalue) + exp_hash = exp_hash_file_base[len("_0x") :] + logger.log( + loglevel, "[Node] Old/new hashes = %s/%s", exp_hash, hashvalue + ) try: prev_inputs = load_json(hashfiles[0]) except Exception: pass else: - logger.log(loglevel, - dict_diff(prev_inputs, hashed_inputs, 10)) + logger.log(loglevel, dict_diff(prev_inputs, hashed_inputs, 10)) if rm_outdated: os.remove(hashfiles[0]) - assert(cached) # At this point, node is cached (may not be up-to-date) + assert cached # At this point, node is cached (may not be up-to-date) return cached, updated def hash_exists(self, updatehash=False): @@ -373,7 +413,7 @@ def hash_exists(self, updatehash=False): cached, updated = self.is_cached(rm_outdated=True) outdir = self.output_dir() - hashfile = op.join(outdir, '_0x%s.json' % self._hashvalue) + hashfile = op.join(outdir, "_0x%s.json" % self._hashvalue) if updated: return True, self._hashvalue, hashfile, self._hashed_inputs @@ -386,103 +426,119 @@ def hash_exists(self, updatehash=False): return cached, self._hashvalue, hashfile, self._hashed_inputs def run(self, updatehash=False): - """Execute the node in its directory. + """ + Execute the node in its directory. Parameters ---------- - updatehash: boolean When the hash stored in the output directory as a result of a previous run does not match that calculated for this execution, updatehash=True only updates the hash without re-running. - """ + """ if self.config is None: self.config = {} self.config = merge_dict(deepcopy(config._sections), self.config) outdir = self.output_dir() - force_run = self.overwrite or (self.overwrite is None and - self._interface.always_run) + force_run = self.overwrite or ( + self.overwrite is None and self._interface.always_run + ) # Check hash, check whether run should be enforced - logger.info('[Node] Setting-up "%s" in "%s".', self.fullname, outdir) + if not isinstance(self, MapNode): + logger.info(f'[Node] Setting-up "{self.fullname}" in "{outdir}".') cached, updated = self.is_cached() # If the node is cached, check on pklz files and finish if not force_run and (updated or (not updated and updatehash)): logger.debug("Only updating node hashes or skipping execution") - inputs_file = op.join(outdir, '_inputs.pklz') + inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): - logger.debug('Creating inputs file %s', inputs_file) + logger.debug("Creating inputs file %s", inputs_file) savepkl(inputs_file, self.inputs.get_traitsfree()) - node_file = op.join(outdir, '_node.pklz') + node_file = op.join(outdir, "_node.pklz") if not op.exists(node_file): - logger.debug('Creating node file %s', node_file) + logger.debug("Creating node file %s", node_file) savepkl(node_file, self) - result = self._run_interface(execute=False, - updatehash=updatehash and not updated) - logger.info('[Node] "%s" found cached%s.', self.fullname, - ' (and hash updated)' * (updatehash and not updated)) + result = self._run_interface( + execute=False, updatehash=updatehash and not updated + ) + logger.info( + '[Node] "%s" found cached%s.', + self.fullname, + " (and hash updated)" * (updatehash and not updated), + ) return result if cached and updated and not isinstance(self, MapNode): logger.debug('[Node] Rerunning cached, up-to-date node "%s"', self.fullname) if not force_run and str2bool( - self.config['execution']['stop_on_first_rerun']): + self.config["execution"]["stop_on_first_rerun"] + ): raise Exception( - 'Cannot rerun when "stop_on_first_rerun" is set to True') + 'Cannot rerun when "stop_on_first_rerun" is set to True' + ) # Remove any hashfile that exists at this point (re)running. if cached: - for outdatedhash in glob(op.join(self.output_dir(), '_0x*.json')): + for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) - + # _get_hashval needs to be called before running. When there is a valid (or seemingly + # valid cache), the is_cached() member updates the hashval via _get_hashval. + # However, if this node's folder doesn't exist or the result file is not found, then + # the hashval needs to be generated here. See #3026 for a larger context. + self._get_hashval() # Hashfile while running - hashfile_unfinished = op.join( - outdir, '_0x%s_unfinished.json' % self._hashvalue) + hashfile_unfinished = op.join(outdir, "_0x%s_unfinished.json" % self._hashvalue) # Delete directory contents if this is not a MapNode or can't resume can_resume = not (self._interface.can_resume and op.isfile(hashfile_unfinished)) if can_resume and not isinstance(self, MapNode): emptydirs(outdir, noexist_ok=True) else: - logger.debug('[%sNode] Resume - hashfile=%s', - 'Map' * int(isinstance(self, MapNode)), - hashfile_unfinished) + logger.debug( + "[%sNode] Resume - hashfile=%s", + "Map" * int(isinstance(self, MapNode)), + hashfile_unfinished, + ) if isinstance(self, MapNode): # remove old json files - for filename in glob(op.join(outdir, '_0x*.json')): + for filename in glob(op.join(outdir, "_0x*.json")): os.remove(filename) # Make sure outdir is created - makedirs(outdir, exist_ok=True) + os.makedirs(outdir, exist_ok=True) # Store runtime-hashfile, pre-execution report, the node and the inputs set. _save_hashfile(hashfile_unfinished, self._hashed_inputs) - write_report( - self, report_type='preexec', is_mapnode=isinstance(self, MapNode)) - savepkl(op.join(outdir, '_node.pklz'), self) - savepkl(op.join(outdir, '_inputs.pklz'), self.inputs.get_traitsfree()) + write_node_report(self, is_mapnode=isinstance(self, MapNode)) + savepkl(op.join(outdir, "_node.pklz"), self) + savepkl(op.join(outdir, "_inputs.pklz"), self.inputs.get_traitsfree()) try: result = self._run_interface(execute=True) except Exception: logger.warning('[Node] Error on "%s" (%s)', self.fullname, outdir) # Tear-up after error - os.remove(hashfile_unfinished) + if not silentrm(hashfile_unfinished): + logger.warning( + """\ +Interface finished unexpectedly and the corresponding unfinished hashfile %s \ +does not exist. Another nipype instance may be running against the same work \ +directory. Please ensure no other concurrent workflows are racing""", + hashfile_unfinished, + ) raise # Tear-up after success - shutil.move(hashfile_unfinished, - hashfile_unfinished.replace('_unfinished', '')) - write_report( - self, report_type='postexec', is_mapnode=isinstance(self, MapNode)) - logger.info('[Node] Finished "%s".', self.fullname) + shutil.move(hashfile_unfinished, hashfile_unfinished.replace("_unfinished", "")) + write_node_report(self, result=result, is_mapnode=isinstance(self, MapNode)) return result def _get_hashval(self): @@ -490,63 +546,93 @@ def _get_hashval(self): self._get_inputs() if self._hashvalue is None and self._hashed_inputs is None: self._hashed_inputs, self._hashvalue = self.inputs.get_hashval( - hash_method=self.config['execution']['hash_method']) - rm_extra = self.config['execution']['remove_unnecessary_outputs'] + hash_method=self.config["execution"]["hash_method"] + ) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(self._hashvalue.encode()) hashobject.update(str(self.needed_outputs).encode()) self._hashvalue = hashobject.hexdigest() - self._hashed_inputs.append(('needed_outputs', self.needed_outputs)) + self._hashed_inputs.append(("needed_outputs", self.needed_outputs)) return self._hashed_inputs, self._hashvalue def _get_inputs(self): - """Retrieve inputs from pointers to results file + """ + Retrieve inputs from pointers to results files. This mechanism can be easily extended/replaced to retrieve data from other data sources (e.g., XNAT, HTTP, etc.,.) """ - if self._got_inputs: + if self._got_inputs: # Inputs cached + return + + if not self.input_source: # No previous nodes + self._got_inputs = True return - logger.debug('Setting node inputs') + prev_results = defaultdict(list) for key, info in list(self.input_source.items()): - logger.debug('input: %s', key) - results_file = info[0] - logger.debug('results file: %s', results_file) - results = loadpkl(results_file) - output_value = Undefined - if isinstance(info[1], tuple): - output_name = info[1][0] - value = getattr(results.outputs, output_name) - if isdefined(value): - output_value = evaluate_connect_function( - info[1][1], info[1][2], value) - else: - output_name = info[1] - try: - output_value = results.outputs.trait_get()[output_name] - except AttributeError: - output_value = results.outputs.dictcopy()[output_name] - logger.debug('output: %s', output_name) + prev_results[info[0]].append((key, info[1])) + + logger.debug( + '[Node] Setting %d connected inputs of node "%s" from %d previous nodes.', + len(self.input_source), + self.name, + len(prev_results), + ) + + for results_fname, connections in list(prev_results.items()): + outputs = None try: - self.set_input(key, deepcopy(output_value)) - except traits.TraitError as e: - msg = [ - 'Error setting node input:', - 'Node: %s' % self.name, - 'input: %s' % key, - 'results_file: %s' % results_file, - 'value: %s' % str(output_value) - ] - e.args = (e.args[0] + "\n" + '\n'.join(msg), ) - raise + outputs = _load_resultfile(results_fname).outputs + except AttributeError as e: + logger.critical("%s", e) + + if outputs is None: + raise NodeExecutionError( + """\ +Error populating the inputs of node "%s": the results file of the source node \ +(%s) does not contain any outputs.""" + % (self.name, results_fname) + ) + + for key, conn in connections: + output_value = Undefined + if isinstance(conn, tuple): + value = getattr(outputs, conn[0]) + if isdefined(value): + output_value = evaluate_connect_function( + conn[1], conn[2], value + ) + else: + output_name = conn + try: + output_value = outputs.trait_get()[output_name] + except AttributeError: + output_value = outputs.dictcopy()[output_name] + logger.debug("output: %s", output_name) + + try: + self.set_input(key, deepcopy(output_value)) + except traits.TraitError as e: + msg = ( + e.args[0], + "", + "Error setting node input:", + "Node: %s" % self.name, + "input: %s" % key, + "results_file: %s" % results_fname, + "value: %s" % str(output_value), + ) + e.args = ("\n".join(msg),) + raise # Successfully set inputs self._got_inputs = True def _update_hash(self): - for outdatedhash in glob(op.join(self.output_dir(), '_0x*.json')): + for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) _save_hashfile(self._hashvalue, self._hashed_inputs) @@ -558,31 +644,48 @@ def _run_interface(self, execute=True, updatehash=False): def _load_results(self): cwd = self.output_dir() - result, aggregate, attribute_error = _load_resultfile(cwd, self.name) + + try: + result = _load_resultfile(op.join(cwd, "result_%s.pklz" % self.name)) + except (traits.TraitError, EOFError): + logger.debug("Error populating inputs/outputs, (re)aggregating results...") + except (AttributeError, ImportError) as err: + logger.debug( + "attribute error: %s probably using different trait pickled file", + str(err), + ) + old_inputs = loadpkl(op.join(cwd, "_inputs.pklz")) + self.inputs.trait_set(**old_inputs) + else: + return result + # try aggregating first - if aggregate: - logger.debug('aggregating results') - if attribute_error: - old_inputs = loadpkl(op.join(cwd, '_inputs.pklz')) - self.inputs.trait_set(**old_inputs) - if not isinstance(self, MapNode): - self._copyfiles_to_wd(linksonly=True) - aggouts = self._interface.aggregate_outputs( - needed_outputs=self.needed_outputs) - runtime = Bunch( - cwd=cwd, - returncode=0, - environ=dict(os.environ), - hostname=socket.gethostname()) - result = InterfaceResult( - interface=self._interface.__class__, - runtime=runtime, - inputs=self._interface.inputs.get_traitsfree(), - outputs=aggouts) - _save_resultfile(result, cwd, self.name) - else: - logger.debug('aggregating mapnode results') - result = self._run_interface() + if not isinstance(self, MapNode): + self._copyfiles_to_wd(linksonly=True) + aggouts = self._interface.aggregate_outputs( + needed_outputs=self.needed_outputs + ) + runtime = Bunch( + cwd=cwd, + returncode=0, + environ=dict(os.environ), + hostname=socket.gethostname(), + ) + result = InterfaceResult( + interface=self._interface.__class__, + runtime=runtime, + inputs=self._interface.inputs.get_traitsfree(), + outputs=aggouts, + ) + _save_resultfile( + result, + cwd, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) + else: + logger.debug("aggregating mapnode results") + result = self._run_interface() return result def _run_command(self, execute, copyfiles=True): @@ -591,68 +694,79 @@ def _run_command(self, execute, copyfiles=True): result = self._load_results() except (FileNotFoundError, AttributeError): # if aggregation does not work, rerun the node - logger.info("[Node] Some of the outputs were not found: " - "rerunning node.") + logger.info( + "[Node] Some of the outputs were not found: rerunning node." + ) copyfiles = False # OE: this was like this before, execute = True # I'll keep them for safety else: - logger.info('[Node] Cached "%s" - collecting precomputed outputs', - self.fullname) + logger.info( + '[Node] Cached "%s" - collecting precomputed outputs', self.fullname + ) return result - outdir = self.output_dir() - # Run command: either execute is true or load_results failed. - result = InterfaceResult( - interface=self._interface.__class__, - runtime=Bunch( - cwd=outdir, - returncode=1, - environ=dict(os.environ), - hostname=socket.gethostname() - ), - inputs=self._interface.inputs.get_traitsfree()) - + outdir = Path(self.output_dir()) if copyfiles: self._originputs = deepcopy(self._interface.inputs) self._copyfiles_to_wd(execute=execute) - message = '[Node] Running "{}" ("{}.{}")'.format( - self.name, self._interface.__module__, - self._interface.__class__.__name__) - if issubclass(self._interface.__class__, CommandLine): - try: - with indirectory(outdir): - cmd = self._interface.cmdline - except Exception as msg: - result.runtime.stderr = '{}\n\n{}'.format( - getattr(result.runtime, 'stderr', ''), msg) - _save_resultfile(result, outdir, self.name) - raise - cmdfile = op.join(outdir, 'command.txt') - with open(cmdfile, 'wt') as fd: - print(cmd + "\n", file=fd) - message += ', a CommandLine Interface with command:\n{}'.format(cmd) - logger.info(message) - try: - result = self._interface.run(cwd=outdir) - except Exception as msg: - result.runtime.stderr = '%s\n\n%s'.format( - getattr(result.runtime, 'stderr', ''), msg) - _save_resultfile(result, outdir, self.name) - raise - - dirs2keep = None - if isinstance(self, MapNode): - dirs2keep = [op.join(outdir, 'mapflow')] - - result.outputs = clean_working_directory( - result.outputs, + # Run command: either execute is true or load_results failed. + logger.info( + f'[Node] Executing "{self.name}" <{self._interface.__module__}' + f".{self._interface.__class__.__name__}>" + ) + + # Invoke core run method of the interface ignoring exceptions + result = self._interface.run(cwd=outdir, ignore_exception=True) + logger.info( + f'[Node] Finished "{self.name}", elapsed time {result.runtime.duration}s.' + ) + + exc_tb = getattr(result.runtime, "traceback", None) + + if not exc_tb: + # Clean working directory if no errors + dirs2keep = None + if isinstance(self, MapNode): + dirs2keep = [op.join(outdir, "mapflow")] + + result.outputs = clean_working_directory( + result.outputs, + outdir, + self._interface.inputs, + self.needed_outputs, + self.config, + dirs2keep=dirs2keep, + ) + + # Store results file under all circumstances + _save_resultfile( + result, outdir, - self._interface.inputs, - self.needed_outputs, - self.config, - dirs2keep=dirs2keep) - _save_resultfile(result, outdir, self.name) + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) + + if exc_tb: + runtime = result.runtime + + def _tab(text): + from textwrap import indent + + if not text: + return "" + return indent(text, '\t') + + msg = f"Exception raised while executing Node {self.name}.\n\n" + if hasattr(runtime, 'cmdline'): + msg += ( + f"Cmdline:\n{_tab(runtime.cmdline)}\n" + f"Stdout:\n{_tab(runtime.stdout)}\n" + f"Stderr:\n{_tab(runtime.stderr)}\n" + ) + # Always pass along the traceback + msg += f"Traceback:\n{_tab(runtime.traceback)}" + raise NodeExecutionError(msg) return result @@ -663,41 +777,42 @@ def _copyfiles_to_wd(self, execute=True, linksonly=False): # Nothing to be done return - logger.debug('copying files to wd [execute=%s, linksonly=%s]', execute, - linksonly) + logger.debug( + "copying files to wd [execute=%s, linksonly=%s]", execute, linksonly + ) outdir = self.output_dir() if execute and linksonly: olddir = outdir - outdir = op.join(outdir, '_tempinput') - makedirs(outdir, exist_ok=True) + outdir = op.join(outdir, "_tempinput") + os.makedirs(outdir, exist_ok=True) for info in filecopy_info: - files = self.inputs.trait_get().get(info['key']) + files = self.inputs.trait_get().get(info["key"]) if not isdefined(files) or not files: continue infiles = ensure_list(files) if execute: if linksonly: - if not info['copy']: + if not info["copy"]: newfiles = copyfiles( - infiles, [outdir], - copy=info['copy'], - create_new=True) + infiles, [outdir], copy=info["copy"], create_new=True + ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) - newfiles = _strip_temp(newfiles, - op.abspath(olddir).split( - op.sep)[-1]) + newfiles = _strip_temp( + newfiles, op.abspath(olddir).split(op.sep)[-1] + ) else: newfiles = copyfiles( - infiles, [outdir], copy=info['copy'], create_new=True) + infiles, [outdir], copy=info["copy"], create_new=True + ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) if not isinstance(files, list): newfiles = simplify_list(newfiles) - setattr(self.inputs, info['key'], newfiles) + setattr(self.inputs, info["key"], newfiles) if execute and linksonly: emptydirs(outdir, noexist_ok=True) @@ -735,13 +850,9 @@ class JoinNode(Node): """ - def __init__(self, - interface, - name, - joinsource, - joinfield=None, - unique=False, - **kwargs): + def __init__( + self, interface, name, joinsource, joinfield=None, unique=False, **kwargs + ): """ Parameters @@ -759,7 +870,7 @@ def __init__(self, See Node docstring for additional keyword arguments. """ - super(JoinNode, self).__init__(interface, name, **kwargs) + super().__init__(interface, name, **kwargs) self._joinsource = None # The member should be defined self.joinsource = joinsource # Let the setter do the job @@ -773,8 +884,9 @@ def __init__(self, self.joinfield = joinfield """the fields to join""" - self._inputs = self._override_join_traits(self._interface.inputs, - self.joinfield) + self._inputs = self._override_join_traits( + self._interface.inputs, self.joinfield + ) """the override inputs""" self._unique = unique @@ -824,8 +936,9 @@ def _add_join_item_fields(self): """ # create the new join item fields idx = self._next_slot_index - newfields = dict([(field, self._add_join_item_field(field, idx)) - for field in self.joinfield]) + newfields = { + field: self._add_join_item_field(field, idx) for field in self.joinfield + } # increment the join slot index logger.debug("Added the %s join item fields %s.", self, newfields) self._next_slot_index += 1 @@ -859,8 +972,10 @@ def _override_join_traits(self, basetraits, fields): # validate the fields for field in fields: if not basetraits.trait(field): - raise ValueError("The JoinNode %s does not have a field" - " named %s" % (self.name, field)) + raise ValueError( + "The JoinNode %s does not have a field" + " named %s" % (self.name, field) + ) for name, trait in list(basetraits.items()): # if a join field has a single inner trait, then the item # trait is that inner trait. Otherwise, the item trait is @@ -871,7 +986,11 @@ def _override_join_traits(self, basetraits, fields): setattr(dyntraits, name, Undefined) logger.debug( "Converted the join node %s field %s trait type from %s to %s", - self, name, trait.trait_type.info(), item_trait.info()) + self, + name, + trait.trait_type.info(), + item_trait.info(), + ) else: dyntraits.add_trait(name, traits.Any) setattr(dyntraits, name, Undefined) @@ -880,7 +999,7 @@ def _override_join_traits(self, basetraits, fields): def _run_command(self, execute, copyfiles=True): """Collates the join inputs prior to delegating to the superclass.""" self._collate_join_field_inputs() - return super(JoinNode, self)._run_command(execute, copyfiles) + return super()._run_command(execute, copyfiles) def _collate_join_field_inputs(self): """ @@ -893,17 +1012,27 @@ def _collate_join_field_inputs(self): try: setattr(self._interface.inputs, field, val) except Exception as e: - raise ValueError(">>JN %s %s %s %s %s: %s" % - (self, field, val, - self.inputs.copyable_trait_names(), - self.joinfield, e)) + raise ValueError( + ">>JN %s %s %s %s %s: %s" + % ( + self, + field, + val, + self.inputs.copyable_trait_names(), + self.joinfield, + e, + ) + ) elif hasattr(self._interface.inputs, field): # copy the non-join field val = getattr(self._inputs, field) if isdefined(val): setattr(self._interface.inputs, field, val) - logger.debug("Collated %d inputs into the %s node join fields", - self._next_slot_index, self) + logger.debug( + "Collated %d inputs into the %s node join fields", + self._next_slot_index, + self, + ) def _collate_input_value(self, field): """ @@ -917,10 +1046,7 @@ def _collate_input_value(self, field): the iterables order. If the ``unique`` flag is set, then duplicate values are removed but the iterables order is preserved. """ - val = [ - self._slot_value(field, idx) - for idx in range(self._next_slot_index) - ] + val = [self._slot_value(field, idx) for idx in range(self._next_slot_index)] basetrait = self._interface.inputs.trait(field) if isinstance(basetrait.trait_type, traits.Set): return set(val) @@ -937,8 +1063,9 @@ def _slot_value(self, field, index): except AttributeError as e: raise AttributeError( "The join node %s does not have a slot field %s" - " to hold the %s value at index %d: %s" % (self, slot_field, - field, index, e)) + " to hold the %s value at index %d: %s" + % (self, slot_field, field, index, e) + ) class MapNode(Node): @@ -957,13 +1084,9 @@ class MapNode(Node): """ - def __init__(self, - interface, - iterfield, - name, - serial=False, - nested=False, - **kwargs): + def __init__( + self, interface, iterfield, name, serial=False, nested=False, **kwargs + ): """ Parameters @@ -988,26 +1111,26 @@ def __init__(self, See Node docstring for additional keyword arguments. """ - super(MapNode, self).__init__(interface, name, **kwargs) + super().__init__(interface, name, **kwargs) if isinstance(iterfield, (str, bytes)): iterfield = [iterfield] self.iterfield = iterfield self.nested = nested self._inputs = self._create_dynamic_traits( - self._interface.inputs, fields=self.iterfield) + self._interface.inputs, fields=self.iterfield + ) self._inputs.on_trait_change(self._set_mapnode_input) self._got_inputs = False self._serial = serial def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): - """Convert specific fields of a trait to accept multiple inputs - """ + """Convert specific fields of a trait to accept multiple inputs""" output = DynamicTraitedSpec() if fields is None: fields = basetraits.copyable_trait_names() for name, spec in list(basetraits.items()): if name in fields and ((nitems is None) or (nitems > 1)): - logger.debug('adding multipath trait: %s', name) + logger.debug("adding multipath trait: %s", name) if self.nested: output.add_trait(name, InputMultiPath(traits.Any())) else: @@ -1026,13 +1149,15 @@ def set_input(self, parameter, val): Set interface input value or nodewrapper attribute Priority goes to interface. """ - logger.debug('setting nodelevel(%s) input %s = %s', to_str(self), - parameter, to_str(val)) + logger.debug( + "setting nodelevel(%s) input %s = %s", str(self), parameter, str(val) + ) self._set_mapnode_input(parameter, deepcopy(val)) def _set_mapnode_input(self, name, newvalue): - logger.debug('setting mapnode(%s) input: %s -> %s', to_str(self), name, - to_str(newvalue)) + logger.debug( + "setting mapnode(%s) input: %s -> %s", str(self), name, str(newvalue) + ) if name in self.iterfield: setattr(self._inputs, name, newvalue) else: @@ -1050,25 +1175,24 @@ def _get_hashval(self): for name in self.iterfield: hashinputs.remove_trait(name) hashinputs.add_trait( - name, - InputMultiPath( - self._interface.inputs.traits()[name].trait_type)) - logger.debug('setting hashinput %s-> %s', name, - getattr(self._inputs, name)) + name, InputMultiPath(self._interface.inputs.traits()[name].trait_type) + ) + logger.debug("setting hashinput %s-> %s", name, getattr(self._inputs, name)) if self.nested: setattr(hashinputs, name, flatten(getattr(self._inputs, name))) else: setattr(hashinputs, name, getattr(self._inputs, name)) hashed_inputs, hashvalue = hashinputs.get_hashval( - hash_method=self.config['execution']['hash_method']) - rm_extra = self.config['execution']['remove_unnecessary_outputs'] + hash_method=self.config["execution"]["hash_method"] + ) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(hashvalue.encode()) sorted_outputs = sorted(self.needed_outputs) hashobject.update(str(sorted_outputs).encode()) hashvalue = hashobject.hexdigest() - hashed_inputs.append(('needed_outputs', sorted_outputs)) + hashed_inputs.append(("needed_outputs", sorted_outputs)) self._hashed_inputs, self._hashvalue = hashed_inputs, hashvalue return self._hashed_inputs, self._hashvalue @@ -1085,14 +1209,11 @@ def _make_nodes(self, cwd=None): if cwd is None: cwd = self.output_dir() if self.nested: - nitems = len( - flatten( - ensure_list(getattr(self.inputs, self.iterfield[0])))) + nitems = len(flatten(ensure_list(getattr(self.inputs, self.iterfield[0])))) else: - nitems = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) + nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for i in range(nitems): - nodename = '_%s%d' % (self.name, i) + nodename = "_%s%d" % (self.name, i) node = Node( deepcopy(self._interface), n_procs=self._n_procs, @@ -1100,47 +1221,44 @@ def _make_nodes(self, cwd=None): overwrite=self.overwrite, needed_outputs=self.needed_outputs, run_without_submitting=self.run_without_submitting, - base_dir=op.join(cwd, 'mapflow'), - name=nodename) + base_dir=op.join(cwd, "mapflow"), + name=nodename, + ) node.plugin_args = self.plugin_args node.interface.inputs.trait_set( - **deepcopy(self._interface.inputs.trait_get())) + **deepcopy(self._interface.inputs.trait_get()) + ) node.interface.resource_monitor = self._interface.resource_monitor for field in self.iterfield: if self.nested: - fieldvals = flatten( - ensure_list(getattr(self.inputs, field))) + fieldvals = flatten(ensure_list(getattr(self.inputs, field))) else: fieldvals = ensure_list(getattr(self.inputs, field)) - logger.debug('setting input %d %s %s', i, field, fieldvals[i]) + logger.debug("setting input %d %s %s", i, field, fieldvals[i]) setattr(node.inputs, field, fieldvals[i]) node.config = self.config yield i, node def _collate_results(self, nodes): finalresult = InterfaceResult( - interface=[], - runtime=[], - provenance=[], - inputs=[], - outputs=self.outputs) + interface=[], runtime=[], provenance=[], inputs=[], outputs=self.outputs + ) returncode = [] for i, nresult, err in nodes: finalresult.runtime.insert(i, None) returncode.insert(i, err) if nresult: - if hasattr(nresult, 'runtime'): + if hasattr(nresult, "runtime"): finalresult.interface.insert(i, nresult.interface) finalresult.inputs.insert(i, nresult.inputs) finalresult.runtime[i] = nresult.runtime - if hasattr(nresult, 'provenance'): + if hasattr(nresult, "provenance"): finalresult.provenance.insert(i, nresult.provenance) if self.outputs: for key, _ in list(self.outputs.items()): - rm_extra = ( - self.config['execution']['remove_unnecessary_outputs']) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: if key not in self.needed_outputs: continue @@ -1159,20 +1277,20 @@ def _collate_results(self, nodes): for key, _ in list(self.outputs.items()): values = getattr(finalresult.outputs, key) if isdefined(values): - values = unflatten(values, - ensure_list( - getattr(self.inputs, - self.iterfield[0]))) + values = unflatten( + values, ensure_list(getattr(self.inputs, self.iterfield[0])) + ) setattr(finalresult.outputs, key, values) - if returncode and any([code is not None for code in returncode]): + if returncode and any(code is not None for code in returncode): msg = [] for i, code in enumerate(returncode): if code is not None: - msg += ['Subnode %d failed' % i] - msg += ['Error: %s' % str(code)] - raise Exception('Subnodes of node: %s failed:\n%s' % - (self.name, '\n'.join(msg))) + msg += ["Subnode %d failed" % i] + msg += ["Error: %s" % str(code)] + raise NodeExecutionError( + "Subnodes of node: {} failed:\n{}".format(self.name, "\n".join(msg)) + ) return finalresult @@ -1180,7 +1298,7 @@ def get_subnodes(self): """Generate subnodes of a mapnode and write pre-execution report""" self._get_inputs() self._check_iterfield() - write_report(self, report_type='preexec', is_mapnode=True) + write_node_report(self, result=None, is_mapnode=True) return [node for _, node in self._make_nodes()] def num_subnodes(self): @@ -1190,17 +1308,16 @@ def num_subnodes(self): if self._serial: return 1 if self.nested: - return len( - ensure_list( - flatten(getattr(self.inputs, self.iterfield[0])))) + return len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) return len(ensure_list(getattr(self.inputs, self.iterfield[0]))) def _get_inputs(self): old_inputs = self._inputs.trait_get() self._inputs = self._create_dynamic_traits( - self._interface.inputs, fields=self.iterfield) + self._interface.inputs, fields=self.iterfield + ) self._inputs.trait_set(**old_inputs) - super(MapNode, self)._get_inputs() + super()._get_inputs() def _check_iterfield(self): """Checks iterfield @@ -1210,17 +1327,17 @@ def _check_iterfield(self): """ for iterfield in self.iterfield: if not isdefined(getattr(self.inputs, iterfield)): - raise ValueError(("Input %s was not set but it is listed " - "in iterfields.") % iterfield) + raise ValueError( + "Input %s was not set but it is listed in iterfields." % iterfield + ) if len(self.iterfield) > 1: - first_len = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) + first_len = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for iterfield in self.iterfield[1:]: - if first_len != len( - ensure_list(getattr(self.inputs, iterfield))): + if first_len != len(ensure_list(getattr(self.inputs, iterfield))): raise ValueError( - ("All iterfields of a MapNode have to " - "have the same length. %s") % str(self.inputs)) + "All iterfields of a MapNode have to have the same length. %s" + % str(self.inputs) + ) def _run_interface(self, execute=True, updatehash=False): """Run the mapnode interface @@ -1235,27 +1352,51 @@ def _run_interface(self, execute=True, updatehash=False): # Set up mapnode folder names if self.nested: - nitems = len( - ensure_list( - flatten(getattr(self.inputs, self.iterfield[0])))) + nitems = len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) else: - nitems = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) - nnametpl = '_%s{}' % self.name + nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) + nnametpl = "_%s{}" % self.name nodenames = [nnametpl.format(i) for i in range(nitems)] # Run mapnode - result = self._collate_results( - _node_runner( - self._make_nodes(cwd), - updatehash=updatehash, - stop_first=str2bool( - self.config['execution']['stop_on_first_crash']))) + outdir = self.output_dir() + result = InterfaceResult( + interface=self._interface.__class__, + runtime=Bunch( + cwd=outdir, + returncode=1, + environ=dict(os.environ), + hostname=socket.gethostname(), + ), + inputs=self._interface.inputs.get_traitsfree(), + ) + try: + result = self._collate_results( + _node_runner( + self._make_nodes(cwd), + updatehash=updatehash, + stop_first=str2bool( + self.config["execution"]["stop_on_first_crash"] + ), + ) + ) + except Exception as msg: + result.runtime.stderr = "{}\n\n{}".format( + getattr(result.runtime, "stderr", ""), msg + ) + _save_resultfile( + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) + raise + # And store results - _save_resultfile(result, cwd, self.name) + _save_resultfile(result, cwd, self.name, rebase=False) # remove any node directories no longer required dirs2remove = [] - for path in glob(op.join(cwd, 'mapflow', '*')): + for path in glob(op.join(cwd, "mapflow", "*")): if op.isdir(path): if path.split(op.sep)[-1] not in nodenames: dirs2remove.append(path) diff --git a/nipype/pipeline/engine/report_template.html b/nipype/pipeline/engine/report_template.html index 3fb66b4a02..86b2745122 100644 --- a/nipype/pipeline/engine/report_template.html +++ b/nipype/pipeline/engine/report_template.html @@ -261,4 +261,3 @@

- diff --git a/nipype/pipeline/engine/tests/__init__.py b/nipype/pipeline/engine/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/pipeline/engine/tests/__init__.py +++ b/nipype/pipeline/engine/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/engine/tests/test_base.py b/nipype/pipeline/engine/tests/test_base.py index fd87aa6878..5562a3338c 100644 --- a/nipype/pipeline/engine/tests/test_base.py +++ b/nipype/pipeline/engine/tests/test_base.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals import pytest from ..base import EngineBase @@ -11,79 +9,82 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') - input_file = nib.traits.File(desc='Random File') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") + input_file = nib.File(desc="Random File") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") -class EngineTestInterface(nib.BaseInterface): +class EngineTestInterface(nib.SimpleInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 + self._results["output1"] = [1, self.inputs.input1] return runtime - def _list_outputs(self): - outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] - return outputs - -@pytest.mark.parametrize( - 'name', ['valid1', 'valid_node', 'valid-node', 'ValidNode0']) +@pytest.mark.parametrize("name", ["valid1", "valid_node", "valid-node", "ValidNode0"]) def test_create(name): base = EngineBase(name=name) assert base.name == name @pytest.mark.parametrize( - 'name', ['invalid*1', 'invalid.1', 'invalid@', 'in/valid', None]) + "name", ["invalid*1", "invalid.1", "invalid@", "in/valid", None] +) def test_create_invalid(name): with pytest.raises(ValueError): EngineBase(name=name) def test_hierarchy(): - base = EngineBase(name='nodename') - base._hierarchy = 'some.history.behind' + base = EngineBase(name="nodename") + base._hierarchy = "some.history.behind" - assert base.name == 'nodename' - assert base.fullname == 'some.history.behind.nodename' + assert base.name == "nodename" + assert base.fullname == "some.history.behind.nodename" def test_clone(): - base = EngineBase(name='nodename') - base2 = base.clone('newnodename') + base = EngineBase(name="nodename") + base2 = base.clone("newnodename") - assert (base.base_dir == base2.base_dir and - base.config == base2.config and - base2.name == 'newnodename') + assert ( + base.base_dir == base2.base_dir + and base.config == base2.config + and base2.name == "newnodename" + ) with pytest.raises(ValueError): - base.clone('nodename') + base.clone("nodename") + def test_clone_node_iterables(tmpdir): tmpdir.chdir() def addstr(string): - return ('%s + 2' % string) - - subject_list = ['sub-001', 'sub-002'] - inputnode = pe.Node(niu.IdentityInterface(fields=['subject']), - name='inputnode') - inputnode.iterables = [('subject', subject_list)] - - node_1 = pe.Node(niu.Function(input_names='string', - output_names='string', - function=addstr), name='node_1') - node_2 = node_1.clone('node_2') - - workflow = pe.Workflow(name='iter_clone_wf') - workflow.connect([(inputnode, node_1, [('subject', 'string')]), - (node_1, node_2, [('string', 'string')])]) + return "%s + 2" % string + + subject_list = ["sub-001", "sub-002"] + inputnode = pe.Node(niu.IdentityInterface(fields=["subject"]), name="inputnode") + inputnode.iterables = [("subject", subject_list)] + + node_1 = pe.Node( + niu.Function(input_names="string", output_names="string", function=addstr), + name="node_1", + ) + node_2 = node_1.clone("node_2") + + workflow = pe.Workflow(name="iter_clone_wf") + workflow.connect( + [ + (inputnode, node_1, [("subject", "string")]), + (node_1, node_2, [("string", "string")]), + ] + ) workflow.run() diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index 56d05a1d56..abf9426d43 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -1,17 +1,11 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ - -from __future__ import print_function -from __future__ import unicode_literals -from builtins import open from copy import deepcopy from glob import glob import os - import pytest from ... import engine as pe from .test_base import EngineTestInterface @@ -24,17 +18,14 @@ @pytest.mark.parametrize( "iterables, expected", [ - ({ - "1": None - }, (1, 0)), # test1 - ({ - "1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2]) - }, (4, 0)) # test2 - ]) + ({"1": None}, (1, 0)), # test1 + ({"1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2])}, (4, 0)), # test2 + ], +) def test_1mod(iterables, expected): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - setattr(mod1, "iterables", iterables["1"]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod1.iterables = iterables["1"] pipe.add_nodes([mod1]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) @@ -45,26 +36,21 @@ def test_1mod(iterables, expected): @pytest.mark.parametrize( "iterables, expected", [ - ({ - "1": {}, - "2": dict(input1=lambda: [1, 2]) - }, (3, 2)), # test3 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": {} - }, (4, 2)), # test4 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": dict(input1=lambda: [1, 2]) - }, (6, 4)) # test5 - ]) + ({"1": {}, "2": dict(input1=lambda: [1, 2])}, (3, 2)), # test3 + ({"1": dict(input1=lambda: [1, 2]), "2": {}}, (4, 2)), # test4 + ( + {"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2])}, + (6, 4), + ), # test5 + ], +) def test_2mods(iterables, expected): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") for nr in ["1", "2"]: - setattr(eval("mod" + nr), "iterables", iterables[nr]) - pipe.connect([(mod1, mod2, [('output1', 'input2')])]) + eval("mod" + nr).iterables = iterables[nr] + pipe.connect([(mod1, mod2, [("output1", "input2")])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] @@ -74,95 +60,111 @@ def test_2mods(iterables, expected): @pytest.mark.parametrize( "iterables, expected, connect", [ - ({ - "1": {}, - "2": dict(input1=lambda: [1, 2]), - "3": {} - }, (5, 4), ("1-2", "2-3")), # test6 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": {}, - "3": {} - }, (5, 4), ("1-3", "2-3")), # test7 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": dict(input1=lambda: [1, 2]), - "3": {} - }, (8, 8), ("1-3", "2-3")), # test8 - ]) + ( + {"1": {}, "2": dict(input1=lambda: [1, 2]), "3": {}}, + (5, 4), + ("1-2", "2-3"), + ), # test6 + ( + {"1": dict(input1=lambda: [1, 2]), "2": {}, "3": {}}, + (5, 4), + ("1-3", "2-3"), + ), # test7 + ( + { + "1": dict(input1=lambda: [1, 2]), + "2": dict(input1=lambda: [1, 2]), + "3": {}, + }, + (8, 8), + ("1-3", "2-3"), + ), # test8 + ], +) def test_3mods(iterables, expected, connect): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - mod3 = pe.Node(interface=EngineTestInterface(), name='mod3') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") for nr in ["1", "2", "3"]: - setattr(eval("mod" + nr), "iterables", iterables[nr]) + eval("mod" + nr).iterables = iterables[nr] if connect == ("1-2", "2-3"): - pipe.connect([(mod1, mod2, [('output1', 'input2')]), - (mod2, mod3, [('output1', 'input2')])]) + pipe.connect( + [ + (mod1, mod2, [("output1", "input2")]), + (mod2, mod3, [("output1", "input2")]), + ] + ) elif connect == ("1-3", "2-3"): - pipe.connect([(mod1, mod3, [('output1', 'input1')]), - (mod2, mod3, [('output1', 'input2')])]) + pipe.connect( + [ + (mod1, mod3, [("output1", "input1")]), + (mod2, mod3, [("output1", "input2")]), + ] + ) else: raise Exception( - "connect pattern is not implemented yet within the test function") + "connect pattern is not implemented yet within the test function" + ) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] assert len(pipe._execgraph.edges()) == expected[1] - edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + - len(pipe._execgraph.out_edges(node))) - for node in pipe._execgraph.nodes()]) + edgenum = sorted( + [ + (len(pipe._execgraph.in_edges(node)) + len(pipe._execgraph.out_edges(node))) + for node in pipe._execgraph.nodes() + ] + ) assert edgenum[0] > 0 def test_expansion(): - pipe1 = pe.Workflow(name='pipe1') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe1.connect([(mod1, mod2, [('output1', 'input2')])]) - pipe2 = pe.Workflow(name='pipe2') - mod3 = pe.Node(interface=EngineTestInterface(), name='mod3') - mod4 = pe.Node(interface=EngineTestInterface(), name='mod4') - pipe2.connect([(mod3, mod4, [('output1', 'input2')])]) + pipe1 = pe.Workflow(name="pipe1") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe1.connect([(mod1, mod2, [("output1", "input2")])]) + pipe2 = pe.Workflow(name="pipe2") + mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") + mod4 = pe.Node(interface=EngineTestInterface(), name="mod4") + pipe2.connect([(mod3, mod4, [("output1", "input2")])]) pipe3 = pe.Workflow(name="pipe3") - pipe3.connect([(pipe1, pipe2, [('mod2.output1', 'mod4.input1')])]) + pipe3.connect([(pipe1, pipe2, [("mod2.output1", "mod4.input1")])]) pipe4 = pe.Workflow(name="pipe4") - mod5 = pe.Node(interface=EngineTestInterface(), name='mod5') + mod5 = pe.Node(interface=EngineTestInterface(), name="mod5") pipe4.add_nodes([mod5]) pipe5 = pe.Workflow(name="pipe5") pipe5.add_nodes([pipe4]) pipe6 = pe.Workflow(name="pipe6") - pipe6.connect([(pipe5, pipe3, [('pipe4.mod5.output1', - 'pipe2.mod3.input1')])]) + pipe6.connect([(pipe5, pipe3, [("pipe4.mod5.output1", "pipe2.mod3.input1")])]) pipe6._flatgraph = pipe6._create_flat_graph() def test_iterable_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node2 = pe.Node(EngineTestInterface(), name='node2') - node1.iterables = ('input1', [1, 2]) - wf1.connect(node1, 'output1', node2, 'input2') - wf3 = pe.Workflow(name='group') + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node2 = pe.Node(EngineTestInterface(), name="node2") + node1.iterables = ("input1", [1, 2]) + wf1.connect(node1, "output1", node2, "input2") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 12 def test_synchronize_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4, 5])] + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4, 5])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input2') - wf3 = pe.Workflow(name='group') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input2") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Each expanded graph clone has: # 3 node1 expansion nodes and @@ -173,19 +175,19 @@ def test_synchronize_expansion(): def test_synchronize_tuples_expansion(): - wf1 = pe.Workflow(name='test') + wf1 = pe.Workflow(name="test") - node1 = pe.Node(EngineTestInterface(), name='node1') - node2 = pe.Node(EngineTestInterface(), name='node2') - node1.iterables = [('input1', 'input2'), [(1, 3), (2, 4), (None, 5)]] + node1 = pe.Node(EngineTestInterface(), name="node1") + node2 = pe.Node(EngineTestInterface(), name="node2") + node1.iterables = [("input1", "input2"), [(1, 3), (2, 4), (None, 5)]] node1.synchronize = True - wf1.connect(node1, 'output1', node2, 'input2') + wf1.connect(node1, "output1", node2, "input2") - wf3 = pe.Workflow(name='group') + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Identical to test_synchronize_expansion @@ -193,26 +195,25 @@ def test_synchronize_tuples_expansion(): def test_itersource_expansion(): + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = ("input1", [1, 2]) - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = ('input1', [1, 2]) - - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', 'input1') - node3.iterables = [('input1', {1: [3, 4], 2: [5, 6, 7]})] + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", "input1") + node3.iterables = [("input1", {1: [3, 4], 2: [5, 6, 7]})] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") - wf1.connect(node3, 'output1', node4, 'input1') + wf1.connect(node3, "output1", node4, "input1") - wf3 = pe.Workflow(name='group') + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() @@ -228,26 +229,24 @@ def test_itersource_expansion(): def test_itersource_synchronize1_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', ['input1', 'input2']) - node3.iterables = [('input1', { - (1, 3): [5, 6] - }), ('input2', { - (1, 3): [7, 8], - (2, 4): [9] - })] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') - wf1.connect(node3, 'output1', node4, 'input1') - wf3 = pe.Workflow(name='group') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", ["input1", "input2"]) + node3.iterables = [ + ("input1", {(1, 3): [5, 6]}), + ("input2", {(1, 3): [7, 8], (2, 4): [9]}), + ] + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") + wf1.connect(node3, "output1", node4, "input1") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -262,26 +261,26 @@ def test_itersource_synchronize1_expansion(): def test_itersource_synchronize2_expansion(): - wf1 = pe.Workflow(name='test') + wf1 = pe.Workflow(name="test") - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', ['input1', 'input2']) + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", ["input1", "input2"]) node3.synchronize = True - node3.iterables = [('input1', 'input2'), { - (1, 3): [(5, 7), (6, 8)], - (2, 4): [(None, 9)] - }] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') - wf1.connect(node3, 'output1', node4, 'input1') - wf3 = pe.Workflow(name='group') + node3.iterables = [ + ("input1", "input2"), + {(1, 3): [(5, 7), (6, 8)], (2, 4): [(None, 9)]}, + ] + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") + wf1.connect(node3, "output1", node4, "input1") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -295,7 +294,6 @@ def test_itersource_synchronize2_expansion(): assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 30 - def test_old_config(tmpdir): tmpdir.chdir() wd = os.getcwd() @@ -308,26 +306,25 @@ def func2(a): return a + 1 n1 = pe.Node( - Function(input_names=[], output_names=['a'], function=func1), - name='n1') + Function(input_names=[], output_names=["a"], function=func1), name="n1" + ) n2 = pe.Node( - Function(input_names=['a'], output_names=['b'], function=func2), - name='n2') - w1 = pe.Workflow(name='test') + Function(input_names=["a"], output_names=["b"], function=func2), name="n2" + ) + w1 = pe.Workflow(name="test") modify = lambda x: x + 1 n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2, 'a') + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = wd - w1.config['execution']['crashdump_dir'] = wd + w1.config["execution"]["crashdump_dir"] = wd # generate outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") def test_mapnode_json(tmpdir): - """Tests that mapnodes don't generate excess jsons - """ + """Tests that mapnodes don't generate excess jsons""" tmpdir.chdir() wd = os.getcwd() from nipype import MapNode, Function, Workflow @@ -336,13 +333,14 @@ def func1(in1): return in1 + 1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], - name='n1') + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], + name="n1", + ) n1.inputs.in1 = [1] - w1 = Workflow(name='test') + w1 = Workflow(name="test") w1.base_dir = wd - w1.config['execution']['crashdump_dir'] = wd + w1.config["execution"]["crashdump_dir"] = wd w1.add_nodes([n1]) w1.run() n1.inputs.in1 = [2] @@ -352,13 +350,13 @@ def func1(in1): eg = w1.run() node = list(eg.nodes())[0] - outjson = glob(os.path.join(node.output_dir(), '_0x*.json')) + outjson = glob(os.path.join(node.output_dir(), "_0x*.json")) assert len(outjson) == 1 # check that multiple json's don't trigger rerun - with open(os.path.join(node.output_dir(), 'test.json'), 'wt') as fp: - fp.write('dummy file') - w1.config['execution'].update(**{'stop_on_first_rerun': True}) + with open(os.path.join(node.output_dir(), "test.json"), "w") as fp: + fp.write("dummy file") + w1.config["execution"].update(stop_on_first_rerun=True) w1.run() @@ -367,18 +365,18 @@ def test_parameterize_dirs_false(tmpdir): from ....interfaces.utility import IdentityInterface from ....testing import example_data - input_file = example_data('fsl_motion_outliers_fd.txt') + input_file = example_data("fsl_motion_outliers_fd.txt") - n1 = pe.Node(EngineTestInterface(), name='Node1') - n1.iterables = ('input_file', (input_file, input_file)) + n1 = pe.Node(EngineTestInterface(), name="Node1") + n1.iterables = ("input_file", (input_file, input_file)) n1.interface.inputs.input1 = 1 - n2 = pe.Node(IdentityInterface(fields='in1'), name='Node2') + n2 = pe.Node(IdentityInterface(fields="in1"), name="Node2") - wf = pe.Workflow(name='Test') + wf = pe.Workflow(name="Test") wf.base_dir = tmpdir.strpath - wf.config['execution']['parameterize_dirs'] = False - wf.connect([(n1, n2, [('output1', 'in1')])]) + wf.config["execution"]["parameterize_dirs"] = False + wf.connect([(n1, n2, [("output1", "in1")])]) wf.run() @@ -392,62 +390,61 @@ def func1(in1): return in1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], - name='n1') + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], + name="n1", + ) n1.inputs.in1 = [1, 2, 3] - w1 = Workflow(name='test') + w1 = Workflow(name="test") w1.base_dir = wd w1.add_nodes([n1]) # set local check - w1.config['execution'] = { - 'stop_on_first_crash': 'true', - 'local_hash_check': 'true', - 'crashdump_dir': wd, - 'poll_sleep_duration': 2 + w1.config["execution"] = { + "stop_on_first_crash": "true", + "local_hash_check": "true", + "crashdump_dir": wd, + "poll_sleep_duration": 2, } # test output of num_subnodes method when serial is default (False) assert n1.num_subnodes() == len(n1.inputs.in1) # test running the workflow on default conditions - w1.run(plugin='MultiProc') + w1.run(plugin="MultiProc") # test output of num_subnodes method when serial is True n1._serial = True assert n1.num_subnodes() == 1 # test running the workflow on serial conditions - w1.run(plugin='MultiProc') + w1.run(plugin="MultiProc") def test_write_graph_runs(tmpdir): tmpdir.chdir() - for graph in ('orig', 'flat', 'exec', 'hierarchical', 'colored'): + for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) try: - pipe.write_graph( - graph2use=graph, simple_form=simple, format='dot') + pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception: - assert False, \ - 'Failed to plot {} {} graph'.format( - 'simple' if simple else 'detailed', graph) + assert False, "Failed to plot {} {} graph".format( + "simple" if simple else "detailed", graph + ) + + assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") - assert os.path.exists('graph.dot') or os.path.exists( - 'graph_detailed.dot') - try: - os.remove('graph.dot') + os.remove("graph.dot") except OSError: pass try: - os.remove('graph_detailed.dot') + os.remove("graph_detailed.dot") except OSError: pass @@ -455,88 +452,93 @@ def test_write_graph_runs(tmpdir): def test_deep_nested_write_graph_runs(tmpdir): tmpdir.chdir() - for graph in ('orig', 'flat', 'exec', 'hierarchical', 'colored'): + for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): - pipe = pe.Workflow(name='pipe') + pipe = pe.Workflow(name="pipe") parent = pipe for depth in range(10): - sub = pe.Workflow(name='pipe_nest_{}'.format(depth)) + sub = pe.Workflow(name=f"pipe_nest_{depth}") parent.add_nodes([sub]) parent = sub - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") parent.add_nodes([mod1]) try: - pipe.write_graph( - graph2use=graph, simple_form=simple, format='dot') + pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception as e: - assert False, \ - 'Failed to plot {} {} deep graph: {!s}'.format( - 'simple' if simple else 'detailed', graph, e) + assert False, "Failed to plot {} {} deep graph: {!s}".format( + "simple" if simple else "detailed", graph, e + ) - assert os.path.exists('graph.dot') or os.path.exists( - 'graph_detailed.dot') + assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") try: - os.remove('graph.dot') + os.remove("graph.dot") except OSError: pass try: - os.remove('graph_detailed.dot') + os.remove("graph_detailed.dot") except OSError: pass -import networkx -# Format of the graph has slightly changed -graph_str = '""' if int(networkx.__version__.split('.')[0]) == 1 else '' # examples of dot files used in the following test -dotfile_orig = ['strict digraph ' + graph_str + ' {\n', - '"mod1 (engine)";\n', - '"mod2 (engine)";\n', - '"mod1 (engine)" -> "mod2 (engine)";\n', - '}\n'] - -dotfile_detailed_orig = ['digraph structs {\n', - 'node [shape=record];\n', - 'pipemod1 [label="{IN}|{ mod1 | engine | }|{OUT| output1}"];\n', - 'pipemod2 [label="{IN| input1}|{ mod2 | engine | }|{OUT}"];\n', - 'pipemod1:outoutput1:e -> pipemod2:ininput1:w;\n', - '}'] - - -dotfile_hierarchical = ['digraph pipe{\n', - ' label="pipe";\n', - ' pipe_mod1[label="mod1 (engine)"];\n', - ' pipe_mod2[label="mod2 (engine)"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] - -dotfile_colored = ['digraph pipe{\n', - ' label="pipe";\n', - ' pipe_mod1[label="mod1 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] +dotfile_orig = [ + "strict digraph {\n", + '"mod1 (engine)";\n', + '"mod2 (engine)";\n', + '"mod1 (engine)" -> "mod2 (engine)";\n', + "}\n", +] + +dotfile_detailed_orig = [ + "digraph structs {\n", + "node [shape=record];\n", + 'pipemod1 [label="{IN}|{ mod1 | engine | }|{OUT| output1}"];\n', + 'pipemod2 [label="{IN| input1}|{ mod2 | engine | }|{OUT}"];\n', + "pipemod1:outoutput1:e -> pipemod2:ininput1:w;\n", + "}", +] + + +dotfile_hierarchical = [ + "digraph pipe{\n", + ' label="pipe";\n', + ' pipe_mod1[label="mod1 (engine)"];\n', + ' pipe_mod2[label="mod2 (engine)"];\n', + " pipe_mod1 -> pipe_mod2;\n", + "}", +] + +dotfile_colored = [ + "digraph pipe{\n", + ' label="pipe";\n', + ' pipe_mod1[label="mod1 (engine)", style=filled, fillcolor="#FFFFC8"];\n', + ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfiles = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_hierarchical, - "colored": dotfile_colored - } + "colored": dotfile_colored, +} + @pytest.mark.parametrize("simple", [True, False]) -@pytest.mark.parametrize("graph_type", ['orig', 'flat', 'exec', 'hierarchical', 'colored']) +@pytest.mark.parametrize( + "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] +) def test_write_graph_dotfile(tmpdir, graph_type, simple): - """ checking dot files for a workflow without iterables""" + """checking dot files for a workflow without iterables""" tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) - pipe.write_graph( - graph2use=graph_type, simple_form=simple, format='dot') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) + pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() @@ -548,15 +550,22 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): # if simple=False graph.dot uses longer names for line in dotfiles[graph_type]: if graph_type in ["hierarchical", "colored"]: - assert line.replace("mod1 (engine)", "mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") + in graph_str + ) else: - assert line.replace( - "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") + in graph_str + ) # graph_detailed is the same for orig, flat, exec (if no iterables) - # graph_detailed is not created for hierachical or colored + # graph_detailed is not created for hierarchical or colored if graph_type not in ["hierarchical", "colored"]: with open("graph_detailed.dot") as f: graph_str = f.read() @@ -566,59 +575,64 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): # examples of dot files used in the following test dotfile_detailed_iter_exec = [ - 'digraph structs {\n', - 'node [shape=record];\n', + "digraph structs {\n", + "node [shape=record];\n", 'pipemod1aIa1 [label="{IN}|{ a1 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a1 [label="{IN| input1}|{ a1 | engine | mod2 }|{OUT}"];\n', 'pipemod1aIa0 [label="{IN}|{ a0 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a0 [label="{IN| input1}|{ a0 | engine | mod2 }|{OUT}"];\n', - 'pipemod1aIa0:outoutput1:e -> pipemod2a0:ininput1:w;\n', - 'pipemod1aIa1:outoutput1:e -> pipemod2a1:ininput1:w;\n', - '}'] + "pipemod1aIa0:outoutput1:e -> pipemod2a0:ininput1:w;\n", + "pipemod1aIa1:outoutput1:e -> pipemod2a1:ininput1:w;\n", + "}", +] dotfile_iter_hierarchical = [ - 'digraph pipe{\n', + "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfile_iter_colored = [ - 'digraph pipe{\n', + "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfiles_iter = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_iter_hierarchical, - "colored": dotfile_iter_colored - } + "colored": dotfile_iter_colored, +} dotfiles_detailed_iter = { "orig": dotfile_detailed_orig, "flat": dotfile_detailed_orig, - "exec": dotfile_detailed_iter_exec - } + "exec": dotfile_detailed_iter_exec, +} + @pytest.mark.parametrize("simple", [True, False]) -@pytest.mark.parametrize("graph_type", ['orig', 'flat', 'exec', 'hierarchical', 'colored']) +@pytest.mark.parametrize( + "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] +) def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): - """ checking dot files for a workflow with iterables""" + """checking dot files for a workflow with iterables""" tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod1.iterables = ('input1', [1, 2]) - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) - pipe.write_graph( - graph2use=graph_type, simple_form=simple, format='dot') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod1.iterables = ("input1", [1, 2]) + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) + pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() @@ -630,14 +644,21 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): # if simple=False graph.dot uses longer names for line in dotfiles_iter[graph_type]: if graph_type in ["hierarchical", "colored"]: - assert line.replace("mod1 (engine)", "mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") + in graph_str + ) else: - assert line.replace( - "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str - - # graph_detailed is not created for hierachical or colored + assert ( + line.replace( + "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") + in graph_str + ) + + # graph_detailed is not created for hierarchical or colored if graph_type not in ["hierarchical", "colored"]: with open("graph_detailed.dot") as f: graph_str = f.read() @@ -645,7 +666,6 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): assert line in graph_str - def test_io_subclass(): """Ensure any io subclass allows dynamic traits""" from nipype.interfaces.io import IOBase @@ -657,25 +677,26 @@ class TestKV(IOBase): def _list_outputs(self): outputs = {} - outputs['test'] = 1 - outputs['foo'] = 'bar' + outputs["test"] = 1 + outputs["foo"] = "bar" return outputs - wf = pe.Workflow('testkv') + wf = pe.Workflow("testkv") def testx2(test): return test * 2 - kvnode = pe.Node(TestKV(), name='testkv') + kvnode = pe.Node(TestKV(), name="testkv") from nipype.interfaces.utility import Function + func = pe.Node( - Function( - input_names=['test'], output_names=['test2'], function=testx2), - name='func') + Function(input_names=["test"], output_names=["test2"], function=testx2), + name="func", + ) exception_not_raised = True try: - wf.connect(kvnode, 'test', func, 'test') + wf.connect(kvnode, "test", func, "test") except Exception as e: - if 'Module testkv has no output called test' in e: + if "Module testkv has no output called test" in e: exception_not_raised = False assert exception_not_raised diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index 77fc0f2fdf..2fe5f70564 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -1,12 +1,10 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for join expansion """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open +import pytest +from .... import config from ... import engine as pe from ....interfaces import base as nib from ....interfaces.utility import IdentityInterface, Function, Merge @@ -14,8 +12,7 @@ class PickFirstSpec(nib.TraitedSpec): - in_files = traits.List( - File(exists=True), argstr="%s", position=2, mandatory=True) + in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) class PickFirstOutSpec(nib.TraitedSpec): @@ -32,32 +29,28 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = self.inputs.in_files[0] + outputs["output1"] = self.inputs.in_files[0] return outputs class IncrementInputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(mandatory=True, desc='input') - inc = nib.traits.Int(usedefault=True, default_value=1, desc='increment') + input1 = nib.traits.Int(mandatory=True, desc="input") + inc = nib.traits.Int(usedefault=True, default_value=1, desc="increment") class IncrementOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') + output1 = nib.traits.Int(desc="output") -class IncrementInterface(nib.BaseInterface): +class IncrementInterface(nib.SimpleInterface): input_spec = IncrementInputSpec output_spec = IncrementOutputSpec def _run_interface(self, runtime): runtime.returncode = 0 + self._results["output1"] = self.inputs.input1 + self.inputs.inc return runtime - def _list_outputs(self): - outputs = self._outputs().get() - outputs['output1'] = self.inputs.input1 + self.inputs.inc - return outputs - _sums = [] @@ -65,31 +58,27 @@ def _list_outputs(self): class SumInputSpec(nib.TraitedSpec): - input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc='input') + input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc="input") class SumOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') - operands = nib.traits.List(nib.traits.Int, desc='operands') + output1 = nib.traits.Int(desc="output") + operands = nib.traits.List(nib.traits.Int, desc="operands") -class SumInterface(nib.BaseInterface): +class SumInterface(nib.SimpleInterface): input_spec = SumInputSpec output_spec = SumOutputSpec def _run_interface(self, runtime): - runtime.returncode = 0 - return runtime - - def _list_outputs(self): global _sum global _sum_operands - outputs = self._outputs().get() - outputs['operands'] = self.inputs.input1 - _sum_operands.append(outputs['operands']) - outputs['output1'] = sum(self.inputs.input1) - _sums.append(outputs['output1']) - return outputs + runtime.returncode = 0 + self._results["operands"] = self.inputs.input1 + self._results["output1"] = sum(self.inputs.input1) + _sum_operands.append(self.inputs.input1) + _sums.append(sum(self.inputs.input1)) + return runtime _set_len = None @@ -97,11 +86,11 @@ def _list_outputs(self): class SetInputSpec(nib.TraitedSpec): - input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc='input') + input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc="input") class SetOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') + output1 = nib.traits.Int(desc="output") class SetInterface(nib.BaseInterface): @@ -115,7 +104,7 @@ def _run_interface(self, runtime): def _list_outputs(self): global _set_len outputs = self._outputs().get() - _set_len = outputs['output1'] = len(self.inputs.input1) + _set_len = outputs["output1"] = len(self.inputs.input1) return outputs @@ -124,12 +113,12 @@ def _list_outputs(self): class ProductInputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(mandatory=True, desc='input1') - input2 = nib.traits.Int(mandatory=True, desc='input2') + input1 = nib.traits.Int(mandatory=True, desc="input1") + input2 = nib.traits.Int(mandatory=True, desc="input2") class ProductOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(mandatory=True, desc='output') + output1 = nib.traits.Int(mandatory=True, desc="output") class ProductInterface(nib.BaseInterface): @@ -143,60 +132,73 @@ def _run_interface(self, runtime): def _list_outputs(self): global _products outputs = self._outputs().get() - outputs['output1'] = self.inputs.input1 * self.inputs.input2 - _products.append(outputs['output1']) + outputs["output1"] = self.inputs.input1 * self.inputs.input2 + _products.append(outputs["output1"]) return outputs -def test_join_expansion(tmpdir): +@pytest.mark.parametrize("needed_outputs", ["true", "false"]) +def test_join_expansion(tmpdir, needed_outputs): + global _sums + global _sum_operands + global _products tmpdir.chdir() + # Clean up, just in case some other test modified them + _products = [] + _sum_operands = [] + _sums = [] + + prev_state = config.get("execution", "remove_unnecessary_outputs") + config.set("execution", "remove_unnecessary_outputs", needed_outputs) # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") # another pre-join node in the iterated path - pre_join2 = pe.Node(IncrementInterface(), name='pre_join2') - wf.connect(pre_join1, 'output1', pre_join2, 'input1') + pre_join2 = pe.Node(IncrementInterface(), name="pre_join2") # the join node join = pe.JoinNode( - SumInterface(), - joinsource='inputspec', - joinfield='input1', - name='join') - wf.connect(pre_join2, 'output1', join, 'input1') + SumInterface(), joinsource="inputspec", joinfield="input1", name="join" + ) # an uniterated post-join node - post_join1 = pe.Node(IncrementInterface(), name='post_join1') - wf.connect(join, 'output1', post_join1, 'input1') + post_join1 = pe.Node(IncrementInterface(), name="post_join1") # a post-join node in the iterated path - post_join2 = pe.Node(ProductInterface(), name='post_join2') - wf.connect(join, 'output1', post_join2, 'input1') - wf.connect(pre_join1, 'output1', post_join2, 'input2') - + post_join2 = pe.Node(ProductInterface(), name="post_join2") + + wf.connect( + [ + (inputspec, pre_join1, [("n", "input1")]), + (pre_join1, pre_join2, [("output1", "input1")]), + (pre_join1, post_join2, [("output1", "input2")]), + (pre_join2, join, [("output1", "input1")]), + (join, post_join1, [("output1", "input1")]), + (join, post_join2, [("output1", "input1")]), + ] + ) result = wf.run() # the two expanded pre-join predecessor nodes feed into one join node - joins = [node for node in result.nodes() if node.name == 'join'] + joins = [node for node in result.nodes() if node.name == "join"] assert len(joins) == 1, "The number of join result nodes is incorrect." # the expanded graph contains 2 * 2 = 4 iteration pre-join nodes, 1 join # node, 1 non-iterated post-join node and 2 * 1 iteration post-join nodes. # Nipype factors away the IdentityInterface. - assert len( - result.nodes()) == 8, "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 8, "The number of expanded nodes is incorrect." + # the join Sum result is (1 + 1 + 1) + (2 + 1 + 1) assert len(_sums) == 1, "The number of join outputs is incorrect" - assert _sums[ - 0] == 7, "The join Sum output value is incorrect: %s." % _sums[0] + assert _sums[0] == 7, "The join Sum output value is incorrect: %s." % _sums[0] # the join input preserves the iterables input order - assert _sum_operands[0] == [3, 4], \ + assert _sum_operands[0] == [3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] + ) # there are two iterations of the post-join node in the iterable path - assert len(_products) == 2,\ - "The number of iterated post-join outputs is incorrect" + assert len(_products) == 2, "The number of iterated post-join outputs is incorrect" + config.set("execution", "remove_unnecessary_outputs", prev_state) def test_node_joinsource(tmpdir): @@ -204,15 +206,17 @@ def test_node_joinsource(tmpdir): tmpdir.chdir() # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # the join node join = pe.JoinNode( - SetInterface(), joinsource=inputspec, joinfield='input1', name='join') + SetInterface(), joinsource=inputspec, joinfield="input1", name="join" + ) # the joinsource is the inputspec name - assert join.joinsource == inputspec.name, \ - "The joinsource is not set to the node name." + assert ( + join.joinsource == inputspec.name + ), "The joinsource is not set to the node name." def test_set_join_node(tmpdir): @@ -220,26 +224,23 @@ def test_set_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 1, 3, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 1, 3, 2])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( - SetInterface(), - joinsource='inputspec', - joinfield='input1', - name='join') - wf.connect(pre_join1, 'output1', join, 'input1') + SetInterface(), joinsource="inputspec", joinfield="input1", name="join" + ) + wf.connect(pre_join1, "output1", join, "input1") wf.run() # the join length is the number of unique inputs - assert _set_len == 3, \ - "The join Set output value is incorrect: %s." % _set_len + assert _set_len == 3, "The join Set output value is incorrect: %s." % _set_len def test_unique_join_node(tmpdir): @@ -249,26 +250,28 @@ def test_unique_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [3, 1, 2, 1, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [3, 1, 2, 1, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( SumInterface(), - joinsource='inputspec', - joinfield='input1', + joinsource="inputspec", + joinfield="input1", unique=True, - name='join') - wf.connect(pre_join1, 'output1', join, 'input1') + name="join", + ) + wf.connect(pre_join1, "output1", join, "input1") wf.run() - assert _sum_operands[0] == [4, 2, 3], \ + assert _sum_operands[0] == [4, 2, 3], ( "The unique join output value is incorrect: %s." % _sum_operands[0] + ) def test_multiple_join_nodes(tmpdir): @@ -278,47 +281,48 @@ def test_multiple_join_nodes(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the first join node join1 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join1') - wf.connect(pre_join1, 'output1', join1, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join1", + ) + wf.connect(pre_join1, "output1", join1, "vector") # an uniterated post-join node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join1, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join1, "vector", post_join1, "input1") # the downstream join node connected to both an upstream join # path output and a separate input in the iterated path join2 = pe.JoinNode( - IdentityInterface(fields=['vector', 'scalar']), - joinsource='inputspec', - joinfield='vector', - name='join2') - wf.connect(pre_join1, 'output1', join2, 'vector') - wf.connect(post_join1, 'output1', join2, 'scalar') + IdentityInterface(fields=["vector", "scalar"]), + joinsource="inputspec", + joinfield="vector", + name="join2", + ) + wf.connect(pre_join1, "output1", join2, "vector") + wf.connect(post_join1, "output1", join2, "scalar") # a second post-join node - post_join2 = pe.Node(SumInterface(), name='post_join2') - wf.connect(join2, 'vector', post_join2, 'input1') + post_join2 = pe.Node(SumInterface(), name="post_join2") + wf.connect(join2, "vector", post_join2, "input1") # a third post-join node - post_join3 = pe.Node(ProductInterface(), name='post_join3') - wf.connect(post_join2, 'output1', post_join3, 'input1') - wf.connect(join2, 'scalar', post_join3, 'input2') + post_join3 = pe.Node(ProductInterface(), name="post_join3") + wf.connect(post_join2, "output1", post_join3, "input1") + wf.connect(join2, "scalar", post_join3, "input2") result = wf.run() # The expanded graph contains one pre_join1 replicate per inputspec # replicate and one of each remaining node = 3 + 5 = 8 nodes. # The replicated inputspec nodes are factored out of the expansion. - assert len(result.nodes()) == 8, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 8, "The number of expanded nodes is incorrect." # The outputs are: # pre_join1: [2, 3, 4] # post_join1: 9 @@ -335,33 +339,34 @@ def test_identity_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the IdentityInterface join node join = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join') - wf.connect(pre_join1, 'output1', join, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join", + ) + wf.connect(pre_join1, "output1", join, "vector") # an uniterated post-join node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join, "vector", post_join1, "input1") result = wf.run() # the expanded graph contains 1 * 3 iteration pre-join nodes, 1 join # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. - assert len(result.nodes()) == 5, \ - "The number of expanded nodes is incorrect." - assert _sum_operands[0] == [2, 3, 4], \ + assert len(result.nodes()) == 5, "The number of expanded nodes is incorrect." + assert _sum_operands[0] == [2, 3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] + ) def test_multifield_join_node(tmpdir): @@ -371,38 +376,38 @@ def test_multifield_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') - inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] + inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") + inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] # two pre-join nodes in a parallel iterated path - inc1 = pe.Node(IncrementInterface(), name='inc1') - wf.connect(inputspec, 'm', inc1, 'input1') - inc2 = pe.Node(IncrementInterface(), name='inc2') - wf.connect(inputspec, 'n', inc2, 'input1') + inc1 = pe.Node(IncrementInterface(), name="inc1") + wf.connect(inputspec, "m", inc1, "input1") + inc2 = pe.Node(IncrementInterface(), name="inc2") + wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector1', 'vector2']), - joinsource='inputspec', - name='join') - wf.connect(inc1, 'output1', join, 'vector1') - wf.connect(inc2, 'output1', join, 'vector2') + IdentityInterface(fields=["vector1", "vector2"]), + joinsource="inputspec", + name="join", + ) + wf.connect(inc1, "output1", join, "vector1") + wf.connect(inc2, "output1", join, "vector2") # a post-join node - prod = pe.MapNode( - ProductInterface(), name='prod', iterfield=['input1', 'input2']) - wf.connect(join, 'vector1', prod, 'input1') - wf.connect(join, 'vector2', prod, 'input2') + prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) + wf.connect(join, "vector1", prod, "input1") + wf.connect(join, "vector2", prod, "input2") result = wf.run() # the iterables are expanded as the cartesian product of the iterables values. # thus, the expanded graph contains 2 * (2 * 2) iteration pre-join nodes, 1 join # node and 1 post-join node. - assert len(result.nodes()) == 10, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 10, "The number of expanded nodes is incorrect." # the product inputs are [2, 4], [2, 5], [3, 4], [3, 5] - assert set(_products) == set([8, 10, 12, 15]), \ + assert set(_products) == {8, 10, 12, 15}, ( "The post-join products is incorrect: %s." % _products + ) def test_synchronize_join_node(tmpdir): @@ -412,39 +417,37 @@ def test_synchronize_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') - inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] + inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") + inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] inputspec.synchronize = True # two pre-join nodes in a parallel iterated path - inc1 = pe.Node(IncrementInterface(), name='inc1') - wf.connect(inputspec, 'm', inc1, 'input1') - inc2 = pe.Node(IncrementInterface(), name='inc2') - wf.connect(inputspec, 'n', inc2, 'input1') + inc1 = pe.Node(IncrementInterface(), name="inc1") + wf.connect(inputspec, "m", inc1, "input1") + inc2 = pe.Node(IncrementInterface(), name="inc2") + wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector1', 'vector2']), - joinsource='inputspec', - name='join') - wf.connect(inc1, 'output1', join, 'vector1') - wf.connect(inc2, 'output1', join, 'vector2') + IdentityInterface(fields=["vector1", "vector2"]), + joinsource="inputspec", + name="join", + ) + wf.connect(inc1, "output1", join, "vector1") + wf.connect(inc2, "output1", join, "vector2") # a post-join node - prod = pe.MapNode( - ProductInterface(), name='prod', iterfield=['input1', 'input2']) - wf.connect(join, 'vector1', prod, 'input1') - wf.connect(join, 'vector2', prod, 'input2') + prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) + wf.connect(join, "vector1", prod, "input1") + wf.connect(join, "vector2", prod, "input2") result = wf.run() # there are 3 iterables expansions. # thus, the expanded graph contains 2 * 2 iteration pre-join nodes, 1 join # node and 1 post-join node. - assert len(result.nodes()) == 6, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." # the product inputs are [2, 3] and [4, 5] - assert _products == [8, 15], \ - "The post-join products is incorrect: %s." % _products + assert _products == [8, 15], "The post-join products is incorrect: %s." % _products def test_itersource_join_source_node(tmpdir): @@ -452,31 +455,32 @@ def test_itersource_join_source_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource - pre_join2 = pe.Node(ProductInterface(), name='pre_join2') - pre_join2.itersource = ('inputspec', 'n') - pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) - wf.connect(pre_join1, 'output1', pre_join2, 'input2') + pre_join2 = pe.Node(ProductInterface(), name="pre_join2") + pre_join2.itersource = ("inputspec", "n") + pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) + wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path - pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') - wf.connect(pre_join2, 'output1', pre_join3, 'input1') + pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") + wf.connect(pre_join2, "output1", pre_join3, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='pre_join2', - joinfield='vector', - name='join') - wf.connect(pre_join3, 'output1', join, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="pre_join2", + joinfield="vector", + name="join", + ) + wf.connect(pre_join3, "output1", join, "vector") # a join successor node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join, "vector", post_join1, "input1") result = wf.run() @@ -489,18 +493,19 @@ def test_itersource_join_source_node(tmpdir): # 2 + (2 * 2) + 4 + 2 + 2 = 14 expansion graph nodes. # Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. - assert len(result.nodes()) == 14, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 14, "The number of expanded nodes is incorrect." # The first join inputs are: # 1 + (3 * 2) and 1 + (4 * 2) # The second join inputs are: # 1 + (5 * 3) and 1 + (6 * 3) # the post-join nodes execution order is indeterminate; # therefore, compare the lists item-wise. - assert [16, 19] in _sum_operands, \ + assert [16, 19] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands - assert [7, 9] in _sum_operands, \ + ) + assert [7, 9] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands + ) def test_itersource_two_join_nodes(tmpdir): @@ -509,68 +514,69 @@ def test_itersource_two_join_nodes(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource - pre_join2 = pe.Node(ProductInterface(), name='pre_join2') - pre_join2.itersource = ('inputspec', 'n') - pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) - wf.connect(pre_join1, 'output1', pre_join2, 'input2') + pre_join2 = pe.Node(ProductInterface(), name="pre_join2") + pre_join2.itersource = ("inputspec", "n") + pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) + wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path - pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') - wf.connect(pre_join2, 'output1', pre_join3, 'input1') + pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") + wf.connect(pre_join2, "output1", pre_join3, "input1") # the first join node join1 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='pre_join2', - joinfield='vector', - name='join1') - wf.connect(pre_join3, 'output1', join1, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="pre_join2", + joinfield="vector", + name="join1", + ) + wf.connect(pre_join3, "output1", join1, "vector") # a join successor node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join1, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join1, "vector", post_join1, "input1") # a summary join node join2 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join2') - wf.connect(post_join1, 'output1', join2, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join2", + ) + wf.connect(post_join1, "output1", join2, "vector") result = wf.run() # the expanded graph contains the 14 test_itersource_join_source_node # nodes plus the summary join node. - assert len(result.nodes()) == 15, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 15, "The number of expanded nodes is incorrect." def test_set_join_node_file_input(tmpdir): """Test collecting join inputs to a set.""" tmpdir.chdir() - open('test.nii', 'w+').close() - open('test2.nii', 'w+').close() + open("test.nii", "w+").close() + open("test2.nii", "w+").close() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [ - tmpdir.join('test.nii').strpath, - tmpdir.join('test2.nii').strpath - ])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [ + ("n", [tmpdir.join("test.nii").strpath, tmpdir.join("test2.nii").strpath]) + ] # a pre-join node in the iterated path - pre_join1 = pe.Node(IdentityInterface(fields=['n']), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'n') + pre_join1 = pe.Node(IdentityInterface(fields=["n"]), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "n") # the set join node join = pe.JoinNode( - PickFirst(), joinsource='inputspec', joinfield='in_files', name='join') - wf.connect(pre_join1, 'n', join, 'in_files') + PickFirst(), joinsource="inputspec", joinfield="in_files", name="join" + ) + wf.connect(pre_join1, "n", join, "in_files") wf.run() @@ -580,26 +586,27 @@ def test_nested_workflow_join(tmpdir): tmpdir.chdir() # Make the nested workflow - def nested_wf(i, name='smallwf'): + def nested_wf(i, name="smallwf"): # iterables with list of nums - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', i)] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", i)] # increment each iterable before joining - pre_join = pe.Node(IncrementInterface(), name='pre_join') + pre_join = pe.Node(IncrementInterface(), name="pre_join") # rejoin nums into list join = pe.JoinNode( - IdentityInterface(fields=['n']), - joinsource='inputspec', - joinfield='n', - name='join') + IdentityInterface(fields=["n"]), + joinsource="inputspec", + joinfield="n", + name="join", + ) # define and connect nested workflow - wf = pe.Workflow(name='wf_%d' % i[0]) - wf.connect(inputspec, 'n', pre_join, 'input1') - wf.connect(pre_join, 'output1', join, 'n') + wf = pe.Workflow(name="wf_%d" % i[0]) + wf.connect(inputspec, "n", pre_join, "input1") + wf.connect(pre_join, "output1", join, "n") return wf # master wf - meta_wf = pe.Workflow(name='meta', base_dir='.') + meta_wf = pe.Workflow(name="meta", base_dir=".") # add each mini-workflow to master for i in [[1, 3], [2, 4]]: mini_wf = nested_wf(i) @@ -608,24 +615,25 @@ def nested_wf(i, name='smallwf'): result = meta_wf.run() # there should be six nodes in total - assert len(result.nodes()) == 6, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." def test_name_prefix_join(tmpdir): tmpdir.chdir() def sq(x): - return x ** 2 - - wf = pe.Workflow('wf', base_dir=tmpdir.strpath) - square = pe.Node(Function(function=sq), name='square') - square.iterables = [('x', [1, 2])] - square_join = pe.JoinNode(Merge(1, ravel_inputs=True), - name='square_join', - joinsource='square', - joinfield=['in1']) - wf.connect(square, 'out', square_join, "in1") + return x**2 + + wf = pe.Workflow("wf", base_dir=tmpdir.strpath) + square = pe.Node(Function(function=sq), name="square") + square.iterables = [("x", [1, 2])] + square_join = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="square_join", + joinsource="square", + joinfield=["in1"], + ) + wf.connect(square, "out", square_join, "in1") wf.run() @@ -633,29 +641,32 @@ def test_join_nestediters(tmpdir): tmpdir.chdir() def exponent(x, p): - return x ** p - - wf = pe.Workflow('wf', base_dir=tmpdir.strpath) - - xs = pe.Node(IdentityInterface(['x']), - iterables=[('x', [1, 2])], - name='xs') - ps = pe.Node(IdentityInterface(['p']), - iterables=[('p', [3, 4])], - name='ps') - exp = pe.Node(Function(function=exponent), name='exp') - exp_joinx = pe.JoinNode(Merge(1, ravel_inputs=True), - name='exp_joinx', - joinsource='xs', - joinfield=['in1']) - exp_joinp = pe.JoinNode(Merge(1, ravel_inputs=True), - name='exp_joinp', - joinsource='ps', - joinfield=['in1']) - wf.connect([ - (xs, exp, [('x', 'x')]), - (ps, exp, [('p', 'p')]), - (exp, exp_joinx, [('out', 'in1')]), - (exp_joinx, exp_joinp, [('out', 'in1')])]) + return x**p + + wf = pe.Workflow("wf", base_dir=tmpdir.strpath) + + xs = pe.Node(IdentityInterface(["x"]), iterables=[("x", [1, 2])], name="xs") + ps = pe.Node(IdentityInterface(["p"]), iterables=[("p", [3, 4])], name="ps") + exp = pe.Node(Function(function=exponent), name="exp") + exp_joinx = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="exp_joinx", + joinsource="xs", + joinfield=["in1"], + ) + exp_joinp = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="exp_joinp", + joinsource="ps", + joinfield=["in1"], + ) + wf.connect( + [ + (xs, exp, [("x", "x")]), + (ps, exp, [("p", "p")]), + (exp, exp_joinx, [("out", "in1")]), + (exp_joinx, exp_joinp, [("out", "in1")]), + ] + ) wf.run() diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index ea03fe69ae..19ffd714c6 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -1,20 +1,18 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function, unicode_literals -from builtins import str import os from copy import deepcopy import pytest from .... import config from ....interfaces import utility as niu +from ....interfaces import base as nib from ... import engine as pe from ..utils import merge_dict from .test_base import EngineTestInterface from .test_utils import UtilsTestInterface -''' +""" Test for order of iterables import nipype.pipeline.engine as pe @@ -44,7 +42,7 @@ wf1.run(inseries=True, createdirsonly=True) wf1.write_graph(graph2use='exec') -''' +""" ''' import nipype.pipeline.engine as pe import nipype.interfaces.spm as spm @@ -90,34 +88,42 @@ def test_node_init(): with pytest.raises(TypeError): pe.Node() with pytest.raises(IOError): - pe.Node(EngineTestInterface, name='test') + pe.Node(EngineTestInterface, name="test") def test_node_get_output(): - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod1.inputs.input1 = 1 mod1.run() - assert mod1.get_output('output1') == [1, 1] + assert mod1.get_output("output1") == [1, 1] mod1._result = None - assert mod1.get_output('output1') == [1, 1] + assert mod1.get_output("output1") == [1, 1] def test_mapnode_iterfield_check(): - mod1 = pe.MapNode(EngineTestInterface(), iterfield=['input1'], name='mod1') + mod1 = pe.MapNode(EngineTestInterface(), iterfield=["input1"], name="mod1") with pytest.raises(ValueError): mod1._check_iterfield() mod1 = pe.MapNode( - EngineTestInterface(), iterfield=['input1', 'input2'], name='mod1') + EngineTestInterface(), iterfield=["input1", "input2"], name="mod1" + ) mod1.inputs.input1 = [1, 2] mod1.inputs.input2 = 3 with pytest.raises(ValueError): mod1._check_iterfield() -@pytest.mark.parametrize("x_inp, f_exp", - [(3, [6]), ([2, 3], [4, 6]), ((2, 3), [4, 6]), - (range(3), [0, 2, 4]), ("Str", ["StrStr"]), - (["Str1", "Str2"], ["Str1Str1", "Str2Str2"])]) +@pytest.mark.parametrize( + "x_inp, f_exp", + [ + (3, [6]), + ([2, 3], [4, 6]), + ((2, 3), [4, 6]), + (range(3), [0, 2, 4]), + ("Str", ["StrStr"]), + (["Str1", "Str2"], ["Str1Str1", "Str2Str2"]), + ], +) def test_mapnode_iterfield_type(x_inp, f_exp): from nipype import MapNode, Function @@ -141,19 +147,21 @@ def func1(in1): return in1 + 1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], nested=True, - name='n1') + name="n1", + ) n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() - assert n1.get_output('out') == [[2, [3]], 4, [5, 6]] + assert n1.get_output("out") == [[2, [3]], 4, [5, 6]] n2 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], nested=False, - name='n1') + name="n1", + ) n2.inputs.in1 = [[1, [2]], 3, [4, 5]] with pytest.raises(Exception) as excinfo: @@ -169,27 +177,25 @@ def func1(in1): return in1 + 1 mapnode = MapNode( - Function(function=func1), - iterfield='in1', - name='mapnode', - n_procs=2, - mem_gb=2) + Function(function=func1), iterfield="in1", name="mapnode", n_procs=2, mem_gb=2 + ) mapnode.inputs.in1 = [1, 2] for idx, node in mapnode._make_nodes(): - for attr in ('overwrite', 'run_without_submitting', 'plugin_args'): + for attr in ("overwrite", "run_without_submitting", "plugin_args"): + assert getattr(node, attr) == getattr(mapnode, attr) + for attr in ("_n_procs", "_mem_gb"): assert getattr(node, attr) == getattr(mapnode, attr) - for attr in ('_n_procs', '_mem_gb'): - assert (getattr(node, attr) == getattr(mapnode, attr)) def test_node_hash(tmpdir): from nipype.interfaces.utility import Function + tmpdir.chdir() config.set_default_config() - config.set('execution', 'stop_on_first_crash', True) - config.set('execution', 'crashdump_dir', os.getcwd()) + config.set("execution", "stop_on_first_crash", True) + config.set("execution", "crashdump_dir", os.getcwd()) def func1(): return 1 @@ -198,17 +204,18 @@ def func2(a): return a + 1 n1 = pe.Node( - Function(input_names=[], output_names=['a'], function=func1), - name='n1') + Function(input_names=[], output_names=["a"], function=func1), name="n1" + ) n2 = pe.Node( - Function(input_names=['a'], output_names=['b'], function=func2), - name='n2') - w1 = pe.Workflow(name='test') + Function(input_names=["a"], output_names=["b"], function=func2), name="n2" + ) + w1 = pe.Workflow(name="test") def modify(x): return x + 1 + n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2, 'a') + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() # create dummy distributed plugin class @@ -221,25 +228,26 @@ class EngineTestException(Exception): class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise EngineTestException( - 'Submit called - cached=%s, updated=%s' % node.is_cached()) + "Submit called - cached=%s, updated=%s" % node.is_cached() + ) # check if a proper exception is raised with pytest.raises(EngineTestException) as excinfo: w1.run(plugin=RaiseError()) - assert str(excinfo.value).startswith('Submit called') + assert str(excinfo.value).startswith("Submit called") # generate outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") # ensure plugin is being called - config.set('execution', 'local_hash_check', False) + config.set("execution", "local_hash_check", False) # rerun to ensure we have outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") # set local check - config.set('execution', 'local_hash_check', True) - w1 = pe.Workflow(name='test') - w1.connect(n1, ('a', modify), n2, 'a') + config.set("execution", "local_hash_check", True) + w1 = pe.Workflow(name="test") + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() w1.run(plugin=RaiseError()) @@ -247,57 +255,58 @@ def _submit_job(self, node, updatehash=False): def test_outputs_removal(tmpdir): def test_function(arg1): import os - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - with open(file1, 'wt') as fp: - fp.write('%d' % arg1) - with open(file2, 'wt') as fp: - fp.write('%d' % arg1) + + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + with open(file1, "w") as fp: + fp.write("%d" % arg1) + with open(file2, "w") as fp: + fp.write("%d" % arg1) return file1, file2 n1 = pe.Node( niu.Function( - input_names=['arg1'], - output_names=['file1', 'file2'], - function=test_function), + input_names=["arg1"], + output_names=["file1", "file2"], + function=test_function, + ), base_dir=tmpdir.strpath, - name='testoutputs') + name="testoutputs", + ) n1.inputs.arg1 = 1 - n1.config = {'execution': {'remove_unnecessary_outputs': True}} + n1.config = {"execution": {"remove_unnecessary_outputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name, 'file1.txt').check() - assert tmpdir.join(n1.name, 'file1.txt').check() - n1.needed_outputs = ['file2'] + assert tmpdir.join(n1.name, "file1.txt").check() + assert tmpdir.join(n1.name, "file1.txt").check() + n1.needed_outputs = ["file2"] n1.run() - assert not tmpdir.join(n1.name, 'file1.txt').check() - assert tmpdir.join(n1.name, 'file2.txt').check() + assert not tmpdir.join(n1.name, "file1.txt").check() + assert tmpdir.join(n1.name, "file2.txt").check() def test_inputs_removal(tmpdir): - file1 = tmpdir.join('file1.txt') - file1.write('dummy_file') - n1 = pe.Node( - UtilsTestInterface(), base_dir=tmpdir.strpath, name='testinputs') + file1 = tmpdir.join("file1.txt") + file1.write("dummy_file") + n1 = pe.Node(UtilsTestInterface(), base_dir=tmpdir.strpath, name="testinputs") n1.inputs.in_file = file1.strpath - n1.config = {'execution': {'keep_inputs': True}} + n1.config = {"execution": {"keep_inputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name, 'file1.txt').check() + assert tmpdir.join(n1.name, "file1.txt").check() n1.inputs.in_file = file1.strpath - n1.config = {'execution': {'keep_inputs': False}} + n1.config = {"execution": {"keep_inputs": False}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.overwrite = True n1.run() - assert not tmpdir.join(n1.name, 'file1.txt').check() + assert not tmpdir.join(n1.name, "file1.txt").check() def test_outputmultipath_collapse(tmpdir): """Test an OutputMultiPath whose initial value is ``[[x]]`` to ensure that it is returned as ``[x]``, regardless of how accessed.""" select_if = niu.Select(inlist=[[1, 2, 3], [4]], index=1) - select_nd = pe.Node(niu.Select(inlist=[[1, 2, 3], [4]], index=1), - name='select_nd') + select_nd = pe.Node(niu.Select(inlist=[[1, 2, 3], [4]], index=1), name="select_nd") ifres = select_if.run() ndres = select_nd.run() @@ -305,3 +314,68 @@ def test_outputmultipath_collapse(tmpdir): assert ifres.outputs.out == [4] assert ndres.outputs.out == [4] assert select_nd.result.outputs.out == [4] + + +@pytest.mark.timeout(30) +def test_mapnode_single(tmpdir): + tmpdir.chdir() + + def _producer(num=1, deadly_num=7): + if num == deadly_num: + raise RuntimeError("Got the deadly num (%d)." % num) + return num + 1 + + pnode = pe.MapNode( + niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] + ) + pnode.inputs.num = [7] + wf = pe.Workflow(name="PC_Workflow") + wf.add_nodes([pnode]) + wf.base_dir = os.path.abspath("./test_output") + with pytest.raises(RuntimeError): + wf.run(plugin="MultiProc") + + +class FailCommandLine(nib.CommandLine): + input_spec = nib.CommandLineInputSpec + output_spec = nib.TraitedSpec + _cmd = 'nipype-node-execution-fail' + + +def test_NodeExecutionError(tmp_path, monkeypatch): + import stat + + monkeypatch.chdir(tmp_path) + + # create basic executable and add to PATH + exebin = tmp_path / 'bin' + exebin.mkdir() + exe = exebin / 'nipype-node-execution-fail' + exe.write_text( + '#!/bin/bash\necho "Running"\necho "This should fail" >&2\nexit 1', + encoding='utf-8', + ) + exe.chmod(exe.stat().st_mode | stat.S_IEXEC) + monkeypatch.setenv("PATH", str(exe.parent.absolute()), prepend=os.pathsep) + + # Test with cmdline interface + cmd = pe.Node(FailCommandLine(), name="cmd-fail", base_dir='cmd') + with pytest.raises(pe.nodes.NodeExecutionError) as exc: + cmd.run() + error_msg = str(exc.value) + + for attr in ("Cmdline:", "Stdout:", "Stderr:", "Traceback:"): + assert attr in error_msg + assert "This should fail" in error_msg + + # Test with function interface + def fail(): + raise Exception("Functions can fail too") + + func = pe.Node(niu.Function(function=fail), name='func-fail', base_dir='func') + with pytest.raises(pe.nodes.NodeExecutionError) as exc: + func.run() + error_msg = str(exc.value) + assert "Traceback:" in error_msg + assert "Cmdline:" not in error_msg + assert "Functions can fail too" in error_msg diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 42f8b2434e..78483b6923 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -1,14 +1,8 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine utils module """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - import os -import sys from copy import deepcopy import pytest @@ -16,7 +10,12 @@ from ....interfaces import base as nib from ....interfaces import utility as niu from .... import config -from ..utils import clean_working_directory, write_workflow_prov +from ..utils import ( + clean_working_directory, + write_workflow_prov, + load_resultfile, + format_node, +) class InputSpec(nib.TraitedSpec): @@ -24,7 +23,7 @@ class InputSpec(nib.TraitedSpec): class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class UtilsTestInterface(nib.BaseInterface): @@ -37,38 +36,37 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1] + outputs["output1"] = [1] return outputs def test_identitynode_removal(tmpdir): def test_function(arg1, arg2, arg3): import numpy as np + return (np.array(arg1) + arg2 + arg3).tolist() wf = pe.Workflow(name="testidentity", base_dir=tmpdir.strpath) n1 = pe.Node( - niu.IdentityInterface(fields=['a', 'b']), - name='src', - base_dir=tmpdir.strpath) - n1.iterables = ('b', [0, 1, 2, 3]) + niu.IdentityInterface(fields=["a", "b"]), name="src", base_dir=tmpdir.strpath + ) + n1.iterables = ("b", [0, 1, 2, 3]) n1.inputs.a = [0, 1, 2, 3] - n2 = pe.Node(niu.Select(), name='selector', base_dir=tmpdir.strpath) - wf.connect(n1, ('a', test_function, 1, -1), n2, 'inlist') - wf.connect(n1, 'b', n2, 'index') + n2 = pe.Node(niu.Select(), name="selector", base_dir=tmpdir.strpath) + wf.connect(n1, ("a", test_function, 1, -1), n2, "inlist") + wf.connect(n1, "b", n2, "index") n3 = pe.Node( - niu.IdentityInterface(fields=['c', 'd']), - name='passer', - base_dir=tmpdir.strpath) + niu.IdentityInterface(fields=["c", "d"]), name="passer", base_dir=tmpdir.strpath + ) n3.inputs.c = [1, 2, 3, 4] - wf.connect(n2, 'out', n3, 'd') + wf.connect(n2, "out", n3, "d") - n4 = pe.Node(niu.Select(), name='selector2', base_dir=tmpdir.strpath) - wf.connect(n3, ('c', test_function, 1, -1), n4, 'inlist') - wf.connect(n3, 'd', n4, 'index') + n4 = pe.Node(niu.Select(), name="selector2", base_dir=tmpdir.strpath) + wf.connect(n3, ("c", test_function, 1, -1), n4, "inlist") + wf.connect(n3, "d", n4, "index") fg = wf._create_flat_graph() wf._set_needed_outputs(fg) @@ -88,29 +86,35 @@ class InputSpec(nib.TraitedSpec): inputs = InputSpec() filenames = [ - 'file.hdr', 'file.img', 'file.BRIK', 'file.HEAD', '_0x1234.json', - 'foo.txt' + "file.hdr", + "file.img", + "file.BRIK", + "file.HEAD", + "_0x1234.json", + "foo.txt", ] outfiles = [] for filename in filenames: outfile = tmpdir.join(filename) - outfile.write('dummy') + outfile.write("dummy") outfiles.append(outfile.strpath) outputs.files = outfiles[:4:2] outputs.others = outfiles[5] inputs.infile = outfiles[-1] - needed_outputs = ['files'] + needed_outputs = ["files"] config.set_default_config() assert os.path.exists(outfiles[5]) config.set_default_config() - config.set('execution', 'remove_unnecessary_outputs', False) - out = clean_working_directory(outputs, tmpdir.strpath, inputs, - needed_outputs, deepcopy(config._sections)) + config.set("execution", "remove_unnecessary_outputs", False) + out = clean_working_directory( + outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) + ) assert os.path.exists(outfiles[5]) assert out.others == outfiles[5] - config.set('execution', 'remove_unnecessary_outputs', True) - out = clean_working_directory(outputs, tmpdir.strpath, inputs, - needed_outputs, deepcopy(config._sections)) + config.set("execution", "remove_unnecessary_outputs", True) + out = clean_working_directory( + outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) + ) assert os.path.exists(outfiles[1]) assert os.path.exists(outfiles[3]) assert os.path.exists(outfiles[4]) @@ -122,39 +126,40 @@ class InputSpec(nib.TraitedSpec): def create_wf(name): """Creates a workflow for the following tests""" + def fwhm(fwhm): return fwhm pipe = pe.Workflow(name=name) process = pe.Node( - niu.Function( - input_names=['fwhm'], output_names=['fwhm'], function=fwhm), - name='proc') - process.iterables = ('fwhm', [0]) + niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), + name="proc", + ) + process.iterables = ("fwhm", [0]) process2 = pe.Node( - niu.Function( - input_names=['fwhm'], output_names=['fwhm'], function=fwhm), - name='proc2') - process2.iterables = ('fwhm', [0]) - pipe.connect(process, 'fwhm', process2, 'fwhm') + niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), + name="proc2", + ) + process2.iterables = ("fwhm", [0]) + pipe.connect(process, "fwhm", process2, "fwhm") return pipe def test_multi_disconnected_iterable(tmpdir): - metawf = pe.Workflow(name='meta') + metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath - metawf.add_nodes([create_wf('wf%d' % i) for i in range(30)]) - eg = metawf.run(plugin='Linear') + metawf.add_nodes([create_wf("wf%d" % i) for i in range(30)]) + eg = metawf.run(plugin="Linear") assert len(eg.nodes()) == 60 def test_provenance(tmpdir): - metawf = pe.Workflow(name='meta') + metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath - metawf.add_nodes([create_wf('wf%d' % i) for i in range(1)]) - eg = metawf.run(plugin='Linear') - prov_base = tmpdir.join('workflow_provenance_test').strpath - psg = write_workflow_prov(eg, prov_base, format='all') + metawf.add_nodes([create_wf("wf%d" % i) for i in range(1)]) + eg = metawf.run(plugin="Linear") + prov_base = tmpdir.join("workflow_provenance_test").strpath + psg = write_workflow_prov(eg, prov_base, format="all") assert len(psg.bundles) == 2 assert len(psg.get_records()) == 7 @@ -163,40 +168,36 @@ def dummy_func(value): return value + 1 -@pytest.mark.skipif( - sys.version_info < (3, 0), reason="the famous segfault #1788") def test_mapnode_crash(tmpdir): """Test mapnode crash when stop_on_first_crash is True""" cwd = os.getcwd() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = [f"string{i}" for i in range(3)] node.config = deepcopy(config._sections) - node.config['execution']['stop_on_first_crash'] = True + node.config["execution"]["stop_on_first_crash"] = True node.base_dir = tmpdir.strpath - with pytest.raises(TypeError): + with pytest.raises(pe.nodes.NodeExecutionError): node.run() os.chdir(cwd) -@pytest.mark.skipif( - sys.version_info < (3, 0), reason="the famous segfault #1788") def test_mapnode_crash2(tmpdir): """Test mapnode crash when stop_on_first_crash is False""" cwd = os.getcwd() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = [f"string{i}" for i in range(3)] node.base_dir = tmpdir.strpath with pytest.raises(Exception): @@ -204,23 +205,137 @@ def test_mapnode_crash2(tmpdir): os.chdir(cwd) -@pytest.mark.skipif( - sys.version_info < (3, 0), reason="the famous segfault #1788") def test_mapnode_crash3(tmpdir): """Test mapnode crash when mapnode is embedded in a workflow""" tmpdir.chdir() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] - wf = pe.Workflow('testmapnodecrash') + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = [f"string{i}" for i in range(3)] + wf = pe.Workflow("testmapnodecrash") wf.add_nodes([node]) wf.base_dir = tmpdir.strpath - # changing crashdump dir to cwl (to avoid problems with read-only systems) + # changing crashdump dir to current working directory (to avoid problems with read-only systems) wf.config["execution"]["crashdump_dir"] = os.getcwd() with pytest.raises(RuntimeError): - wf.run(plugin='Linear') + wf.run(plugin="Linear") + + +class StrPathConfuserInputSpec(nib.TraitedSpec): + in_str = nib.traits.String() + + +class StrPathConfuserOutputSpec(nib.TraitedSpec): + out_tuple = nib.Tuple(nib.File, nib.traits.String) + out_dict_path = nib.traits.Dict(nib.traits.String, nib.File(exists=True)) + out_dict_str = nib.traits.DictStrStr() + out_list = nib.traits.List(nib.traits.String) + out_str = nib.traits.String() + out_path = nib.File(exists=True) + + +class StrPathConfuser(nib.SimpleInterface): + input_spec = StrPathConfuserInputSpec + output_spec = StrPathConfuserOutputSpec + + def _run_interface(self, runtime): + out_path = os.path.abspath(os.path.basename(self.inputs.in_str) + "_path") + open(out_path, "w").close() + self._results["out_str"] = self.inputs.in_str + self._results["out_path"] = out_path + self._results["out_tuple"] = (out_path, self.inputs.in_str) + self._results["out_dict_path"] = {self.inputs.in_str: out_path} + self._results["out_dict_str"] = {self.inputs.in_str: self.inputs.in_str} + self._results["out_list"] = [self.inputs.in_str] * 2 + return runtime + + +def test_modify_paths_bug(tmpdir): + """ + There was a bug in which, if the current working directory contained a file with the name + of an output String, the string would get transformed into a path, and generally wreak havoc. + This attempts to replicate that condition, using an object with strings and paths in various + trait configurations, to ensure that the guards added resolve the issue. + Please see https://github.com/nipy/nipype/issues/2944 for more details. + """ + tmpdir.chdir() + + spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") + + open("2", "w").close() + + outputs = spc.run().outputs + + # Basic check that string was not manipulated + out_str = outputs.out_str + assert out_str == "2" + + # Check path exists and is absolute + out_path = outputs.out_path + assert os.path.isabs(out_path) + + # Assert data structures pass through correctly + assert outputs.out_tuple == (out_path, out_str) + assert outputs.out_dict_path == {out_str: out_path} + assert outputs.out_dict_str == {out_str: out_str} + assert outputs.out_list == [out_str] * 2 + + +@pytest.mark.parametrize("use_relative", [True, False]) +def test_save_load_resultfile(tmpdir, use_relative): + """Test minimally the save/load functions for result files.""" + from shutil import copytree, rmtree + + tmpdir.chdir() + + old_use_relative = config.getboolean("execution", "use_relative_paths") + config.set("execution", "use_relative_paths", use_relative) + + spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") + spc.base_dir = tmpdir.mkdir("node").strpath + + result = spc.run() + + loaded_result = load_resultfile( + tmpdir.join("node").join("spc").join("result_spc.pklz").strpath + ) + + assert result.runtime.dictcopy() == loaded_result.runtime.dictcopy() + assert result.inputs == loaded_result.inputs + assert result.outputs.get() == loaded_result.outputs.get() + + # Test the mobility of the result file. + copytree(tmpdir.join("node").strpath, tmpdir.join("node2").strpath) + rmtree(tmpdir.join("node").strpath) + + if use_relative: + loaded_result2 = load_resultfile( + tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath + ) + + assert result.runtime.dictcopy() == loaded_result2.runtime.dictcopy() + assert result.inputs == loaded_result2.inputs + assert loaded_result2.outputs.get() != result.outputs.get() + newpath = result.outputs.out_path.replace("/node/", "/node2/") + assert loaded_result2.outputs.out_path == newpath + assert loaded_result2.outputs.out_tuple[0] == newpath + assert loaded_result2.outputs.out_dict_path["2"] == newpath + else: + with pytest.raises(nib.TraitError): + load_resultfile( + tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath + ) + + config.set("execution", "use_relative_paths", old_use_relative) + + +def test_format_node(): + node = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="node") + serialized = format_node(node) + workspace = {"Node": pe.Node} + exec("\n".join(serialized), workspace) + assert workspace["node"].interface._fields == node.interface._fields diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index 0cc7f2142f..12d56de285 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine workflows module @@ -20,27 +19,25 @@ def test_init(): with pytest.raises(TypeError): pe.Workflow() - pipe = pe.Workflow(name='pipe') - assert type(pipe._graph) == nx.DiGraph + pipe = pe.Workflow(name="pipe") + assert type(pipe._graph) is nx.DiGraph def test_connect(): - pipe = pe.Workflow(name='pipe') - mod2 = pe.Node(EngineTestInterface(), name='mod2') - mod1 = pe.Node(EngineTestInterface(), name='mod1') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod2 = pe.Node(EngineTestInterface(), name="mod2") + mod1 = pe.Node(EngineTestInterface(), name="mod1") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) assert mod1 in pipe._graph.nodes() assert mod2 in pipe._graph.nodes() - assert pipe._graph.get_edge_data(mod1, mod2) == { - 'connect': [('output1', 'input1')] - } + assert pipe._graph.get_edge_data(mod1, mod2) == {"connect": [("output1", "input1")]} def test_add_nodes(): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(EngineTestInterface(), name='mod1') - mod2 = pe.Node(EngineTestInterface(), name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(EngineTestInterface(), name="mod1") + mod2 = pe.Node(EngineTestInterface(), name="mod2") pipe.add_nodes([mod1, mod2]) assert mod1 in pipe._graph.nodes() @@ -48,45 +45,60 @@ def test_add_nodes(): def test_disconnect(): - a = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='a') - b = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='b') - flow1 = pe.Workflow(name='test') - flow1.connect(a, 'a', b, 'a') - flow1.disconnect(a, 'a', b, 'a') + a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") + b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") + flow1 = pe.Workflow(name="test") + flow1.connect(a, "a", b, "a") + flow1.disconnect(a, "a", b, "a") assert list(flow1._graph.edges()) == [] def test_workflow_add(): - n1 = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='n1') - n2 = pe.Node(niu.IdentityInterface(fields=['c', 'd']), name='n2') - n3 = pe.Node(niu.IdentityInterface(fields=['c', 'd']), name='n1') - w1 = pe.Workflow(name='test') - w1.connect(n1, 'a', n2, 'c') + n1 = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="n1") + n2 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n2") + n3 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n1") + w1 = pe.Workflow(name="test") + w1.connect(n1, "a", n2, "c") for node in [n1, n2, n3]: with pytest.raises(IOError): w1.add_nodes([node]) with pytest.raises(IOError): - w1.connect([(w1, n2, [('n1.a', 'd')])]) + w1.connect([(w1, n2, [("n1.a", "d")])]) def test_doubleconnect(): - a = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='a') - b = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='b') - flow1 = pe.Workflow(name='test') - flow1.connect(a, 'a', b, 'a') + a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") + b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") + flow1 = pe.Workflow(name="test") + flow1.connect(a, "a", b, "a") with pytest.raises(Exception) as excinfo: - flow1.connect(a, 'b', b, 'a') + flow1.connect(a, "b", b, "a") assert "Trying to connect" in str(excinfo.value) - c = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='c') - flow1 = pe.Workflow(name='test2') + c = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="c") + flow1 = pe.Workflow(name="test2") with pytest.raises(Exception) as excinfo: - flow1.connect([(a, c, [('b', 'b')]), (b, c, [('a', 'b')])]) + flow1.connect([(a, c, [("b", "b")]), (b, c, [("a", "b")])]) assert "Trying to connect" in str(excinfo.value) -def test_duplicate_node_check(): +def test_nested_workflow_doubleconnect(): + # double input with nested workflows + a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") + b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") + c = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="c") + flow1 = pe.Workflow(name="test1") + flow2 = pe.Workflow(name="test2") + flow3 = pe.Workflow(name="test3") + flow1.add_nodes([b]) + flow2.connect(a, "a", flow1, "b.a") + with pytest.raises(Exception) as excinfo: + flow3.connect(c, "a", flow2, "test1.b.a") + assert "Some connections were not found" in str(excinfo.value) + flow3.connect(c, "b", flow2, "test1.b.b") + +def test_duplicate_node_check(): wf = pe.Workflow(name="testidentity") original_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] @@ -114,29 +126,31 @@ def test_duplicate_node_check(): def _test_function(arg1): import os - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - file3 = os.path.join(os.getcwd(), 'file3.txt') - file4 = os.path.join(os.getcwd(), 'subdir', 'file4.txt') + + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + file3 = os.path.join(os.getcwd(), "file3.txt") + file4 = os.path.join(os.getcwd(), "subdir", "file4.txt") os.mkdir("subdir") for filename in [file1, file2, file3, file4]: - with open(filename, 'wt') as fp: - fp.write('%d' % arg1) + with open(filename, "w") as fp: + fp.write("%d" % arg1) return file1, file2, os.path.join(os.getcwd(), "subdir") def _test_function2(in_file, arg): import os - with open(in_file, 'rt') as fp: + + with open(in_file) as fp: in_arg = fp.read() - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - file3 = os.path.join(os.getcwd(), 'file3.txt') + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + file3 = os.path.join(os.getcwd(), "file3.txt") files = [file1, file2, file3] for filename in files: - with open(filename, 'wt') as fp: - fp.write('%d' % arg + in_arg) + with open(filename, "w") as fp: + fp.write("%d" % arg + in_arg) return file1, file2, 1 @@ -145,67 +159,69 @@ def _test_function3(arg): @pytest.mark.parametrize( - 'plugin, remove_unnecessary_outputs, keep_inputs', - list(product(['Linear', 'MultiProc'], [False, True], [True, False]))) -def test_outputs_removal_wf(tmpdir, plugin, remove_unnecessary_outputs, - keep_inputs): + "plugin, remove_unnecessary_outputs, keep_inputs", + list(product(["Linear", "MultiProc"], [False, True], [True, False])), +) +def test_outputs_removal_wf(tmpdir, plugin, remove_unnecessary_outputs, keep_inputs): config.set_default_config() - config.set('execution', 'remove_unnecessary_outputs', - remove_unnecessary_outputs) - config.set('execution', 'keep_inputs', keep_inputs) + config.set("execution", "remove_unnecessary_outputs", remove_unnecessary_outputs) + config.set("execution", "keep_inputs", keep_inputs) n1 = pe.Node( niu.Function( - output_names=['out_file1', 'out_file2', 'dir'], - function=_test_function), - name='n1', - base_dir=tmpdir.strpath) + output_names=["out_file1", "out_file2", "dir"], function=_test_function + ), + name="n1", + base_dir=tmpdir.strpath, + ) n1.inputs.arg1 = 1 n2 = pe.Node( niu.Function( - output_names=['out_file1', 'out_file2', 'n'], - function=_test_function2), - name='n2', - base_dir=tmpdir.strpath) + output_names=["out_file1", "out_file2", "n"], function=_test_function2 + ), + name="n2", + base_dir=tmpdir.strpath, + ) n2.inputs.arg = 2 n3 = pe.Node( - niu.Function( - output_names=['n'], - function=_test_function3), - name='n3', - base_dir=tmpdir.strpath) + niu.Function(output_names=["n"], function=_test_function3), + name="n3", + base_dir=tmpdir.strpath, + ) - wf = pe.Workflow( - name="node_rem_test" + plugin, base_dir=tmpdir.strpath) + wf = pe.Workflow(name="node_rem_test" + plugin, base_dir=tmpdir.strpath) wf.connect(n1, "out_file1", n2, "in_file") wf.run(plugin=plugin) # Necessary outputs HAVE to exist - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file2.txt')) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) # Unnecessary outputs exist only iff remove_unnecessary_outputs is True - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, - 'file2.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, "subdir", - 'file4.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, - 'file3.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, - 'file3.txt')) is not remove_unnecessary_outputs - - n4 = pe.Node(UtilsTestInterface(), name='n4', base_dir=tmpdir.strpath) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file2.txt")) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists( + os.path.join(wf.base_dir, wf.name, n1.name, "subdir", "file4.txt") + ) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file3.txt")) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file3.txt")) + is not remove_unnecessary_outputs + ) + + n4 = pe.Node(UtilsTestInterface(), name="n4", base_dir=tmpdir.strpath) wf.connect(n2, "out_file1", n4, "in_file") def pick_first(l): @@ -216,50 +232,49 @@ def pick_first(l): wf.run(plugin=plugin) # Test necessary outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) # Test unnecessary outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, - 'file2.txt')) is not remove_unnecessary_outputs + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) + is not remove_unnecessary_outputs + ) # Test keep_inputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n4.name, - 'file1.txt')) is keep_inputs + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n4.name, "file1.txt")) + is keep_inputs + ) def _test_function4(): - raise FileNotFoundError('Generic error') + raise FileNotFoundError("Generic error") def test_config_setting(tmpdir): tmpdir.chdir() - wf = pe.Workflow('config') + wf = pe.Workflow("config") wf.base_dir = os.getcwd() - crashdir = os.path.join(os.getcwd(), 'crashdir') + crashdir = os.path.join(os.getcwd(), "crashdir") os.mkdir(crashdir) wf.config = {"execution": {"crashdump_dir": crashdir}} - n1 = pe.Node(niu.Function(function=_test_function4), - name='errorfunc') + n1 = pe.Node(niu.Function(function=_test_function4), name="errorfunc") wf.add_nodes([n1]) try: wf.run() except RuntimeError: pass - fl = glob(os.path.join(crashdir, 'crash*')) + fl = glob(os.path.join(crashdir, "crash*")) assert len(fl) == 1 # Now test node overwrite - crashdir2 = os.path.join(os.getcwd(), 'crashdir2') + crashdir2 = os.path.join(os.getcwd(), "crashdir2") os.mkdir(crashdir2) - crashdir3 = os.path.join(os.getcwd(), 'crashdir3') + crashdir3 = os.path.join(os.getcwd(), "crashdir3") os.mkdir(crashdir3) wf.config = {"execution": {"crashdump_dir": crashdir3}} n1.config = {"execution": {"crashdump_dir": crashdir2}} @@ -269,7 +284,7 @@ def test_config_setting(tmpdir): except RuntimeError: pass - fl = glob(os.path.join(crashdir2, 'crash*')) + fl = glob(os.path.join(crashdir2, "crash*")) assert len(fl) == 1 - fl = glob(os.path.join(crashdir3, 'crash*')) + fl = glob(os.path.join(crashdir3, "crash*")) assert len(fl) == 0 diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 3d961126d5..0f800aa02a 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -1,11 +1,6 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for workflow graphs""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open, next, zip, range - import os import sys import pickle @@ -13,56 +8,56 @@ import re from copy import deepcopy from glob import glob +from pathlib import Path from traceback import format_exception from hashlib import sha1 -import gzip from functools import reduce import numpy as np -from future import standard_library -from ... import logging, config, LooseVersion +from ... import logging, config from ...utils.filemanip import ( + indirectory, relpath, - makedirs, fname_presuffix, - to_str, ensure_list, get_related_files, - FileNotFoundError, save_json, savepkl, + loadpkl, write_rst_header, write_rst_dict, write_rst_list, ) from ...utils.misc import str2bool from ...utils.functions import create_function_from_source -from ...interfaces.base import (Bunch, CommandLine, isdefined, Undefined, - InterfaceResult, traits) +from ...interfaces.base.traits_extension import ( + rebase_path_traits, + resolve_path_traits, + OutputMultiPath, + isdefined, + Undefined, +) +from ...interfaces.base.support import Bunch, InterfaceResult +from ...interfaces.base import CommandLine from ...interfaces.utility import IdentityInterface from ...utils.provenance import ProvStore, pm, nipype_ns, get_id -try: - from inspect import signature -except ImportError: - from funcsigs import signature +from inspect import signature -standard_library.install_aliases() -logger = logging.getLogger('nipype.workflow') -PY3 = sys.version_info[0] > 2 +logger = logging.getLogger("nipype.workflow") -def _parameterization_dir(param): +def _parameterization_dir(param, maxlen): """ Returns the directory name for the given parameterization string as follows: - - If the parameterization is longer than 32 characters, then + - If the parameterization is longer than maxlen characters, then return the SHA-1 hex digest. - Otherwise, return the parameterization unchanged. """ - if len(param) > 32: + if len(param) > maxlen: return sha1(param.encode()).hexdigest() return param @@ -71,19 +66,18 @@ def save_hashfile(hashfile, hashed_inputs): """Store a hashfile""" try: save_json(hashfile, hashed_inputs) - except (IOError, TypeError): + except (OSError, TypeError): err_type = sys.exc_info()[0] if err_type is TypeError: # XXX - SG current workaround is to just # create the hashed file and not put anything # in it - with open(hashfile, 'wt') as fd: + with open(hashfile, "w") as fd: fd.writelines(str(hashed_inputs)) - logger.debug('Unable to write a particular type to the json file') + logger.debug("Unable to write a particular type to the json file") else: - logger.critical('Unable to open the file in write mode: %s', - hashfile) + logger.critical("Unable to open the file in write mode: %s", hashfile) def nodelist_runner(nodes, updatehash=False, stop_first=False): @@ -103,268 +97,218 @@ def nodelist_runner(nodes, updatehash=False, stop_first=False): result = node.result err = [] - if result.runtime and hasattr(result.runtime, 'traceback'): + if result.runtime and hasattr(result.runtime, "traceback"): err = [result.runtime.traceback] err += format_exception(*sys.exc_info()) - err = '\n'.join(err) + err = "\n".join(err) finally: yield i, result, err -def write_report(node, report_type=None, is_mapnode=False): - """Write a report file for a node""" - if not str2bool(node.config['execution']['create_report']): - return - - if report_type not in ['preexec', 'postexec']: - logger.warning('[Node] Unknown report type "%s".', report_type) +def write_node_report(node, result=None, is_mapnode=False): + """Write a report file for a node.""" + if not str2bool(node.config["execution"]["create_report"]): return cwd = node.output_dir() - report_dir = os.path.join(cwd, '_report') - report_file = os.path.join(report_dir, 'report.rst') - makedirs(report_dir, exist_ok=True) - - logger.debug('[Node] Writing %s-exec report to "%s"', report_type[:-4], - report_file) - if report_type.startswith('pre'): - lines = [ - write_rst_header('Node: %s' % get_print_name(node), level=0), - write_rst_list( - ['Hierarchy : %s' % node.fullname, - 'Exec ID : %s' % node._id]), - write_rst_header('Original Inputs', level=1), - write_rst_dict(node.inputs.trait_get()), - ] - with open(report_file, 'wt') as fp: - fp.write('\n'.join(lines)) - return + report_file = Path(cwd) / "_report" / "report.rst" + report_file.parent.mkdir(exist_ok=True, parents=True) lines = [ - write_rst_header('Execution Inputs', level=1), + write_rst_header("Node: %s" % get_print_name(node), level=0), + write_rst_list(["Hierarchy : %s" % node.fullname, "Exec ID : %s" % node._id]), + write_rst_header("Original Inputs", level=1), write_rst_dict(node.inputs.trait_get()), ] - result = node.result # Locally cache result - outputs = result.outputs + if result is None: + logger.debug('[Node] Writing pre-exec report to "%s"', report_file) + report_file.write_text("\n".join(lines), encoding='utf-8') + return + logger.debug('[Node] Writing post-exec report to "%s"', report_file) + lines += [ + write_rst_header("Execution Inputs", level=1), + write_rst_dict(node.inputs.trait_get()), + write_rst_header("Execution Outputs", level=1), + ] + + outputs = result.outputs if outputs is None: - with open(report_file, 'at') as fp: - fp.write('\n'.join(lines)) + lines += ["None"] + report_file.write_text("\n".join(lines), encoding='utf-8') return - lines.append(write_rst_header('Execution Outputs', level=1)) - if isinstance(outputs, Bunch): lines.append(write_rst_dict(outputs.dictcopy())) elif outputs: lines.append(write_rst_dict(outputs.trait_get())) + else: + lines += ["Outputs object was empty."] if is_mapnode: - lines.append(write_rst_header('Subnode reports', level=1)) + lines.append(write_rst_header("Subnode reports", level=1)) nitems = len(ensure_list(getattr(node.inputs, node.iterfield[0]))) subnode_report_files = [] for i in range(nitems): - nodecwd = os.path.join(cwd, 'mapflow', '_%s%d' % (node.name, i), - '_report', 'report.rst') - subnode_report_files.append('subnode %d : %s' % (i, nodecwd)) + subnode_file = ( + Path(cwd) + / "mapflow" + / ("_%s%d" % (node.name, i)) + / "_report" + / "report.rst" + ) + subnode_report_files.append("subnode %d : %s" % (i, subnode_file)) lines.append(write_rst_list(subnode_report_files)) - - with open(report_file, 'at') as fp: - fp.write('\n'.join(lines)) + report_file.write_text("\n".join(lines), encoding='utf-8') return - lines.append(write_rst_header('Runtime info', level=1)) + lines.append(write_rst_header("Runtime info", level=1)) # Init rst dictionary of runtime stats rst_dict = { - 'hostname': result.runtime.hostname, - 'duration': result.runtime.duration, - 'working_dir': result.runtime.cwd, - 'prev_wd': getattr(result.runtime, 'prevcwd', ''), + "hostname": result.runtime.hostname, + "duration": result.runtime.duration, + "working_dir": result.runtime.cwd, + "prev_wd": getattr(result.runtime, "prevcwd", ""), } - if hasattr(result.runtime, 'cmdline'): - rst_dict['command'] = result.runtime.cmdline - - # Try and insert memory/threads usage if available - if hasattr(result.runtime, 'mem_peak_gb'): - rst_dict['mem_peak_gb'] = result.runtime.mem_peak_gb - - if hasattr(result.runtime, 'cpu_percent'): - rst_dict['cpu_percent'] = result.runtime.cpu_percent + for prop in ("cmdline", "mem_peak_gb", "cpu_percent"): + if hasattr(result.runtime, prop): + rst_dict[prop] = getattr(result.runtime, prop) lines.append(write_rst_dict(rst_dict)) # Collect terminal output - if hasattr(result.runtime, 'merged'): + if hasattr(result.runtime, "merged"): lines += [ - write_rst_header('Terminal output', level=2), + write_rst_header("Terminal output", level=2), write_rst_list(result.runtime.merged), ] - if hasattr(result.runtime, 'stdout'): + if hasattr(result.runtime, "stdout"): lines += [ - write_rst_header('Terminal - standard output', level=2), + write_rst_header("Terminal - standard output", level=2), write_rst_list(result.runtime.stdout), ] - if hasattr(result.runtime, 'stderr'): + if hasattr(result.runtime, "stderr"): lines += [ - write_rst_header('Terminal - standard error', level=2), + write_rst_header("Terminal - standard error", level=2), write_rst_list(result.runtime.stderr), ] # Store environment - if hasattr(result.runtime, 'environ'): + if hasattr(result.runtime, "environ"): lines += [ - write_rst_header('Environment', level=2), + write_rst_header("Environment", level=2), write_rst_dict(result.runtime.environ), ] - with open(report_file, 'at') as fp: - fp.write('\n'.join(lines)) - return - + report_file.write_text("\n".join(lines), encoding='utf-8') -def _identify_collapses(hastraits): - """ Identify traits that will collapse when being set to themselves. - ``OutputMultiObject``s automatically unwrap a list of length 1 to directly - reference the element of that list. - If that element is itself a list of length 1, then the following will - result in modified values. - - hastraits.trait_set(**hastraits.trait_get()) - - Cloning performs this operation on a copy of the original traited object, - allowing us to identify traits that will be affected. - """ - raw = hastraits.trait_get() - cloned = hastraits.clone_traits().trait_get() - - collapsed = set() - for key in cloned: - orig = raw[key] - new = cloned[key] - # Allow numpy to handle the equality checks, as mixed lists and arrays - # can be problematic. - if isinstance(orig, list) and len(orig) == 1 and ( - not np.array_equal(orig, new) and np.array_equal(orig[0], new)): - collapsed.add(key) - - return collapsed - - -def _uncollapse(indexable, collapsed): - """ Wrap collapsible values in a list to prevent double-collapsing. - - Should be used with _identify_collapses to provide the following - idempotent operation: - - collapsed = _identify_collapses(hastraits) - hastraits.trait_set(**_uncollapse(hastraits.trait_get(), collapsed)) - - NOTE: Modifies object in-place, in addition to returning it. - """ +def write_report(node, report_type=None, is_mapnode=False): + """Write a report file for a node - DEPRECATED""" + if report_type not in ("preexec", "postexec"): + logger.warning('[Node] Unknown report type "%s".', report_type) + return - for key in indexable: - if key in collapsed: - indexable[key] = [indexable[key]] - return indexable + write_node_report( + node, + is_mapnode=is_mapnode, + result=node.result if report_type == "postexec" else None, + ) -def _protect_collapses(hastraits): - """ A collapse-protected replacement for hastraits.trait_get() +def save_resultfile(result, cwd, name, rebase=None): + """Save a result pklz file to ``cwd``.""" + if rebase is None: + rebase = config.getboolean("execution", "use_relative_paths") - May be used as follows to provide an idempotent trait_set: + cwd = os.path.abspath(cwd) + resultsfile = os.path.join(cwd, "result_%s.pklz" % name) + logger.debug("Saving results file: '%s'", resultsfile) - hastraits.trait_set(**_protect_collapses(hastraits)) - """ - collapsed = _identify_collapses(hastraits) - return _uncollapse(hastraits.trait_get(), collapsed) + if result.outputs is None: + logger.warning("Storing result file without outputs") + savepkl(resultsfile, result) + return + try: + output_names = result.outputs.copyable_trait_names() + except AttributeError: + logger.debug("Storing non-traited results, skipping rebase of paths") + savepkl(resultsfile, result) + return + if not rebase: + savepkl(resultsfile, result) + return -def save_resultfile(result, cwd, name): - """Save a result pklz file to ``cwd``""" - resultsfile = os.path.join(cwd, 'result_%s.pklz' % name) - if result.outputs: - try: - collapsed = _identify_collapses(result.outputs) - outputs = _uncollapse(result.outputs.trait_get(), collapsed) - # Double-protect tosave so that the original, uncollapsed trait - # is saved in the pickle file. Thus, when the loading process - # collapses, the original correct value is loaded. - tosave = _uncollapse(outputs.copy(), collapsed) - except AttributeError: - tosave = outputs = result.outputs.dictcopy() # outputs was a bunch - for k, v in list(modify_paths(tosave, relative=True, basedir=cwd).items()): - setattr(result.outputs, k, v) - - savepkl(resultsfile, result) - logger.debug('saved results in %s', resultsfile) - - if result.outputs: - for k, v in list(outputs.items()): - setattr(result.outputs, k, v) - - -def load_resultfile(path, name): + backup_traits = {} + try: + with indirectory(cwd): + # All the magic to fix #2944 resides here: + for key in output_names: + old = getattr(result.outputs, key) + if isdefined(old): + if result.outputs.trait(key).is_trait_type(OutputMultiPath): + old = result.outputs.trait(key).handler.get_value( + result.outputs, key + ) + backup_traits[key] = old + val = rebase_path_traits(result.outputs.trait(key), old, cwd) + setattr(result.outputs, key, val) + savepkl(resultsfile, result) + finally: + # Restore resolved paths from the outputs dict no matter what + for key, val in list(backup_traits.items()): + setattr(result.outputs, key, val) + + +def load_resultfile(results_file, resolve=True): """ - Load InterfaceResult file from path - - Parameter - --------- + Load InterfaceResult file from path. - path : base_dir of node - name : name of node + Parameters + ---------- + results_file : pathlike + Path to an existing pickle (``result_.pklz``) created with + ``save_resultfile``. + Raises ``FileNotFoundError`` if ``results_file`` does not exist. + resolve : bool + Determines whether relative paths will be resolved to absolute (default is ``True``). Returns ------- + result : InterfaceResult + A Nipype object containing the runtime, inputs, outputs and other interface information + such as a traceback in the case of errors. - result : InterfaceResult structure - aggregate : boolean indicating whether node should aggregate_outputs - attribute error : boolean indicating whether there was some mismatch in - versions of traits used to store result and hence node needs to - rerun """ - aggregate = True - resultsoutputfile = os.path.join(path, 'result_%s.pklz' % name) - result = None - attribute_error = False - if os.path.exists(resultsoutputfile): - pkl_file = gzip.open(resultsoutputfile, 'rb') + results_file = Path(results_file) + if not results_file.exists(): + raise FileNotFoundError(results_file) + + result = loadpkl(results_file) + if resolve and getattr(result, "outputs", None): try: - result = pickle.load(pkl_file) - except UnicodeDecodeError: - # Was this pickle created with Python 2.x? - pickle.load(pkl_file, fix_imports=True, encoding='utf-8') - logger.warning('Successfully loaded pkl in compatibility mode') - except (traits.TraitError, AttributeError, ImportError, - EOFError) as err: - if isinstance(err, (AttributeError, ImportError)): - attribute_error = True - logger.debug('attribute error: %s probably using ' - 'different trait pickled file', str(err)) - else: - logger.debug( - 'some file does not exist. hence trait cannot be set') - else: - if result.outputs: - try: - outputs = _protect_collapses(result.outputs) - except AttributeError: - outputs = result.outputs.dictcopy() # outputs == Bunch - try: - for k, v in list(modify_paths(outputs, relative=False, - basedir=path).items()): - setattr(result.outputs, k, v) - except FileNotFoundError: - logger.debug('conversion to full path results in ' - 'non existent file') - aggregate = False - pkl_file.close() - logger.debug('Aggregate: %s', aggregate) - return result, aggregate, attribute_error + outputs = result.outputs.get() + except TypeError: # This is a Bunch + logger.debug("Outputs object of loaded result %s is a Bunch.", results_file) + return result + + logger.debug("Resolving paths in outputs loaded from results file.") + for trait_name, old in list(outputs.items()): + if isdefined(old): + if result.outputs.trait(trait_name).is_trait_type(OutputMultiPath): + old = result.outputs.trait(trait_name).handler.get_value( + result.outputs, trait_name + ) + value = resolve_path_traits( + result.outputs.trait(trait_name), old, results_file.parent + ) + setattr(result.outputs, trait_name, value) + return result def strip_temp(files, wd): @@ -374,13 +318,13 @@ def strip_temp(files, wd): if isinstance(f, list): out.append(strip_temp(f, wd)) else: - out.append(f.replace(os.path.join(wd, '_tempinput'), wd)) + out.append(f.replace(os.path.join(wd, "_tempinput"), wd)) return out def _write_inputs(node): lines = [] - nodename = node.fullname.replace('.', '_') + nodename = node.fullname.replace(".", "_") for key, _ in list(node.inputs.items()): val = getattr(node.inputs, key) if isdefined(val): @@ -388,68 +332,65 @@ def _write_inputs(node): try: func = create_function_from_source(val) except RuntimeError: - lines.append("%s.inputs.%s = '%s'" % (nodename, key, val)) + lines.append(f"{nodename}.inputs.{key} = '{val}'") else: funcname = [ - name for name in func.__globals__ - if name != '__builtins__' + name for name in func.__globals__ if name != "__builtins__" ][0] lines.append(pickle.loads(val)) if funcname == nodename: - lines[-1] = lines[-1].replace(' %s(' % funcname, - ' %s_1(' % funcname) - funcname = '%s_1' % funcname - lines.append( - 'from nipype.utils.functions import getsource') - lines.append("%s.inputs.%s = getsource(%s)" % - (nodename, key, funcname)) + lines[-1] = lines[-1].replace( + " %s(" % funcname, " %s_1(" % funcname + ) + funcname = "%s_1" % funcname + lines.append("from nipype.utils.functions import getsource") + lines.append(f"{nodename}.inputs.{key} = getsource({funcname})") else: - lines.append('%s.inputs.%s = %s' % (nodename, key, val)) + lines.append(f"{nodename}.inputs.{key} = {val}") return lines -def format_node(node, format='python', include_config=False): +def format_node(node, format="python", include_config=False): """Format a node in a given output syntax.""" from .nodes import MapNode + lines = [] - name = node.fullname.replace('.', '_') - if format == 'python': + name = node.fullname.replace(".", "_") + if format == "python": klass = node.interface - importline = 'from %s import %s' % (klass.__module__, - klass.__class__.__name__) - comment = '# Node: %s' % node.fullname + importline = f"from {klass.__module__} import {klass.__class__.__name__}" + comment = "# Node: %s" % node.fullname spec = signature(node.interface.__init__) - args = [p.name for p in list(spec.parameters.values())] - args = args[1:] - if args: - filled_args = [] - for arg in args: - if hasattr(node.interface, '_%s' % arg): - filled_args.append('%s=%s' % - (arg, - getattr(node.interface, '_%s' % arg))) - args = ', '.join(filled_args) - else: - args = '' + filled_args = [] + for param in spec.parameters.values(): + val = getattr(node.interface, f"_{param.name}", None) + if val is not None: + filled_args.append(f"{param.name}={val!r}") + args = ", ".join(filled_args) klass_name = klass.__class__.__name__ if isinstance(node, MapNode): - nodedef = '%s = MapNode(%s(%s), iterfield=%s, name="%s")' \ - % (name, klass_name, args, node.iterfield, name) + nodedef = '{} = MapNode({}({}), iterfield={}, name="{}")'.format( + name, + klass_name, + args, + node.iterfield, + name, + ) else: - nodedef = '%s = Node(%s(%s), name="%s")' \ - % (name, klass_name, args, name) + nodedef = f'{name} = Node({klass_name}({args}), name="{name}")' lines = [importline, comment, nodedef] if include_config: lines = [ - importline, "from future import standard_library", - "standard_library.install_aliases()", - "from collections import OrderedDict", comment, nodedef + importline, + "from collections import OrderedDict", + comment, + nodedef, ] - lines.append('%s.config = %s' % (name, node.config)) + lines.append(f"{name}.config = {node.config}") if node.iterables is not None: - lines.append('%s.iterables = %s' % (name, node.iterables)) + lines.append(f"{name}.iterables = {node.iterables}") lines.extend(_write_inputs(node)) return lines @@ -474,32 +415,30 @@ def modify_paths(object, relative=True, basedir=None): out = {} for key, val in sorted(object.items()): if isdefined(val): - out[key] = modify_paths( - val, relative=relative, basedir=basedir) + out[key] = modify_paths(val, relative=relative, basedir=basedir) elif isinstance(object, (list, tuple)): out = [] for val in object: if isdefined(val): - out.append( - modify_paths(val, relative=relative, basedir=basedir)) + out.append(modify_paths(val, relative=relative, basedir=basedir)) if isinstance(object, tuple): out = tuple(out) else: if isdefined(object): if isinstance(object, (str, bytes)) and os.path.isfile(object): if relative: - if config.getboolean('execution', 'use_relative_paths'): + if config.getboolean("execution", "use_relative_paths"): out = relpath(object, start=basedir) else: out = object else: out = os.path.abspath(os.path.join(basedir, object)) if not os.path.exists(out): - raise IOError('File %s not found' % out) + raise OSError("File %s not found" % out) else: out = object else: - raise TypeError("Object {} is undefined".format(object)) + raise TypeError(f"Object {object} is undefined") return out @@ -511,20 +450,20 @@ def get_print_name(node, simple_form=True): """ name = node.fullname - if hasattr(node, '_interface'): - pkglist = node.interface.__class__.__module__.split('.') + if hasattr(node, "_interface"): + pkglist = node.interface.__class__.__module__.split(".") interface = node.interface.__class__.__name__ - destclass = '' + destclass = "" if len(pkglist) > 2: - destclass = '.%s' % pkglist[2] + destclass = ".%s" % pkglist[2] if simple_form: - name = node.fullname + destclass + name = f"{node.fullname}{destclass}" else: - name = '.'.join([node.fullname, interface]) + destclass + name = f"{node.fullname}.{interface}{destclass}" if simple_form: - parts = name.split('.') + parts = name.split(".") if len(parts) > 2: - return ' ('.join(parts[1:]) + ')' + return " (".join(parts[1:]) + ")" elif len(parts) == 2: return parts[1] return name @@ -535,17 +474,18 @@ def _create_dot_graph(graph, show_connectinfo=False, simple_form=True): Ensures that edge info is pickleable. """ - logger.debug('creating dot graph') + logger.debug("creating dot graph") import networkx as nx + pklgraph = nx.DiGraph() for edge in graph.edges(): data = graph.get_edge_data(*edge) srcname = get_print_name(edge[0], simple_form=simple_form) destname = get_print_name(edge[1], simple_form=simple_form) if show_connectinfo: - pklgraph.add_edge(srcname, destname, l=str(data['connect'])) + pklgraph.add_edge(f'"{srcname}"', f'"{destname}"', l=str(data["connect"])) else: - pklgraph.add_edge(srcname, destname) + pklgraph.add_edge(f'"{srcname}"', f'"{destname}"') return pklgraph @@ -564,67 +504,83 @@ def _write_detailed_dot(graph, dotfilename): } """ import networkx as nx - text = ['digraph structs {', 'node [shape=record];'] + + text = ["digraph structs {", "node [shape=record];"] # write nodes edges = [] for n in nx.topological_sort(graph): nodename = n.itername - inports = [] + in_ports = [] for u, v, d in graph.in_edges(nbunch=n, data=True): - for cd in d['connect']: + for cd in d["connect"]: if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] - inport = cd[1] - ipstrip = 'in%s' % _replacefunk(inport) - opstrip = 'out%s' % _replacefunk(outport) + in_port = cd[1] + ipstrip = "in%s" % _replacefunk(in_port) + opstrip = "out%s" % _replacefunk(outport) edges.append( - '%s:%s:e -> %s:%s:w;' % (u.itername.replace('.', ''), opstrip, - v.itername.replace('.', ''), ipstrip)) - if inport not in inports: - inports.append(inport) - inputstr = ['{IN'] + [ - '| %s' % (_replacefunk(ip), ip) for ip in sorted(inports) - ] + ['}'] + "%s:%s:e -> %s:%s:w;" + % ( + u.itername.replace(".", ""), + opstrip, + v.itername.replace(".", ""), + ipstrip, + ) + ) + if in_port not in in_ports: + in_ports.append(in_port) + inputstr = ( + ["{IN"] + + [f"| {ip}" for ip in sorted(in_ports)] + + ["}"] + ) outports = [] for u, v, d in graph.out_edges(nbunch=n, data=True): - for cd in d['connect']: + for cd in d["connect"]: if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] if outport not in outports: outports.append(outport) - outputstr = ['{OUT'] + [ - '| %s' % (_replacefunk(oport), oport) - for oport in sorted(outports) - ] + ['}'] - srcpackage = '' - if hasattr(n, '_interface'): - pkglist = n.interface.__class__.__module__.split('.') + outputstr = ( + ["{OUT"] + + [f"| {oport}" for oport in sorted(outports)] + + ["}"] + ) + srcpackage = "" + if hasattr(n, "_interface"): + pkglist = n.interface.__class__.__module__.split(".") if len(pkglist) > 2: srcpackage = pkglist[2] - srchierarchy = '.'.join(nodename.split('.')[1:-1]) - nodenamestr = '{ %s | %s | %s }' % (nodename.split('.')[-1], - srcpackage, srchierarchy) + srchierarchy = ".".join(nodename.split(".")[1:-1]) + nodenamestr = "{{ {} | {} | {} }}".format( + nodename.split(".")[-1], + srcpackage, + srchierarchy, + ) text += [ - '%s [label="%s|%s|%s"];' % - (nodename.replace('.', ''), ''.join(inputstr), nodenamestr, - ''.join(outputstr)) + '%s [label="%s|%s|%s"];' + % ( + nodename.replace(".", ""), + "".join(inputstr), + nodenamestr, + "".join(outputstr), + ) ] # write edges for edge in sorted(edges): text.append(edge) - text.append('}') - with open(dotfilename, 'wt') as filep: - filep.write('\n'.join(text)) + text.append("}") + with open(dotfilename, "w") as filep: + filep.write("\n".join(text)) return text def _replacefunk(x): - return x.replace('_', '').replace('.', '').replace('@', '').replace( - '-', '') + return x.replace("_", "").replace(".", "").replace("@", "").replace("-", "") # Graph manipulations for iterable expansion @@ -635,10 +591,10 @@ def _get_valid_pathstr(pathstr): Replaces: ',' -> '.' """ if not isinstance(pathstr, (str, bytes)): - pathstr = to_str(pathstr) - pathstr = pathstr.replace(os.sep, '..') - pathstr = re.sub(r'''[][ (){}?:<>#!|"';]''', '', pathstr) - pathstr = pathstr.replace(',', '.') + pathstr = str(pathstr) + pathstr = pathstr.replace(os.sep, "..") + pathstr = re.sub(r"""[][ (){}?:<>#!|"';]""", "", pathstr) + pathstr = pathstr.replace(",", ".") return pathstr @@ -689,8 +645,7 @@ def walk(children, level=0, path=None, usename=True): else: path[level] = child # Recurse into the next level - for child_paths in walk(tail, level + 1, path, usename): - yield child_paths + yield from walk(tail, level + 1, path, usename) def synchronize_iterables(iterables): @@ -711,8 +666,9 @@ def synchronize_iterables(iterables): True """ out_list = [] - iterable_items = [(field, iter(fvals())) - for field, fvals in sorted(iterables.items())] + iterable_items = [ + (field, iter(fvals())) for field, fvals in sorted(iterables.items()) + ] while True: cur_dict = {} for field, iter_values in iterable_items: @@ -733,17 +689,16 @@ def evaluate_connect_function(function_source, args, first_arg): try: output_value = func(first_arg, *list(args)) except NameError as e: - if e.args[0].startswith("global name") and \ - e.args[0].endswith("is not defined"): - e.args = (e.args[0], - ("Due to engine constraints all imports have to be done " - "inside each function definition")) - raise e + raise NameError( + f"{e}: Due to engine constraints all imports have to be done inside each " + " function definition." + ) return output_value def get_levels(G): import networkx as nx + levels = {} for n in nx.topological_sort(G): levels[n] = 0 @@ -752,13 +707,9 @@ def get_levels(G): return levels -def _merge_graphs(supergraph, - nodes, - subgraph, - nodeid, - iterables, - prefix, - synchronize=False): +def _merge_graphs( + supergraph, nodes, subgraph, nodeid, iterables, prefix, synchronize=False +): """Merges two graphs that share a subset of nodes. If the subgraph needs to be replicated for multiple iterables, the @@ -790,12 +741,14 @@ def _merge_graphs(supergraph, # nodes of the supergraph. supernodes = supergraph.nodes() ids = [n._hierarchy + n._id for n in supernodes] - if len(np.unique(ids)) != len(ids): + if len(set(ids)) != len(ids): # This should trap the problem of miswiring when multiple iterables are # used at the same level. The use of the template below for naming # updates to nodes is the general solution. - raise Exception(("Execution graph does not have a unique set of node " - "names. Please rerun the workflow")) + raise Exception( + "Execution graph does not have a unique set of node " + "names. Please rerun the workflow" + ) edgeinfo = {} for n in list(subgraph.nodes()): nidx = ids.index(n._hierarchy + n._id) @@ -805,7 +758,8 @@ def _merge_graphs(supergraph, if n._hierarchy + n._id not in list(edgeinfo.keys()): edgeinfo[n._hierarchy + n._id] = [] edgeinfo[n._hierarchy + n._id].append( - (edge[0], supergraph.get_edge_data(*edge))) + (edge[0], supergraph.get_edge_data(*edge)) + ) supergraph.remove_nodes_from(nodes) # Add copies of the subgraph depending on the number of iterables iterable_params = expand_iterables(iterables, synchronize) @@ -814,20 +768,21 @@ def _merge_graphs(supergraph, return supergraph # Make an iterable subgraph node id template count = len(iterable_params) - template = '.%s%%0%dd' % (prefix, np.ceil(np.log10(count))) + template = ".%s%%0%dd" % (prefix, np.ceil(np.log10(count))) # Copy the iterable subgraphs for i, params in enumerate(iterable_params): Gc = deepcopy(subgraph) ids = [n._hierarchy + n._id for n in Gc.nodes()] nodeidx = ids.index(nodeid) rootnode = list(Gc.nodes())[nodeidx] - paramstr = '' + paramstr = "" for key, val in sorted(params.items()): - paramstr = '{}_{}_{}'.format(paramstr, _get_valid_pathstr(key), - _get_valid_pathstr(val)) + paramstr = "{}_{}_{}".format( + paramstr, _get_valid_pathstr(key), _get_valid_pathstr(val) + ) rootnode.set_input(key, val) - logger.debug('Parameterization: paramstr=%s', paramstr) + logger.debug("Parameterization: paramstr=%s", paramstr) levels = get_levels(Gc) for n in Gc.nodes(): # update parameterization of the node to reflect the location of @@ -856,14 +811,13 @@ def _merge_graphs(supergraph, def _connect_nodes(graph, srcnode, destnode, connection_info): - """Add a connection between two nodes - """ + """Add a connection between two nodes""" data = graph.get_edge_data(srcnode, destnode, default=None) if not data: - data = {'connect': connection_info} + data = {"connect": connection_info} graph.add_edges_from([(srcnode, destnode, data)]) else: - data['connect'].extend(connection_info) + data["connect"].extend(connection_info) def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): @@ -875,7 +829,7 @@ def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): # if keep_iterables is False, then include the iterable # and join nodes in the nodes to delete for node in _identity_nodes(graph, not keep_iterables): - if not hasattr(node, 'joinsource'): + if not hasattr(node, "joinsource"): _remove_identity_node(graph, node) return graph @@ -888,21 +842,21 @@ def _identity_nodes(graph, include_iterables): to True. """ import networkx as nx + return [ - node for node in nx.topological_sort(graph) - if isinstance(node.interface, IdentityInterface) and ( - include_iterables or getattr(node, 'iterables') is None) + node + for node in nx.topological_sort(graph) + if isinstance(node.interface, IdentityInterface) + and (include_iterables or node.iterables is None) ] def _remove_identity_node(graph, node): - """Remove identity nodes from an execution graph - """ + """Remove identity nodes from an execution graph""" portinputs, portoutputs = _node_ports(graph, node) for field, connections in list(portoutputs.items()): if portinputs: - _propagate_internal_output(graph, node, field, connections, - portinputs) + _propagate_internal_output(graph, node, field, connections, portinputs) else: _propagate_root_output(graph, node, field, connections) graph.remove_nodes_from([node]) @@ -922,65 +876,62 @@ def _node_ports(graph, node): portinputs = {} portoutputs = {} for u, _, d in graph.in_edges(node, data=True): - for src, dest in d['connect']: + for src, dest in d["connect"]: portinputs[dest] = (u, src) for _, v, d in graph.out_edges(node, data=True): - for src, dest in d['connect']: + for src, dest in d["connect"]: if isinstance(src, tuple): - srcport = src[0] + src_port = src[0] else: - srcport = src - if srcport not in portoutputs: - portoutputs[srcport] = [] - portoutputs[srcport].append((v, dest, src)) + src_port = src + if src_port not in portoutputs: + portoutputs[src_port] = [] + portoutputs[src_port].append((v, dest, src)) return (portinputs, portoutputs) def _propagate_root_output(graph, node, field, connections): """Propagates the given graph root node output port field connections to the out-edge destination nodes.""" - for destnode, inport, src in connections: + for destnode, in_port, src in connections: value = getattr(node.inputs, field) if isinstance(src, tuple): value = evaluate_connect_function(src[1], src[2], value) - destnode.set_input(inport, value) + destnode.set_input(in_port, value) def _propagate_internal_output(graph, node, field, connections, portinputs): """Propagates the given graph internal node output port field connections to the out-edge source node and in-edge destination nodes.""" - for destnode, inport, src in connections: + for destnode, in_port, src in connections: if field in portinputs: - srcnode, srcport = portinputs[field] - if isinstance(srcport, tuple) and isinstance(src, tuple): - src_func = srcport[1].split("\\n")[0] + srcnode, src_port = portinputs[field] + if isinstance(src_port, tuple) and isinstance(src, tuple): + src_func = src_port[1].split("\\n")[0] dst_func = src[1].split("\\n")[0] - raise ValueError("Does not support two inline functions " - "in series ('{}' and '{}'), found when " - "connecting {} to {}. Please use a Function " - "node.".format(src_func, dst_func, srcnode, - destnode)) - - connect = graph.get_edge_data( - srcnode, destnode, default={ - 'connect': [] - }) + raise ValueError( + "Does not support two inline functions " + "in series ('{}' and '{}'), found when " + "connecting {} to {}. Please use a Function " + "node.".format(src_func, dst_func, srcnode, destnode) + ) + + connect = graph.get_edge_data(srcnode, destnode, default={"connect": []}) if isinstance(src, tuple): - connect['connect'].append(((srcport, src[1], src[2]), inport)) + connect["connect"].append(((src_port, src[1], src[2]), in_port)) else: - connect = {'connect': [(srcport, inport)]} + connect = {"connect": [(src_port, in_port)]} old_connect = graph.get_edge_data( - srcnode, destnode, default={ - 'connect': [] - }) - old_connect['connect'] += connect['connect'] + srcnode, destnode, default={"connect": []} + ) + old_connect["connect"] += connect["connect"] graph.add_edges_from([(srcnode, destnode, old_connect)]) else: value = getattr(node.inputs, field) if isinstance(src, tuple): value = evaluate_connect_function(src[1], src[2], value) - destnode.set_input(inport, value) + destnode.set_input(in_port, value) def generate_expanded_graph(graph_in): @@ -992,6 +943,7 @@ def generate_expanded_graph(graph_in): parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). """ import networkx as nx + try: dfs_preorder = nx.dfs_preorder except AttributeError: @@ -1003,7 +955,7 @@ def generate_expanded_graph(graph_in): for node in graph_in.nodes(): if node.iterables: _standardize_iterables(node) - allprefixes = list('abcdefghijklmnopqrstuvwxyz') + allprefixes = list("abcdefghijklmnopqrstuvwxyz") # the iterable nodes inodes = _iterable_nodes(graph_in) @@ -1016,8 +968,10 @@ def generate_expanded_graph(graph_in): # the join successor nodes of the current iterable node jnodes = [ - node for node in graph_in.nodes() - if hasattr(node, 'joinsource') and inode.name == node.joinsource + node + for node in graph_in.nodes() + if hasattr(node, "joinsource") + and inode.name == node.joinsource and nx.has_path(graph_in, inode, node) ] @@ -1034,8 +988,7 @@ def generate_expanded_graph(graph_in): for src, dest in edges2remove: graph_in.remove_edge(src, dest) - logger.debug("Excised the %s -> %s join node in-edge.", src, - dest) + logger.debug("Excised the %s -> %s join node in-edge.", src, dest) if inode.itersource: # the itersource is a (node name, fields) tuple @@ -1045,23 +998,23 @@ def generate_expanded_graph(graph_in): src_fields = [src_fields] # find the unique iterable source node in the graph try: - iter_src = next((node for node in graph_in.nodes() - if node.name == src_name - and nx.has_path(graph_in, node, inode))) + iter_src = next( + node + for node in graph_in.nodes() + if node.name == src_name and nx.has_path(graph_in, node, inode) + ) except StopIteration: - raise ValueError("The node %s itersource %s was not found" - " among the iterable predecessor nodes" % - (inode, src_name)) - logger.debug("The node %s has iterable source node %s", inode, - iter_src) + raise ValueError( + "The node %s itersource %s was not found" + " among the iterable predecessor nodes" % (inode, src_name) + ) + logger.debug("The node %s has iterable source node %s", inode, iter_src) # look up the iterables for this particular itersource descendant # using the iterable source ancestor values as a key iterables = {} # the source node iterables values - src_values = [ - getattr(iter_src.inputs, field) for field in src_fields - ] - # if there is one source field, then the key is the the source value, + src_values = [getattr(iter_src.inputs, field) for field in src_fields] + # if there is one source field, then the key is the source value, # otherwise the key is the tuple of source values if len(src_values) == 1: key = src_values[0] @@ -1070,9 +1023,9 @@ def generate_expanded_graph(graph_in): # The itersource iterables is a {field: lookup} dictionary, where the # lookup is a {source key: iteration list} dictionary. Look up the # current iterable value using the predecessor itersource input values. - iter_dict = dict([(field, lookup[key]) - for field, lookup in inode.iterables - if key in lookup]) + iter_dict = { + field: lookup[key] for field, lookup in inode.iterables if key in lookup + } # convert the iterables to the standard {field: function} format @@ -1080,37 +1033,39 @@ def make_field_func(*pair): return pair[0], lambda: pair[1] iterables = dict( - [make_field_func(*pair) for pair in list(iter_dict.items())]) + [make_field_func(*pair) for pair in list(iter_dict.items())] + ) else: iterables = inode.iterables.copy() inode.iterables = None - logger.debug('node: %s iterables: %s', inode, iterables) + logger.debug("node: %s iterables: %s", inode, iterables) # collect the subnodes to expand - subnodes = [s for s in dfs_preorder(graph_in, inode)] - prior_prefix = [re.findall(r'\.(.)I', s._id) for s in subnodes if s._id] + subnodes = list(dfs_preorder(graph_in, inode)) + prior_prefix = [re.findall(r"\.(.)I", s._id) for s in subnodes if s._id] prior_prefix = sorted([l for item in prior_prefix for l in item]) if not prior_prefix: - iterable_prefix = 'a' + iterable_prefix = "a" else: - if prior_prefix[-1] == 'z': - raise ValueError('Too many iterables in the workflow') - iterable_prefix =\ - allprefixes[allprefixes.index(prior_prefix[-1]) + 1] - logger.debug(('subnodes:', subnodes)) + if prior_prefix[-1] == "z": + raise ValueError("Too many iterables in the workflow") + iterable_prefix = allprefixes[allprefixes.index(prior_prefix[-1]) + 1] + logger.debug(("subnodes:", subnodes)) # append a suffix to the iterable node id - inode._id += '.%sI' % iterable_prefix + inode._id += ".%sI" % iterable_prefix # merge the iterated subgraphs - # dj: the behaviour of .copy changes in version 2 - if LooseVersion(nx.__version__) < LooseVersion('2'): - subgraph = graph_in.subgraph(subnodes) - else: - subgraph = graph_in.subgraph(subnodes).copy() - graph_in = _merge_graphs(graph_in, subnodes, subgraph, - inode._hierarchy + inode._id, iterables, - iterable_prefix, inode.synchronize) + subgraph = graph_in.subgraph(subnodes).copy() + graph_in = _merge_graphs( + graph_in, + subnodes, + subgraph, + inode._hierarchy + inode._id, + iterables, + iterable_prefix, + inode.synchronize, + ) # reconnect the join nodes for jnode in jnodes: @@ -1119,11 +1074,12 @@ def make_field_func(*pair): old_edge_dict = jedge_dict[jnode] # the edge source node replicates expansions = defaultdict(list) - for node in graph_in.nodes(): + for node in graph_in: for src_id in list(old_edge_dict.keys()): # Drop the original JoinNodes; only concerned with # generated Nodes - if hasattr(node, 'joinfield') and node.itername == src_id: + itername = node.itername + if hasattr(node, "joinfield") and itername == src_id: continue # Patterns: # - src_id : Non-iterable node @@ -1132,12 +1088,17 @@ def make_field_func(*pair): # - src_id.[a-z]I.[a-z]\d+ : # Non-IdentityInterface w/ iterables # - src_idJ\d+ : JoinNode(IdentityInterface) - if re.match(src_id + r'((\.[a-z](I\.[a-z])?|J)\d+)?$', - node.itername): - expansions[src_id].append(node) + if itername.startswith(src_id): + suffix = itername[len(src_id) :] + if re.fullmatch(r"((\.[a-z](I\.[a-z])?|J)\d+)?", suffix): + expansions[src_id].append(node) for in_id, in_nodes in list(expansions.items()): - logger.debug("The join node %s input %s was expanded" - " to %d nodes.", jnode, in_id, len(in_nodes)) + logger.debug( + "The join node %s input %s was expanded to %d nodes.", + jnode, + in_id, + len(in_nodes), + ) # preserve the node iteration order by sorting on the node id for in_nodes in list(expansions.values()): in_nodes.sort(key=lambda node: node._id) @@ -1146,9 +1107,7 @@ def make_field_func(*pair): iter_cnt = count_iterables(iterables, inode.synchronize) # make new join node fields to connect to each replicated # join in-edge source node. - slot_dicts = [ - jnode._add_join_item_fields() for _ in range(iter_cnt) - ] + slot_dicts = [jnode._add_join_item_fields() for _ in range(iter_cnt)] # for each join in-edge, connect every expanded source node # which matches on the in-edge source name to the destination # join node. Qualify each edge connect join field name by @@ -1164,11 +1123,10 @@ def make_field_func(*pair): olddata = old_edge_dict[old_id] newdata = deepcopy(olddata) # the (source, destination) field tuples - connects = newdata['connect'] + connects = newdata["connect"] # the join fields connected to the source join_fields = [ - field for _, field in connects - if field in jnode.joinfield + field for _, field in connects if field in jnode.joinfield ] # the {field: slot fields} maps assigned to the input # node, e.g. {'image': 'imageJ3', 'mask': 'maskJ3'} @@ -1183,10 +1141,18 @@ def make_field_func(*pair): connects[con_idx] = (src_field, slot_field) logger.debug( "Qualified the %s -> %s join field %s as %s.", - in_node, jnode, dest_field, slot_field) + in_node, + jnode, + dest_field, + slot_field, + ) graph_in.add_edge(in_node, jnode, **newdata) - logger.debug("Connected the join node %s subgraph to the" - " expanded join point %s", jnode, in_node) + logger.debug( + "Connected the join node %s subgraph to the" + " expanded join point %s", + jnode, + in_node, + ) # nx.write_dot(graph_in, '%s_post.dot' % node) # the remaining iterable nodes @@ -1226,6 +1192,7 @@ def _iterable_nodes(graph_in): Return the iterable nodes list """ import networkx as nx + nodes = nx.topological_sort(graph_in) inodes = [node for node in nodes if node.iterables is not None] inodes_no_src = [node for node in inodes if not node.itersource] @@ -1248,8 +1215,7 @@ def _standardize_iterables(node): if node.synchronize: if len(iterables) == 2: first, last = iterables - if all((isinstance(item, (str, bytes)) and item in fields - for item in first)): + if all(isinstance(item, (str, bytes)) and item in fields for item in first): iterables = _transpose_iterables(first, last) # Convert a tuple to a list @@ -1266,9 +1232,7 @@ def _standardize_iterables(node): def make_field_func(*pair): return pair[0], lambda: pair[1] - iter_items = [ - make_field_func(*field_value1) for field_value1 in iterables - ] + iter_items = [make_field_func(*field_value1) for field_value1 in iterables] iterables = dict(iter_items) node.iterables = iterables @@ -1285,20 +1249,23 @@ def _validate_iterables(node, iterables, fields): if isinstance(iterables, dict): iterables = list(iterables.items()) elif not isinstance(iterables, tuple) and not isinstance(iterables, list): - raise ValueError("The %s iterables type is not a list or a dictionary:" - " %s" % (node.name, iterables.__class__)) + raise ValueError( + "The %s iterables type is not a list or a dictionary:" + " %s" % (node.name, iterables.__class__) + ) for item in iterables: try: if len(item) != 2: - raise ValueError("The %s iterables is not a [(field, values)]" - " list" % node.name) + raise ValueError( + "The %s iterables is not a [(field, values)] list" % node.name + ) except TypeError as e: - raise TypeError("A %s iterables member is not iterable: %s" % - (node.name, e)) + raise TypeError(f"A {node.name} iterables member is not iterable: {e}") field, _ = item if field not in fields: - raise ValueError("The %s iterables field is unrecognized: %s" % - (node.name, field)) + raise ValueError( + f"The {node.name} iterables field is unrecognized: {field}" + ) def _transpose_iterables(fields, values): @@ -1312,7 +1279,7 @@ def _transpose_iterables(fields, values): Otherwise, the result is a list of (field: value list) pairs. """ if isinstance(values, dict): - transposed = dict([(field, defaultdict(list)) for field in fields]) + transposed = {field: defaultdict(list) for field in fields} for key, tuples in list(values.items()): for kvals in tuples: for idx, val in enumerate(kvals): @@ -1321,19 +1288,27 @@ def _transpose_iterables(fields, values): return list(transposed.items()) return list( - zip(fields, [[v for v in list(transpose) if v is not None] - for transpose in zip(*values)])) - - -def export_graph(graph_in, - base_dir=None, - show=False, - use_execgraph=False, - show_connectinfo=False, - dotfilename='graph.dot', - format='png', - simple_form=True): - """ Displays the graph layout of the pipeline + zip( + fields, + [ + [v for v in list(transpose) if v is not None] + for transpose in zip(*values) + ], + ) + ) + + +def export_graph( + graph_in, + base_dir=None, + show=False, + use_execgraph=False, + show_connectinfo=False, + dotfilename="graph.dot", + format="png", + simple_form=True, +): + """Displays the graph layout of the pipeline This function requires that pygraphviz and matplotlib are available on the system. @@ -1354,37 +1329,40 @@ def export_graph(graph_in, makes the graph rather cluttered. default [False] """ import networkx as nx + graph = deepcopy(graph_in) if use_execgraph: graph = generate_expanded_graph(graph) - logger.debug('using execgraph') + logger.debug("using execgraph") else: - logger.debug('using input graph') + logger.debug("using input graph") if base_dir is None: base_dir = os.getcwd() - makedirs(base_dir, exist_ok=True) + os.makedirs(base_dir, exist_ok=True) out_dot = fname_presuffix( - dotfilename, suffix='_detailed.dot', use_ext=False, newpath=base_dir) + dotfilename, suffix="_detailed.dot", use_ext=False, newpath=base_dir + ) _write_detailed_dot(graph, out_dot) # Convert .dot if format != 'dot' outfname, res = _run_dot(out_dot, format_ext=format) if res is not None and res.runtime.returncode: - logger.warning('dot2png: %s', res.runtime.stderr) + logger.warning("dot2png: %s", res.runtime.stderr) pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form) simple_dot = fname_presuffix( - dotfilename, suffix='.dot', use_ext=False, newpath=base_dir) + dotfilename, suffix=".dot", use_ext=False, newpath=base_dir + ) nx.drawing.nx_pydot.write_dot(pklgraph, simple_dot) # Convert .dot if format != 'dot' simplefname, res = _run_dot(simple_dot, format_ext=format) if res is not None and res.runtime.returncode: - logger.warning('dot2png: %s', res.runtime.stderr) + logger.warning("dot2png: %s", res.runtime.stderr) if show: - pos = nx.graphviz_layout(pklgraph, prog='dot') + pos = nx.graphviz_layout(pklgraph, prog="dot") nx.draw(pklgraph, pos) if show_connectinfo: nx.draw_networkx_edge_labels(pklgraph, pos) @@ -1392,27 +1370,26 @@ def export_graph(graph_in, return simplefname if simple_form else outfname -def format_dot(dotfilename, format='png'): +def format_dot(dotfilename, format="png"): """Dump a directed graph (Linux only; install via `brew` on OSX)""" try: formatted_dot, _ = _run_dot(dotfilename, format_ext=format) - except IOError as ioe: + except OSError as ioe: if "could not be found" in str(ioe): - raise IOError("Cannot draw directed graph; executable 'dot' is unavailable") + raise OSError("Cannot draw directed graph; executable 'dot' is unavailable") else: raise ioe return formatted_dot def _run_dot(dotfilename, format_ext): - if format_ext == 'dot': + if format_ext == "dot": return dotfilename, None - dot_base = os.path.splitext(dotfilename)[0] - formatted_dot = '{}.{}'.format(dot_base, format_ext) - cmd = 'dot -T{} -o"{}" "{}"'.format(format_ext, formatted_dot, dotfilename) - res = CommandLine(cmd, terminal_output='allatonce', - resource_monitor=False).run() + dot_base = os.path.splitext(dotfilename)[0] + formatted_dot = f"{dot_base}.{format_ext}" + cmd = f'dot -T{format_ext} -o"{formatted_dot}" "{dotfilename}"' + res = CommandLine(cmd, terminal_output="allatonce", resource_monitor=False).run() return formatted_dot, res @@ -1427,8 +1404,7 @@ def get_all_files(infile): def walk_outputs(object): - """Extract every file and directory from a python structure - """ + """Extract every file and directory from a python structure""" out = [] if isinstance(object, dict): for _, val in sorted(object.items()): @@ -1441,9 +1417,9 @@ def walk_outputs(object): else: if isdefined(object) and isinstance(object, (str, bytes)): if os.path.islink(object) or os.path.isfile(object): - out = [(filename, 'f') for filename in get_all_files(object)] + out = [(filename, "f") for filename in get_all_files(object)] elif os.path.isdir(object): - out = [(object, 'd')] + out = [(object, "d")] return out @@ -1453,69 +1429,67 @@ def walk_files(cwd): yield os.path.join(path, f) -def clean_working_directory(outputs, - cwd, - inputs, - needed_outputs, - config, - files2keep=None, - dirs2keep=None): - """Removes all files not needed for further analysis from the directory - """ +def clean_working_directory( + outputs, cwd, inputs, needed_outputs, config, files2keep=None, dirs2keep=None +): + """Removes all files not needed for further analysis from the directory""" if not outputs: return outputs_to_keep = list(outputs.trait_get().keys()) - if needed_outputs and \ - str2bool(config['execution']['remove_unnecessary_outputs']): + if needed_outputs and str2bool(config["execution"]["remove_unnecessary_outputs"]): outputs_to_keep = needed_outputs # build a list of needed files output_files = [] outputdict = outputs.trait_get() for output in outputs_to_keep: output_files.extend(walk_outputs(outputdict[output])) - needed_files = [path for path, type in output_files if type == 'f'] - if str2bool(config['execution']['keep_inputs']): + needed_files = [path for path, type in output_files if type == "f"] + if str2bool(config["execution"]["keep_inputs"]): input_files = [] inputdict = inputs.trait_get() input_files.extend(walk_outputs(inputdict)) - needed_files += [path for path, type in input_files if type == 'f'] + needed_files += [path for path, type in input_files if type == "f"] for extra in [ - '_0x*.json', 'provenance.*', 'pyscript*.m', 'pyjobs*.mat', - 'command.txt', 'result*.pklz', '_inputs.pklz', '_node.pklz', - '.proc-*', + "_0x*.json", + "provenance.*", + "pyscript*.m", + "pyjobs*.mat", + "command.txt", + "result*.pklz", + "_inputs.pklz", + "_node.pklz", + ".proc-*", ]: needed_files.extend(glob(os.path.join(cwd, extra))) if files2keep: needed_files.extend(ensure_list(files2keep)) - needed_dirs = [path for path, type in output_files if type == 'd'] + needed_dirs = [path for path, type in output_files if type == "d"] if dirs2keep: needed_dirs.extend(ensure_list(dirs2keep)) - for extra in ['_nipype', '_report']: + for extra in ["_nipype", "_report"]: needed_dirs.extend(glob(os.path.join(cwd, extra))) temp = [] for filename in needed_files: temp.extend(get_related_files(filename)) needed_files = temp - logger.debug('Needed files: %s', ';'.join(needed_files)) - logger.debug('Needed dirs: %s', ';'.join(needed_dirs)) - files2remove = [] - if str2bool(config['execution']['remove_unnecessary_outputs']): - for f in walk_files(cwd): - if f not in needed_files: - if not needed_dirs: - files2remove.append(f) - elif not any([f.startswith(dname) for dname in needed_dirs]): - files2remove.append(f) + logger.debug("Needed files: %s", ";".join(needed_files)) + logger.debug("Needed dirs: %s", ";".join(needed_dirs)) + if str2bool(config["execution"]["remove_unnecessary_outputs"]): + files2remove = [ + f + for f in walk_files(cwd) + if f not in needed_files and not f.startswith(tuple(needed_dirs)) + ] + elif not str2bool(config["execution"]["keep_inputs"]): + input_files = { + path for path, type in walk_outputs(inputs.trait_get()) if type == "f" + } + files2remove = [ + f for f in walk_files(cwd) if f in input_files and f not in needed_files + ] else: - if not str2bool(config['execution']['keep_inputs']): - input_files = [] - inputdict = inputs.trait_get() - input_files.extend(walk_outputs(inputdict)) - input_files = [path for path, type in input_files if type == 'f'] - for f in walk_files(cwd): - if f in input_files and f not in needed_files: - files2remove.append(f) - logger.debug('Removing files: %s', ';'.join(files2remove)) + files2remove = [] + logger.debug("Removing files: %s", ";".join(files2remove)) for f in files2remove: os.remove(f) for key in outputs.copyable_trait_names(): @@ -1569,11 +1543,10 @@ def merge_bundles(g1, g2): return g1 -def write_workflow_prov(graph, filename=None, format='all'): - """Write W3C PROV Model JSON file - """ +def write_workflow_prov(graph, filename=None, format="all"): + """Write W3C PROV Model JSON file""" if not filename: - filename = os.path.join(os.getcwd(), 'workflow_provenance') + filename = os.path.join(os.getcwd(), "workflow_provenance") ps = ProvStore() @@ -1585,16 +1558,15 @@ def write_workflow_prov(graph, filename=None, format='all'): _, hashval, _, _ = node.hash_exists() attrs = { pm.PROV["type"]: nipype_ns[classname], - pm.PROV["label"]: '_'.join((classname, node.name)), - nipype_ns['hashval']: hashval + pm.PROV["label"]: f"{classname}_{node.name}", + nipype_ns["hashval"]: hashval, } process = ps.g.activity(get_id(), None, None, attrs) if isinstance(result.runtime, list): process.add_attributes({pm.PROV["type"]: nipype_ns["MapNode"]}) # add info about sub processes for idx, runtime in enumerate(result.runtime): - subresult = InterfaceResult( - result.interface[idx], runtime, outputs={}) + subresult = InterfaceResult(result.interface[idx], runtime, outputs={}) if result.inputs: if idx < len(result.inputs): subresult.inputs = result.inputs[idx] @@ -1604,14 +1576,12 @@ def write_workflow_prov(graph, filename=None, format='all'): if isdefined(values) and idx < len(values): subresult.outputs[key] = values[idx] sub_doc = ProvStore().add_results(subresult) - sub_bundle = pm.ProvBundle( - sub_doc.get_records(), identifier=get_id()) + sub_bundle = pm.ProvBundle(sub_doc.get_records(), identifier=get_id()) ps.g.add_bundle(sub_bundle) bundle_entity = ps.g.entity( sub_bundle.identifier, - other_attributes={ - 'prov:type': pm.PROV_BUNDLE - }) + other_attributes={"prov:type": pm.PROV_BUNDLE}, + ) ps.g.wasGeneratedBy(bundle_entity, process) else: process.add_attributes({pm.PROV["type"]: nipype_ns["Node"]}) @@ -1619,14 +1589,11 @@ def write_workflow_prov(graph, filename=None, format='all'): prov_doc = result.provenance else: prov_doc = ProvStore().add_results(result) - result_bundle = pm.ProvBundle( - prov_doc.get_records(), identifier=get_id()) + result_bundle = pm.ProvBundle(prov_doc.get_records(), identifier=get_id()) ps.g.add_bundle(result_bundle) bundle_entity = ps.g.entity( - result_bundle.identifier, - other_attributes={ - 'prov:type': pm.PROV_BUNDLE - }) + result_bundle.identifier, other_attributes={"prov:type": pm.PROV_BUNDLE} + ) ps.g.wasGeneratedBy(bundle_entity, process) processes.append(process) @@ -1635,7 +1602,8 @@ def write_workflow_prov(graph, filename=None, format='all'): for idx, edgeinfo in enumerate(graph.in_edges()): ps.g.wasStartedBy( processes[list(nodes).index(edgeinfo[1])], - starter=processes[list(nodes).index(edgeinfo[0])]) + starter=processes[list(nodes).index(edgeinfo[0])], + ) # write provenance ps.write_provenance(filename, format=format) @@ -1650,46 +1618,49 @@ def write_workflow_resources(graph, filename=None, append=None): import simplejson as json # Overwrite filename if nipype config is set - filename = config.get('monitoring', 'summary_file', filename) + filename = config.get("monitoring", "summary_file", filename) # If filename still does not make sense, store in $PWD if not filename: - filename = os.path.join(os.getcwd(), 'resource_monitor.json') + filename = os.path.join(os.getcwd(), "resource_monitor.json") if append is None: - append = str2bool(config.get('monitoring', 'summary_append', 'true')) + append = str2bool(config.get("monitoring", "summary_append", "true")) big_dict = { - 'time': [], - 'name': [], - 'interface': [], - 'rss_GiB': [], - 'vms_GiB': [], - 'cpus': [], - 'mapnode': [], - 'params': [], + "time": [], + "name": [], + "interface": [], + "rss_GiB": [], + "vms_GiB": [], + "cpus": [], + "mapnode": [], + "params": [], } # If file exists, just append new profile information # If we append different runs, then we will see different # "bursts" of timestamps corresponding to those executions. if append and os.path.isfile(filename): - with open(filename, 'r' if PY3 else 'rb') as rsf: + with open(filename) as rsf: big_dict = json.load(rsf) for _, node in enumerate(graph.nodes()): nodename = node.fullname classname = node.interface.__class__.__name__ - params = '' + params = "" if node.parameterization: - params = '_'.join(['{}'.format(p) for p in node.parameterization]) + params = "_".join([f"{p}" for p in node.parameterization]) try: rt_list = node.result.runtime except Exception: - logger.warning('Could not access runtime info for node %s' - ' (%s interface)', nodename, classname) + logger.warning( + "Could not access runtime info for node %s (%s interface)", + nodename, + classname, + ) continue if not isinstance(rt_list, list): @@ -1697,50 +1668,50 @@ def write_workflow_resources(graph, filename=None, append=None): for subidx, runtime in enumerate(rt_list): try: - nsamples = len(runtime.prof_dict['time']) + nsamples = len(runtime.prof_dict["time"]) except AttributeError: logger.warning( 'Could not retrieve profiling information for node "%s" ' - '(mapflow %d/%d).', nodename, subidx + 1, len(rt_list)) + "(mapflow %d/%d).", + nodename, + subidx + 1, + len(rt_list), + ) continue - for key in ['time', 'cpus', 'rss_GiB', 'vms_GiB']: + for key in ["time", "cpus", "rss_GiB", "vms_GiB"]: big_dict[key] += runtime.prof_dict[key] - big_dict['interface'] += [classname] * nsamples - big_dict['name'] += [nodename] * nsamples - big_dict['mapnode'] += [subidx] * nsamples - big_dict['params'] += [params] * nsamples + big_dict["interface"] += [classname] * nsamples + big_dict["name"] += [nodename] * nsamples + big_dict["mapnode"] += [subidx] * nsamples + big_dict["params"] += [params] * nsamples - with open(filename, 'w' if PY3 else 'wb') as rsf: + with open(filename, "w") as rsf: json.dump(big_dict, rsf, ensure_ascii=False) return filename def topological_sort(graph, depth_first=False): - """Returns a depth first sorted order if depth_first is True - """ + """Returns a depth first sorted order if depth_first is True""" import networkx as nx + nodesort = list(nx.topological_sort(graph)) if not depth_first: return nodesort, None logger.debug("Performing depth first search") nodes = [] groups = [] - group = 0 G = nx.Graph() G.add_nodes_from(graph.nodes()) G.add_edges_from(graph.edges()) components = nx.connected_components(G) - for desc in components: - group += 1 - indices = [] - for node in desc: - indices.append(nodesort.index(node)) + for group, desc in enumerate(components, start=1): + indices = [nodesort.index(node) for node in desc] nodes.extend( - np.array(nodesort)[np.array(indices)[np.argsort(indices)]] - .tolist()) + np.array(nodesort)[np.array(indices)[np.argsort(indices)]].tolist() + ) for node in desc: nodesort.remove(node) groups.extend([group] * len(desc)) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 6c0757bead..54577f21b8 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -1,19 +1,13 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces The `Workflow` class provides core functionality for batch processing. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, bytes, open - import os import os.path as op import sys -from datetime import datetime from copy import deepcopy import pickle import shutil @@ -21,24 +15,28 @@ import numpy as np from ... import config, logging +from ...utils.datetime import utcnow from ...utils.misc import str2bool -from ...utils.functions import (getsource, create_function_from_source) - -from ...interfaces.base import (traits, TraitedSpec, TraitDictObject, - TraitListObject) -from ...utils.filemanip import save_json, makedirs, to_str -from .utils import (generate_expanded_graph, export_graph, write_workflow_prov, - write_workflow_resources, format_dot, topological_sort, - get_print_name, merge_dict, format_node) +from ...utils.functions import getsource, create_function_from_source + +from ...interfaces.base import traits, TraitedSpec, TraitDictObject, TraitListObject +from ...utils.filemanip import save_json +from .utils import ( + generate_expanded_graph, + export_graph, + write_workflow_prov, + write_workflow_resources, + format_dot, + topological_sort, + get_print_name, + merge_dict, + format_node, +) from .base import EngineBase from .nodes import MapNode -# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict -from future import standard_library -standard_library.install_aliases() - -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") class Workflow(EngineBase): @@ -56,9 +54,13 @@ def __init__(self, name, base_dir=None): """ import networkx as nx - super(Workflow, self).__init__(name, base_dir) + + super().__init__(name, base_dir) self._graph = nx.DiGraph() + self._nodes_cache = set() + self._nested_workflows_cache = set() + # PUBLIC API def clone(self, name): """Clone a workflow @@ -75,7 +77,7 @@ def clone(self, name): unique name for the workflow """ - clone = super(Workflow, self).clone(name) + clone = super().clone(name) clone._reset_hierarchy() return clone @@ -117,7 +119,7 @@ def connect(self, *args, **kwargs): 'targetinput'), ...]), ...] sourceoutput1 will always be the first argument to func - and func will be evaluated and the results sent ot targetinput + and func will be evaluated and the results sent to targetinput currently func needs to define all its needed imports within the function as we use the inspect module to get at the source code @@ -128,29 +130,32 @@ def connect(self, *args, **kwargs): elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: - raise TypeError('connect() takes either 4 arguments, or 1 list of' - ' connection tuples (%d args given)' % len(args)) + raise TypeError( + "connect() takes either 4 arguments, or 1 list of" + " connection tuples (%d args given)" % len(args) + ) disconnect = False if kwargs: - disconnect = kwargs.get('disconnect', False) + disconnect = kwargs.get("disconnect", False) if disconnect: self.disconnect(connection_list) return - newnodes = [] + newnodes = set() for srcnode, destnode, _ in connection_list: if self in [srcnode, destnode]: - msg = ('Workflow connect cannot contain itself as node:' - ' src[%s] dest[%s] workflow[%s]') % (srcnode, destnode, - self.name) + msg = ( + "Workflow connect cannot contain itself as node:" + " src[%s] dest[%s] workflow[%s]" + ) % (srcnode, destnode, self.name) - raise IOError(msg) + raise OSError(msg) if (srcnode not in newnodes) and not self._has_node(srcnode): - newnodes.append(srcnode) + newnodes.add(srcnode) if (destnode not in newnodes) and not self._has_node(destnode): - newnodes.append(destnode) + newnodes.add(destnode) if newnodes: self._check_nodes(newnodes) for node in newnodes: @@ -160,36 +165,49 @@ def connect(self, *args, **kwargs): connected_ports = {} for srcnode, destnode, connects in connection_list: if destnode not in connected_ports: - connected_ports[destnode] = [] + connected_ports[destnode] = set() # check to see which ports of destnode are already # connected. if not disconnect and (destnode in self._graph.nodes()): for edge in self._graph.in_edges(destnode): data = self._graph.get_edge_data(*edge) - for sourceinfo, destname in data['connect']: - if destname not in connected_ports[destnode]: - connected_ports[destnode] += [destname] + connected_ports[destnode].update( + destname for _, destname in data["connect"] + ) for source, dest in connects: # Currently datasource/sink/grabber.io modules # determine their inputs/outputs depending on # connection settings. Skip these modules in the check if dest in connected_ports[destnode]: - raise Exception("""\ + raise Exception( + """\ Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already connected. -""" % (srcnode, source, destnode, dest, dest, destnode)) - if not (hasattr(destnode, '_interface') and - ('.io' in str(destnode._interface.__class__) or any([ - '.io' in str(val) +""" + % (srcnode, source, destnode, dest, dest, destnode) + ) + if not ( + hasattr(destnode, "_interface") + and ( + ".io" in str(destnode._interface.__class__) + or any( + ".io" in str(val) for val in destnode._interface.__class__.__bases__ - ]))): + ) + ) + ): if not destnode._check_inputs(dest): - not_found.append(['in', destnode.name, dest]) - if not (hasattr(srcnode, '_interface') and - ('.io' in str(srcnode._interface.__class__) or any([ - '.io' in str(val) + not_found.append(["in", destnode.name, dest]) + if not ( + hasattr(srcnode, "_interface") + and ( + ".io" in str(srcnode._interface.__class__) + or any( + ".io" in str(val) for val in srcnode._interface.__class__.__bases__ - ]))): + ) + ) + ): if isinstance(source, tuple): # handles the case that source is specified # with a function @@ -198,26 +216,25 @@ def connect(self, *args, **kwargs): sourcename = source else: raise Exception( - ('Unknown source specification in ' - 'connection from output of %s') % srcnode.name) + ( + "Unknown source specification in " + "connection from output of %s" + ) + % srcnode.name + ) if sourcename and not srcnode._check_outputs(sourcename): - not_found.append(['out', srcnode.name, sourcename]) - connected_ports[destnode] += [dest] + not_found.append(["out", srcnode.name, sourcename]) + connected_ports[destnode].add(dest) infostr = [] for info in not_found: - infostr += [ - "Module %s has no %sput called %s\n" % (info[1], info[0], - info[2]) - ] + infostr += [f"Module {info[1]} has no {info[0]}put called {info[2]}\n"] if not_found: - raise Exception( - '\n'.join(['Some connections were not found'] + infostr)) + raise Exception("\n".join(["Some connections were not found"] + infostr)) # turn functions into strings for srcnode, destnode, connects in connection_list: for idx, (src, dest) in enumerate(connects): - if isinstance(src, - tuple) and not isinstance(src[1], (str, bytes)): + if isinstance(src, tuple) and not isinstance(src[1], (str, bytes)): function_source = getsource(src[1]) connects[idx] = ((src[0], function_source, src[2:]), dest) @@ -225,30 +242,31 @@ def connect(self, *args, **kwargs): for srcnode, destnode, connects in connection_list: edge_data = self._graph.get_edge_data(srcnode, destnode, None) if edge_data: - logger.debug('(%s, %s): Edge data exists: %s', srcnode, - destnode, to_str(edge_data)) + logger.debug( + "(%s, %s): Edge data exists: %s", srcnode, destnode, str(edge_data) + ) for data in connects: - if data not in edge_data['connect']: - edge_data['connect'].append(data) + if data not in edge_data["connect"]: + edge_data["connect"].append(data) if disconnect: - logger.debug('Removing connection: %s', to_str(data)) - edge_data['connect'].remove(data) - if edge_data['connect']: - self._graph.add_edges_from([(srcnode, destnode, - edge_data)]) + logger.debug("Removing connection: %s", str(data)) + edge_data["connect"].remove(data) + if edge_data["connect"]: + self._graph.add_edges_from([(srcnode, destnode, edge_data)]) else: # pass - logger.debug('Removing connection: %s->%s', srcnode, - destnode) + logger.debug("Removing connection: %s->%s", srcnode, destnode) self._graph.remove_edges_from([(srcnode, destnode)]) elif not disconnect: - logger.debug('(%s, %s): No edge data', srcnode, destnode) - self._graph.add_edges_from([(srcnode, destnode, { - 'connect': connects - })]) + logger.debug("(%s, %s): No edge data", srcnode, destnode) + self._graph.add_edges_from([(srcnode, destnode, {"connect": connects})]) edge_data = self._graph.get_edge_data(srcnode, destnode) - logger.debug('(%s, %s): new edge data: %s', srcnode, destnode, - to_str(edge_data)) + logger.debug( + "(%s, %s): new edge data: %s", srcnode, destnode, str(edge_data) + ) + + if newnodes: + self._update_node_cache() def disconnect(self, *args): """Disconnect nodes @@ -259,44 +277,45 @@ def disconnect(self, *args): elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: - raise TypeError('disconnect() takes either 4 arguments, or 1 list ' - 'of connection tuples (%d args given)' % len(args)) + raise TypeError( + "disconnect() takes either 4 arguments, or 1 list " + "of connection tuples (%d args given)" % len(args) + ) for srcnode, dstnode, conn in connection_list: - logger.debug('disconnect(): %s->%s %s', srcnode, dstnode, - to_str(conn)) + logger.debug("disconnect(): %s->%s %s", srcnode, dstnode, str(conn)) if self in [srcnode, dstnode]: - raise IOError( - 'Workflow connect cannot contain itself as node: src[%s] ' - 'dest[%s] workflow[%s]') % (srcnode, dstnode, self.name) + raise OSError( + "Workflow connect cannot contain itself as node: src[%s] " + "dest[%s] workflow[%s]" + ) % (srcnode, dstnode, self.name) # If node is not in the graph, not connected if not self._has_node(srcnode) or not self._has_node(dstnode): continue - edge_data = self._graph.get_edge_data(srcnode, dstnode, { - 'connect': [] - }) - ed_conns = [(c[0], c[1]) for c in edge_data['connect']] + edge_data = self._graph.get_edge_data(srcnode, dstnode, {"connect": []}) + ed_conns = [(c[0], c[1]) for c in edge_data["connect"]] - remove = [] - for edge in conn: - if edge in ed_conns: - # idx = ed_conns.index(edge) - remove.append((edge[0], edge[1])) + remove = [ + # idx = ed_conns.index(edge) + (edge[0], edge[1]) + for edge in conn + if edge in ed_conns + ] - logger.debug('disconnect(): remove list %s', to_str(remove)) + logger.debug("disconnect(): remove list %s", str(remove)) for el in remove: - edge_data['connect'].remove(el) - logger.debug('disconnect(): removed connection %s', to_str(el)) + edge_data["connect"].remove(el) + logger.debug("disconnect(): removed connection %s", str(el)) - if not edge_data['connect']: + if not edge_data["connect"]: self._graph.remove_edge(srcnode, dstnode) else: self._graph.add_edges_from([(srcnode, dstnode, edge_data)]) def add_nodes(self, nodes): - """ Add nodes to a workflow + """Add nodes to a workflow Parameters ---------- @@ -306,29 +325,31 @@ def add_nodes(self, nodes): newnodes = [] all_nodes = self._get_all_nodes() for node in nodes: - if self._has_node(node): - raise IOError('Node %s already exists in the workflow' % node) + if node in all_nodes: + raise OSError("Node %s already exists in the workflow" % node) if isinstance(node, Workflow): for subnode in node._get_all_nodes(): if subnode in all_nodes: - raise IOError(('Subnode %s of node %s already exists ' - 'in the workflow') % (subnode, node)) + raise OSError( + "Subnode %s of node %s already exists in the workflow" + % (subnode, node) + ) newnodes.append(node) if not newnodes: - logger.debug('no new nodes to add') + logger.debug("no new nodes to add") return for node in newnodes: if not issubclass(node.__class__, EngineBase): - raise Exception('Node %s must be a subclass of EngineBase', - node) + raise Exception("Node %s must be a subclass of EngineBase", node) self._check_nodes(newnodes) for node in newnodes: if node._hierarchy is None: node._hierarchy = self.name self._graph.add_nodes_from(newnodes) + self._update_node_cache() def remove_nodes(self, nodes): - """ Remove nodes from a workflow + """Remove nodes from a workflow Parameters ---------- @@ -336,6 +357,7 @@ def remove_nodes(self, nodes): A list of EngineBase-based objects """ self._graph.remove_nodes_from(nodes) + self._update_node_cache() # Input-Output access @property @@ -347,42 +369,41 @@ def outputs(self): return self._get_outputs() def get_node(self, name): - """Return an internal node by name - """ - nodenames = name.split('.') + """Return an internal node by name""" + nodenames = name.split(".") nodename = nodenames[0] outnode = [ - node for node in self._graph.nodes() - if str(node).endswith('.' + nodename) + node for node in self._graph.nodes() if str(node).endswith("." + nodename) ] if outnode: outnode = outnode[0] if nodenames[1:] and issubclass(outnode.__class__, Workflow): - outnode = outnode.get_node('.'.join(nodenames[1:])) + outnode = outnode.get_node(".".join(nodenames[1:])) else: outnode = None return outnode def list_node_names(self): - """List names of all nodes in a workflow - """ + """List names of all nodes in a workflow""" import networkx as nx + outlist = [] for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): - outlist.extend([ - '.'.join((node.name, nodename)) - for nodename in node.list_node_names() - ]) + outlist.extend( + f"{node.name}.{nodename}" for nodename in node.list_node_names() + ) else: outlist.append(node.name) return sorted(outlist) - def write_graph(self, - dotfilename='graph.dot', - graph2use='hierarchical', - format="png", - simple_form=True): + def write_graph( + self, + dotfilename="graph.dot", + graph2use="hierarchical", + format="png", + simple_form=True, + ): """Generates a graphviz dot file and a png file Parameters @@ -406,69 +427,67 @@ def write_graph(self, False. """ - graphtypes = ['orig', 'flat', 'hierarchical', 'exec', 'colored'] + graphtypes = ["orig", "flat", "hierarchical", "exec", "colored"] if graph2use not in graphtypes: - raise ValueError('Unknown graph2use keyword. Must be one of: ' + - str(graphtypes)) + raise ValueError( + "Unknown graph2use keyword. Must be one of: " + str(graphtypes) + ) base_dir, dotfilename = op.split(dotfilename) - if base_dir == '': + if base_dir == "": if self.base_dir: base_dir = self.base_dir if self.name: base_dir = op.join(base_dir, self.name) else: base_dir = os.getcwd() - base_dir = makedirs(base_dir, exist_ok=True) - if graph2use in ['hierarchical', 'colored']: + os.makedirs(base_dir, exist_ok=True) + if graph2use in ["hierarchical", "colored"]: if self.name[:1].isdigit(): # these graphs break if int - raise ValueError('{} graph failed, workflow name cannot begin ' - 'with a number'.format(graph2use)) + raise ValueError( + "{} graph failed, workflow name cannot begin " + "with a number".format(graph2use) + ) dotfilename = op.join(base_dir, dotfilename) self.write_hierarchical_dotfile( dotfilename=dotfilename, colored=graph2use == "colored", - simple_form=simple_form) + simple_form=simple_form, + ) outfname = format_dot(dotfilename, format=format) else: graph = self._graph - if graph2use in ['flat', 'exec']: + if graph2use in ["flat", "exec"]: graph = self._create_flat_graph() - if graph2use == 'exec': + if graph2use == "exec": graph = generate_expanded_graph(deepcopy(graph)) outfname = export_graph( graph, base_dir, dotfilename=dotfilename, format=format, - simple_form=simple_form) + simple_form=simple_form, + ) logger.info( - 'Generated workflow graph: %s (graph2use=%s, simple_form=%s).' % - (outfname, graph2use, simple_form)) + "Generated workflow graph: %s (graph2use=%s, simple_form=%s)." + % (outfname, graph2use, simple_form) + ) return outfname - def write_hierarchical_dotfile(self, - dotfilename=None, - colored=False, - simple_form=True): - dotlist = ['digraph %s{' % self.name] - dotlist.append( - self._get_dot( - prefix=' ', colored=colored, simple_form=simple_form)) - dotlist.append('}') - dotstr = '\n'.join(dotlist) + def write_hierarchical_dotfile( + self, dotfilename=None, colored=False, simple_form=True + ): + dotlist = self._get_dot(prefix=" ", colored=colored, simple_form=simple_form) + dotstr = f"digraph {self.name}{{\n{dotlist}\n}}" if dotfilename: - fp = open(dotfilename, 'wt') - fp.writelines(dotstr) - fp.close() + with open(dotfilename, "w") as fp: + fp.writelines(dotstr) else: logger.info(dotstr) - def export(self, - filename=None, - prefix="output", - format="python", - include_config=False): + def export( + self, filename=None, prefix="output", format="python", include_config=False + ): """Export object into a different format Parameters @@ -484,41 +503,39 @@ def export(self, """ import networkx as nx + formats = ["python"] if format not in formats: - raise ValueError('format must be one of: %s' % '|'.join(formats)) + raise ValueError("format must be one of: %s" % "|".join(formats)) flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) all_lines = None - lines = ['# Workflow'] - importlines = [ - 'from nipype.pipeline.engine import Workflow, ' - 'Node, MapNode' - ] + lines = ["# Workflow"] + importlines = ["from nipype.pipeline.engine import Workflow, Node, MapNode"] functions = {} if format == "python": connect_template = '%s.connect(%%s, %%s, %%s, "%%s")' % self.name - connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' \ - % self.name - wfdef = '%s = Workflow("%s")' % (self.name, self.name) + connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' % self.name + wfdef = f'{self.name} = Workflow("{self.name}")' lines.append(wfdef) if include_config: - lines.append('%s.config = %s' % (self.name, self.config)) - for idx, node in enumerate(nodes): - nodename = node.fullname.replace('.', '_') + lines.append(f"{self.name}.config = {self.config}") + for node in nodes: + nodename = node.fullname.replace(".", "_") # write nodes nodelines = format_node( - node, format='python', include_config=include_config) + node, format="python", include_config=include_config + ) for line in nodelines: - if line.startswith('from'): + if line.startswith("from"): if line not in importlines: importlines.append(line) else: lines.append(line) # write connections for u, _, d in flatgraph.in_edges(nbunch=node, data=True): - for cd in d['connect']: + for cd in d["connect"]: if isinstance(cd[0], tuple): args = list(cd[0]) if args[1] in functions: @@ -526,34 +543,44 @@ def export(self, else: func = create_function_from_source(args[1]) funcname = [ - name for name in func.__globals__ - if name != '__builtins__' + name + for name in func.__globals__ + if name != "__builtins__" ][0] functions[args[1]] = funcname args[1] = funcname - args = tuple([arg for arg in args if arg]) - line_args = (u.fullname.replace('.', '_'), args, - nodename, cd[1]) + args = tuple(arg for arg in args if arg) + line_args = ( + u.fullname.replace(".", "_"), + args, + nodename, + cd[1], + ) line = connect_template % line_args line = line.replace("'%s'" % funcname, funcname) lines.append(line) else: - line_args = (u.fullname.replace('.', '_'), cd[0], - nodename, cd[1]) + line_args = ( + u.fullname.replace(".", "_"), + cd[0], + nodename, + cd[1], + ) lines.append(connect_template2 % line_args) - functionlines = ['# Functions'] - for function in functions: - functionlines.append(pickle.loads(function).rstrip()) + functionlines = ["# Functions"] + functionlines.extend( + pickle.loads(function).rstrip() for function in functions + ) all_lines = importlines + functionlines + lines if not filename: - filename = '%s%s.py' % (prefix, self.name) - with open(filename, 'wt') as fp: - fp.writelines('\n'.join(all_lines)) + filename = f"{prefix}{self.name}.py" + with open(filename, "w") as fp: + fp.writelines("\n".join(all_lines)) return all_lines def run(self, plugin=None, plugin_args=None, updatehash=False): - """ Execute the workflow + """Execute the workflow Parameters ---------- @@ -565,26 +592,25 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): constructor. see individual plugin doc strings for details. """ if plugin is None: - plugin = config.get('execution', 'plugin') + plugin = config.get("execution", "plugin") if not isinstance(plugin, (str, bytes)): runner = plugin - plugin = runner.__class__.__name__[:-len('Plugin')] + plugin = runner.__class__.__name__[: -len("Plugin")] plugin_args = runner.plugin_args else: - name = '.'.join(__name__.split('.')[:-2] + ['plugins']) + name = ".".join(__name__.split(".")[:-2] + ["plugins"]) try: __import__(name) except ImportError: - msg = 'Could not import plugin module: %s' % name + msg = "Could not import plugin module: %s" % name logger.error(msg) raise ImportError(msg) else: - plugin_mod = getattr(sys.modules[name], '%sPlugin' % plugin) + plugin_mod = getattr(sys.modules[name], "%sPlugin" % plugin) runner = plugin_mod(plugin_args=plugin_args) flatgraph = self._create_flat_graph() self.config = merge_dict(deepcopy(config._sections), self.config) - logger.info('Workflow %s settings: %s', self.name, - to_str(sorted(self.config))) + logger.info("Workflow %s settings: %s", self.name, str(sorted(self.config))) self._set_needed_outputs(flatgraph) execgraph = generate_expanded_graph(deepcopy(flatgraph)) for index, node in enumerate(execgraph.nodes()): @@ -594,21 +620,21 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): if isinstance(node, MapNode): node.use_plugin = (plugin, plugin_args) self._configure_exec_nodes(execgraph) - if str2bool(self.config['execution']['create_report']): + if str2bool(self.config["execution"]["create_report"]): self._write_report_info(self.base_dir, self.name, execgraph) runner.run(execgraph, updatehash=updatehash, config=self.config) - datestr = datetime.utcnow().strftime('%Y%m%dT%H%M%S') - if str2bool(self.config['execution']['write_provenance']): - prov_base = op.join(self.base_dir, - 'workflow_provenance_%s' % datestr) - logger.info('Provenance file prefix: %s' % prov_base) - write_workflow_prov(execgraph, prov_base, format='all') + datestr = utcnow().strftime("%Y%m%dT%H%M%S") + if str2bool(self.config["execution"]["write_provenance"]): + prov_base = op.join(self.base_dir, "workflow_provenance_%s" % datestr) + logger.info("Provenance file prefix: %s" % prov_base) + write_workflow_prov(execgraph, prov_base, format="all") if config.resource_monitor: base_dir = self.base_dir or os.getcwd() write_workflow_resources( execgraph, - filename=op.join(base_dir, self.name, 'resource_monitor.json')) + filename=op.join(base_dir, self.name, "resource_monitor.json"), + ) return execgraph # PRIVATE API AND FUNCTIONS @@ -617,52 +643,60 @@ def _write_report_info(self, workingdir, name, graph): if workingdir is None: workingdir = os.getcwd() report_dir = op.join(workingdir, name) - makedirs(report_dir, exist_ok=True) + os.makedirs(report_dir, exist_ok=True) shutil.copyfile( - op.join(op.dirname(__file__), 'report_template.html'), - op.join(report_dir, 'index.html')) + op.join(op.dirname(__file__), "report_template.html"), + op.join(report_dir, "index.html"), + ) shutil.copyfile( - op.join(op.dirname(__file__), '..', '..', 'external', 'd3.js'), - op.join(report_dir, 'd3.js')) + op.join(op.dirname(__file__), "..", "..", "external", "d3.js"), + op.join(report_dir, "d3.js"), + ) nodes, groups = topological_sort(graph, depth_first=True) - graph_file = op.join(report_dir, 'graph1.json') - json_dict = {'nodes': [], 'links': [], 'groups': [], 'maxN': 0} + graph_file = op.join(report_dir, "graph1.json") + json_dict = {"nodes": [], "links": [], "groups": [], "maxN": 0} for i, node in enumerate(nodes): - report_file = "%s/_report/report.rst" % \ - node.output_dir().replace(report_dir, '') - result_file = "%s/result_%s.pklz" % \ - (node.output_dir().replace(report_dir, ''), - node.name) - json_dict['nodes'].append( + report_file = "%s/_report/report.rst" % node.output_dir().replace( + report_dir, "" + ) + result_file = "{}/result_{}.pklz".format( + node.output_dir().replace(report_dir, ""), + node.name, + ) + json_dict["nodes"].append( dict( - name='%d_%s' % (i, node.name), + name="%d_%s" % (i, node.name), report=report_file, result=result_file, - group=groups[i])) + group=groups[i], + ) + ) maxN = 0 for gid in np.unique(groups): procs = [i for i, val in enumerate(groups) if val == gid] N = len(procs) if N > maxN: maxN = N - json_dict['groups'].append( - dict(procs=procs, total=N, name='Group_%05d' % gid)) - json_dict['maxN'] = maxN + json_dict["groups"].append( + dict(procs=procs, total=N, name="Group_%05d" % gid) + ) + json_dict["maxN"] = maxN for u, v in graph.in_edges(): - json_dict['links'].append( - dict(source=nodes.index(u), target=nodes.index(v), value=1)) + json_dict["links"].append( + dict(source=nodes.index(u), target=nodes.index(v), value=1) + ) save_json(graph_file, json_dict) - graph_file = op.join(report_dir, 'graph.json') + graph_file = op.join(report_dir, "graph.json") # Avoid RuntimeWarning: divide by zero encountered in log10 num_nodes = len(nodes) if num_nodes > 0: index_name = np.ceil(np.log10(num_nodes)).astype(int) else: index_name = 0 - template = '%%0%dd_' % index_name + template = "%%0%dd_" % index_name def getname(u, i): - name_parts = u.fullname.split('.') + name_parts = u.fullname.split(".") # return '.'.join(name_parts[:-1] + [template % i + name_parts[-1]]) return template % i + name_parts[-1] @@ -672,16 +706,13 @@ def getname(u, i): for u, v in graph.in_edges(nbunch=node): imports.append(getname(u, nodes.index(u))) json_dict.append( - dict( - name=getname(node, i), - size=1, - group=groups[i], - imports=imports)) + dict(name=getname(node, i), size=1, group=groups[i], imports=imports) + ) save_json(graph_file, json_dict) def _set_needed_outputs(self, graph): """Initialize node with list of which outputs are needed.""" - rm_outputs = self.config['execution']['remove_unnecessary_outputs'] + rm_outputs = self.config["execution"]["remove_unnecessary_outputs"] if not str2bool(rm_outputs): return for node in graph.nodes(): @@ -689,8 +720,7 @@ def _set_needed_outputs(self, graph): for edge in graph.out_edges(node): data = graph.get_edge_data(*edge) sourceinfo = [ - v1[0] if isinstance(v1, tuple) else v1 - for v1, v2 in data['connect'] + v1[0] if isinstance(v1, tuple) else v1 for v1, v2 in data["connect"] ] node.needed_outputs += [ v for v in sourceinfo if v not in node.needed_outputs @@ -699,22 +729,19 @@ def _set_needed_outputs(self, graph): node.needed_outputs = sorted(node.needed_outputs) def _configure_exec_nodes(self, graph): - """Ensure that each node knows where to get inputs from - """ + """Ensure that each node knows where to get inputs from""" for node in graph.nodes(): node.input_source = {} for edge in graph.in_edges(node): data = graph.get_edge_data(*edge) - for sourceinfo, field in data['connect']: - node.input_source[field] = \ - (op.join(edge[0].output_dir(), - 'result_%s.pklz' % edge[0].name), - sourceinfo) + for sourceinfo, field in data["connect"]: + node.input_source[field] = ( + op.join(edge[0].output_dir(), "result_%s.pklz" % edge[0].name), + sourceinfo, + ) def _check_nodes(self, nodes): - """Checks if any of the nodes are already in the graph - - """ + """Checks if any of the nodes are already in the graph""" node_names = [node.name for node in self._graph.nodes()] node_lineage = [node._hierarchy for node in self._graph.nodes()] for node in nodes: @@ -723,49 +750,84 @@ def _check_nodes(self, nodes): try: this_node_lineage = node_lineage[idx] except IndexError: - raise IOError( - 'Duplicate node name "%s" found.' % node.name) + raise OSError('Duplicate node name "%s" found.' % node.name) else: if this_node_lineage in [node._hierarchy, self.name]: - raise IOError( - 'Duplicate node name "%s" found.' % node.name) + raise OSError('Duplicate node name "%s" found.' % node.name) else: node_names.append(node.name) - def _has_attr(self, parameter, subtype='in'): - """Checks if a parameter is available as an input or output - """ - if subtype == 'in': - subobject = self.inputs - else: - subobject = self.outputs - attrlist = parameter.split('.') - cur_out = subobject - for attr in attrlist: - if not hasattr(cur_out, attr): + def _has_attr(self, parameter, subtype="in"): + """Checks if a parameter is available as an input or output""" + hierarchy = parameter.split(".") + + # Connecting to a workflow needs at least two values, + # the name of the child node and the name of the input/output + if len(hierarchy) < 2: + return False + + attrname = hierarchy.pop() + nodename = hierarchy.pop() + + def _check_is_already_connected(workflow, node, attrname): + for _, _, d in workflow._graph.in_edges(nbunch=node, data=True): + for cd in d["connect"]: + if attrname == cd[1]: + return False + return True + + targetworkflow = self + while hierarchy: + workflowname = hierarchy.pop(0) + workflow = None + for node in targetworkflow._graph.nodes(): + if node.name == workflowname: + if isinstance(node, Workflow): + workflow = node + break + if workflow is None: return False - cur_out = getattr(cur_out, attr) - return True + # Verify input does not already have an incoming connection + # in the hierarchy of workflows + if subtype == "in": + hierattrname = ".".join(hierarchy + [nodename, attrname]) + if not _check_is_already_connected( + targetworkflow, workflow, hierattrname + ): + return False + targetworkflow = workflow + + targetnode = None + for node in targetworkflow._graph.nodes(): + if node.name == nodename: + if isinstance(node, Workflow): + return False + else: + targetnode = node + break + if targetnode is None: + return False - def _get_parameter_node(self, parameter, subtype='in'): - """Returns the underlying node corresponding to an input or - output parameter - """ - if subtype == 'in': - subobject = self.inputs + if subtype == "in": + if not hasattr(targetnode.inputs, attrname): + return False else: - subobject = self.outputs - attrlist = parameter.split('.') - cur_out = subobject - for attr in attrlist[:-1]: - cur_out = getattr(cur_out, attr) - return cur_out.traits()[attrlist[-1]].node + if not hasattr(targetnode.outputs, attrname): + return False + + # Verify input does not already have an incoming connection + # in the target workflow + if subtype == "in": + if not _check_is_already_connected(targetworkflow, targetnode, attrname): + return False + + return True def _check_outputs(self, parameter): - return self._has_attr(parameter, subtype='out') + return self._has_attr(parameter, subtype="out") def _check_inputs(self, parameter): - return self._has_attr(parameter, subtype='in') + return self._has_attr(parameter, subtype="in") def _get_inputs(self): """Returns the inputs of a workflow @@ -779,16 +841,15 @@ def _get_inputs(self): if isinstance(node, Workflow): setattr(inputdict, node.name, node.inputs) else: - taken_inputs = [] - for _, _, d in self._graph.in_edges(nbunch=node, data=True): - for cd in d['connect']: - taken_inputs.append(cd[1]) + taken_inputs = [ + cd[1] + for _, _, d in self._graph.in_edges(nbunch=node, data=True) + for cd in d["connect"] + ] unconnectedinputs = TraitedSpec() for key, trait in list(node.inputs.items()): if key not in taken_inputs: - unconnectedinputs.add_trait(key, - traits.Trait( - trait, node=node)) + unconnectedinputs.add_trait(key, traits.Trait(trait, node=node)) value = getattr(node.inputs, key) setattr(unconnectedinputs, key, value) setattr(inputdict, node.name, unconnectedinputs) @@ -796,8 +857,7 @@ def _get_inputs(self): return inputdict def _get_outputs(self): - """Returns all possible output ports that are not already connected - """ + """Returns all possible output ports that are not already connected""" outputdict = TraitedSpec() for node in self._graph.nodes(): outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) @@ -812,8 +872,7 @@ def _get_outputs(self): return outputdict def _set_input(self, objekt, name, newvalue): - """Trait callback function to update a node input - """ + """Trait callback function to update a node input""" objekt.traits()[name].node.set_input(name, newvalue) def _set_node_input(self, node, param, source, sourceinfo): @@ -822,230 +881,240 @@ def _set_node_input(self, node, param, source, sourceinfo): val = source.get_output(sourceinfo) elif isinstance(sourceinfo, tuple): if callable(sourceinfo[1]): - val = sourceinfo[1](source.get_output(sourceinfo[0]), - *sourceinfo[2:]) + val = sourceinfo[1](source.get_output(sourceinfo[0]), *sourceinfo[2:]) newval = val if isinstance(val, TraitDictObject): newval = dict(val) if isinstance(val, TraitListObject): newval = val[:] - logger.debug('setting node input: %s->%s', param, to_str(newval)) + logger.debug("setting node input: %s->%s", param, str(newval)) node.set_input(param, deepcopy(newval)) def _get_all_nodes(self): - allnodes = [] - for node in self._graph.nodes(): - if isinstance(node, Workflow): - allnodes.extend(node._get_all_nodes()) - else: - allnodes.append(node) + allnodes = self._nodes_cache - self._nested_workflows_cache + for node in self._nested_workflows_cache: + allnodes |= node._get_all_nodes() return allnodes - def _has_node(self, wanted_node): - for node in self._graph.nodes(): - if wanted_node == node: - return True + def _update_node_cache(self): + nodes = set(self._graph) + + added_nodes = nodes.difference(self._nodes_cache) + removed_nodes = self._nodes_cache.difference(nodes) + + self._nodes_cache = nodes + self._nested_workflows_cache.difference_update(removed_nodes) + + for node in added_nodes: if isinstance(node, Workflow): - if node._has_node(wanted_node): - return True - return False + self._nested_workflows_cache.add(node) + + def _has_node(self, wanted_node): + return wanted_node in self._nodes_cache or any( + wf._has_node(wanted_node) for wf in self._nested_workflows_cache + ) def _create_flat_graph(self): """Make a simple DAG where no node is a workflow.""" - logger.debug('Creating flat graph for workflow: %s', self.name) + logger.debug("Creating flat graph for workflow: %s", self.name) workflowcopy = deepcopy(self) workflowcopy._generate_flatgraph() return workflowcopy._graph def _reset_hierarchy(self): - """Reset the hierarchy on a graph - """ + """Reset the hierarchy on a graph""" for node in self._graph.nodes(): if isinstance(node, Workflow): node._reset_hierarchy() for innernode in node._graph.nodes(): - innernode._hierarchy = '.'.join((self.name, - innernode._hierarchy)) + innernode._hierarchy = f"{self.name}.{innernode._hierarchy}" else: node._hierarchy = self.name def _generate_flatgraph(self): - """Generate a graph containing only Nodes or MapNodes - """ + """Generate a graph containing only Nodes or MapNodes""" import networkx as nx - logger.debug('expanding workflow: %s', self) + + logger.debug("expanding workflow: %s", self) nodes2remove = [] if not nx.is_directed_acyclic_graph(self._graph): - raise Exception(('Workflow: %s is not a directed acyclic graph ' - '(DAG)') % self.name) - nodes = list(nx.topological_sort(self._graph)) + raise Exception( + ("Workflow: %s is not a directed acyclic graph (DAG)") % self.name + ) + nodes = list(self._graph.nodes) for node in nodes: - logger.debug('processing node: %s', node) + logger.debug("processing node: %s", node) if isinstance(node, Workflow): nodes2remove.append(node) # use in_edges instead of in_edges_iter to allow # disconnections to take place properly. otherwise, the # edge dict is modified. # dj: added list() for networkx ver.2 - for u, _, d in list( - self._graph.in_edges(nbunch=node, data=True)): - logger.debug('in: connections-> %s', to_str(d['connect'])) - for cd in deepcopy(d['connect']): - logger.debug("in: %s", to_str(cd)) - dstnode = node._get_parameter_node(cd[1], subtype='in') + for u, _, d in list(self._graph.in_edges(nbunch=node, data=True)): + logger.debug("in: connections-> %s", str(d["connect"])) + for cd in deepcopy(d["connect"]): + logger.debug("in: %s", str(cd)) + dstnode = node.get_node(cd[1].rsplit(".", 1)[0]) srcnode = u srcout = cd[0] - dstin = cd[1].split('.')[-1] - logger.debug('in edges: %s %s %s %s', srcnode, srcout, - dstnode, dstin) + dstin = cd[1].split(".")[-1] + logger.debug( + "in edges: %s %s %s %s", srcnode, srcout, dstnode, dstin + ) self.disconnect(u, cd[0], node, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # do not use out_edges_iter for reasons stated in in_edges # dj: for ver 2 use list(out_edges) - for _, v, d in list( - self._graph.out_edges(nbunch=node, data=True)): - logger.debug('out: connections-> %s', to_str(d['connect'])) - for cd in deepcopy(d['connect']): - logger.debug("out: %s", to_str(cd)) + for _, v, d in list(self._graph.out_edges(nbunch=node, data=True)): + logger.debug("out: connections-> %s", str(d["connect"])) + for cd in deepcopy(d["connect"]): + logger.debug("out: %s", str(cd)) dstnode = v if isinstance(cd[0], tuple): parameter = cd[0][0] else: parameter = cd[0] - srcnode = node._get_parameter_node( - parameter, subtype='out') + srcnode = node.get_node(parameter.rsplit(".", 1)[0]) if isinstance(cd[0], tuple): srcout = list(cd[0]) - srcout[0] = parameter.split('.')[-1] + srcout[0] = parameter.split(".")[-1] srcout = tuple(srcout) else: - srcout = parameter.split('.')[-1] + srcout = parameter.split(".")[-1] dstin = cd[1] - logger.debug('out edges: %s %s %s %s', srcnode, srcout, - dstnode, dstin) + logger.debug( + "out edges: %s %s %s %s", srcnode, srcout, dstnode, dstin + ) self.disconnect(node, cd[0], v, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # expand the workflow node # logger.debug('expanding workflow: %s', node) node._generate_flatgraph() for innernode in node._graph.nodes(): - innernode._hierarchy = '.'.join((self.name, - innernode._hierarchy)) + innernode._hierarchy = f"{self.name}.{innernode._hierarchy}" self._graph.add_nodes_from(node._graph.nodes()) self._graph.add_edges_from(node._graph.edges(data=True)) if nodes2remove: self._graph.remove_nodes_from(nodes2remove) - logger.debug('finished expanding workflow: %s', self) - - def _get_dot(self, - prefix=None, - hierarchy=None, - colored=False, - simple_form=True, - level=0): - """Create a dot file with connection info - """ + logger.debug("finished expanding workflow: %s", self) + + def _get_dot( + self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0 + ): + """Create a dot file with connection info""" import networkx as nx + if prefix is None: - prefix = ' ' + prefix = " " if hierarchy is None: hierarchy = [] colorset = [ - '#FFFFC8', # Y - '#0000FF', - '#B4B4FF', - '#E6E6FF', # B - '#FF0000', - '#FFB4B4', - '#FFE6E6', # R - '#00A300', - '#B4FFB4', - '#E6FFE6', # G - '#0000FF', - '#B4B4FF' + "#FFFFC8", # Y + "#0000FF", + "#B4B4FF", + "#E6E6FF", # B + "#FF0000", + "#FFB4B4", + "#FFE6E6", # R + "#00A300", + "#B4FFB4", + "#E6FFE6", # G + "#0000FF", + "#B4B4FF", ] # loop B if level > len(colorset) - 2: level = 3 # Loop back to blue - dotlist = ['%slabel="%s";' % (prefix, self.name)] + dotlist = [f'{prefix}label="{self.name}";'] for node in nx.topological_sort(self._graph): - fullname = '.'.join(hierarchy + [node.fullname]) - nodename = fullname.replace('.', '_') + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") if not isinstance(node, Workflow): node_class_name = get_print_name(node, simple_form=simple_form) if not simple_form: - node_class_name = '.'.join(node_class_name.split('.')[1:]) - if hasattr(node, 'iterables') and node.iterables: - dotlist.append(('%s[label="%s", shape=box3d,' - 'style=filled, color=black, colorscheme' - '=greys7 fillcolor=2];') % - (nodename, node_class_name)) + node_class_name = ".".join(node_class_name.split(".")[1:]) + if hasattr(node, "iterables") and node.iterables: + dotlist.append( + ( + '%s[label="%s", shape=box3d,' + "style=filled, color=black, colorscheme" + "=greys7 fillcolor=2];" + ) + % (nodename, node_class_name) + ) else: if colored: dotlist.append( - ('%s[label="%s", style=filled,' - ' fillcolor="%s"];') % (nodename, node_class_name, - colorset[level])) + ('%s[label="%s", style=filled, fillcolor="%s"];') + % (nodename, node_class_name, colorset[level]) + ) else: - dotlist.append(('%s[label="%s"];') % (nodename, - node_class_name)) + dotlist.append( + ('%s[label="%s"];') % (nodename, node_class_name) + ) for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): - fullname = '.'.join(hierarchy + [node.fullname]) - nodename = fullname.replace('.', '_') - dotlist.append('subgraph cluster_%s {' % nodename) + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") + dotlist.append("subgraph cluster_%s {" % nodename) if colored: - dotlist.append(prefix + prefix + 'edge [color="%s"];' % - (colorset[level + 1])) - dotlist.append(prefix + prefix + 'style=filled;') - dotlist.append(prefix + prefix + 'fillcolor="%s";' % - (colorset[level + 2])) - dotlist.append( - node._get_dot( - prefix=prefix + prefix, - hierarchy=hierarchy + [self.name], - colored=colored, - simple_form=simple_form, - level=level + 3)) - dotlist.append('}') + dotlist.extend( + ( + f'{prefix * 2}edge [color="{colorset[level + 1]}"];', + f"{prefix * 2}style=filled;", + f'{prefix * 2}fillcolor="{colorset[level + 2]}";', + ) + ) + dotlist.extend( + ( + node._get_dot( + prefix=prefix + prefix, + hierarchy=hierarchy + [self.name], + colored=colored, + simple_form=simple_form, + level=level + 3, + ), + "}", + ) + ) else: for subnode in self._graph.successors(node): if node._hierarchy != subnode._hierarchy: continue if not isinstance(subnode, Workflow): - nodefullname = '.'.join(hierarchy + [node.fullname]) - subnodefullname = '.'.join( - hierarchy + [subnode.fullname]) - nodename = nodefullname.replace('.', '_') - subnodename = subnodefullname.replace('.', '_') - for _ in self._graph.get_edge_data(node, - subnode)['connect']: - dotlist.append('%s -> %s;' % (nodename, - subnodename)) - logger.debug('connection: %s', dotlist[-1]) + nodefullname = ".".join(hierarchy + [node.fullname]) + subnodefullname = ".".join(hierarchy + [subnode.fullname]) + nodename = nodefullname.replace(".", "_") + subnodename = subnodefullname.replace(".", "_") + dotlist.extend( + f"{nodename} -> {subnodename};" + for _ in self._graph.get_edge_data(node, subnode)["connect"] + ) + logger.debug("connection: %s", dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges(data=True): - uname = '.'.join(hierarchy + [u.fullname]) - vname = '.'.join(hierarchy + [v.fullname]) - for src, dest in d['connect']: + uname = ".".join(hierarchy + [u.fullname]) + vname = ".".join(hierarchy + [v.fullname]) + for src, dest in d["connect"]: uname1 = uname vname1 = vname if isinstance(src, tuple): srcname = src[0] else: srcname = src - if '.' in srcname: - uname1 += '.' + '.'.join(srcname.split('.')[:-1]) - if '.' in dest and '@' not in dest: + if "." in srcname: + uname1 += "." + ".".join(srcname.split(".")[:-1]) + if "." in dest and "@" not in dest: if not isinstance(v, Workflow): - if 'datasink' not in \ - str(v._interface.__class__).lower(): - vname1 += '.' + '.'.join(dest.split('.')[:-1]) + if "datasink" not in str(v._interface.__class__).lower(): + vname1 += "." + ".".join(dest.split(".")[:-1]) else: - vname1 += '.' + '.'.join(dest.split('.')[:-1]) - if uname1.split('.')[:-1] != vname1.split('.')[:-1]: - dotlist.append('%s -> %s;' % (uname1.replace('.', '_'), - vname1.replace('.', '_'))) - logger.debug('cross connection: %s', dotlist[-1]) - return ('\n' + prefix).join(dotlist) + vname1 += "." + ".".join(dest.split(".")[:-1]) + if uname1.split(".")[:-1] != vname1.split(".")[:-1]: + dotlist.append( + "%s -> %s;" + % (uname1.replace(".", "_"), vname1.replace(".", "_")) + ) + logger.debug("cross connection: %s", dotlist[-1]) + return ("\n" + prefix).join(dotlist) diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index e3c797a10a..0b1ba01637 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) from .debug import DebugPlugin from .linear import LinearPlugin diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index a30838a323..1571ab71a9 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -1,12 +1,6 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Common graph operations for execution -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, object, open - +"""Common graph operations for execution.""" import sys from copy import deepcopy from glob import glob @@ -18,39 +12,52 @@ import numpy as np from ... import logging -from ...utils.filemanip import loadpkl from ...utils.misc import str2bool -from ..engine.utils import topological_sort +from ..engine.utils import topological_sort, load_resultfile from ..engine import MapNode from .tools import report_crash, report_nodes_not_run, create_pyscript -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") -class PluginBase(object): - """ - Base class for plugins +def _graph_to_lil_matrix(graph, nodelist): + """Provide a sparse linked list matrix across various NetworkX versions""" + import scipy.sparse as ssp + + try: + from networkx import to_scipy_sparse_array + except ImportError: # NetworkX < 2.7 + from networkx import to_scipy_sparse_matrix as to_scipy_sparse_array + + return ssp.lil_matrix(to_scipy_sparse_array(graph, nodelist=nodelist, format="lil")) - """ + +class PluginBase: + """Base class for plugins.""" def __init__(self, plugin_args=None): if plugin_args is None: plugin_args = {} self.plugin_args = plugin_args self._config = None - self._status_callback = plugin_args.get('status_callback') + self._status_callback = plugin_args.get("status_callback") def run(self, graph, config, updatehash=False): """ + Instruct the plugin to execute the workflow graph. + The core plugin member that should be implemented by all plugins. - graph: a networkx, flattened :abbr:`DAG (Directed Acyclic Graph)` - to be executed - - config: a nipype.config object - - updatehash: + Parameters + ---------- + graph : + a networkx, flattened :abbr:`DAG (Directed Acyclic Graph)` + to be executed + config : :obj:`~nipype.config` + a nipype.config object + updatehash : :obj:`bool` + whether cached nodes with stale hash should be just updated. """ raise NotImplementedError @@ -60,19 +67,7 @@ class DistributedPluginBase(PluginBase): """ Execute workflow with a distribution engine - Relevant class attributes - ------------------------- - - procs: list (N) of underlying interface elements to be processed - proc_done: a boolean numpy array (N,) signifying whether a process has been - submitted for execution - proc_pending: a boolean numpy array (N,) signifying whether a - process is currently running. - depidx: a boolean matrix (NxN) storing the dependency structure accross - processes. Process dependencies are derived from each column. - - Combinations of ``proc_done`` and ``proc_pending`` - -------------------------------------------------- + Combinations of ``proc_done`` and ``proc_pending``: +------------+---------------+--------------------------------+ | proc_done | proc_pending | outcome | @@ -85,6 +80,21 @@ class DistributedPluginBase(PluginBase): +------------+---------------+--------------------------------+ | False | True | INVALID COMBINATION | +------------+---------------+--------------------------------+ + + Attributes + ---------- + procs : :obj:`list` + list (N) of underlying interface elements to be processed + proc_done : :obj:`numpy.ndarray` + a boolean numpy array (N,) signifying whether a process has been + submitted for execution + proc_pending : :obj:`numpy.ndarray` + a boolean numpy array (N,) signifying whether a + process is currently running. + depidx : :obj:`numpy.matrix` + a boolean matrix (NxN) storing the dependency structure across + processes. Process dependencies are derived from each column. + """ def __init__(self, plugin_args=None): @@ -92,7 +102,7 @@ def __init__(self, plugin_args=None): Initialize runtime attributes to none """ - super(DistributedPluginBase, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self.procs = None self.depidx = None self.refidx = None @@ -101,7 +111,7 @@ def __init__(self, plugin_args=None): self.proc_done = None self.proc_pending = None self.pending_tasks = [] - self.max_jobs = self.plugin_args.get('max_jobs', np.inf) + self.max_jobs = self.plugin_args.get("max_jobs", np.inf) def _prerun_check(self, graph): """Stub method to validate/massage graph and nodes before running""" @@ -115,7 +125,7 @@ def run(self, graph, config, updatehash=False): """ logger.info("Running in parallel.") self._config = config - poll_sleep_secs = float(config['execution']['poll_sleep_duration']) + poll_sleep_secs = float(config["execution"]["poll_sleep_duration"]) self._prerun_check(graph) # Generate appropriate structures for worker-manager model @@ -124,6 +134,7 @@ def run(self, graph, config, updatehash=False): self.mapnodesubids = {} # setup polling - TODO: change to threaded model notrun = [] + errors = [] old_progress_stats = None old_presub_stats = None @@ -131,19 +142,24 @@ def run(self, graph, config, updatehash=False): loop_start = time() # Check if a job is available (jobs with all dependencies run) # https://github.com/nipy/nipype/pull/2200#discussion_r141605722 - jobs_ready = np.nonzero(~self.proc_done & - (self.depidx.sum(0) == 0))[1] - - progress_stats = (len(self.proc_done), - np.sum(self.proc_done ^ self.proc_pending), - np.sum(self.proc_done & self.proc_pending), - len(jobs_ready), len(self.pending_tasks), - np.sum(~self.proc_done & ~self.proc_pending)) + jobs_ready = np.nonzero(~self.proc_done & (self.depidx.sum(0) == 0))[1] + + progress_stats = ( + len(self.proc_done), + np.sum(self.proc_done ^ self.proc_pending), + np.sum(self.proc_done & self.proc_pending), + len(jobs_ready), + len(self.pending_tasks), + np.sum(~self.proc_done & ~self.proc_pending), + ) display_stats = progress_stats != old_progress_stats if display_stats: - logger.debug('Progress: %d jobs, %d/%d/%d ' - '(done/running/ready), %d/%d ' - '(pending_tasks/waiting).', *progress_stats) + logger.debug( + "Progress: %d jobs, %d/%d/%d " + "(done/running/ready), %d/%d " + "(pending_tasks/waiting).", + *progress_stats, + ) old_progress_stats = progress_stats toappend = [] # trigger callbacks for any pending results @@ -151,38 +167,37 @@ def run(self, graph, config, updatehash=False): taskid, jobid = self.pending_tasks.pop() try: result = self._get_result(taskid) - except Exception: - notrun.append( - self._clean_queue(jobid, graph)) + except Exception as exc: + notrun.append(self._clean_queue(jobid, graph)) + errors.append(exc) else: if result: - if result['traceback']: + if result["traceback"]: notrun.append( - self._clean_queue(jobid, graph, result=result)) + self._clean_queue(jobid, graph, result=result) + ) + errors.append("".join(result["traceback"])) else: self._task_finished_cb(jobid) self._remove_node_dirs() self._clear_task(taskid) else: - assert self.proc_done[jobid] and \ - self.proc_pending[jobid] + assert self.proc_done[jobid] and self.proc_pending[jobid] toappend.insert(0, (taskid, jobid)) if toappend: self.pending_tasks.extend(toappend) num_jobs = len(self.pending_tasks) - presub_stats = (num_jobs, - np.sum(self.proc_done & self.proc_pending)) + presub_stats = (num_jobs, np.sum(self.proc_done & self.proc_pending)) display_stats = display_stats or presub_stats != old_presub_stats if display_stats: - logger.debug('Tasks currently running: %d. Pending: %d.', - *presub_stats) + logger.debug("Tasks currently running: %d. Pending: %d.", *presub_stats) old_presub_stats = presub_stats if num_jobs < self.max_jobs: self._send_procs_to_workers(updatehash=updatehash, graph=graph) elif display_stats: - logger.debug('Not submitting (max jobs reached)') + logger.debug("Not submitting (max jobs reached)") sleep_til = loop_start + poll_sleep_secs sleep(max(0, sleep_til - time())) @@ -193,6 +208,20 @@ def run(self, graph, config, updatehash=False): # close any open resources self._postrun_check() + if errors: + # If one or more nodes failed, re-rise first of them + error, cause = errors[0], None + if isinstance(error, str): + error = RuntimeError(error) + + if len(errors) > 1: + error, cause = ( + RuntimeError(f"{len(errors)} raised. Re-raising first."), + error, + ) + + raise error from cause + def _get_result(self, taskid): raise NotImplementedError @@ -202,8 +231,8 @@ def _submit_job(self, node, updatehash=False): def _report_crash(self, node, result=None): tb = None if result is not None: - node._result = result['result'] - tb = result['traceback'] + node._result = result["result"] + tb = result["traceback"] node._traceback = tb return report_crash(node, traceback=tb) @@ -211,17 +240,19 @@ def _clear_task(self, taskid): raise NotImplementedError def _clean_queue(self, jobid, graph, result=None): - logger.debug('Clearing %d from queue', jobid) + logger.debug("Clearing %d from queue", jobid) if self._status_callback: - self._status_callback(self.procs[jobid], 'exception') + self._status_callback(self.procs[jobid], "exception") if result is None: - result = {'result': None, - 'traceback': '\n'.join(format_exception(*sys.exc_info()))} + result = { + "result": None, + "traceback": "\n".join(format_exception(*sys.exc_info())), + } crashfile = self._report_crash(self.procs[jobid], result=result) - if str2bool(self._config['execution']['stop_on_first_crash']): - raise RuntimeError("".join(result['traceback'])) + if str2bool(self._config["execution"]["stop_on_first_crash"]): + raise RuntimeError("".join(result["traceback"])) if jobid in self.mapnodesubids: # remove current jobid self.proc_pending[jobid] = False @@ -235,29 +266,31 @@ def _clean_queue(self, jobid, graph, result=None): def _submit_mapnode(self, jobid): import scipy.sparse as ssp + if jobid in self.mapnodes: return True self.mapnodes.append(jobid) mapnodesubids = self.procs[jobid].get_subnodes() numnodes = len(mapnodesubids) - logger.debug('Adding %d jobs for mapnode %s', numnodes, - self.procs[jobid]) + logger.debug("Adding %d jobs for mapnode %s", numnodes, self.procs[jobid]) for i in range(numnodes): self.mapnodesubids[self.depidx.shape[0] + i] = jobid self.procs.extend(mapnodesubids) self.depidx = ssp.vstack( - (self.depidx, - ssp.lil_matrix(np.zeros( - (numnodes, self.depidx.shape[1])))), 'lil') + (self.depidx, ssp.lil_matrix(np.zeros((numnodes, self.depidx.shape[1])))), + "lil", + ) self.depidx = ssp.hstack( - (self.depidx, - ssp.lil_matrix(np.zeros( - (self.depidx.shape[0], numnodes)))), 'lil') + (self.depidx, ssp.lil_matrix(np.zeros((self.depidx.shape[0], numnodes)))), + "lil", + ) self.depidx[-numnodes:, jobid] = 1 - self.proc_done = np.concatenate((self.proc_done, - np.zeros(numnodes, dtype=bool))) - self.proc_pending = np.concatenate((self.proc_pending, - np.zeros(numnodes, dtype=bool))) + self.proc_done = np.concatenate( + (self.proc_done, np.zeros(numnodes, dtype=bool)) + ) + self.proc_pending = np.concatenate( + (self.proc_pending, np.zeros(numnodes, dtype=bool)) + ) return False def _send_procs_to_workers(self, updatehash=False, graph=None): @@ -271,7 +304,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): slots = None else: slots = max(0, self.max_jobs - num_jobs) - logger.debug('Slots available: %s', slots) + logger.debug("Slots available: %s", slots) if (num_jobs >= self.max_jobs) or (slots == 0): break @@ -281,8 +314,12 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): if len(jobids) > 0: # send all available jobs - logger.info('Pending[%d] Submitting[%d] jobs Slots[%s]', - num_jobs, len(jobids[:slots]), slots or 'inf') + logger.info( + "Pending[%d] Submitting[%d] jobs Slots[%s]", + num_jobs, + len(jobids[:slots]), + slots or "inf", + ) for jobid in jobids[:slots]: if isinstance(self.procs[jobid], MapNode): @@ -300,15 +337,15 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.proc_done[jobid] = True self.proc_pending[jobid] = True # Send job to task manager and add to pending tasks - logger.info('Submitting: %s ID: %d', - self.procs[jobid], jobid) + logger.info("Submitting: %s ID: %d", self.procs[jobid], jobid) if self._status_callback: - self._status_callback(self.procs[jobid], 'start') + self._status_callback(self.procs[jobid], "start") if not self._local_hash_check(jobid, graph): if self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug( + "Running node %s on master thread", self.procs[jobid] + ) try: self.procs[jobid].run() except Exception: @@ -317,69 +354,84 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self._remove_node_dirs() else: tid = self._submit_job( - deepcopy(self.procs[jobid]), - updatehash=updatehash) + deepcopy(self.procs[jobid]), updatehash=updatehash + ) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False else: self.pending_tasks.insert(0, (tid, jobid)) - logger.info('Finished submitting: %s ID: %d', - self.procs[jobid], jobid) + logger.info( + "Finished submitting: %s ID: %d", self.procs[jobid], jobid + ) else: break def _local_hash_check(self, jobid, graph): - if not str2bool( - self.procs[jobid].config['execution']['local_hash_check']): + if not str2bool(self.procs[jobid].config["execution"]["local_hash_check"]): return False try: cached, updated = self.procs[jobid].is_cached() except Exception: logger.warning( - 'Error while checking node hash, forcing re-run. ' - 'Although this error may not prevent the workflow from running, ' - 'it could indicate a major problem. Please report a new issue ' - 'at https://github.com/nipy/nipype/issues adding the following ' - 'information:\n\n\tNode: %s\n\tInterface: %s.%s\n\tTraceback:\n%s', + "Error while checking node hash, forcing re-run. " + "Although this error may not prevent the workflow from running, " + "it could indicate a major problem. Please report a new issue " + "at https://github.com/nipy/nipype/issues adding the following " + "information:\n\n\tNode: %s\n\tInterface: %s.%s\n\tTraceback:\n%s", self.procs[jobid], self.procs[jobid].interface.__module__, self.procs[jobid].interface.__class__.__name__, - '\n'.join(format_exception(*sys.exc_info())) + "\n".join(format_exception(*sys.exc_info())), ) return False - logger.debug('Checking hash "%s" locally: cached=%s, updated=%s.', - self.procs[jobid], cached, updated) + logger.debug( + 'Checking hash "%s" locally: cached=%s, updated=%s.', + self.procs[jobid], + cached, + updated, + ) overwrite = self.procs[jobid].overwrite always_run = self.procs[jobid].interface.always_run - if cached and updated and (overwrite is False or - overwrite is None and not always_run): - logger.debug('Skipping cached node %s with ID %s.', - self.procs[jobid], jobid) + if ( + cached + and updated + and (overwrite is False or (overwrite is None and not always_run)) + ): + logger.debug( + "Skipping cached node %s with ID %s.", self.procs[jobid], jobid + ) try: self._task_finished_cb(jobid, cached=True) self._remove_node_dirs() except Exception: - logger.debug('Error skipping cached node %s (%s).\n\n%s', - self.procs[jobid], jobid, - '\n'.join(format_exception(*sys.exc_info()))) + logger.debug( + "Error skipping cached node %s (%s).\n\n%s", + self.procs[jobid], + jobid, + "\n".join(format_exception(*sys.exc_info())), + ) self._clean_queue(jobid, graph) self.proc_pending[jobid] = False return True return False def _task_finished_cb(self, jobid, cached=False): - """ Extract outputs and assign to inputs of dependent tasks + """Extract outputs and assign to inputs of dependent tasks This is called when a job is completed. """ - logger.info('[Job %d] %s (%s).', jobid, 'Cached' - if cached else 'Completed', self.procs[jobid]) + logger.info( + "[Job %d] %s (%s).", + jobid, + "Cached" if cached else "Completed", + self.procs[jobid], + ) if self._status_callback: - self._status_callback(self.procs[jobid], 'end') + self._status_callback(self.procs[jobid], "end") # Update job and worker queues self.proc_pending[jobid] = False # update the job dependency structure @@ -389,39 +441,30 @@ def _task_finished_cb(self, jobid, cached=False): self.refidx[self.refidx[:, jobid].nonzero()[0], jobid] = 0 def _generate_dependency_list(self, graph): - """ Generates a dependency list for a list of graphs. - """ - import networkx as nx - + """Generates a dependency list for a list of graphs.""" self.procs, _ = topological_sort(graph) - try: - self.depidx = nx.to_scipy_sparse_matrix( - graph, nodelist=self.procs, format='lil') - except: - self.depidx = nx.to_scipy_sparse_matrix(graph, nodelist=self.procs) - self.refidx = deepcopy(self.depidx) - self.refidx.astype = np.int + self.depidx = _graph_to_lil_matrix(graph, nodelist=self.procs) + self.refidx = self.depidx.astype(int) self.proc_done = np.zeros(len(self.procs), dtype=bool) self.proc_pending = np.zeros(len(self.procs), dtype=bool) def _remove_node_deps(self, jobid, crashfile, graph): import networkx as nx + try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes - subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])] + subnodes = list(dfs_preorder(graph, self.procs[jobid])) for node in subnodes: idx = self.procs.index(node) self.proc_done[idx] = True self.proc_pending[idx] = False - return dict( - node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) + return dict(node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) def _remove_node_dirs(self): - """Removes directories whose outputs have already been used up - """ - if str2bool(self._config['execution']['remove_node_directories']): + """Removes directories whose outputs have already been used up""" + if str2bool(self._config["execution"]["remove_node_directories"]): indices = np.nonzero((self.refidx.sum(axis=1) == 0).__array__())[0] for idx in indices: if idx in self.mapnodesubids: @@ -429,43 +472,44 @@ def _remove_node_dirs(self): if self.proc_done[idx] and (not self.proc_pending[idx]): self.refidx[idx, idx] = -1 outdir = self.procs[idx].output_dir() - logger.info(('[node dependencies finished] ' - 'removing node: %s from directory %s') % - (self.procs[idx]._id, outdir)) + logger.info( + ( + "[node dependencies finished] " + "removing node: %s from directory %s" + ) + % (self.procs[idx]._id, outdir) + ) shutil.rmtree(outdir) class SGELikeBatchManagerBase(DistributedPluginBase): - """Execute workflow with SGE/OGE/PBS like batch system - """ + """Execute workflow with SGE/OGE/PBS like batch system""" def __init__(self, template, plugin_args=None): - super(SGELikeBatchManagerBase, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._template = template self._qsub_args = None if plugin_args: - if 'template' in plugin_args: - self._template = plugin_args['template'] + if "template" in plugin_args: + self._template = plugin_args["template"] if os.path.isfile(self._template): with open(self._template) as tpl_file: self._template = tpl_file.read() - if 'qsub_args' in plugin_args: - self._qsub_args = plugin_args['qsub_args'] + if "qsub_args" in plugin_args: + self._qsub_args = plugin_args["qsub_args"] self._pending = {} def _is_pending(self, taskid): - """Check if a task is pending in the batch system - """ + """Check if a task is pending in the batch system""" raise NotImplementedError def _submit_batchtask(self, scriptfile, node): - """Submit a task to the batch system - """ + """Submit a task to the batch system""" raise NotImplementedError def _get_result(self, taskid): if taskid not in self._pending: - raise Exception('Task %d not found' % taskid) + raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] @@ -475,58 +519,55 @@ def _get_result(self, taskid): # is a disconnect when the queueing engine knows a job is # finished to when the directories become statable. t = time() - timeout = float(self._config['execution']['job_finished_timeout']) + timeout = float(self._config["execution"]["job_finished_timeout"]) timed_out = True while (time() - t) < timeout: try: - glob(os.path.join(node_dir, 'result_*.pklz')).pop() + glob(os.path.join(node_dir, "result_*.pklz")).pop() timed_out = False break except Exception as e: logger.debug(e) sleep(2) if timed_out: - result_data = { - 'hostname': 'unknown', - 'result': None, - 'traceback': None - } + result_data = {"hostname": "unknown", "result": None, "traceback": None} results_file = None try: - error_message = ('Job id ({0}) finished or terminated, but ' - 'results file does not exist after ({1}) ' - 'seconds. Batch dir contains crashdump file ' - 'if node raised an exception.\n' - 'Node working directory: ({2}) '.format( - taskid, timeout, node_dir)) - raise IOError(error_message) - except IOError as e: - result_data['traceback'] = '\n'.join(format_exception(*sys.exc_info())) + error_message = ( + "Job id ({}) finished or terminated, but " + "results file does not exist after ({}) " + "seconds. Batch dir contains crashdump file " + "if node raised an exception.\n" + "Node working directory: ({}) ".format(taskid, timeout, node_dir) + ) + raise OSError(error_message) + except OSError: + result_data["traceback"] = "\n".join(format_exception(*sys.exc_info())) else: - results_file = glob(os.path.join(node_dir, 'result_*.pklz'))[0] - result_data = loadpkl(results_file) + results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] + result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): - result_out['result'] = result_data['result'] - result_out['traceback'] = result_data['traceback'] - result_out['hostname'] = result_data['hostname'] + result_out["result"] = result_data["result"] + result_out["traceback"] = result_data["traceback"] + result_out["hostname"] = result_data["hostname"] if results_file: - crash_file = os.path.join(node_dir, 'crashstore.pklz') + crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: - result_out['result'] = result_data + result_out["result"] = result_data return result_out def _submit_job(self, node, updatehash=False): - """submit job and return taskid - """ + """submit job and return taskid""" pyscript = create_pyscript(node, updatehash=updatehash) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join((self._template, '%s %s' % (sys.executable, - pyscript))) - batchscriptfile = os.path.join(batch_dir, 'batchscript_%s.sh' % name) - with open(batchscriptfile, 'wt') as fp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join( + (self._template.rstrip("\n"), f"{sys.executable} {pyscript}") + ) + batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) + with open(batchscriptfile, "w") as fp: fp.writelines(batchscript) return self._submit_batchtask(batchscriptfile, node) @@ -535,29 +576,28 @@ def _clear_task(self, taskid): class GraphPluginBase(PluginBase): - """Base class for plugins that distribute graphs to workflows - """ + """Base class for plugins that distribute graphs to workflows""" def __init__(self, plugin_args=None): - if plugin_args and plugin_args.get('status_callback'): - logger.warning('status_callback not supported for Graph submission' - ' plugins') - super(GraphPluginBase, self).__init__(plugin_args=plugin_args) + if plugin_args and plugin_args.get("status_callback"): + logger.warning("status_callback not supported for Graph submission plugins") + super().__init__(plugin_args=plugin_args) def run(self, graph, config, updatehash=False): import networkx as nx + pyfiles = [] dependencies = {} self._config = config nodes = list(nx.topological_sort(graph)) - logger.debug('Creating executable python files for each node') + logger.debug("Creating executable python files for each node") for idx, node in enumerate(nodes): pyfiles.append( - create_pyscript( - node, updatehash=updatehash, store_exception=False)) + create_pyscript(node, updatehash=updatehash, store_exception=False) + ) dependencies[idx] = [ - nodes.index(prevnode) - for prevnode in list(graph.predecessors(node))] + nodes.index(prevnode) for prevnode in list(graph.predecessors(node)) + ] self._submit_graph(pyfiles, dependencies, nodes) def _get_args(self, node, keywords): @@ -567,52 +607,52 @@ def _get_args(self, node, keywords): if keyword == "template" and os.path.isfile(value): with open(value) as f: value = f.read() - if (hasattr(node, "plugin_args") - and isinstance(node.plugin_args, dict) - and keyword in node.plugin_args): - if (keyword == "template" - and os.path.isfile(node.plugin_args[keyword])): + if ( + hasattr(node, "plugin_args") + and isinstance(node.plugin_args, dict) + and keyword in node.plugin_args + ): + if keyword == "template" and os.path.isfile(node.plugin_args[keyword]): with open(node.plugin_args[keyword]) as f: tmp_value = f.read() else: tmp_value = node.plugin_args[keyword] - if ('overwrite' in node.plugin_args - and node.plugin_args['overwrite']): + if node.plugin_args.get("overwrite"): value = tmp_value else: value += tmp_value - values += (value, ) + values += (value,) return values def _submit_graph(self, pyfiles, dependencies, nodes): """ pyfiles: list of files corresponding to a topological sort - dependencies: dictionary of dependencies based on the toplogical sort + dependencies: dictionary of dependencies based on the topological sort """ raise NotImplementedError def _get_result(self, taskid): if taskid not in self._pending: - raise Exception('Task %d not found' % taskid) + raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] - glob(os.path.join(node_dir, 'result_*.pklz')).pop() + glob(os.path.join(node_dir, "result_*.pklz")).pop() - results_file = glob(os.path.join(node_dir, 'result_*.pklz'))[0] - result_data = loadpkl(results_file) + results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] + result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): - result_out['result'] = result_data['result'] - result_out['traceback'] = result_data['traceback'] - result_out['hostname'] = result_data['hostname'] + result_out["result"] = result_data["result"] + result_out["traceback"] = result_data["traceback"] + result_out["hostname"] = result_data["hostname"] if results_file: - crash_file = os.path.join(node_dir, 'crashstore.pklz') + crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: - result_out['result'] = result_data + result_out["result"] = result_data return result_out diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 9f5ca632e5..0fff477377 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via Condor """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os from time import sleep @@ -10,7 +7,8 @@ from ...interfaces.base import CommandLine from ... import logging from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class CondorPlugin(SGELikeBatchManagerBase): @@ -40,59 +38,57 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - super(CondorPlugin, self).__init__(template, **kwargs) + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + super().__init__(template, **kwargs) def _is_pending(self, taskid): cmd = CommandLine( - 'condor_q', resource_monitor=False, terminal_output='allatonce') - cmd.inputs.args = '%d' % taskid + "condor_q", resource_monitor=False, terminal_output="allatonce" + ) + cmd.inputs.args = "%d" % taskid # check condor cluster oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) - if result.runtime.stdout.count('\n%d' % taskid): - return True - return False + return bool(result.runtime.stdout.count("\n%d" % taskid)) def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'condor_qsub', + "condor_qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) + qsubargs += " " + node.plugin_args["qsub_args"] if self._qsub_args: qsubargs = self._qsub_args - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + if "-o" not in qsubargs: + qsubargs = f"{qsubargs} -o {path}" + if "-e" not in qsubargs: + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + jobname = ".".join(jobnameitems) + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -103,16 +99,20 @@ def _submit_batchtask(self, scriptfile, node): sleep(self._retry_timeout) # sleep 2 seconds and try again else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit condor ' - 'cluster' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + "Could not submit condor cluster for node %s" + % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve condor clusterid - taskid = int(result.runtime.stdout.split(' ')[2]) + taskid = int(result.runtime.stdout.split(" ")[2]) self._pending[taskid] = node.output_dir() - logger.debug('submitted condor cluster: %d for node %s' % (taskid, - node._id)) + logger.debug("submitted condor cluster: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 28b766f2ea..55f3f03bee 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,9 +1,5 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via Condor DAGMan """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open import os import sys @@ -69,111 +65,129 @@ def _get_str_or_file(self, arg): # actually have to run. would be good to be able to decide whether they # actually have to be scheduled (i.e. output already exist). def __init__(self, **kwargs): - for var, id_, val in \ - (('_template', 'submit_template', self.default_submit_template), - ('_initial_specs', 'template', ''), - ('_initial_specs', 'initial_specs', ''), - ('_override_specs', 'submit_specs', ''), - ('_override_specs', 'override_specs', ''), - ('_wrapper_cmd', 'wrapper_cmd', None), - ('_wrapper_args', 'wrapper_args', ''), - ('_block', 'block', False), - ('_dagman_args', 'dagman_args', '')): - if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None \ - and id_ in kwargs['plugin_args']: - if id_ == 'wrapper_cmd': - val = os.path.abspath(kwargs['plugin_args'][id_]) - elif id_ == 'block': - val = kwargs['plugin_args'][id_] + for var, id_, val in ( + ("_template", "submit_template", self.default_submit_template), + ("_initial_specs", "template", ""), + ("_initial_specs", "initial_specs", ""), + ("_override_specs", "submit_specs", ""), + ("_override_specs", "override_specs", ""), + ("_wrapper_cmd", "wrapper_cmd", None), + ("_wrapper_args", "wrapper_args", ""), + ("_block", "block", False), + ("_dagman_args", "dagman_args", ""), + ): + if ( + "plugin_args" in kwargs + and kwargs["plugin_args"] is not None + and id_ in kwargs["plugin_args"] + ): + if id_ == "wrapper_cmd": + val = os.path.abspath(kwargs["plugin_args"][id_]) + elif id_ == "block": + val = kwargs["plugin_args"][id_] else: - val = self._get_str_or_file(kwargs['plugin_args'][id_]) + val = self._get_str_or_file(kwargs["plugin_args"][id_]) setattr(self, var, val) # TODO remove after some time - if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None: - plugin_args = kwargs['plugin_args'] - if 'template' in plugin_args: + if "plugin_args" in kwargs and kwargs["plugin_args"] is not None: + plugin_args = kwargs["plugin_args"] + if "template" in plugin_args: warn( "the 'template' argument is deprecated, use 'initial_specs' instead" ) - if 'submit_specs' in plugin_args: + if "submit_specs" in plugin_args: warn( "the 'submit_specs' argument is deprecated, use 'override_specs' instead" ) - super(CondorDAGManPlugin, self).__init__(**kwargs) + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): # location of all scripts, place dagman output in here too batch_dir, _ = os.path.split(pyfiles[0]) # DAG description filename - dagfilename = os.path.join(batch_dir, 'workflow-%s.dag' % uuid.uuid4()) - with open(dagfilename, 'wt') as dagfileptr: + dagfilename = os.path.join(batch_dir, "workflow-%s.dag" % uuid.uuid4()) + with open(dagfilename, "w") as dagfileptr: # loop over all scripts, create submit files, and define them # as jobs in the DAG for idx, pyscript in enumerate(pyfiles): node = nodes[idx] # XXX redundant with previous value? or could it change between # scripts? - template, initial_specs, override_specs, wrapper_cmd, wrapper_args = \ - self._get_args(node, - ["template", "initial_specs", - "override_specs", "wrapper_cmd", - "wrapper_args"]) + ( + template, + initial_specs, + override_specs, + wrapper_cmd, + wrapper_args, + ) = self._get_args( + node, + [ + "template", + "initial_specs", + "override_specs", + "wrapper_cmd", + "wrapper_args", + ], + ) # add required slots to the template - template = '%s\n%s\n%s\nqueue\n' % ('%(initial_specs)s', - template, - '%(override_specs)s') + template = "{}\n{}\n{}\nqueue\n".format( + "%(initial_specs)s", + template, + "%(override_specs)s", + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) + name = ".".join(name.split(".")[:-1]) specs = dict( # TODO make parameter for this, initial_specs=initial_specs, executable=sys.executable, nodescript=pyscript, basename=os.path.join(batch_dir, name), - override_specs=override_specs) + override_specs=override_specs, + ) if wrapper_cmd is not None: - specs['executable'] = wrapper_cmd - specs['nodescript'] = \ - '%s %s %s' % (wrapper_args % specs, # give access to variables - sys.executable, - pyscript) + specs["executable"] = wrapper_cmd + specs["nodescript"] = "{} {} {}".format( + wrapper_args % specs, # give access to variables + sys.executable, + pyscript, + ) submitspec = template % specs # write submit spec for this job - submitfile = os.path.join(batch_dir, '%s.submit' % name) - with open(submitfile, 'wt') as submitfileprt: + submitfile = os.path.join(batch_dir, "%s.submit" % name) + with open(submitfile, "w") as submitfileprt: submitfileprt.writelines(submitspec) submitfileprt.close() # define job in DAG - dagfileptr.write('JOB %i %s\n' % (idx, submitfile)) + dagfileptr.write("JOB %i %s\n" % (idx, submitfile)) # define dependencies in DAG for child in dependencies: parents = dependencies[child] if len(parents): - dagfileptr.write('PARENT %s CHILD %i\n' % - (' '.join([str(i) for i in parents]), - child)) + dagfileptr.write( + "PARENT %s CHILD %i\n" + % (" ".join([str(i) for i in parents]), child) + ) # hand over DAG to condor_dagman cmd = CommandLine( - 'condor_submit_dag', + "condor_submit_dag", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) # needs -update_submit or re-running a workflow will fail - cmd.inputs.args = '%s -update_submit %s' % (self._dagman_args, - dagfilename) + cmd.inputs.args = f"{self._dagman_args} -update_submit {dagfilename}" cmd.run() - logger.info('submitted all jobs to Condor DAGMan') + logger.info("submitted all jobs to Condor DAGMan") if self._block: # wait for DAGMan to settle down, no time wasted it is already running time.sleep(10) - if not os.path.exists('%s.condor.sub' % dagfilename): - raise EnvironmentError( + if not os.path.exists("%s.condor.sub" % dagfilename): + raise OSError( "DAGMan did not create its submit file, please check the logs" ) # wait for completion - logger.info('waiting for DAGMan to finish') - lockfilename = '%s.lock' % dagfilename + logger.info("waiting for DAGMan to finish") + lockfilename = "%s.lock" % dagfilename while os.path.exists(lockfilename): time.sleep(5) diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 9921bb9cf4..1dac35cf8f 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -1,26 +1,25 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Debug plugin """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import networkx as nx from .base import PluginBase, logger class DebugPlugin(PluginBase): - """Execute workflow in series - """ + """Execute workflow in series""" def __init__(self, plugin_args=None): - super(DebugPlugin, self).__init__(plugin_args=plugin_args) - if plugin_args and "callable" in plugin_args and \ - hasattr(plugin_args['callable'], '__call__'): - self._callable = plugin_args['callable'] + super().__init__(plugin_args=plugin_args) + if ( + plugin_args + and "callable" in plugin_args + and callable(plugin_args["callable"]) + ): + self._callable = plugin_args["callable"] else: - raise ValueError('plugin_args must contain a callable function') + raise ValueError("plugin_args must contain a callable function") def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. @@ -33,7 +32,7 @@ def run(self, graph, config, updatehash=False): """ if not isinstance(graph, nx.DiGraph): - raise ValueError('Input must be a networkx digraph object') + raise ValueError("Input must be a networkx digraph object") logger.info("Executing debug plugin") for node in nx.topological_sort(graph): self._callable(node, graph) diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index aa20f935c1..f52b3e6282 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -1,19 +1,11 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from future import standard_library -standard_library.install_aliases() -from future.utils import raise_from - from pickle import dumps import sys -from .base import (DistributedPluginBase, logger, report_crash) +from .base import DistributedPluginBase, logger, report_crash IPython_not_loaded = False try: @@ -27,19 +19,23 @@ def execute_task(pckld_task, node_config, updatehash): from socket import gethostname from traceback import format_exc from nipype import config, logging + traceback = None result = None import os + cwd = os.getcwd() try: config.update_config(node_config) logging.update_logging(config) from pickle import loads + task = loads(pckld_task) result = task.run(updatehash=updatehash) except: traceback = format_exc() from pickle import loads + task = loads(pckld_task) result = task.result os.chdir(cwd) @@ -47,19 +43,28 @@ def execute_task(pckld_task, node_config, updatehash): class IPythonPlugin(DistributedPluginBase): - """Execute workflow with ipython - """ + """Execute workflow with ipython""" def __init__(self, plugin_args=None): if IPython_not_loaded: - raise ImportError('Please install ipyparallel to use this plugin.') - super(IPythonPlugin, self).__init__(plugin_args=plugin_args) - valid_args = ('url_file', 'profile', 'cluster_id', 'context', 'debug', - 'timeout', 'config', 'username', 'sshserver', 'sshkey', - 'password', 'paramiko') + raise ImportError("Please install ipyparallel to use this plugin.") + super().__init__(plugin_args=plugin_args) + valid_args = ( + "url_file", + "profile", + "cluster_id", + "context", + "debug", + "timeout", + "config", + "username", + "sshserver", + "sshkey", + "password", + "paramiko", + ) self.client_args = { - arg: plugin_args[arg] - for arg in valid_args if arg in plugin_args + arg: plugin_args[arg] for arg in valid_args if arg in plugin_args } self.iparallel = None self.taskclient = None @@ -72,38 +77,35 @@ def run(self, graph, config, updatehash=False): """ # retrieve clients again try: - name = 'ipyparallel' + name = "ipyparallel" __import__(name) self.iparallel = sys.modules[name] except ImportError as e: - raise_from( - ImportError("ipyparallel not found. Parallel execution " - "will be unavailable"), e) + raise ImportError( + "ipyparallel not found. Parallel execution will be unavailable" + ) from e try: self.taskclient = self.iparallel.Client(**self.client_args) except Exception as e: if isinstance(e, TimeoutError): - raise_from(Exception("No IPython clients found."), e) + raise Exception("No IPython clients found.") from e if isinstance(e, IOError): - raise_from( - Exception("ipcluster/ipcontroller has not been started"), - e) + raise Exception("ipcluster/ipcontroller has not been started") from e if isinstance(e, ValueError): - raise_from(Exception("Ipython kernel not installed"), e) + raise Exception("Ipython kernel not installed") from e else: raise e - return super(IPythonPlugin, self).run( - graph, config, updatehash=updatehash) + return super().run(graph, config, updatehash=updatehash) def _get_result(self, taskid): if taskid not in self.taskmap: - raise ValueError('Task %d not in pending list' % taskid) + raise ValueError("Task %d not in pending list" % taskid) if self.taskmap[taskid].ready(): result, traceback, hostname = self.taskmap[taskid].get() result_out = dict(result=None, traceback=None) - result_out['result'] = result - result_out['traceback'] = traceback - result_out['hostname'] = hostname + result_out["result"] = result + result_out["traceback"] = traceback + result_out["hostname"] = hostname return result_out else: return None @@ -111,21 +113,22 @@ def _get_result(self, taskid): def _submit_job(self, node, updatehash=False): pckld_node = dumps(node, 2) result_object = self.taskclient.load_balanced_view().apply( - execute_task, pckld_node, node.config, updatehash) + execute_task, pckld_node, node.config, updatehash + ) self._taskid += 1 self.taskmap[self._taskid] = result_object return self._taskid def _report_crash(self, node, result=None): - if result and result['traceback']: - node._result = result['result'] - node._traceback = result['traceback'] - return report_crash(node, traceback=result['traceback']) + if result and result["traceback"]: + node._result = result["result"] + node._traceback = result["traceback"] + return report_crash(node, traceback=result["traceback"]) else: return report_crash(node) def _clear_task(self, taskid): - if IPyversion >= '0.11': + if IPyversion >= "0.11": logger.debug("Clearing id: %d" % taskid) self.taskclient.purge_results(self.taskmap[taskid]) del self.taskmap[taskid] diff --git a/nipype/pipeline/plugins/legacymultiproc.py b/nipype/pipeline/plugins/legacymultiproc.py index bfc1773a92..4c39be1ab2 100644 --- a/nipype/pipeline/plugins/legacymultiproc.py +++ b/nipype/pipeline/plugins/legacymultiproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing @@ -6,8 +5,6 @@ Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) # Import packages import os @@ -30,7 +27,7 @@ except ImportError: def indent(text, prefix): - """ A textwrap.indent replacement for Python < 3.3 """ + """A textwrap.indent replacement for Python < 3.3""" if not prefix: return text splittext = text.splitlines(True) @@ -38,7 +35,7 @@ def indent(text, prefix): # Init logger -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") # Run node @@ -66,75 +63,100 @@ def run_node(node, updatehash, taskid): # Try and execute the node via node.run() try: - result['result'] = node.run(updatehash=updatehash) + result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here - result['traceback'] = format_exception(*sys.exc_info()) - result['result'] = node.result + result["traceback"] = format_exception(*sys.exc_info()) + result["result"] = node.result # Return the result dictionary return result + # Pythons 2.7, 3.4-3.7.0, and 3.7.1 have three different implementations of # pool.Pool().Process(), and the type of the result varies based on the default # multiprocessing context, so we need to dynamically patch the daemon property -class NonDaemonMixin(object): +class NonDaemonMixin: @property def daemon(self): return False - + @daemon.setter def daemon(self, val): pass + try: from multiprocessing import context + # Exists on all platforms class NonDaemonSpawnProcess(NonDaemonMixin, context.SpawnProcess): pass + class NonDaemonSpawnContext(context.SpawnContext): Process = NonDaemonSpawnProcess - _nondaemon_context_mapper = { - 'spawn': NonDaemonSpawnContext() - } + + _nondaemon_context_mapper = {"spawn": NonDaemonSpawnContext()} # POSIX only try: + class NonDaemonForkProcess(NonDaemonMixin, context.ForkProcess): pass + class NonDaemonForkContext(context.ForkContext): Process = NonDaemonForkProcess - _nondaemon_context_mapper['fork'] = NonDaemonForkContext() + + _nondaemon_context_mapper["fork"] = NonDaemonForkContext() except AttributeError: pass # POSIX only try: + class NonDaemonForkServerProcess(NonDaemonMixin, context.ForkServerProcess): pass + class NonDaemonForkServerContext(context.ForkServerContext): Process = NonDaemonForkServerProcess - _nondaemon_context_mapper['forkserver'] = NonDaemonForkServerContext() + + _nondaemon_context_mapper["forkserver"] = NonDaemonForkServerContext() except AttributeError: pass class NonDaemonPool(pool.Pool): - def __init__(self, processes=None, initializer=None, initargs=(), - maxtasksperchild=None, context=None): + def __init__( + self, + processes=None, + initializer=None, + initargs=(), + maxtasksperchild=None, + context=None, + ): if context is None: context = mp.get_context() context = _nondaemon_context_mapper[context._name] - super(NonDaemonPool, self).__init__(processes=processes, - initializer=initializer, - initargs=initargs, - maxtasksperchild=maxtasksperchild, - context=context) + super().__init__( + processes=processes, + initializer=initializer, + initargs=initargs, + maxtasksperchild=maxtasksperchild, + context=context, + ) except ImportError: + class NonDaemonProcess(NonDaemonMixin, mp.Process): pass + class NonDaemonPool(pool.Pool): Process = NonDaemonProcess +def process_initializer(cwd): + """Initializes the environment of the child process""" + os.chdir(cwd) + os.environ["NIPYPE_NO_ET"] = "1" + + class LegacyMultiProcPlugin(DistributedPluginBase): """ Execute workflow with multiprocessing, not sending more jobs at once @@ -171,7 +193,7 @@ class LegacyMultiProcPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): # Init variables and instance attributes - super(LegacyMultiProcPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 @@ -181,27 +203,32 @@ def __init__(self, plugin_args=None): self._cwd = os.getcwd() # Read in options or set defaults. - non_daemon = self.plugin_args.get('non_daemon', True) - maxtasks = self.plugin_args.get('maxtasksperchild', 10) - self.processors = self.plugin_args.get('n_procs', cpu_count()) + non_daemon = self.plugin_args.get("non_daemon", True) + maxtasks = self.plugin_args.get("maxtasksperchild", 10) + self.processors = self.plugin_args.get("n_procs", cpu_count()) self.memory_gb = self.plugin_args.get( - 'memory_gb', # Allocate 90% of system memory - get_system_total_memory_gb() * 0.9) - self.raise_insufficient = self.plugin_args.get('raise_insufficient', - True) + "memory_gb", # Allocate 90% of system memory + get_system_total_memory_gb() * 0.9, + ) + self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes - logger.debug('[LegacyMultiProc] Starting in "%sdaemon" mode (n_procs=%d, ' - 'mem_gb=%0.2f, cwd=%s)', 'non' * int(non_daemon), - self.processors, self.memory_gb, self._cwd) + logger.debug( + '[LegacyMultiProc] Starting in "%sdaemon" mode (n_procs=%d, ' + "mem_gb=%0.2f, cwd=%s)", + "non" * int(non_daemon), + self.processors, + self.memory_gb, + self._cwd, + ) NipypePool = NonDaemonPool if non_daemon else Pool try: self.pool = NipypePool( processes=self.processors, maxtasksperchild=maxtasks, - initializer=os.chdir, - initargs=(self._cwd,) + initializer=process_initializer, + initargs=(self._cwd,), ) except TypeError: # Python < 3.2 does not have maxtasksperchild @@ -214,7 +241,7 @@ def __init__(self, plugin_args=None): def _async_callback(self, args): # Make sure runtime is not left at a dubious working directory os.chdir(self._cwd) - self._taskresult[args['taskid']] = args + self._taskresult[args["taskid"]] = args def _get_result(self, taskid): return self._taskresult.get(taskid) @@ -226,19 +253,22 @@ def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs - if getattr(node.interface, 'terminal_output', '') == 'stream': - node.interface.terminal_output = 'allatonce' + if getattr(node.interface, "terminal_output", "") == "stream": + node.interface.terminal_output = "allatonce" self._task_obj[self._taskid] = self.pool.apply_async( - run_node, (node, updatehash, self._taskid), - callback=self._async_callback) - - logger.debug('[LegacyMultiProc] Submitted task %s (taskid=%d).', - node.fullname, self._taskid) + run_node, (node, updatehash, self._taskid), callback=self._async_callback + ) + + logger.debug( + "[LegacyMultiProc] Submitted task %s (taskid=%d).", + node.fullname, + self._taskid, + ) return self._taskid def _prerun_check(self, graph): - """Check if any node exeeds the available resources""" + """Check if any node exceeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] for node in graph.nodes(): @@ -247,17 +277,19 @@ def _prerun_check(self, graph): if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( - 'Some nodes exceed the total amount of memory available ' - '(%0.2fGB).', self.memory_gb) + "Some nodes exceed the total amount of memory available (%0.2fGB).", + self.memory_gb, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( - 'Some nodes demand for more threads than available (%d).', - self.processors) + "Some nodes demand for more threads than available (%d).", + self.processors, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.close() @@ -282,46 +314,58 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 - jobids = np.flatnonzero(~self.proc_done & - (self.depidx.sum(axis=0) == 0).__array__()) + jobids = np.flatnonzero( + ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() + ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources( - self.pending_tasks) - - stats = (len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors) + free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + + stats = ( + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + ) if self._stats != stats: - tasks_list_msg = '' + tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ - ' * %s' % self.procs[jobid].fullname + " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: - tasks_list_msg = '\nCurrently running:\n' - tasks_list_msg += '\n'.join(running_tasks) - tasks_list_msg = indent(tasks_list_msg, ' ' * 21) + tasks_list_msg = "\nCurrently running:\n" + tasks_list_msg += "\n".join(running_tasks) + tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( - '[LegacyMultiProc] Running %d tasks, and %d jobs ready. Free ' - 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', - len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors, - tasks_list_msg) + "[LegacyMultiProc] Running %d tasks, and %d jobs ready. Free " + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + tasks_list_msg, + ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: - logger.debug('No resources available') + logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: - logger.debug('No tasks are being run, and no jobs can ' - 'be submitted to the queue. Potential deadlock') + logger.debug( + "No tasks are being run, and no jobs can " + "be submitted to the queue. Potential deadlock" + ) return - jobids = self._sort_jobs( - jobids, scheduler=self.plugin_args.get('scheduler')) + jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() @@ -335,12 +379,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) self.proc_pending[jobid] = False continue if num_subnodes > 1: @@ -354,16 +394,26 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: - logger.debug('Cannot allocate job %d (%0.2fGB, %d threads).', - jobid, next_job_gb, next_job_th) + logger.debug( + "Cannot allocate job %d (%0.2fGB, %d threads).", + jobid, + next_job_gb, + next_job_th, + ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th - logger.debug('Allocating %s ID=%d (%0.2fGB, %d threads). Free: ' - '%0.2fGB, %d threads.', self.procs[jobid].fullname, - jobid, next_job_gb, next_job_th, free_memory_gb, - free_processors) + logger.debug( + "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " + "%0.2fGB, %d threads.", + self.procs[jobid].fullname, + jobid, + next_job_gb, + next_job_th, + free_memory_gb, + free_processors, + ) # change job status in appropriate queues self.proc_done[jobid] = True @@ -375,19 +425,14 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) # Release resources self._task_finished_cb(jobid) @@ -404,9 +449,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: - self._status_callback(self.procs[jobid], 'start') - tid = self._submit_job( - deepcopy(self.procs[jobid]), updatehash=updatehash) + self._status_callback(self.procs[jobid], "start") + tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False @@ -415,10 +459,10 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Display stats next loop self._stats = None - def _sort_jobs(self, jobids, scheduler='tsort'): - if scheduler == 'mem_thread': + def _sort_jobs(self, jobids, scheduler="tsort"): + if scheduler == "mem_thread": return sorted( jobids, - key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs) + key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 3f8ec51463..93029ee1b9 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -1,20 +1,15 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Local serial workflow execution """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os -from .base import (PluginBase, logger, report_crash, report_nodes_not_run, - str2bool) +from .base import PluginBase, logger, report_crash, report_nodes_not_run, str2bool from ..engine.utils import topological_sort class LinearPlugin(PluginBase): - """Execute workflow in series - """ + """Execute workflow in series""" def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. @@ -26,46 +21,61 @@ def run(self, graph, config, updatehash=False): defines order of execution """ import networkx as nx + try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes if not isinstance(graph, nx.DiGraph): - raise ValueError('Input must be a networkx digraph object') + raise ValueError("Input must be a networkx digraph object") logger.info("Running serially.") old_wd = os.getcwd() notrun = [] donotrun = [] + stop_on_first_crash = str2bool(config["execution"]["stop_on_first_crash"]) + errors = [] nodes, _ = topological_sort(graph) for node in nodes: - endstatus = 'end' + endstatus = "end" try: if node in donotrun: continue if self._status_callback: - self._status_callback(node, 'start') + self._status_callback(node, "start") node.run(updatehash=updatehash) - except: - endstatus = 'exception' + except Exception as exc: + endstatus = "exception" # bare except, but i really don't know where a # node might fail crashfile = report_crash(node) - if str2bool(config['execution']['stop_on_first_crash']): - raise # remove dependencies from queue - subnodes = [s for s in dfs_preorder(graph, node)] - notrun.append({'node': node, 'dependents': subnodes, - 'crashfile': crashfile}) + subnodes = list(dfs_preorder(graph, node)) + notrun.append( + {"node": node, "dependents": subnodes, "crashfile": crashfile} + ) donotrun.extend(subnodes) # Delay raising the crash until we cleaned the house - if str2bool(config['execution']['stop_on_first_crash']): - os.chdir(old_wd) # Return wherever we were before - report_nodes_not_run(notrun) # report before raising - raise + errors.append(exc) + + if stop_on_first_crash: + break finally: if self._status_callback: self._status_callback(node, endstatus) os.chdir(old_wd) # Return wherever we were before report_nodes_not_run(notrun) + if errors: + # If one or more nodes failed, re-rise first of them + error, cause = errors[0], None + if isinstance(error, str): + error = RuntimeError(error) + + if len(errors) > 1: + error, cause = ( + RuntimeError(f"{len(errors)} raised. Re-raising first."), + error, + ) + + raise error from cause diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index 7f687a69ec..cf334be051 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via LSF """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import re @@ -11,7 +8,8 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class LSFPlugin(SGELikeBatchManagerBase): @@ -32,15 +30,15 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - self._bsub_args = '' - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'bsub_args' in kwargs['plugin_args']: - self._bsub_args = kwargs['plugin_args']['bsub_args'] - super(LSFPlugin, self).__init__(template, **kwargs) + self._bsub_args = "" + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "bsub_args" in kwargs["plugin_args"]: + self._bsub_args = kwargs["plugin_args"]["bsub_args"] + super().__init__(template, **kwargs) def _is_pending(self, taskid): """LSF lists a status of 'PEND' when a job has been submitted but is @@ -48,53 +46,53 @@ def _is_pending(self, taskid): But _is_pending should return True until a job has finished and is ready to be checked for completeness. So return True if status is either 'PEND' or 'RUN'""" - cmd = CommandLine( - 'bjobs', resource_monitor=False, terminal_output='allatonce') - cmd.inputs.args = '%d' % taskid + cmd = CommandLine("bjobs", resource_monitor=False, terminal_output="allatonce") + cmd.inputs.args = "%d" % taskid # check lsf task oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) # logger.debug(result.runtime.stdout) - if 'DONE' in result.runtime.stdout or 'EXIT' in result.runtime.stdout: - return False - else: - return True + return ( + "DONE" not in result.runtime.stdout and "EXIT" not in result.runtime.stdout + ) def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'bsub', + "bsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - bsubargs = '' + terminal_output="allatonce", + ) + bsubargs = "" if self._bsub_args: bsubargs = self._bsub_args - if 'bsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - bsubargs = node.plugin_args['bsub_args'] + if "bsub_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + bsubargs = node.plugin_args["bsub_args"] else: - bsubargs += (" " + node.plugin_args['bsub_args']) - if '-o' not in bsubargs: # -o outfile - bsubargs = '%s -o %s' % (bsubargs, scriptfile + ".log") - if '-e' not in bsubargs: + bsubargs += " " + node.plugin_args["bsub_args"] + if "-o" not in bsubargs: # -o outfile + bsubargs = "{} -o {}".format(bsubargs, scriptfile + ".log") + if "-e" not in bsubargs: # -e error file - bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log") + bsubargs = "{} -e {}".format(bsubargs, scriptfile + ".log") if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -J %s sh %s' % (bsubargs, jobname, - scriptfile) # -J job_name_spec - logger.debug('bsub ' + cmd.inputs.args) + jobname = ".".join(jobnameitems) + cmd.inputs.args = "{} -J {} sh {}".format( + bsubargs, + jobname, + scriptfile, + ) # -J job_name_spec + logger.debug("bsub " + cmd.inputs.args) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -102,23 +100,28 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - sleep( - self._retry_timeout) # sleep 2 seconds and try again. + sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit lsf task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + "Could not submit lsf task for node %s" % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve lsf taskid - match = re.search(r'<(\d*)>', result.runtime.stdout) + match = re.search(r"<(\d*)>", result.runtime.stdout) if match: taskid = int(match.groups()[0]) else: - raise IOError("Can't parse submission job output id: %s" % - result.runtime.stdout) + raise OSError( + "Can't parse submission job output id: %s" % result.runtime.stdout + ) self._pending[taskid] = node.output_dir() - logger.debug('submitted lsf task: %d for node %s' % (taskid, node._id)) + logger.debug("submitted lsf task: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index d2ef363a34..401b01b388 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing @@ -6,13 +5,11 @@ Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) # Import packages import os import multiprocessing as mp -from concurrent.futures import ProcessPoolExecutor +from concurrent.futures import ProcessPoolExecutor, wait from traceback import format_exception import sys from logging import INFO @@ -30,7 +27,7 @@ except ImportError: def indent(text, prefix): - """ A textwrap.indent replacement for Python < 3.3 """ + """A textwrap.indent replacement for Python < 3.3""" if not prefix: return text splittext = text.splitlines(True) @@ -38,7 +35,7 @@ def indent(text, prefix): # Init logger -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") # Run node @@ -66,15 +63,21 @@ def run_node(node, updatehash, taskid): # Try and execute the node via node.run() try: - result['result'] = node.run(updatehash=updatehash) + result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here - result['traceback'] = format_exception(*sys.exc_info()) - result['result'] = node.result + result["traceback"] = format_exception(*sys.exc_info()) + result["result"] = node.result # Return the result dictionary return result +def process_initializer(cwd): + """Initializes the environment of the child process""" + os.chdir(cwd) + os.environ["NIPYPE_NO_ET"] = "1" + + class MultiProcPlugin(DistributedPluginBase): """ Execute workflow with multiprocessing, not sending more jobs at once @@ -110,7 +113,7 @@ class MultiProcPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): # Init variables and instance attributes - super(MultiProcPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 @@ -120,36 +123,40 @@ def __init__(self, plugin_args=None): self._cwd = os.getcwd() # Read in options or set defaults. - self.processors = self.plugin_args.get('n_procs', mp.cpu_count()) + self.processors = self.plugin_args.get("n_procs", mp.cpu_count()) self.memory_gb = self.plugin_args.get( - 'memory_gb', # Allocate 90% of system memory - get_system_total_memory_gb() * 0.9) - self.raise_insufficient = self.plugin_args.get('raise_insufficient', - True) + "memory_gb", # Allocate 90% of system memory + get_system_total_memory_gb() * 0.9, + ) + self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes - logger.debug('[MultiProc] Starting (n_procs=%d, ' - 'mem_gb=%0.2f, cwd=%s)', - self.processors, self.memory_gb, self._cwd) + logger.debug( + "[MultiProc] Starting (n_procs=%d, mem_gb=%0.2f, cwd=%s)", + self.processors, + self.memory_gb, + self._cwd, + ) try: - mp_context = mp.context.get_context( - self.plugin_args.get('mp_context')) - self.pool = ProcessPoolExecutor(max_workers=self.processors, - initializer=os.chdir, - initargs=(self._cwd,), - mp_context=mp_context) + mp_context = mp.get_context(self.plugin_args.get("mp_context")) + self.pool = ProcessPoolExecutor( + max_workers=self.processors, + initializer=process_initializer, + initargs=(self._cwd,), + mp_context=mp_context, + ) except (AttributeError, TypeError): # Python < 3.7 does not support initialization or contexts self.pool = ProcessPoolExecutor(max_workers=self.processors) + result_future = self.pool.submit(process_initializer, self._cwd) + wait([result_future], timeout=5) self._stats = None def _async_callback(self, args): - # Make sure runtime is not left at a dubious working directory - os.chdir(self._cwd) result = args.result() - self._taskresult[result['taskid']] = result + self._taskresult[result["taskid"]] = result def _get_result(self, taskid): return self._taskresult.get(taskid) @@ -161,19 +168,20 @@ def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs - if getattr(node.interface, 'terminal_output', '') == 'stream': - node.interface.terminal_output = 'allatonce' + if getattr(node.interface, "terminal_output", "") == "stream": + node.interface.terminal_output = "allatonce" result_future = self.pool.submit(run_node, node, updatehash, self._taskid) result_future.add_done_callback(self._async_callback) self._task_obj[self._taskid] = result_future - logger.debug('[MultiProc] Submitted task %s (taskid=%d).', - node.fullname, self._taskid) + logger.debug( + "[MultiProc] Submitted task %s (taskid=%d).", node.fullname, self._taskid + ) return self._taskid def _prerun_check(self, graph): - """Check if any node exeeds the available resources""" + """Check if any node exceeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] for node in graph.nodes(): @@ -182,17 +190,19 @@ def _prerun_check(self, graph): if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( - 'Some nodes exceed the total amount of memory available ' - '(%0.2fGB).', self.memory_gb) + "Some nodes exceed the total amount of memory available (%0.2fGB).", + self.memory_gb, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( - 'Some nodes demand for more threads than available (%d).', - self.processors) + "Some nodes demand for more threads than available (%d).", + self.processors, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.shutdown() @@ -217,46 +227,58 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 - jobids = np.flatnonzero(~self.proc_done & - (self.depidx.sum(axis=0) == 0).__array__()) + jobids = np.flatnonzero( + ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() + ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources( - self.pending_tasks) - - stats = (len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors) + free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + + stats = ( + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + ) if self._stats != stats: - tasks_list_msg = '' + tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ - ' * %s' % self.procs[jobid].fullname + " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: - tasks_list_msg = '\nCurrently running:\n' - tasks_list_msg += '\n'.join(running_tasks) - tasks_list_msg = indent(tasks_list_msg, ' ' * 21) + tasks_list_msg = "\nCurrently running:\n" + tasks_list_msg += "\n".join(running_tasks) + tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( - '[MultiProc] Running %d tasks, and %d jobs ready. Free ' - 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', - len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors, - tasks_list_msg) + "[MultiProc] Running %d tasks, and %d jobs ready. Free " + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + tasks_list_msg, + ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: - logger.debug('No resources available') + logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: - logger.debug('No tasks are being run, and no jobs can ' - 'be submitted to the queue. Potential deadlock') + logger.debug( + "No tasks are being run, and no jobs can " + "be submitted to the queue. Potential deadlock" + ) return - jobids = self._sort_jobs( - jobids, scheduler=self.plugin_args.get('scheduler')) + jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() @@ -270,12 +292,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) self.proc_pending[jobid] = False continue if num_subnodes > 1: @@ -289,16 +307,26 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: - logger.debug('Cannot allocate job %d (%0.2fGB, %d threads).', - jobid, next_job_gb, next_job_th) + logger.debug( + "Cannot allocate job %d (%0.2fGB, %d threads).", + jobid, + next_job_gb, + next_job_th, + ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th - logger.debug('Allocating %s ID=%d (%0.2fGB, %d threads). Free: ' - '%0.2fGB, %d threads.', self.procs[jobid].fullname, - jobid, next_job_gb, next_job_th, free_memory_gb, - free_processors) + logger.debug( + "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " + "%0.2fGB, %d threads.", + self.procs[jobid].fullname, + jobid, + next_job_gb, + next_job_th, + free_memory_gb, + free_processors, + ) # change job status in appropriate queues self.proc_done[jobid] = True @@ -310,19 +338,14 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) # Release resources self._task_finished_cb(jobid) @@ -339,9 +362,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: - self._status_callback(self.procs[jobid], 'start') - tid = self._submit_job( - deepcopy(self.procs[jobid]), updatehash=updatehash) + self._status_callback(self.procs[jobid], "start") + tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False @@ -350,10 +372,10 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Display stats next loop self._stats = None - def _sort_jobs(self, jobids, scheduler='tsort'): - if scheduler == 'mem_thread': + def _sort_jobs(self, jobids, scheduler="tsort"): + if scheduler == "mem_thread": return sorted( jobids, - key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs) + key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index c68b42379f..df56391bae 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,10 +1,6 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via OAR http://oar.imag.fr """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open import os import stat from time import sleep @@ -14,7 +10,8 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class OARPlugin(SGELikeBatchManagerBase): @@ -30,9 +27,9 @@ class OARPlugin(SGELikeBatchManagerBase): """ - # Addtional class variables + # Additional class variables _max_jobname_len = 15 - _oarsub_args = '' + _oarsub_args = "" def __init__(self, **kwargs): template = """ @@ -41,71 +38,75 @@ def __init__(self, **kwargs): self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'oarsub_args' in kwargs['plugin_args']: - self._oarsub_args = kwargs['plugin_args']['oarsub_args'] - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'max_jobname_len' in kwargs['plugin_args']: - self._max_jobname_len = \ - kwargs['plugin_args']['max_jobname_len'] - super(OARPlugin, self).__init__(template, **kwargs) + if kwargs.get("plugin_args"): + if "oarsub_args" in kwargs["plugin_args"]: + self._oarsub_args = kwargs["plugin_args"]["oarsub_args"] + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "max_jobname_len" in kwargs["plugin_args"]: + self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] + super().__init__(template, **kwargs) def _is_pending(self, taskid): # subprocess.Popen requires taskid to be a string proc = subprocess.Popen( - ['oarstat', '-J', '-s', '-j', taskid], + ["oarstat", "-J", "-s", "-j", taskid], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) o, e = proc.communicate() parsed_result = json.loads(o)[taskid].lower() - is_pending = (('error' not in parsed_result) - and ('terminated' not in parsed_result)) + is_pending = ("error" not in parsed_result) and ( + "terminated" not in parsed_result + ) return is_pending def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'oarsub', + "oarsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - oarsubargs = '' + oarsubargs = "" if self._oarsub_args: oarsubargs = self._oarsub_args - if 'oarsub_args' in node.plugin_args: - if ('overwrite' in node.plugin_args - and node.plugin_args['overwrite']): - oarsubargs = node.plugin_args['oarsub_args'] + if "oarsub_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + oarsubargs = node.plugin_args["oarsub_args"] else: - oarsubargs += (" " + node.plugin_args['oarsub_args']) + oarsubargs += " " + node.plugin_args["oarsub_args"] if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - jobname = jobname[0:self._max_jobname_len] - - if '-O' not in oarsubargs: - oarsubargs = '%s -O %s' % (oarsubargs, - os.path.join(path, jobname + '.stdout')) - if '-E' not in oarsubargs: - oarsubargs = '%s -E %s' % (oarsubargs, - os.path.join(path, jobname + '.stderr')) - if '-J' not in oarsubargs: - oarsubargs = '%s -J' % (oarsubargs) + jobname = ".".join(jobnameitems) + jobname = jobname[0 : self._max_jobname_len] + + if "-O" not in oarsubargs: + oarsubargs = "{} -O {}".format( + oarsubargs, + os.path.join(path, jobname + ".stdout"), + ) + if "-E" not in oarsubargs: + oarsubargs = "{} -E {}".format( + oarsubargs, + os.path.join(path, jobname + ".stderr"), + ) + if "-J" not in oarsubargs: + oarsubargs = "%s -J" % (oarsubargs) os.chmod(scriptfile, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) - cmd.inputs.args = '%s -n %s -S %s' % (oarsubargs, jobname, scriptfile) + cmd.inputs.args = f"{oarsubargs} -n {jobname} -S {scriptfile}" oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -117,24 +118,29 @@ def _submit_batchtask(self, scriptfile, node): # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit OAR task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + "Could not submit OAR task for node %s" % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve OAR taskid - o = '' + o = "" add = False for line in result.runtime.stdout.splitlines(): - if line.strip().startswith('{'): + if line.strip().startswith("{"): add = True if add: - o += line + '\n' - if line.strip().startswith('}'): + o += line + "\n" + if line.strip().startswith("}"): break - taskid = json.loads(o)['job_id'] + taskid = json.loads(o)["job_id"] self._pending[taskid] = node.output_dir() - logger.debug('submitted OAR task: %s for node %s' % (taskid, node._id)) + logger.debug(f"submitted OAR task: {taskid} for node {node._id}") return taskid diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index 0738638765..d967af0bed 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,9 +1,5 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open import os from time import sleep @@ -12,7 +8,7 @@ from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class PBSPlugin(SGELikeBatchManagerBase): @@ -28,7 +24,7 @@ class PBSPlugin(SGELikeBatchManagerBase): """ - # Addtional class variables + # Additional class variables _max_jobname_len = 15 def __init__(self, **kwargs): @@ -38,65 +34,65 @@ def __init__(self, **kwargs): self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'max_jobname_len' in kwargs['plugin_args']: - self._max_jobname_len = kwargs['plugin_args'][ - 'max_jobname_len'] - super(PBSPlugin, self).__init__(template, **kwargs) + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "max_jobname_len" in kwargs["plugin_args"]: + self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] + super().__init__(template, **kwargs) def _is_pending(self, taskid): - result = CommandLine('qstat -f {}'.format(taskid), - environ=dict(os.environ), - terminal_output='file_split', - resource_monitor=False, - ignore_exception=True).run() + result = CommandLine( + f"qstat -f {taskid}", + environ=dict(os.environ), + terminal_output="file_split", + resource_monitor=False, + ignore_exception=True, + ).run() stdout = result.runtime.stdout stderr = result.runtime.stderr - errmsg = 'Unknown Job Id' - success = 'Job has finished' - if (success in stderr) or ('job_state = C' in stdout): + errmsg = "Unknown Job Id" + success = "Job has finished" + if (success in stderr) or ("job_state = C" in stdout): return False else: return errmsg not in stderr def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'qsub', + "qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and \ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + qsubargs += " " + node.plugin_args["qsub_args"] + if "-o" not in qsubargs: + qsubargs = f"{qsubargs} -o {path}" + if "-e" not in qsubargs: + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - jobname = jobname[0:self._max_jobname_len] - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + jobname = ".".join(jobnameitems) + jobname = jobname[0 : self._max_jobname_len] + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -109,15 +105,14 @@ def _submit_batchtask(self, scriptfile, node): else: iflogger.setLevel(oldlevel) raise RuntimeError( - 'Could not submit pbs task for node {}\n{}'.format( - node._id, e)) + f"Could not submit pbs task for node {node._id}\n{e}" + ) else: break iflogger.setLevel(oldlevel) # retrieve pbs taskid - taskid = result.runtime.stdout.split('.')[0] + taskid = result.runtime.stdout.split(".")[0] self._pending[taskid] = node.output_dir() - logger.debug('submitted pbs task: {} for node {}'.format( - taskid, node._id)) + logger.debug(f"submitted pbs task: {taskid} for node {node._id}") return taskid diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 68fc651f5f..4b245dedb7 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -1,8 +1,5 @@ """Parallel workflow execution via PBS/Torque """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open import os import sys @@ -23,43 +20,42 @@ class PBSGraphPlugin(SGEGraphPlugin): qsub call """ + _template = """ #PBS -V """ def _submit_graph(self, pyfiles, dependencies, nodes): batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env sh\n') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") + with open(submitjobsfile, "w") as fp: + fp.writelines("#!/usr/bin/env sh\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] - template, qsub_args = self._get_args(node, - ["template", "qsub_args"]) + template, qsub_args = self._get_args(node, ["template", "qsub_args"]) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join((template, '%s %s' % (sys.executable, - pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) - with open(batchscriptfile, 'wt') as batchfp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) + batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = [ - '$job%05d' % jobid for jobid in dependencies[idx] - ] + values = ["$job%05d" % jobid for jobid in dependencies[idx]] if len(values): - deps = '-W depend=afterok:%s' % ':'.join(values) - fp.writelines('job%05d=`qsub %s %s %s`\n' % - (idx, deps, qsub_args, batchscriptfile)) + deps = "-W depend=afterok:%s" % ":".join(values) + fp.writelines( + "job%05d=`qsub %s %s %s`\n" + % (idx, deps, qsub_args, batchscriptfile) + ) cmd = CommandLine( - 'sh', + "sh", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/semaphore_singleton.py b/nipype/pipeline/plugins/semaphore_singleton.py index 96dfe657bd..1ab42de2fc 100644 --- a/nipype/pipeline/plugins/semaphore_singleton.py +++ b/nipype/pipeline/plugins/semaphore_singleton.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) import threading + semaphore = threading.Semaphore(0) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index a4ce28297c..38079e947d 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from builtins import object import os import pwd @@ -19,45 +14,56 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") DEBUGGING_PREFIX = str(int(random.uniform(100, 999))) def sge_debug_print(message): - """ Needed for debugging on big jobs. Once this is fully vetted, it can be removed. - """ + """Needed for debugging on big jobs. Once this is fully vetted, it can be removed.""" logger.debug(DEBUGGING_PREFIX + " " + "=!" * 3 + " " + message) # print DEBUGGING_PREFIX + " " + "=!" * 3 + " " + message -class QJobInfo(object): +class QJobInfo: """Information about a single job created by OGE/SGE or similar Each job is responsible for knowing it's own refresh state :author Hans J. Johnson """ - def __init__(self, job_num, job_queue_state, job_time, job_queue_name, - job_slots, qsub_command_line): + def __init__( + self, + job_num, + job_queue_state, + job_time, + job_queue_name, + job_slots, + qsub_command_line, + ): # self._jobName = None # Ascii text name of job not unique self._job_num = int( job_num ) # The primary unique identifier for this job, must be an integer! # self._jobOwn = None # Who owns this job - self._job_queue_state = str( - job_queue_state) # ["running","zombie",...??] + self._job_queue_state = str(job_queue_state) # ["running","zombie",...??] # self._jobActionState = str(jobActionState) # ['r','qw','S',...??] self._job_time = job_time # The job start time - self._job_info_creation_time = time.time( - ) # When this job was created (for comparing against initalization) + self._job_info_creation_time = ( + time.time() + ) # When this job was created (for comparing against initialization) self._job_queue_name = job_queue_name # Where the job is running self._job_slots = int(job_slots) # How many slots are being used self._qsub_command_line = qsub_command_line def __repr__(self): - return '{:<8d}{:12}{:<3d}{:20}{:8}{}'.format( - self._job_num, self._job_queue_state, self._job_slots, + return "{:<8d}{:12}{:<3d}{:20}{:8}{}".format( + self._job_num, + self._job_queue_state, + self._job_slots, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(self._job_time)), - self._job_queue_name, self._qsub_command_line) + self._job_queue_name, + self._qsub_command_line, + ) def is_initializing(self): return self._job_queue_state == "initializing" @@ -72,28 +78,28 @@ def is_pending(self): return self._job_queue_state == "pending" def is_job_state_pending(self): - """ Return True, unless job is in the "zombie" status - """ - time_diff = (time.time() - self._job_info_creation_time) + """Return True, unless job is in the "zombie" status""" + time_diff = time.time() - self._job_info_creation_time if self.is_zombie(): sge_debug_print( - "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{0}". - format(self)) + "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{}".format( + self + ) + ) is_pending_status = False # Job explicitly found as being completed! elif self.is_initializing() and (time_diff > 600): # if initializing for more than 5 minute, failure due to # initialization and completion before registration sge_debug_print( - "FAILURE! QJobInfo.IsPending found long running at {1} seconds" - "'initializing' returning False for to break loop!\n{0}". - format(self, time_diff)) + f"FAILURE! QJobInfo.IsPending found long running at {time_diff} seconds " + f"'initializing' returning False for to break loop!\n{self}" + ) is_pending_status = True # Job initialization took too long, so report! else: # self.is_running() || self.is_pending(): is_pending_status = True # Job cache last listed as running return is_pending_status # The job is in one of the hold states - def update_info(self, job_queue_state, job_time, job_queue_name, - job_slots): + def update_info(self, job_queue_state, job_time, job_queue_name, job_slots): self._job_queue_state = job_queue_state self._job_time = job_time self._job_queue_name = job_queue_name @@ -103,13 +109,13 @@ def set_state(self, new_state): self._job_queue_state = new_state -class QstatSubstitute(object): +class QstatSubstitute: """A wrapper for Qstat to avoid overloading the SGE/OGS server with rapid continuous qstat requests""" - def __init__(self, - qstat_instant_executable='qstat', - qstat_cached_executable='qstat'): + def __init__( + self, qstat_instant_executable="qstat", qstat_cached_executable="qstat" + ): """ :param qstat_instant_executable: :param qstat_cached_executable: @@ -117,12 +123,13 @@ def __init__(self, self._qstat_instant_executable = qstat_instant_executable self._qstat_cached_executable = qstat_cached_executable self._out_of_scope_jobs = list() # Initialize first - self._task_dictionary = dict( + self._task_dictionary = ( + dict() ) # {'taskid': QJobInfo(), .... } The dictionaryObject self._remove_old_jobs() def _remove_old_jobs(self): - """ This is only called during initialization of the function for the purpose + """This is only called during initialization of the function for the purpose of identifying jobs that are not part of this run of nipype. They are jobs that existed prior to starting a new jobs, so they are irrelevant. """ @@ -133,24 +140,26 @@ def _remove_old_jobs(self): def add_startup_job(self, taskid, qsub_command_line): """ :param taskid: The job id - :param qsub_command_line: When initializing, re-use the job_queue_name + :param qsub_command_line: When initializing, reuse the job_queue_name :return: NONE """ taskid = int(taskid) # Ensure that it is an integer - self._task_dictionary[taskid] = QJobInfo(taskid, "initializing", - time.time(), "noQueue", 1, - qsub_command_line) + self._task_dictionary[taskid] = QJobInfo( + taskid, "initializing", time.time(), "noQueue", 1, qsub_command_line + ) @staticmethod def _qacct_verified_complete(taskid): - """ request definitive job completion information for the current job - from the qacct report + """request definitive job completion information for the current job + from the qacct report """ - sge_debug_print("WARNING: " - "CONTACTING qacct for finished jobs, " - "{0}: {1}".format(time.time(), "Verifying Completion")) + sge_debug_print( + "WARNING: " + "CONTACTING qacct for finished jobs, " + "{}: {}".format(time.time(), "Verifying Completion") + ) - this_command = 'qacct' + this_command = "qacct" qacct_retries = 10 is_complete = False while qacct_retries > 0: @@ -158,22 +167,23 @@ def _qacct_verified_complete(taskid): try: proc = subprocess.Popen( [ - this_command, '-o', - pwd.getpwuid(os.getuid())[0], '-j', - str(taskid) + this_command, + "-o", + pwd.getpwuid(os.getuid())[0], + "-j", + str(taskid), ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) qacct_result, _ = proc.communicate() if qacct_result.find(str(taskid)): is_complete = True - sge_debug_print( - "NOTE: qacct for jobs\n{0}".format(qacct_result)) + sge_debug_print(f"NOTE: qacct for jobs\n{qacct_result}") break except: sge_debug_print("NOTE: qacct call failed") time.sleep(5) - pass return is_complete def _parse_qstat_job_list(self, xml_job_list): @@ -183,36 +193,46 @@ def _parse_qstat_job_list(self, xml_job_list): # jobown = # current_job_element.getElementsByTagName('JB_owner')[0].childNodes[0].data try: - job_queue_name = current_job_element.getElementsByTagName( - 'queue_name')[0].childNodes[0].data + job_queue_name = ( + current_job_element.getElementsByTagName("queue_name")[0] + .childNodes[0] + .data + ) except: job_queue_name = "unknown" try: job_slots = int( - current_job_element.getElementsByTagName('slots')[0] - .childNodes[0].data) + current_job_element.getElementsByTagName("slots")[0] + .childNodes[0] + .data + ) except: job_slots = -1 - job_queue_state = current_job_element.getAttribute('state') + job_queue_state = current_job_element.getAttribute("state") job_num = int( - current_job_element.getElementsByTagName('JB_job_number')[0] - .childNodes[0].data) + current_job_element.getElementsByTagName("JB_job_number")[0] + .childNodes[0] + .data + ) try: - job_time_text = current_job_element.getElementsByTagName( - 'JAT_start_time')[0].childNodes[0].data + job_time_text = ( + current_job_element.getElementsByTagName("JAT_start_time")[0] + .childNodes[0] + .data + ) job_time = float( - time.mktime( - time.strptime(job_time_text, "%Y-%m-%dT%H:%M:%S"))) + time.mktime(time.strptime(job_time_text, "%Y-%m-%dT%H:%M:%S")) + ) except: - job_time = float(0.0) + job_time = 0.0 # Make job entry task_id = int(job_num) if task_id in self._task_dictionary: self._task_dictionary[task_id].update_info( - job_queue_state, job_time, job_queue_name, job_slots) - sge_debug_print("Updating job: {0}".format( - self._task_dictionary[task_id])) + job_queue_state, job_time, job_queue_name, job_slots + ) + sge_debug_print(f"Updating job: {self._task_dictionary[task_id]}") current_jobs_parsed.append(task_id) # Changed from job_num as "in" is used to check which does not cast else: @@ -232,33 +252,36 @@ def _parse_qstat_job_list(self, xml_job_list): if is_completed: self._task_dictionary[dictionary_job].set_state("zombie") else: - sge_debug_print("ERROR: Job not in current parselist, " - "and not in done list {0}: {1}".format( - dictionary_job, - self._task_dictionary[dictionary_job])) - pass + sge_debug_print( + "ERROR: Job not in current parselist, " + "and not in done list {}: {}".format( + dictionary_job, self._task_dictionary[dictionary_job] + ) + ) if self._task_dictionary[dictionary_job].is_initializing(): is_completed = self._qacct_verified_complete(dictionary_job) if is_completed: self._task_dictionary[dictionary_job].set_state("zombie") else: sge_debug_print( - "ERROR: Job not in still in intializing mode, " - "and not in done list {0}: {1}".format( - dictionary_job, - self._task_dictionary[dictionary_job])) - pass + "ERROR: Job not in still in initialization mode, " + "and not in done list {}: {}".format( + dictionary_job, self._task_dictionary[dictionary_job] + ) + ) def _run_qstat(self, reason_for_qstat, force_instant=True): - """ request all job information for the current user in xmlformat. - See documentation from java documentation: - http://arc.liv.ac.uk/SGE/javadocs/jgdi/com/sun/grid/jgdi/monitoring/filter/JobStateFilter.html - -s r gives running jobs - -s z gives recently completed jobs (**recently** is very ambiguous) - -s s suspended jobs + """request all job information for the current user in xmlformat. + See documentation from java documentation: + http://arc.liv.ac.uk/SGE/javadocs/jgdi/com/sun/grid/jgdi/monitoring/filter/JobStateFilter.html + -s r gives running jobs + -s z gives recently completed jobs (**recently** is very ambiguous) + -s s suspended jobs """ - sge_debug_print("WARNING: CONTACTING qmaster for jobs, " - "{0}: {1}".format(time.time(), reason_for_qstat)) + sge_debug_print( + "WARNING: CONTACTING qmaster for jobs, " + "{}: {}".format(time.time(), reason_for_qstat) + ) if force_instant: this_command = self._qstat_instant_executable else: @@ -270,26 +293,30 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): try: proc = subprocess.Popen( [ - this_command, '-u', - pwd.getpwuid(os.getuid())[0], '-xml', '-s', 'psrz' + this_command, + "-u", + pwd.getpwuid(os.getuid())[0], + "-xml", + "-s", + "psrz", ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) qstat_xml_result, _ = proc.communicate() dom = xml.dom.minidom.parseString(qstat_xml_result) - jobs = dom.getElementsByTagName('job_info') + jobs = dom.getElementsByTagName("job_info") run = jobs[0] - runjobs = run.getElementsByTagName('job_list') + runjobs = run.getElementsByTagName("job_list") self._parse_qstat_job_list(runjobs) break except Exception as inst: - exception_message = "QstatParsingError:\n\t{0}\n\t{1}\n".format( + exception_message = "QstatParsingError:\n\t{}\n\t{}\n".format( type(inst), # the exception instance - inst # __str__ allows args to printed directly + inst, # __str__ allows args to printed directly ) sge_debug_print(exception_message) time.sleep(5) - pass def print_dictionary(self): """For debugging""" @@ -301,41 +328,38 @@ def is_job_pending(self, task_id): # Check if the task is in the dictionary first (before running qstat) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() # Double check pending jobs in case of change (since we don't check at the beginning) if job_is_pending: - self._run_qstat( - "checking job pending status {0}".format(task_id), False) - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + self._run_qstat(f"checking job pending status {task_id}", False) + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: - self._run_qstat("checking job pending status {0}".format(task_id), - True) + self._run_qstat(f"checking job pending status {task_id}", True) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: - sge_debug_print("ERROR: Job {0} not in task list, " - "even after forced qstat!".format(task_id)) + sge_debug_print( + "ERROR: Job {} not in task list, " + "even after forced qstat!".format(task_id) + ) job_is_pending = False if not job_is_pending: - sge_debug_print( - "DONE! Returning for {0} claiming done!".format(task_id)) + sge_debug_print(f"DONE! Returning for {task_id} claiming done!") if task_id in self._task_dictionary: - sge_debug_print( - "NOTE: Adding {0} to OutOfScopeJobs list!".format(task_id)) + sge_debug_print(f"NOTE: Adding {task_id} to OutOfScopeJobs list!") self._out_of_scope_jobs.append(int(task_id)) self._task_dictionary.pop(task_id) else: - sge_debug_print("ERROR: Job {0} not in task list, " - "but attempted to be removed!".format(task_id)) + sge_debug_print( + "ERROR: Job {} not in task list, " + "but attempted to be removed!".format(task_id) + ) return job_is_pending def qsub_sanitize_job_name(testjobname): - """ Ensure that qsub job names must begin with a letter. + """Ensure that qsub job names must begin with a letter. Numbers and punctuation are not allowed. @@ -347,7 +371,7 @@ def qsub_sanitize_job_name(testjobname): if testjobname[0].isalpha(): return testjobname else: - return 'J' + testjobname + return "J" + testjobname class SGEPlugin(SGELikeBatchManagerBase): @@ -369,57 +393,56 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - instant_qstat = 'qstat' - cached_qstat = 'qstat' - - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'qstatProgramPath' in kwargs['plugin_args']: - instant_qstat = kwargs['plugin_args']['qstatProgramPath'] - if 'qstatCachedProgramPath' in kwargs['plugin_args']: - cached_qstat = kwargs['plugin_args']['qstatCachedProgramPath'] + instant_qstat = "qstat" + cached_qstat = "qstat" + + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "qstatProgramPath" in kwargs["plugin_args"]: + instant_qstat = kwargs["plugin_args"]["qstatProgramPath"] + if "qstatCachedProgramPath" in kwargs["plugin_args"]: + cached_qstat = kwargs["plugin_args"]["qstatCachedProgramPath"] self._refQstatSubstitute = QstatSubstitute(instant_qstat, cached_qstat) - super(SGEPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): return self._refQstatSubstitute.is_job_pending(int(taskid)) def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'qsub', + "qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and \ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + qsubargs += " " + node.plugin_args["qsub_args"] + if "-o" not in qsubargs: + qsubargs = f"{qsubargs} -o {path}" + if "-e" not in qsubargs: + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) + jobname = ".".join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 result = list() while True: @@ -428,23 +451,29 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - time.sleep( - self._retry_timeout) # sleep 2 seconds and try again. + time.sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit sge task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + "Could not submit sge task for node %s" % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve sge taskid - lines = [line for line in result.runtime.stdout.split('\n') if line] + lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int( - re.match("Your job ([0-9]*) .* has been submitted", - lines[-1]).groups()[0]) + re.match("Your job ([0-9]*) .* has been submitted", lines[-1]).groups()[0] + ) self._pending[taskid] = node.output_dir() self._refQstatSubstitute.add_startup_job(taskid, cmd.cmdline) - logger.debug('submitted sge task: %d for node %s with %s' % - (taskid, node._id, cmd.cmdline)) + logger.debug( + "submitted sge task: %d for node %s with %s" + % (taskid, node._id, cmd.cmdline) + ) return taskid diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index fa07d6a436..5cd1c7bfb7 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,15 +1,11 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open import os import sys from ...interfaces.base import CommandLine -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger def node_completed_status(checknode): @@ -19,15 +15,15 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( - checknode.overwrite is False or - (checknode.overwrite is None and not checknode._interface.always_run)) + node_state_does_not_require_overwrite = checknode.overwrite is False or ( + checknode.overwrite is None and not checknode._interface.always_run + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite) + return hash_exists and node_state_does_not_require_overwrite class SGEGraphPlugin(GraphPluginBase): @@ -41,6 +37,7 @@ class SGEGraphPlugin(GraphPluginBase): qsub call """ + _template = """ #!/bin/bash #$ -V @@ -48,20 +45,21 @@ class SGEGraphPlugin(GraphPluginBase): """ def __init__(self, **kwargs): - self._qsub_args = '' + self._qsub_args = "" self._dont_resubmit_completed_jobs = False - if 'plugin_args' in kwargs and kwargs['plugin_args']: - plugin_args = kwargs['plugin_args'] - if 'template' in plugin_args: - self._template = plugin_args['template'] + if kwargs.get("plugin_args"): + plugin_args = kwargs["plugin_args"] + if "template" in plugin_args: + self._template = plugin_args["template"] if os.path.isfile(self._template): self._template = open(self._template).read() - if 'qsub_args' in plugin_args: - self._qsub_args = plugin_args['qsub_args'] - if 'dont_resubmit_completed_jobs' in plugin_args: + if "qsub_args" in plugin_args: + self._qsub_args = plugin_args["qsub_args"] + if "dont_resubmit_completed_jobs" in plugin_args: self._dont_resubmit_completed_jobs = plugin_args[ - 'dont_resubmit_completed_jobs'] - super(SGEGraphPlugin, self).__init__(**kwargs) + "dont_resubmit_completed_jobs" + ] + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): @@ -70,93 +68,97 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SGE """ - job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = f"j{jobnumber}_{nodeslist[jobnumber]._id}" # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name = job_name.replace('-', '_').replace('.', '_').replace( - ':', '_') + job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here + if ( + self._dont_resubmit_completed_jobs + ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) # if the node itself claims done, then check to ensure all - # dependancies are also done + # dependencies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: - child_status_done = cache_doneness_per_node[ - child_idx] + child_status_done = cache_doneness_per_node[child_idx] else: - child_status_done = node_completed_status( - nodes[child_idx]) + child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env bash\n') - fp.writelines('# Condense format attempted\n') + with open(submitjobsfile, "w") as fp: + fp.writelines("#!/usr/bin/env bash\n") + fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, qsub_args = self._get_args( - node, ["template", "qsub_args"]) + node, ["template", "qsub_args"] + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join( - (template, '%s %s' % (sys.executable, pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) + batchscriptfile = os.path.join( + batch_dir, "batchscript_%s.sh" % name + ) - batchscriptoutfile = batchscriptfile + '.o' - batchscripterrfile = batchscriptfile + '.e' + batchscriptoutfile = batchscriptfile + ".o" + batchscripterrfile = batchscriptfile + ".e" - with open(batchscriptfile, 'wt') as batchfp: + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = ' ' + values = " " for jobid in dependencies[idx]: - # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: - values += "${{{0}}},".format( - make_job_name(jobid, nodes)) - if values != ' ': # i.e. if some jobs were added to dependency list - values = values.rstrip(',') - deps = '-hold_jid%s' % values + # Avoid dependencies of done jobs + if ( + not self._dont_resubmit_completed_jobs + or not cache_doneness_per_node[jobid] + ): + values += f"${{{make_job_name(jobid, nodes)}}}," + if ( + values != " " + ): # i.e. if some jobs were added to dependency list + values = values.rstrip(",") + deps = "-hold_jid%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._qsub_args - stderrFile = '' - if self._qsub_args.count('-e ') == 0: - stderrFile = '-e {errFile}'.format( - errFile=batchscripterrfile) - stdoutFile = '' - if self._qsub_args.count('-o ') == 0: - stdoutFile = '-o {outFile}'.format( - outFile=batchscriptoutfile) - full_line = '{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk \'{{print $3}}\')\n'.format( + stderrFile = "" + if self._qsub_args.count("-e ") == 0: + stderrFile = f"-e {batchscripterrfile}" + stdoutFile = "" + if self._qsub_args.count("-o ") == 0: + stdoutFile = f"-o {batchscriptoutfile}" + full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '/^Your job/{{print $3}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraQSubArgs=qsub_args, dependantIndex=deps, - batchscript=batchscriptfile) + batchscript=batchscriptfile, + ) fp.writelines(full_line) cmd = CommandLine( - 'bash', + "bash", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index 4645e52fba..bdc142059f 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -1,13 +1,10 @@ -''' +""" Created on Aug 2, 2013 @author: chadcumba Parallel workflow execution with SLURM -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open +""" import os import re @@ -17,11 +14,11 @@ from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class SLURMPlugin(SGELikeBatchManagerBase): - ''' + """ Execute using SLURM The plugin_args input to run can be used to control the SLURM execution. @@ -32,10 +29,9 @@ class SLURMPlugin(SGELikeBatchManagerBase): - sbatch_args: arguments to pass prepend to the sbatch call - ''' + """ def __init__(self, **kwargs): - template = "#!/bin/bash" self._retry_timeout = 2 @@ -44,42 +40,46 @@ def __init__(self, **kwargs): self._sbatch_args = None self._jobid_re = "Submitted batch job ([0-9]*)" - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'jobid_re' in kwargs['plugin_args']: - self._jobid_re = kwargs['plugin_args']['jobid_re'] - if 'template' in kwargs['plugin_args']: - self._template = kwargs['plugin_args']['template'] + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "jobid_re" in kwargs["plugin_args"]: + self._jobid_re = kwargs["plugin_args"]["jobid_re"] + if "template" in kwargs["plugin_args"]: + self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): with open(self._template) as f: self._template = f.read() - if 'sbatch_args' in kwargs['plugin_args']: - self._sbatch_args = kwargs['plugin_args']['sbatch_args'] + if "sbatch_args" in kwargs["plugin_args"]: + self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] self._pending = {} - super(SLURMPlugin, self).__init__(self._template, **kwargs) + super().__init__(self._template, **kwargs) def _is_pending(self, taskid): try: res = CommandLine( - 'squeue', - args=' '.join(['-j', '%s' % taskid]), + "squeue", + args=" ".join(["-j", "%s" % taskid]), resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() return res.runtime.stdout.find(str(taskid)) > -1 except RuntimeError as e: - if any(ss in str(e) for ss - in ['Socket timed out', 'not available at the moment']): + if any( + ss in str(e) + for ss in ["Socket timed out", "not available at the moment"] + ): # do not raise error and allow recheck logger.warning( "SLURM timeout encountered while checking job status," - " treating job %d as pending", taskid + " treating job %d as pending", + taskid, ) return True - if 'Invalid job id' not in str(e): - raise(e) + if "Invalid job id" not in str(e): + raise (e) return False def _submit_batchtask(self, scriptfile, node): @@ -89,38 +89,39 @@ def _submit_batchtask(self, scriptfile, node): formatting/processing """ cmd = CommandLine( - 'sbatch', + "sbatch", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - sbatch_args = '' + sbatch_args = "" if self._sbatch_args: sbatch_args = self._sbatch_args - if 'sbatch_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - sbatch_args = node.plugin_args['sbatch_args'] + if "sbatch_args" in node.plugin_args: + if node.plugin_args.get("overwrite"): + sbatch_args = node.plugin_args["sbatch_args"] else: - sbatch_args += (" " + node.plugin_args['sbatch_args']) - if '-o' not in sbatch_args: - sbatch_args = '%s -o %s' % (sbatch_args, - os.path.join(path, 'slurm-%j.out')) - if '-e' not in sbatch_args: - sbatch_args = '%s -e %s' % (sbatch_args, - os.path.join(path, 'slurm-%j.out')) + sbatch_args += " " + node.plugin_args["sbatch_args"] + if "-o" not in sbatch_args: + sbatch_args = "{} -o {}".format( + sbatch_args, os.path.join(path, "slurm-%j.out") + ) + if "-e" not in sbatch_args: + sbatch_args = "{} -e {}".format( + sbatch_args, os.path.join(path, "slurm-%j.out") + ) if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -J %s %s' % (sbatch_args, jobname, scriptfile) + jobname = ".".join(jobnameitems) + cmd.inputs.args = f"{sbatch_args} -J {jobname} {scriptfile}" oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -132,17 +133,21 @@ def _submit_batchtask(self, scriptfile, node): sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join( - (('Could not submit sbatch task' - ' for node %s') % node._id, str(e)))) + raise RuntimeError( + "\n".join( + ( + "Could not submit sbatch task for node %s" % node._id, + str(e), + ) + ) + ) else: break - logger.debug('Ran command ({0})'.format(cmd.cmdline)) + logger.debug(f"Ran command ({cmd.cmdline})") iflogger.setLevel(oldlevel) # retrieve taskid - lines = [line for line in result.runtime.stdout.split('\n') if line] + lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int(re.match(self._jobid_re, lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() - logger.debug('submitted sbatch task: %d for node %s' % (taskid, - node._id)) + logger.debug("submitted sbatch task: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index b4013163cb..c74ab05a87 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,15 +1,11 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SLURM """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open import os import sys from ...interfaces.base import CommandLine -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger def node_completed_status(checknode): @@ -19,15 +15,15 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( - checknode.overwrite is False or - (checknode.overwrite is None and not checknode._interface.always_run)) + node_state_does_not_require_overwrite = checknode.overwrite is False or ( + checknode.overwrite is None and not checknode._interface.always_run + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite) + return hash_exists and node_state_does_not_require_overwrite class SLURMGraphPlugin(GraphPluginBase): @@ -41,27 +37,29 @@ class SLURMGraphPlugin(GraphPluginBase): qsub call """ + _template = "#!/bin/bash" def __init__(self, **kwargs): - self._sbatch_args = '' - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'template' in kwargs['plugin_args']: - self._template = kwargs['plugin_args']['template'] + self._sbatch_args = "" + if kwargs.get("plugin_args"): + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "template" in kwargs["plugin_args"]: + self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): self._template = open(self._template).read() - if 'sbatch_args' in kwargs['plugin_args']: - self._sbatch_args = kwargs['plugin_args']['sbatch_args'] - if 'dont_resubmit_completed_jobs' in kwargs['plugin_args']: - self._dont_resubmit_completed_jobs = kwargs['plugin_args'][ - 'dont_resubmit_completed_jobs'] + if "sbatch_args" in kwargs["plugin_args"]: + self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] + if "dont_resubmit_completed_jobs" in kwargs["plugin_args"]: + self._dont_resubmit_completed_jobs = kwargs["plugin_args"][ + "dont_resubmit_completed_jobs" + ] else: self._dont_resubmit_completed_jobs = False - super(SLURMGraphPlugin, self).__init__(**kwargs) + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): @@ -70,93 +68,97 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SLURM """ - job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = f"j{jobnumber}_{nodeslist[jobnumber]._id}" # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name = job_name.replace('-', '_').replace('.', '_').replace( - ':', '_') + job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here + if ( + self._dont_resubmit_completed_jobs + ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) # if the node itself claims done, then check to ensure all - # dependancies are also done + # dependencies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: - child_status_done = cache_doneness_per_node[ - child_idx] + child_status_done = cache_doneness_per_node[child_idx] else: - child_status_done = node_completed_status( - nodes[child_idx]) + child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env bash\n') - fp.writelines('# Condense format attempted\n') + with open(submitjobsfile, "w") as fp: + fp.writelines("#!/usr/bin/env bash\n") + fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, sbatch_args = self._get_args( - node, ["template", "sbatch_args"]) + node, ["template", "sbatch_args"] + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join( - (template, '%s %s' % (sys.executable, pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) + batchscriptfile = os.path.join( + batch_dir, "batchscript_%s.sh" % name + ) - batchscriptoutfile = batchscriptfile + '.o' - batchscripterrfile = batchscriptfile + '.e' + batchscriptoutfile = batchscriptfile + ".o" + batchscripterrfile = batchscriptfile + ".e" - with open(batchscriptfile, 'wt') as batchfp: + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = '' + values = "" for jobid in dependencies[idx]: - # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: - values += "${{{0}}}:".format( - make_job_name(jobid, nodes)) - if values != '': # i.e. if some jobs were added to dependency list - values = values.rstrip(':') - deps = '--dependency=afterok:%s' % values + # Avoid dependencies of done jobs + if ( + not self._dont_resubmit_completed_jobs + or not cache_doneness_per_node[jobid] + ): + values += f"${{{make_job_name(jobid, nodes)}}}:" + if ( + values != "" + ): # i.e. if some jobs were added to dependency list + values = values.rstrip(":") + deps = "--dependency=afterok:%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._sbatch_args - stderrFile = '' - if self._sbatch_args.count('-e ') == 0: - stderrFile = '-e {errFile}'.format( - errFile=batchscripterrfile) - stdoutFile = '' - if self._sbatch_args.count('-o ') == 0: - stdoutFile = '-o {outFile}'.format( - outFile=batchscriptoutfile) - full_line = '{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk \'/^Submitted/ {{print $4}}\')\n'.format( + stderrFile = "" + if self._sbatch_args.count("-e ") == 0: + stderrFile = f"-e {batchscripterrfile}" + stdoutFile = "" + if self._sbatch_args.count("-o ") == 0: + stdoutFile = f"-o {batchscriptoutfile}" + full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraSBatchArgs=sbatch_args, dependantIndex=deps, - batchscript=batchscriptfile) + batchscript=batchscriptfile, + ) fp.writelines(full_line) cmd = CommandLine( - 'bash', + "bash", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 174b277c6f..2105204979 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,45 +1,44 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) import os import sys -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger soma_not_loaded = False try: - from soma.workflow.client import (Job, Workflow, WorkflowController, - Helper) + from soma.workflow.client import Job, Workflow, WorkflowController, Helper except: soma_not_loaded = True class SomaFlowPlugin(GraphPluginBase): - """Execute using Soma workflow - """ + """Execute using Soma workflow""" def __init__(self, plugin_args=None): if soma_not_loaded: - raise ImportError('SomaFlow could not be imported') - super(SomaFlowPlugin, self).__init__(plugin_args=plugin_args) + raise ImportError("SomaFlow could not be imported") + super().__init__(plugin_args=plugin_args) def _submit_graph(self, pyfiles, dependencies, nodes): - jobs = [] - soma_deps = [] - for idx, fname in enumerate(pyfiles): - name = os.path.splitext(os.path.split(fname)[1])[0] - jobs.append(Job(command=[sys.executable, fname], name=name)) - for key, values in list(dependencies.items()): - for val in values: - soma_deps.append((jobs[val], jobs[key])) + jobs = [ + Job( + command=[sys.executable, fname], + name=os.path.splitext(os.path.split(fname)[1])[0], + ) + for fname in pyfiles + ] + soma_deps = [ + (jobs[val], jobs[key]) + for key, values in dependencies.items() + for val in values + ] wf = Workflow(jobs, soma_deps) - logger.info('serializing workflow') - Helper.serialize('workflow', wf) + logger.info("serializing workflow") + Helper.serialize("workflow", wf) controller = WorkflowController() - logger.info('submitting workflow') + logger.info("submitting workflow") wf_id = controller.submit_workflow(wf) Helper.wait_workflow(wf_id, controller) diff --git a/nipype/pipeline/plugins/tests/__init__.py b/nipype/pipeline/plugins/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/pipeline/plugins/tests/__init__.py +++ b/nipype/pipeline/plugins/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index cdc55b668b..43471a7d64 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module @@ -14,7 +13,7 @@ def test_scipy_sparse(): assert foo[0, 1] == 0 -''' +""" Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout @@ -38,4 +37,4 @@ def func(arg1): wf.base_dir = '/tmp' wf.run(plugin='MultiProc') -''' +""" diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 6b9525071e..f7606708c7 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,11 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for workflow callbacks """ - -from builtins import object - from time import sleep import pytest import nipype.interfaces.utility as niu @@ -20,7 +16,7 @@ def bad_func(): raise Exception -class Status(object): +class Status: def __init__(self): self.statuses = [] @@ -28,42 +24,39 @@ def callback(self, node, status, result=None): self.statuses.append((node.name, status)) -@pytest.mark.parametrize("plugin", ['Linear', 'MultiProc', 'LegacyMultiProc']) +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) def test_callback_normal(tmpdir, plugin): tmpdir.chdir() so = Status() - wf = pe.Workflow(name='test', base_dir=tmpdir.strpath) + wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( - niu.Function(function=func, input_names=[], output_names=[]), - name='f_node') + niu.Function(function=func, input_names=[], output_names=[]), name="f_node" + ) wf.add_nodes([f_node]) - wf.config['execution'] = { - 'crashdump_dir': wf.base_dir, - 'poll_sleep_duration': 2 - } - wf.run(plugin=plugin, plugin_args={'status_callback': so.callback}) - assert so.statuses == [('f_node', 'start'), ('f_node', 'end')] + wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} + wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) + assert so.statuses == [("f_node", "start"), ("f_node", "end")] -@pytest.mark.parametrize("plugin", ['Linear', 'MultiProc', 'LegacyMultiProc']) +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) @pytest.mark.parametrize("stop_on_first_crash", [False, True]) def test_callback_exception(tmpdir, plugin, stop_on_first_crash): tmpdir.chdir() so = Status() - wf = pe.Workflow(name='test', base_dir=tmpdir.strpath) + wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( - niu.Function(function=bad_func, input_names=[], output_names=[]), - name='f_node') + niu.Function(function=bad_func, input_names=[], output_names=[]), name="f_node" + ) wf.add_nodes([f_node]) - wf.config['execution'] = { - 'crashdump_dir': wf.base_dir, - 'stop_on_first_crash': stop_on_first_crash, - 'poll_sleep_duration': 2 + wf.config["execution"] = { + "crashdump_dir": wf.base_dir, + "stop_on_first_crash": stop_on_first_crash, + "poll_sleep_duration": 2, } with pytest.raises(Exception): - wf.run(plugin=plugin, plugin_args={'status_callback': so.callback}) + wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) sleep(0.5) # Wait for callback to be called (python 2.7) - assert so.statuses == [('f_node', 'start'), ('f_node', 'exception')] + assert so.statuses == [("f_node", "start"), ("f_node", "exception")] diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index bd06ecb775..fafb6a276d 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib @@ -7,12 +6,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class DebugTestInterface(nib.BaseInterface): @@ -25,7 +24,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @@ -36,11 +35,11 @@ def callme(node, graph): def test_debug(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(DebugTestInterface(), name='mod1') - mod2 = pe.MapNode(DebugTestInterface(), iterfield=['input1'], name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(DebugTestInterface(), name="mod1") + mod2 = pe.MapNode(DebugTestInterface(), iterfield=["input1"], name="mod2") - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 @@ -50,8 +49,8 @@ def test_debug(tmpdir): exc = None try: - pipe.run(plugin="Debug", plugin_args={'callable': callme}) + pipe.run(plugin="Debug", plugin_args={"callable": callme}) except Exception as e: exc = e - assert exc is None, 'unexpected exception caught' + assert exc is None, "unexpected exception caught" diff --git a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py index a83d426ada..2f35579a40 100644 --- a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py @@ -1,12 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Testing module for functions and classes from multiproc.py """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, open - # Import packages import os import sys @@ -19,9 +14,9 @@ def mytestFunction(insum=0): - ''' + """ Run a multiprocessing job and spawn child processes. - ''' + """ # need to import here since this is executed as an external process import multiprocessing @@ -41,38 +36,35 @@ def mytestFunction(insum=0): f = [None] * numberOfThreads def dummyFunction(filename): - ''' + """ This function writes the value 45 to the given filename. - ''' + """ j = 0 - for i in range(0, 10): + for i in range(10): j += i # j is now 45 (0+1+2+3+4+5+6+7+8+9) - with open(filename, 'w') as f: + with open(filename, "w") as f: f.write(str(j)) for n in range(numberOfThreads): - # mark thread as alive a[n] = True # create a temp file to use as the data exchange container - tmpFile = tempfile.mkstemp('.txt', 'test_engine_')[1] + tmpFile = tempfile.mkstemp(".txt", "test_engine_")[1] f[n] = tmpFile # keep track of the temp file - t[n] = multiprocessing.Process(target=dummyFunction, args=(tmpFile, )) + t[n] = multiprocessing.Process(target=dummyFunction, args=(tmpFile,)) # fire up the job t[n].start() # block until all processes are done allDone = False while not allDone: - time.sleep(1) for n in range(numberOfThreads): - a[n] = t[n].is_alive() if not any(a): @@ -92,65 +84,64 @@ def dummyFunction(filename): def run_multiproc_nondaemon_with_flag(nondaemon_flag): - ''' + """ Start a pipe with two nodes using the resource multiproc plugin and passing the nondaemon_flag. - ''' + """ cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_') + temp_dir = mkdtemp(prefix="test_engine_") os.chdir(temp_dir) - pipe = pe.Workflow(name='pipe') + pipe = pe.Workflow(name="pipe") f1 = pe.Node( interface=Function( - function=mytestFunction, - input_names=['insum'], - output_names=['sum_out']), - name='f1') + function=mytestFunction, input_names=["insum"], output_names=["sum_out"] + ), + name="f1", + ) f2 = pe.Node( interface=Function( - function=mytestFunction, - input_names=['insum'], - output_names=['sum_out']), - name='f2') + function=mytestFunction, input_names=["insum"], output_names=["sum_out"] + ), + name="f2", + ) - pipe.connect([(f1, f2, [('sum_out', 'insum')])]) + pipe.connect([(f1, f2, [("sum_out", "insum")])]) pipe.base_dir = os.getcwd() f1.inputs.insum = 0 - pipe.config['execution']['stop_on_first_crash'] = True + pipe.config["execution"]["stop_on_first_crash"] = True # execute the pipe using the LegacyMultiProc plugin with 2 processes and the # non_daemon flag to enable child processes which start other # multiprocessing jobs execgraph = pipe.run( plugin="LegacyMultiProc", - plugin_args={ - 'n_procs': 2, - 'non_daemon': nondaemon_flag - }) - - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.f2')] - result = node.get_output('sum_out') + plugin_args={"n_procs": 2, "non_daemon": nondaemon_flag}, + ) + + names = [f"{node._hierarchy}.{node.name}" for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.f2")] + result = node.get_output("sum_out") os.chdir(cur_dir) rmtree(temp_dir) return result +@pytest.mark.skipif( + sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" +) def test_run_multiproc_nondaemon_false(): - ''' + """ This is the entry point for the test. Two times a pipe of several multiprocessing jobs gets executed. First, without the nondaemon flag. Second, with the nondaemon flag. Since the processes of the pipe start child processes, the execution only succeeds when the non_daemon flag is on. - ''' + """ shouldHaveFailed = False try: # with nondaemon_flag = False, the execution should fail @@ -160,6 +151,9 @@ def test_run_multiproc_nondaemon_false(): assert shouldHaveFailed +@pytest.mark.skipif( + sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" +) def test_run_multiproc_nondaemon_true(): # with nondaemon_flag = True, the execution should succeed result = run_multiproc_nondaemon_with_flag(True) diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index 6484432baa..519d978de6 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib @@ -6,12 +5,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class LinearTestInterface(nib.BaseInterface): @@ -24,24 +23,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs def test_run_in_series(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=LinearTestInterface(), name='mod1') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=LinearTestInterface(), name="mod1") mod2 = pe.MapNode( - interface=LinearTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + interface=LinearTestInterface(), iterfield=["input1"], name="mod2" + ) + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [f"{node._hierarchy}.{node.name}" for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 7ba9001c39..938e1aab9e 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -1,9 +1,9 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Test the resource management of MultiProc """ +import sys import os import pytest from nipype.pipeline import engine as pe @@ -11,12 +11,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class MultiprocTestInterface(nib.BaseInterface): @@ -29,35 +29,37 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs +@pytest.mark.skipif( + sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" +) def test_run_multiproc(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(MultiprocTestInterface(), name='mod1') - mod2 = pe.MapNode( - MultiprocTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(MultiprocTestInterface(), name="mod1") + mod2 = pe.MapNode(MultiprocTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 - pipe.config['execution']['poll_sleep_duration'] = 2 + pipe.config["execution"]["poll_sleep_duration"] = 2 execgraph = pipe.run(plugin="MultiProc") names = [node.fullname for node in execgraph.nodes()] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] class InputSpecSingleNode(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpecSingleNode(nib.TraitedSpec): - output1 = nib.traits.Int(desc='a random int') + output1 = nib.traits.Int(desc="a random int") class SingleNodeTestInterface(nib.BaseInterface): @@ -70,68 +72,68 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = self.inputs.input1 + outputs["output1"] = self.inputs.input1 return outputs def test_no_more_memory_than_specified(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', mem_gb=1) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', mem_gb=1) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', mem_gb=1) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', mem_gb=1) - - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", mem_gb=1) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", mem_gb=1) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", mem_gb=1) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", mem_gb=1) + + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 1 max_memory = 0.5 with pytest.raises(RuntimeError): pipe.run( - plugin='MultiProc', - plugin_args={ - 'memory_gb': max_memory, - 'n_procs': 2 - }) + plugin="MultiProc", plugin_args={"memory_gb": max_memory, "n_procs": 2} + ) def test_no_more_threads_than_specified(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', n_procs=2) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', n_procs=2) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', n_procs=4) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', n_procs=2) + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=4) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 with pytest.raises(RuntimeError): - pipe.run(plugin='MultiProc', plugin_args={'n_procs': max_threads}) + pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) +@pytest.mark.skipif( + sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" +) def test_hold_job_until_procs_available(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', n_procs=2) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', n_procs=2) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', n_procs=2) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', n_procs=2) + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=2) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 - pipe.run(plugin='MultiProc', plugin_args={'n_procs': max_threads}) + pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) diff --git a/nipype/pipeline/plugins/tests/test_oar.py b/nipype/pipeline/plugins/tests/test_oar.py index fd4f0b950c..fcb1efac6e 100644 --- a/nipype/pipeline/plugins/tests/test_oar.py +++ b/nipype/pipeline/plugins/tests/test_oar.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- import os -from shutil import rmtree -from tempfile import mkdtemp +from shutil import which import nipype.interfaces.base as nib import pytest @@ -9,12 +7,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class OarTestInterface(nib.BaseInterface): @@ -27,29 +25,21 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs -@pytest.mark.xfail(reason="not known") -def test_run_oar(): - cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_', dir=os.getcwd()) - os.chdir(temp_dir) - - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=OarTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=OarTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) +@pytest.mark.skipif(which("oarsub") is None, reason="OAR not installed") +@pytest.mark.timeout(60) +def test_run_oargraph(tmp_path): + pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) + mod1 = pe.Node(interface=OarTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=OarTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="OAR") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [f"{node._hierarchy}.{node.name}" for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] - os.chdir(cur_dir) - rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index f6aa6c88e0..7a44ba3dc7 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -1,8 +1,4 @@ -# -*- coding: utf-8 -*- -import os -from shutil import rmtree -from tempfile import mkdtemp -from time import sleep +from shutil import which import nipype.interfaces.base as nib import pytest @@ -10,12 +6,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class PbsTestInterface(nib.BaseInterface): @@ -28,29 +24,20 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs -@pytest.mark.xfail(reason="not known") -def test_run_pbsgraph(): - cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_') - os.chdir(temp_dir) - - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=PbsTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=PbsTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) - pipe.base_dir = os.getcwd() +@pytest.mark.skipif(which("qsub") is None, reason="PBS not installed") +@pytest.mark.timeout(60) +def test_run_pbsgraph(tmp_path): + pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) + mod1 = pe.Node(interface=PbsTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=PbsTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [f"{node._hierarchy}.{node.name}" for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] - os.chdir(cur_dir) - rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_sgelike.py b/nipype/pipeline/plugins/tests/test_sgelike.py new file mode 100644 index 0000000000..26c3d70a06 --- /dev/null +++ b/nipype/pipeline/plugins/tests/test_sgelike.py @@ -0,0 +1,36 @@ +from nipype.pipeline.plugins.base import SGELikeBatchManagerBase +from nipype.interfaces.utility import Function +import nipype.pipeline.engine as pe +import pytest +from unittest.mock import patch +import subprocess + + +def crasher(): + raise ValueError + + +def submit_batchtask(self, scriptfile, node): + self._pending[1] = node.output_dir() + subprocess.call(["bash", scriptfile]) + return 1 + + +def is_pending(self, taskid): + return False + + +@patch.object(SGELikeBatchManagerBase, "_submit_batchtask", new=submit_batchtask) +@patch.object(SGELikeBatchManagerBase, "_is_pending", new=is_pending) +def test_crashfile_creation(tmp_path): + pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) + pipe.config["execution"]["crashdump_dir"] = str(tmp_path) + pipe.add_nodes([pe.Node(interface=Function(function=crasher), name="crasher")]) + sgelike_plugin = SGELikeBatchManagerBase("") + with pytest.raises(RuntimeError): + assert pipe.run(plugin=sgelike_plugin) + + crashfiles = list(tmp_path.glob("crash*crasher*.pklz")) + list( + tmp_path.glob("crash*crasher*.txt") + ) + assert len(crashfiles) == 1 diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index 68cefcdc17..4c074522d8 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- import os -from time import sleep import nipype.interfaces.base as nib import pytest @@ -10,12 +8,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class SomaTestInterface(nib.BaseInterface): @@ -28,7 +26,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @@ -36,17 +34,14 @@ def _list_outputs(self): def test_run_somaflow(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=SomaTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=SomaTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=SomaTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=SomaTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [f"{node._hierarchy}.{node.name}" for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index ee9b2e4de7..e21ef42072 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -1,42 +1,37 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ -import numpy as np -import scipy.sparse as ssp import re -import mock +from unittest import mock from nipype.pipeline.plugins.tools import report_crash def test_report_crash(): - with mock.patch('pickle.dump', mock.MagicMock()) as mock_pickle_dump: - with mock.patch('nipype.pipeline.plugins.tools.format_exception', - mock.MagicMock()): # see iss 1517 + with mock.patch("pickle.dump", mock.MagicMock()) as mock_pickle_dump: + with mock.patch( + "nipype.pipeline.plugins.tools.format_exception", mock.MagicMock() + ): # see iss 1517 mock_pickle_dump.return_value = True - mock_node = mock.MagicMock(name='mock_node') - mock_node._id = 'an_id' + mock_node = mock.MagicMock(name="mock_node") + mock_node._id = "an_id" mock_node.config = { - 'execution': { - 'crashdump_dir': '.', - 'crashfile_format': 'pklz', - } + "execution": {"crashdump_dir": ".", "crashfile_format": "pklz"} } actual_crashfile = report_crash(mock_node) - expected_crashfile = re.compile( - '.*/crash-.*-an_id-[0-9a-f\-]*.pklz') + expected_crashfile = re.compile(r".*/crash-.*-an_id-[0-9a-f\-]*.pklz") - assert expected_crashfile.match( - actual_crashfile).group() == actual_crashfile + assert ( + expected_crashfile.match(actual_crashfile).group() == actual_crashfile + ) assert mock_pickle_dump.call_count == 1 -''' +""" Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout @@ -60,4 +55,4 @@ def func(arg1): wf.base_dir = '/tmp' wf.run(plugin='MultiProc') -''' +""" diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index 54fffd2398..bce3eb82da 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -1,12 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - import os import getpass from socket import gethostname @@ -16,50 +11,66 @@ from traceback import format_exception from ... import logging -from ...utils.filemanip import savepkl, crash2txt, makedirs +from ...utils.filemanip import savepkl, crash2txt -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") def report_crash(node, traceback=None, hostname=None): - """Writes crash related information to a file - """ + """Writes crash related information to a file""" name = node._id host = None - if node.result and getattr(node.result, 'runtime'): - if isinstance(node.result.runtime, list): - host = node.result.runtime[0].hostname - else: - host = node.result.runtime.hostname + traceback = traceback or format_exception(*sys.exc_info()) + + try: + result = node.result + except FileNotFoundError: + traceback += """ + +When creating this crashfile, the results file corresponding +to the node could not be found.""".splitlines( + keepends=True + ) + except Exception as exc: + traceback += """ + +During the creation of this crashfile triggered by the above exception, +another exception occurred:\n\n{}.""".format( + exc + ).splitlines( + keepends=True + ) + else: + if getattr(result, "runtime", None): + if isinstance(result.runtime, list): + host = result.runtime[0].hostname + else: + host = result.runtime.hostname # Try everything to fill in the host host = host or hostname or gethostname() - logger.error('Node %s failed to run on host %s.', name, host) - if not traceback: - traceback = format_exception(*sys.exc_info()) - timeofcrash = strftime('%Y%m%d-%H%M%S') + logger.error("Node %s failed to run on host %s.", name, host) + timeofcrash = strftime("%Y%m%d-%H%M%S") try: login_name = getpass.getuser() except KeyError: - login_name = 'UID{:d}'.format(os.getuid()) - crashfile = 'crash-%s-%s-%s-%s' % (timeofcrash, login_name, name, - str(uuid.uuid4())) - crashdir = node.config['execution'].get('crashdump_dir', os.getcwd()) + login_name = f"UID{os.getuid():d}" + crashfile = f"crash-{timeofcrash}-{login_name}-{name}-{uuid.uuid4()}" + crashdir = node.config["execution"].get("crashdump_dir", os.getcwd()) - makedirs(crashdir, exist_ok=True) + os.makedirs(crashdir, exist_ok=True) crashfile = os.path.join(crashdir, crashfile) - if node.config['execution']['crashfile_format'].lower() in ['text', 'txt']: - crashfile += '.txt' + if node.config["execution"]["crashfile_format"].lower() in ("text", "txt", ".txt"): + crashfile += ".txt" else: - crashfile += '.pklz' + crashfile += ".pklz" - logger.error('Saving crash info to %s\n%s', crashfile, ''.join(traceback)) - if crashfile.endswith('.txt'): + logger.error("Saving crash info to %s\n%s", crashfile, "".join(traceback)) + if crashfile.endswith(".txt"): crash2txt(crashfile, dict(node=node, traceback=traceback)) else: - savepkl(crashfile, dict(node=node, traceback=traceback), - versioning=True) + savepkl(crashfile, dict(node=node, traceback=traceback), versioning=True) return crashfile @@ -72,30 +83,27 @@ def report_nodes_not_run(notrun): if notrun: logger.info("***********************************") for info in notrun: - logger.error("could not run node: %s" % '.'.join( - (info['node']._hierarchy, info['node']._id))) - logger.info("crashfile: %s" % info['crashfile']) + node = info["node"] + logger.error(f"could not run node: {node._hierarchy}.{node._id}") + logger.info("crashfile: %s" % info["crashfile"]) logger.debug("The following dependent nodes were not run") - for subnode in info['dependents']: + for subnode in info["dependents"]: logger.debug(subnode._id) logger.info("***********************************") - raise RuntimeError(('Workflow did not execute cleanly. ' - 'Check log for details')) def create_pyscript(node, updatehash=False, store_exception=True): # pickle node - timestamp = strftime('%Y%m%d_%H%M%S') + timestamp = strftime("%Y%m%d_%H%M%S") if node._hierarchy: - suffix = '%s_%s_%s' % (timestamp, node._hierarchy, node._id) - batch_dir = os.path.join(node.base_dir, - node._hierarchy.split('.')[0], 'batch') + suffix = f"{timestamp}_{node._hierarchy}_{node._id}" + batch_dir = os.path.join(node.base_dir, node._hierarchy.split(".")[0], "batch") else: - suffix = '%s_%s' % (timestamp, node._id) - batch_dir = os.path.join(node.base_dir, 'batch') + suffix = f"{timestamp}_{node._id}" + batch_dir = os.path.join(node.base_dir, "batch") if not os.path.exists(batch_dir): os.makedirs(batch_dir) - pkl_file = os.path.join(batch_dir, 'node_%s.pklz' % suffix) + pkl_file = os.path.join(batch_dir, "node_%s.pklz" % suffix) savepkl(pkl_file, dict(node=node, updatehash=updatehash)) mpl_backend = node.config["execution"]["matplotlib_backend"] # create python script to load and trap exception @@ -110,7 +118,13 @@ def create_pyscript(node, updatehash=False, store_exception=True): can_import_matplotlib = False pass +import os +value = os.environ.get('NIPYPE_NO_ET', None) +if value is None: + # disable ET for any submitted job + os.environ['NIPYPE_NO_ET'] = "1" from nipype import config, logging + from nipype.utils.filemanip import loadpkl, savepkl from socket import gethostname from traceback import format_exception @@ -119,8 +133,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): batchdir = '%s' from nipype.utils.filemanip import loadpkl, savepkl try: - if not sys.version_info < (2, 7): - from collections import OrderedDict + from collections import OrderedDict config_dict=%s config.update_config(config_dict) ## Only configure matplotlib if it was successfully imported, @@ -159,7 +172,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): raise Exception(e) """ cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) - pyscript = os.path.join(batch_dir, 'pyscript_%s.py' % suffix) - with open(pyscript, 'wt') as fp: + pyscript = os.path.join(batch_dir, "pyscript_%s.py" % suffix) + with open(pyscript, "w") as fp: fp.writelines(cmdstr) return pyscript diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index 8492d237d7..4370f495f8 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -1,22 +1,14 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from future import standard_library -standard_library.install_aliases() -from builtins import open import configparser import os import sys import subprocess -COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' -PY3 = sys.version_info[0] >= 3 +COMMIT_INFO_FNAME = "COMMIT_INFO.txt" def pkg_commit_hash(pkg_path): - ''' Get short form of commit hash given directory `pkg_path` + """Get short form of commit hash given directory `pkg_path` There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a file in INI file format, with at least one section: ``commit hash`` and two @@ -44,40 +36,36 @@ def pkg_commit_hash(pkg_path): Where we got the hash from - description hash_str : str short form of hash - ''' + """ # Try and get commit from written commit text file pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) if not os.path.isfile(pth): - raise IOError('Missing commit info file %s' % pth) + raise OSError("Missing commit info file %s" % pth) cfg_parser = configparser.RawConfigParser() - with open(pth, encoding='utf-8') as fp: - if sys.version_info >= (3, 2): - cfg_parser.read_file(fp) - else: - cfg_parser.readfp(fp) - archive_subst = cfg_parser.get('commit hash', 'archive_subst_hash') - if not archive_subst.startswith('$Format'): # it has been substituted - return 'archive substitution', archive_subst - install_subst = cfg_parser.get('commit hash', 'install_hash') - if install_subst != '': - return 'installation', install_subst + with open(pth, encoding="utf-8") as fp: + cfg_parser.read_file(fp) + archive_subst = cfg_parser.get("commit hash", "archive_subst_hash") + if not archive_subst.startswith("$Format"): # it has been substituted + return "archive substitution", archive_subst + install_subst = cfg_parser.get("commit hash", "install_hash") + if install_subst != "": + return "installation", install_subst # maybe we are in a repository proc = subprocess.Popen( - 'git rev-parse --short HEAD', + "git rev-parse --short HEAD", stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, - shell=True) + shell=True, + ) repo_commit, _ = proc.communicate() if repo_commit: - if PY3: - repo_commit = repo_commit.decode() - return 'repository', repo_commit.strip() - return '(none found)', '' + return "repository", repo_commit.decode().strip() + return "(none found)", "" def get_pkg_info(pkg_path): - ''' Return dict describing the context of this package + """Return dict describing the context of this package Parameters ---------- @@ -88,16 +76,15 @@ def get_pkg_info(pkg_path): ------- context : dict with named parameters of interest - ''' + """ src, hsh = pkg_commit_hash(pkg_path) from .info import VERSION - if not PY3: - src, hsh, VERSION = src.encode(), hsh.encode(), VERSION.encode() import networkx import nibabel import numpy import scipy import traits + return dict( pkg_path=pkg_path, commit_source=src, @@ -110,4 +97,5 @@ def get_pkg_info(pkg_path): scipy_version=scipy.__version__, networkx_version=networkx.__version__, nibabel_version=nibabel.__version__, - traits_version=traits.__version__) + traits_version=traits.__version__, + ) diff --git a/nipype/pytest.ini b/nipype/pytest.ini index 70f12b64aa..b963665c4a 100644 --- a/nipype/pytest.ini +++ b/nipype/pytest.ini @@ -1,6 +1,7 @@ [pytest] norecursedirs = .git build dist doc nipype/external tools examples src -addopts = --doctest-modules -n auto +addopts = --doctest-modules doctest_optionflags = ALLOW_UNICODE NORMALIZE_WHITESPACE env = PYTHONHASHSEED=0 +junit_family=xunit2 diff --git a/nipype/refs.py b/nipype/refs.py index 0478d7ceed..9d81b314eb 100644 --- a/nipype/refs.py +++ b/nipype/refs.py @@ -1,7 +1,7 @@ # Use duecredit (duecredit.org) to provide a citation to relevant work to # be cited. This does nothing, unless the user has duecredit installed, # And calls this with duecredit (as in `python -m duecredit script.py`): -from .external.due import due, Doi, BibTeX +from .external.due import due, Doi due.cite( Doi("10.3389/fninf.2011.00013"), diff --git a/nipype/scripts/cli.py b/nipype/scripts/cli.py index 59d8672cfb..8c544b8967 100644 --- a/nipype/scripts/cli.py +++ b/nipype/scripts/cli.py @@ -1,8 +1,6 @@ #!python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from io import open - import click from .instance import list_interfaces @@ -27,13 +25,14 @@ def cli(): @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('logdir', type=ExistingDirPath, callback=check_not_none) +@click.argument("logdir", type=ExistingDirPath, callback=check_not_none) @click.option( - '-r', - '--regex', + "-r", + "--regex", type=RegularExpression(), callback=check_not_none, - help='Regular expression to be searched in each traceback.') + help="Regular expression to be searched in each traceback.", +) def search(logdir, regex): """Search for tracebacks content. @@ -54,26 +53,27 @@ def search(logdir, regex): @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('crashfile', type=ExistingFilePath, callback=check_not_none) +@click.argument("crashfile", type=ExistingFilePath, callback=check_not_none) @click.option( - '-r', '--rerun', is_flag=True, flag_value=True, help='Rerun crashed node.') + "-r", "--rerun", is_flag=True, flag_value=True, help="Rerun crashed node." +) @click.option( - '-d', - '--debug', + "-d", + "--debug", is_flag=True, flag_value=True, - help='Enable Python debugger when re-executing.') + help="Enable Python debugger when re-executing.", +) @click.option( - '-i', - '--ipydebug', + "-i", + "--ipydebug", is_flag=True, flag_value=True, - help='Enable IPython debugger when re-executing.') + help="Enable IPython debugger when re-executing.", +) @click.option( - '-w', - '--dir', - type=ExistingDirPath, - help='Directory where to run the node in.') + "-w", "--dir", type=ExistingDirPath, help="Directory where to run the node in." +) def crash(crashfile, rerun, debug, ipydebug, dir): """Display Nipype crash files. @@ -85,17 +85,19 @@ def crash(crashfile, rerun, debug, ipydebug, dir): """ from .crash_files import display_crash_file - debug = 'ipython' if ipydebug else debug - if debug == 'ipython': + debug = "ipython" if ipydebug else debug + if debug == "ipython": import sys from IPython.core import ultratb + sys.excepthook = ultratb.FormattedTB( - mode='Verbose', color_scheme='Linux', call_pdb=1) + mode="Verbose", color_scheme="Linux", call_pdb=1 + ) display_crash_file(crashfile, rerun, debug, dir) @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('pklz_file', type=ExistingFilePath, callback=check_not_none) +@click.argument("pklz_file", type=ExistingFilePath, callback=check_not_none) def show(pklz_file): """Print the content of Nipype node .pklz file. @@ -110,20 +112,17 @@ def show(pklz_file): @cli.command(context_settings=UNKNOWN_OPTIONS) -@click.argument( - 'module', type=PythonModule(), required=False, callback=check_not_none) -@click.argument('interface', type=str, required=False) +@click.argument("module", type=PythonModule(), required=False, callback=check_not_none) +@click.argument("interface", type=str, required=False) @click.option( - '--list', + "--list", is_flag=True, flag_value=True, - help='List the available Interfaces inside the given module.') + help="List the available Interfaces inside the given module.", +) @click.option( - '-h', - '--help', - is_flag=True, - flag_value=True, - help='Show help message and exit.') + "-h", "--help", is_flag=True, flag_value=True, help="Show help message and exit." +) @click.pass_context def run(ctx, module, interface, list, help): """Run a Nipype Interface. @@ -144,18 +143,16 @@ def run(ctx, module, interface, list, help): # print the list of available interfaces for the given module elif (module_given and list) or (module_given and not interface): iface_names = list_interfaces(module) - click.echo('Available Interfaces:') + click.echo("Available Interfaces:") for if_name in iface_names: - click.echo(' {}'.format(if_name)) + click.echo(f" {if_name}") # check the interface - elif (module_given and interface): + elif module_given and interface: # create the argument parser - description = "Run {}".format(interface) - prog = " ".join( - [ctx.command_path, module.__name__, interface] + ctx.args) - iface_parser = argparse.ArgumentParser( - description=description, prog=prog) + description = f"Run {interface}" + prog = " ".join([ctx.command_path, module.__name__, interface] + ctx.args) + iface_parser = argparse.ArgumentParser(description=description, prog=prog) # instantiate the interface node = getattr(module, interface)() @@ -166,8 +163,10 @@ def run(ctx, module, interface, list, help): try: iface_parser.print_help() except: - print('An error ocurred when trying to print the full' - 'command help, printing usage.') + print( + "An error occurred when trying to print the full" + "command help, printing usage." + ) finally: iface_parser.print_usage() else: @@ -194,65 +193,94 @@ def convert(): "--interface", type=str, required=True, - help="Name of the Nipype interface to export.") + help="Name of the Nipype interface to export.", +) @click.option( "-m", "--module", type=PythonModule(), required=True, callback=check_not_none, - help="Module where the interface is defined.") + help="Module where the interface is defined.", +) @click.option( "-o", "--output", type=UnexistingFilePath, required=True, callback=check_not_none, - help="JSON file name where the Boutiques descriptor will be " - "written.") + help="JSON file name where the Boutiques descriptor will be written.", +) @click.option( - "-t", - "--ignored-template-inputs", + "-c", + "--container-image", + required=True, type=str, - multiple=True, - help="Interface inputs ignored in path template creations.") + help="Name of the container image where the tool is installed.", +) @click.option( - "-d", - "--docker-image", + "-p", + "--container-type", + required=True, type=str, - help="Name of the Docker image where the Nipype interface is " - "available.") + help="Type of container image (Docker or Singularity).", +) @click.option( - "-r", - "--docker-index", + "-x", + "--container-index", type=str, - help="Docker index where the Docker image is stored (e.g. " - "http://index.docker.io).") + help="Optional index where the image is available (e.g. " + "http://index.docker.io).", +) @click.option( - "-n", - "--ignore-template-numbers", - is_flag=True, - flag_value=True, - help="Ignore all numbers in path template creations.") + "-g", + "--ignore-inputs", + type=str, + multiple=True, + help="List of interface inputs to not include in the descriptor.", +) @click.option( - "-v", - "--verbose", - is_flag=True, - flag_value=True, - help="Enable verbose output.") -def boutiques(interface, module, output, ignored_template_inputs, docker_image, - docker_index, ignore_template_numbers, verbose): + "-v", "--verbose", is_flag=True, flag_value=True, help="Print information messages." +) +@click.option( + "-a", "--author", type=str, help="Author of the tool (required for publishing)." +) +@click.option( + "-t", + "--tags", + type=str, + help="JSON string containing tags to include in the descriptor," + 'e.g. "{"key1": "value1"}"', +) +def boutiques( + module, + interface, + container_image, + container_type, + output, + container_index, + verbose, + author, + ignore_inputs, + tags, +): """Nipype to Boutiques exporter. See Boutiques specification at https://github.com/boutiques/schema. """ from nipype.utils.nipype2boutiques import generate_boutiques_descriptor - # Generates JSON string - json_string = generate_boutiques_descriptor( - module, interface, ignored_template_inputs, docker_image, docker_index, - verbose, ignore_template_numbers) - - # Writes JSON string to file - with open(output, 'w') as f: - f.write(json_string) + # Generates JSON string and saves it to file + generate_boutiques_descriptor( + module, + interface, + container_image, + container_type, + container_index, + verbose, + True, + output, + author, + ignore_inputs, + tags, + ) diff --git a/nipype/scripts/crash_files.py b/nipype/scripts/crash_files.py index b7b83dff5c..1caa0c430b 100644 --- a/nipype/scripts/crash_files.py +++ b/nipype/scripts/crash_files.py @@ -1,7 +1,5 @@ """Utilities to manipulate and search through .pklz crash files.""" -import re -import sys import os.path as op from glob import glob @@ -18,7 +16,7 @@ def load_pklz_traceback(crash_filepath): except: raise else: - return '\n'.join(data['traceback']) + return "\n".join(data["traceback"]) def iter_tracebacks(logdir): @@ -38,7 +36,7 @@ def iter_tracebacks(logdir): traceback: str """ - crash_files = sorted(glob(op.join(logdir, '*.pkl*'))) + crash_files = sorted(glob(op.join(logdir, "*.pkl*"))) for cf in crash_files: yield cf, load_pklz_traceback(cf) @@ -50,9 +48,9 @@ def display_crash_file(crashfile, rerun, debug, directory): crash_data = loadcrash(crashfile) node = None - if 'node' in crash_data: - node = crash_data['node'] - tb = crash_data['traceback'] + if "node" in crash_data: + node = crash_data["node"] + tb = crash_data["traceback"] print("\n") print("File: %s" % crashfile) @@ -67,7 +65,7 @@ def display_crash_file(crashfile, rerun, debug, directory): print(node.inputs) print("\n") print("Traceback: ") - print(''.join(tb)) + print("".join(tb)) print("\n") if rerun: @@ -76,12 +74,13 @@ def display_crash_file(crashfile, rerun, debug, directory): return print("Rerunning node") node.base_dir = directory - node.config = {'execution': {'crashdump_dir': '/tmp'}} + node.config = {"execution": {"crashdump_dir": "/tmp"}} try: node.run() except: - if debug and debug != 'ipython': + if debug and debug != "ipython": import pdb + pdb.post_mortem() else: raise diff --git a/nipype/scripts/instance.py b/nipype/scripts/instance.py index 1f44a43bda..d9cc425e9a 100644 --- a/nipype/scripts/instance.py +++ b/nipype/scripts/instance.py @@ -1,7 +1,7 @@ -# -*- coding: utf-8 -*- """ Import lib and class meta programming utilities. """ + import inspect import importlib @@ -29,8 +29,7 @@ def import_module(module_path): try: mod = importlib.import_module(module_path) except: - raise ImportError( - 'Error when importing object {}.'.format(module_path)) + raise ImportError(f"Error when importing object {module_path}.") else: return mod @@ -40,7 +39,7 @@ def list_interfaces(module): the given module. """ iface_names = [] - for k, v in sorted(list(module.__dict__.items())): + for k, v in sorted(module.__dict__.items()): if inspect.isclass(v) and issubclass(v, Interface): iface_names.append(k) return iface_names diff --git a/nipype/scripts/utils.py b/nipype/scripts/utils.py index ce9acde7fd..8d8dc52627 100644 --- a/nipype/scripts/utils.py +++ b/nipype/scripts/utils.py @@ -1,11 +1,6 @@ -# -*- coding: utf-8 -*- """ Utilities for the CLI functions. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from builtins import bytes, str import re import click @@ -16,7 +11,7 @@ from ..interfaces.base.support import get_trait_desc # different context options -CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) +CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) UNKNOWN_OPTIONS = dict(allow_extra_args=True, ignore_unknown_options=True) # specification of existing ParamTypes @@ -28,32 +23,31 @@ # validators def check_not_none(ctx, param, value): if value is None: - raise click.BadParameter('got {}.'.format(value)) + raise click.BadParameter(f"got {value}.") return value # declare custom click.ParamType class RegularExpression(click.ParamType): - name = 'regex' + name = "regex" def convert(self, value, param, ctx): try: rex = re.compile(value, re.IGNORECASE) except ValueError: - self.fail('%s is not a valid regular expression.' % value, param, - ctx) + self.fail("%s is not a valid regular expression." % value, param, ctx) else: return rex class PythonModule(click.ParamType): - name = 'Python module path' + name = "Python module path" def convert(self, value, param, ctx): try: module = import_module(value) except ValueError: - self.fail('%s is not a valid Python module.' % value, param, ctx) + self.fail("%s is not a valid Python module." % value, param, ctx) else: return module @@ -62,27 +56,27 @@ def add_args_options(arg_parser, interface): """Add arguments to `arg_parser` to create a CLI for `interface`.""" inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): - desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2:] + desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2 :] # Escape any % signs with a % - desc = desc.replace('%', '%%') + desc = desc.replace("%", "%%") args = {} has_multiple_inner_traits = False if spec.is_trait_type(traits.Bool): args["default"] = getattr(inputs, name) - args["action"] = 'store_true' + args["action"] = "store_true" # current support is for simple trait types if not spec.inner_traits: if not spec.is_trait_type(traits.TraitCompound): trait_type = type(spec.trait_type.default_value) if trait_type in (bytes, str, int, float): - if trait_type == bytes: + if trait_type is bytes: trait_type = str args["type"] = trait_type elif len(spec.inner_traits) == 1: trait_type = type(spec.inner_traits[0].trait_type.default_value) - if trait_type == bytes: + if trait_type is bytes: trait_type = str if trait_type in (bytes, bool, str, int, float): args["type"] = trait_type @@ -95,8 +89,9 @@ def add_args_options(arg_parser, interface): if spec.is_trait_type(InputMultiPath): args["nargs"] = "+" elif spec.is_trait_type(traits.List): - if (spec.trait_type.minlen == spec.trait_type.maxlen) and \ - spec.trait_type.maxlen: + if ( + spec.trait_type.minlen == spec.trait_type.maxlen + ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "+" @@ -105,22 +100,23 @@ def add_args_options(arg_parser, interface): if has_multiple_inner_traits: raise NotImplementedError( - ('This interface cannot be used. via the' - ' command line as multiple inner traits' - ' are currently not supported for mandatory' - ' argument: {}.'.format(name))) + "This interface cannot be used. via the" + " command line as multiple inner traits" + " are currently not supported for mandatory" + " argument: {}.".format(name) + ) arg_parser.add_argument(name, help=desc, **args) else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" elif spec.is_trait_type(traits.List): - if (spec.trait_type.minlen == spec.trait_type.maxlen) and \ - spec.trait_type.maxlen: + if ( + spec.trait_type.minlen == spec.trait_type.maxlen + ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "*" if not has_multiple_inner_traits: - arg_parser.add_argument( - "--%s" % name, dest=name, help=desc, **args) + arg_parser.add_argument("--%s" % name, dest=name, help=desc, **args) return arg_parser diff --git a/nipype/sphinxext/__init__.py b/nipype/sphinxext/__init__.py index 5f8ba4da2b..c30bc66036 100644 --- a/nipype/sphinxext/__init__.py +++ b/nipype/sphinxext/__init__.py @@ -1,6 +1,3 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, absolute_import, - unicode_literals) diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py new file mode 100644 index 0000000000..151011bdfc --- /dev/null +++ b/nipype/sphinxext/apidoc/__init__.py @@ -0,0 +1,185 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" +import re +from sphinx.ext.napoleon import ( + Config as NapoleonConfig, + _patch_python_domain, + _skip_member as _napoleon_skip_member, +) + +from ... import __version__ +from ...interfaces.base import BaseInterface, TraitedSpec +from .docstring import NipypeDocstring, InterfaceDocstring + + +class Config(NapoleonConfig): + r""" + Sphinx-nipype extension settings in ``conf.py``. + + Listed below are all the settings used by this extension + and their default values. + These settings can be changed in the Sphinx's ``conf.py`` file. + Make sure that ``nipype.sphinxext.interfaces`` is enabled + in ``conf.py``:: + + # conf.py + + # Add this extension to the corresponding list: + extensions = ['nipype.sphinxext.interfaces'] + + # NiPype settings + nipype_references = False + + Attributes + ---------- + nipype_skip_classes: :obj:`bool` (Defaults to True) + True to include referenced publications with the interface + (requires duecredit to be installed). + + """ + + _config_values = { + "nipype_skip_classes": ( + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], + "env", + ), + **NapoleonConfig._config_values, + } + + +def setup(app): + # type: (Sphinx) -> Dict[unicode, Any] + """ + Sphinx extension setup function. + + When the extension is loaded, Sphinx imports this module and executes + the ``setup()`` function, which in turn notifies Sphinx of everything + the extension offers. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + + See Also + -------- + `The Sphinx documentation on Extensions + `_ + `The Extension Tutorial `_ + `The Extension API `_ + + """ + from sphinx.application import Sphinx + + if not isinstance(app, Sphinx): + # probably called by tests + return {"version": __version__, "parallel_read_safe": True} + + _patch_python_domain() + + app.setup_extension("sphinx.ext.autodoc") + app.connect("autodoc-process-docstring", _process_docstring) + app.connect("autodoc-skip-member", _skip_member) + + for name, (default, rebuild) in Config._config_values.items(): + app.add_config_value(name, default, rebuild) + return {"version": __version__, "parallel_read_safe": True} + + +def _process_docstring(app, what, name, obj, options, lines): + # type: (Sphinx, unicode, unicode, Any, Any, List[unicode]) -> None + """Process the docstring for a given python object. + Called when autodoc has read and processed a docstring. `lines` is a list + of docstring lines that `_process_docstring` modifies in place to change + what Sphinx outputs. + The following settings in conf.py control what styles of docstrings will + be parsed: + * ``napoleon_google_docstring`` -- parse Google style docstrings + * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process. + what : str + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + lines : list of str + The lines of the docstring, see above. + .. note:: `lines` is modified *in place* + """ + result_lines = lines + # Parse Nipype Interfaces + if what == "class" and issubclass(obj, BaseInterface): + result_lines[:] = InterfaceDocstring( + result_lines, app.config, app, what, name, obj, options + ).lines() + + result_lines = NipypeDocstring( + result_lines, app.config, app, what, name, obj, options + ).lines() + lines[:] = result_lines[:] + + +def _skip_member(app, what, name, obj, skip, options): + # type: (Sphinx, unicode, unicode, Any, bool, Any) -> bool + """ + Determine if private and special class members are included in docs. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + what : str + A string specifying the type of the object to which the member + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The name of the member. + obj : module, class, exception, function, method, or attribute. + For example, if the member is the __init__ method of class A, then + `obj` will be `A.__init__`. + skip : bool + A boolean indicating if autodoc will skip this member if `_skip_member` + does not override the decision + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + Returns + ------- + bool + True if the member should be skipped during creation of the docs, + False if it should be included in the docs. + + """ + # Parse Nipype Interfaces + patterns = [ + pat if hasattr(pat, "search") else re.compile(pat) + for pat in app.config.nipype_skip_classes + ] + isbase = False + try: + isbase = issubclass(obj, BaseInterface) + if issubclass(obj, TraitedSpec): + return True + except TypeError: + pass + + if isbase: + for pattern in patterns: + if pattern.search(name): + return True + + return _napoleon_skip_member(app, what, name, obj, skip, options) diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py new file mode 100644 index 0000000000..cbecc0a5de --- /dev/null +++ b/nipype/sphinxext/apidoc/docstring.py @@ -0,0 +1,175 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Reformat interface docstrings.""" +import re +from sphinx.locale import _ +from sphinx.ext.napoleon.docstring import NumpyDocstring + + +class NipypeDocstring(NumpyDocstring): + """Patch the NumpyDocstring from napoleon to get special section headers.""" + + def _parse_parameters_section(self, section): + # type: (unicode) -> List[unicode] + labels = { + "args": _("Parameters"), + "arguments": _("Parameters"), + "parameters": _("Parameters"), + } # type: Dict[unicode, unicode] + label = labels.get(section.lower(), section) + + fields = self._consume_fields() + if self._config.napoleon_use_param: + return self._format_docutils_params(fields) + + return self._format_fields(label, fields) + + +class InterfaceDocstring(NipypeDocstring): + """ + Convert docstrings of Nipype Interfaces to reStructuredText. + + Parameters + ---------- + docstring : :obj:`str` or :obj:`list` of :obj:`str` + The docstring to parse, given either as a string or split into + individual lines. + config: :obj:`sphinx.ext.napoleon.Config` or :obj:`sphinx.config.Config` + The configuration settings to use. If not given, defaults to the + config object on `app`; or if `app` is not given defaults to the + a new :class:`nipype.sphinxext.apidoc.Config` object. + + Other Parameters + ---------------- + app : :class:`sphinx.application.Sphinx`, optional + Application object representing the Sphinx process. + what : :obj:`str`, optional + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : :obj:`str`, optional + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : :class:`sphinx.ext.autodoc.Options`, optional + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + + """ + + _name_rgx = re.compile( + r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" + r" (?P[a-zA-Z0-9_.-]+))\s*", + re.VERBOSE, + ) + + def __init__( + self, docstring, config=None, app=None, what="", name="", obj=None, options=None + ): + # type: (Union[unicode, List[unicode]], SphinxConfig, Sphinx, unicode, unicode, Any, Any) -> None # NOQA + super().__init__(docstring, config, app, what, name, obj, options) + + cmd = getattr(obj, "_cmd", "") + if cmd and cmd.strip(): + self._parsed_lines = [ + "Wrapped executable: ``%s``." % cmd.strip(), + "", + ] + self._parsed_lines + + if obj is not None: + self._parsed_lines += _parse_interface(obj) + + +def _parse_interface(obj): + """Print description for input parameters.""" + parsed = [] + if obj.input_spec: + inputs = obj.input_spec() + mandatory_items = sorted(inputs.traits(mandatory=True).items()) + if mandatory_items: + parsed += ["", "Mandatory Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in mandatory_items: + parsed += _parse_spec(inputs, name, spec) + + mandatory_keys = {item[0] for item in mandatory_items} + optional_items = sorted( + [ + (name, val) + for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ] + ) + if optional_items: + parsed += ["", "Optional Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in optional_items: + parsed += _parse_spec(inputs, name, spec) + + if obj.output_spec: + outputs = sorted(obj.output_spec().traits(transient=None).items()) + if outputs: + parsed += ["", "Outputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in outputs: + parsed += _parse_spec(inputs, name, spec) + + return parsed + + +def _indent(lines, n=4): + # type: (List[unicode], int) -> List[unicode] + return [(" " * n) + line for line in lines] + + +def _parse_spec(inputs, name, spec): + """Parse a HasTraits object into a Numpy-style docstring.""" + desc_lines = [] + if spec.desc: + desc = "".join([spec.desc[0].capitalize(), spec.desc[1:]]) + if not desc.endswith(".") and not desc.endswith("\n"): + desc = "%s." % desc + desc_lines += desc.splitlines() + + argstr = spec.argstr + if argstr and argstr.strip(): + pos = spec.position + if pos is None: + desc_lines += [ + """Maps to a command-line argument: :code:`{arg}`.""".format( + arg=argstr.strip() + ) + ] + else: + desc_lines += [ + """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( + arg=argstr.strip(), pos=pos + ) + ] + + xor = spec.xor + if xor: + desc_lines += [ + "Mutually **exclusive** with inputs: %s." + % ", ".join(["``%s``" % x for x in xor]) + ] + + requires = spec.requires + if requires: + desc_lines += [ + "**Requires** inputs: %s." % ", ".join(["``%s``" % x for x in requires]) + ] + + if spec.usedefault: + default = spec.default_value()[1] + if isinstance(default, (bytes, str)) and not default: + default = '""' + + desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] + + out_rst = [f"{name} : {spec.full_info(inputs, name, None)}"] + out_rst += _indent(desc_lines, 4) + + return out_rst diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py new file mode 100644 index 0000000000..eee0f626b9 --- /dev/null +++ b/nipype/sphinxext/documenter.py @@ -0,0 +1,83 @@ +"""sphinx autodoc ext.""" + +from sphinx.locale import _ +from sphinx.ext import autodoc +from nipype.interfaces.base import BaseInterface +from .gh import get_url + +_ClassDocumenter = autodoc.ClassDocumenter +RST_CLASS_BLOCK = """ +.. index:: {name} + +.. _{module}.{name}: + +{name} +{underline} +`Link to code <{code_url}>`__ + +""" + + +class NipypeClassDocumenter(_ClassDocumenter): # type: ignore + priority = 20 + + def add_directive_header(self, sig: str) -> None: + if self.doc_as_attr: + self.directivetype = "attribute" + + # Copied from super + domain = getattr(self, "domain", "py") + directive = getattr(self, "directivetype", self.objtype) + name = self.format_name() + sourcename = self.get_sourcename() + + is_interface = False + try: + is_interface = issubclass(self.object, BaseInterface) + except TypeError: + pass + + if is_interface is True: + lines = RST_CLASS_BLOCK.format( + code_url=get_url(self.object), + module=self.modname, + name=name, + underline="=" * len(name), + ) + for line in lines.splitlines(): + self.add_line(line, sourcename) + else: + self.add_line(f".. {domain}:{directive}:: {name}{sig}", sourcename) + if self.options.noindex: + self.add_line(" :noindex:", sourcename) + if self.objpath: + # Be explicit about the module, this is necessary since .. class:: + # etc. don't support a prepended module name + self.add_line(" :module: %s" % self.modname, sourcename) + + # add inheritance info, if wanted + if not self.doc_as_attr and self.options.show_inheritance: + sourcename = self.get_sourcename() + self.add_line("", sourcename) + bases = getattr(self.object, "__bases__", []) + bases_links = [] + + for b in bases: + based_interface = False + try: + based_interface = issubclass(b, BaseInterface) + except TypeError: + pass + + if b.__module__ in ("__builtin__", "builtins"): + bases_links.append(":class:`%s`" % b.__name__) + elif based_interface: + bases_links.append(f":ref:`{b.__module__}.{b.__name__}`") + else: + bases_links.append(f":class:`{b.__module__}.{b.__name__}`") + + self.add_line(" " + _("Bases: %s") % ", ".join(bases_links), sourcename) + + +def setup(app): + app.add_autodocumenter(NipypeClassDocumenter) diff --git a/nipype/sphinxext/gh.py b/nipype/sphinxext/gh.py new file mode 100644 index 0000000000..07a6513fb4 --- /dev/null +++ b/nipype/sphinxext/gh.py @@ -0,0 +1,30 @@ +"""Build a file URL.""" + +import os +import inspect +import subprocess + +REVISION_CMD = "git rev-parse --short HEAD" + + +def _get_git_revision(): + # Comes from scikit-learn + # https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/github_link.py + try: + revision = subprocess.check_output(REVISION_CMD.split()).strip() + except (subprocess.CalledProcessError, OSError): + return None + return revision.decode("utf-8") + + +def get_url(obj): + """Return local or remote url for an object.""" + filename = inspect.getsourcefile(obj) + uri = "file://%s" % filename + revision = _get_git_revision() + if revision is not None: + shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) + uri = f"http://github.com/nipy/nipype/blob/{revision}/{shortfile}" + lines, lstart = inspect.getsourcelines(obj) + lend = len(lines) + lstart + return "%s#L%d-L%d" % (uri, lstart, lend) diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py index 7fa0769401..74745f99cb 100644 --- a/nipype/sphinxext/plot_workflow.py +++ b/nipype/sphinxext/plot_workflow.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -21,7 +20,7 @@ :graph2use: flat :simple_form: no - from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline + from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() @@ -32,7 +31,7 @@ :graph2use: flat :simple_form: no - from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline + from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() @@ -106,7 +105,6 @@ Provide a customized template for preparing restructured text. """ -from __future__ import print_function, division, absolute_import, unicode_literals import sys import os @@ -115,13 +113,13 @@ import re import textwrap from os.path import relpath -from errno import EEXIST import traceback missing_imports = [] try: - from docutils.parsers.rst import directives + from docutils.parsers.rst import directives, Directive from docutils.parsers.rst.directives.images import Image + align = Image.align except ImportError as e: missing_imports = [str(e)] @@ -132,6 +130,7 @@ def format_template(template, **kw): return jinja2.Template(template).render(**kw) + except ImportError as e: missing_imports.append(str(e)) try: @@ -144,70 +143,37 @@ def format_template(template, **kw): except ImportError as e: missing_imports.append(str(e)) -from builtins import str, bytes - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - - -def _mkdirp(folder): - """ - Equivalent to bash's mkdir -p - """ - if sys.version_info > (3, 4, 1): - os.makedirs(folder, exist_ok=True) - return folder - - try: - os.makedirs(folder) - except OSError as exc: - if exc.errno != EEXIST or not os.path.isdir(folder): - raise - - return folder - - -def wf_directive(name, arguments, options, content, lineno, content_offset, - block_text, state, state_machine): - if len(missing_imports) == 0: - return run(arguments, content, options, state_machine, state, lineno) - else: - raise ImportError('\n'.join(missing_imports)) - - -wf_directive.__doc__ = __doc__ - def _option_boolean(arg): if not arg or not arg.strip(): # no argument given, assume used as a flag return True - elif arg.strip().lower() in ('no', '0', 'false'): + elif arg.strip().lower() in ("no", "0", "false"): return False - elif arg.strip().lower() in ('yes', '1', 'true'): + elif arg.strip().lower() in ("yes", "1", "true"): return True else: raise ValueError('"%s" unknown boolean' % arg) def _option_graph2use(arg): - return directives.choice( - arg, ('hierarchical', 'colored', 'flat', 'orig', 'exec')) + return directives.choice(arg, ("hierarchical", "colored", "flat", "orig", "exec")) def _option_context(arg): - if arg in [None, 'reset', 'close-figs']: + if arg in [None, "reset", "close-figs"]: return arg raise ValueError("argument should be None or 'reset' or 'close-figs'") def _option_format(arg): - return directives.choice(arg, ('python', 'doctest')) + return directives.choice(arg, ("python", "doctest")) def _option_align(arg): return directives.choice( - arg, ("top", "middle", "bottom", "left", "center", "right")) + arg, ("top", "middle", "bottom", "left", "center", "right") + ) def mark_wf_labels(app, document): @@ -223,63 +189,285 @@ def mark_wf_labels(app, document): if labelid is None: continue node = document.ids[labelid] - if node.tagname in ('html_only', 'latex_only'): + if node.tagname in ("html_only", "latex_only"): for n in node: - if n.tagname == 'figure': + if n.tagname == "figure": sectname = name for c in n: - if c.tagname == 'caption': + if c.tagname == "caption": sectname = c.astext() break - node['ids'].remove(labelid) - node['names'].remove(name) - n['ids'].append(labelid) - n['names'].append(name) - document.settings.env.labels[name] = \ - document.settings.env.docname, labelid, sectname + node["ids"].remove(labelid) + node["names"].remove(name) + n["ids"].append(labelid) + n["names"].append(name) + document.settings.env.labels[name] = ( + document.settings.env.docname, + labelid, + sectname, + ) break +class WorkflowDirective(Directive): + has_content = True + required_arguments = 0 + optional_arguments = 2 + final_argument_whitespace = False + option_spec = { + "alt": directives.unchanged, + "height": directives.length_or_unitless, + "width": directives.length_or_percentage_or_unitless, + "scale": directives.nonnegative_int, + "align": _option_align, + "class": directives.class_option, + "include-source": _option_boolean, + "format": _option_format, + "context": _option_context, + "nofigs": directives.flag, + "encoding": directives.encoding, + "graph2use": _option_graph2use, + "simple_form": _option_boolean, + } + + def run(self): + if missing_imports: + raise ImportError("\n".join(missing_imports)) + + document = self.state_machine.document + config = document.settings.env.config + nofigs = "nofigs" in self.options + + formats = get_wf_formats(config) + default_fmt = formats[0][0] + + graph2use = self.options.get("graph2use", "hierarchical") + simple_form = self.options.get("simple_form", True) + + self.options.setdefault("include-source", config.wf_include_source) + keep_context = "context" in self.options + context_opt = None if not keep_context else self.options["context"] + + rst_file = document.attributes["source"] + rst_dir = os.path.dirname(rst_file) + + if len(self.arguments): + if not config.wf_basedir: + source_file_name = os.path.join( + setup.app.builder.srcdir, directives.uri(self.arguments[0]) + ) + else: + source_file_name = os.path.join( + setup.confdir, config.wf_basedir, directives.uri(self.arguments[0]) + ) + + # If there is content, it will be passed as a caption. + caption = "\n".join(self.content) + + # If the optional function name is provided, use it + if len(self.arguments) == 2: + function_name = self.arguments[1] + else: + function_name = None + + with open(source_file_name, encoding="utf-8") as fd: + code = fd.read() + output_base = os.path.basename(source_file_name) + else: + source_file_name = rst_file + code = textwrap.dedent("\n".join([str(c) for c in self.content])) + counter = document.attributes.get("_wf_counter", 0) + 1 + document.attributes["_wf_counter"] = counter + base, _ = os.path.splitext(os.path.basename(source_file_name)) + output_base = "%s-%d.py" % (base, counter) + function_name = None + caption = "" + + base, source_ext = os.path.splitext(output_base) + if source_ext in (".py", ".rst", ".txt"): + output_base = base + else: + source_ext = "" + + # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames + output_base = output_base.replace(".", "-") + + # is it in doctest format? + is_doctest = contains_doctest(code) + if "format" in self.options: + if self.options["format"] == "python": + is_doctest = False + else: + is_doctest = True + + # determine output directory name fragment + source_rel_name = relpath(source_file_name, setup.confdir) + source_rel_dir = os.path.dirname(source_rel_name) + while source_rel_dir.startswith(os.path.sep): + source_rel_dir = source_rel_dir[1:] + + # build_dir: where to place output files (temporarily) + build_dir = os.path.join( + os.path.dirname(setup.app.doctreedir), "wf_directive", source_rel_dir + ) + # get rid of .. in paths, also changes pathsep + # see note in Python docs for warning about symbolic links on Windows. + # need to compare source and dest paths at end + build_dir = os.path.normpath(build_dir) + + if not os.path.exists(build_dir): + os.makedirs(build_dir) + + # output_dir: final location in the builder's directory + dest_dir = os.path.abspath( + os.path.join(setup.app.builder.outdir, source_rel_dir) + ) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) # no problem here for me, but just use built-ins + + # how to link to files from the RST file + dest_dir_link = os.path.join( + relpath(setup.confdir, rst_dir), source_rel_dir + ).replace(os.path.sep, "/") + try: + build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, "/") + except ValueError: + # on Windows, relpath raises ValueError when path and start are on + # different mounts/drives + build_dir_link = build_dir + source_link = dest_dir_link + "/" + output_base + source_ext + + # make figures + try: + results = render_figures( + code, + source_file_name, + build_dir, + output_base, + keep_context, + function_name, + config, + graph2use, + simple_form, + context_reset=context_opt == "reset", + close_figs=context_opt == "close-figs", + ) + errors = [] + except GraphError as err: + reporter = self.state.memo.reporter + sm = reporter.system_message( + 2, + "Exception occurred in plotting %s\n from %s:\n%s" + % (output_base, source_file_name, err), + line=self.lineno, + ) + results = [(code, [])] + errors = [sm] + + # Properly indent the caption + caption = "\n".join(" " + line.strip() for line in caption.split("\n")) + + # generate output restructuredtext + total_lines = [] + for j, (code_piece, images) in enumerate(results): + if self.options["include-source"]: + if is_doctest: + lines = [""] + lines += [row.rstrip() for row in code_piece.split("\n")] + else: + lines = [".. code-block:: python", ""] + lines += [" %s" % row.rstrip() for row in code_piece.split("\n")] + source_code = "\n".join(lines) + else: + source_code = "" + + if nofigs: + images = [] + + opts = [ + f":{key}: {val}" + for key, val in list(self.options.items()) + if key in ("alt", "height", "width", "scale", "align", "class") + ] + + only_html = ".. only:: html" + only_latex = ".. only:: latex" + only_texinfo = ".. only:: texinfo" + + # Not-None src_link signals the need for a source link in the generated + # html + if j == 0 and config.wf_html_show_source_link: + src_link = source_link + else: + src_link = None + + result = format_template( + config.wf_template or TEMPLATE, + default_fmt=default_fmt, + dest_dir=dest_dir_link, + build_dir=build_dir_link, + source_link=src_link, + multi_image=len(images) > 1, + only_html=only_html, + only_latex=only_latex, + only_texinfo=only_texinfo, + options=opts, + images=images, + source_code=source_code, + html_show_formats=config.wf_html_show_formats and len(images), + caption=caption, + ) + + total_lines.extend(result.split("\n")) + total_lines.extend("\n") + + if total_lines: + self.state_machine.insert_input(total_lines, source=source_file_name) + + # copy image files to builder's output directory, if necessary + os.makedirs(dest_dir, exist_ok=True) + for code_piece, images in results: + for img in images: + for fn in img.filenames(): + destimg = os.path.join(dest_dir, os.path.basename(fn)) + if fn != destimg: + shutil.copyfile(fn, destimg) + + # copy script (if necessary) + target_name = os.path.join(dest_dir, output_base + source_ext) + with open(target_name, "w", encoding="utf-8") as f: + if source_file_name == rst_file: + code_escaped = unescape_doctest(code) + else: + code_escaped = code + f.write(code_escaped) + + return errors + + def setup(app): setup.app = app setup.config = app.config setup.confdir = app.confdir - options = { - 'alt': directives.unchanged, - 'height': directives.length_or_unitless, - 'width': directives.length_or_percentage_or_unitless, - 'scale': directives.nonnegative_int, - 'align': _option_align, - 'class': directives.class_option, - 'include-source': _option_boolean, - 'format': _option_format, - 'context': _option_context, - 'nofigs': directives.flag, - 'encoding': directives.encoding, - 'graph2use': _option_graph2use, - 'simple_form': _option_boolean - } - - app.add_directive('workflow', wf_directive, True, (0, 2, False), **options) - app.add_config_value('graph2use', 'hierarchical', 'html') - app.add_config_value('simple_form', True, 'html') - app.add_config_value('wf_pre_code', None, True) - app.add_config_value('wf_include_source', False, True) - app.add_config_value('wf_html_show_source_link', True, True) - app.add_config_value('wf_formats', ['png', 'svg', 'pdf'], True) - app.add_config_value('wf_basedir', None, True) - app.add_config_value('wf_html_show_formats', True, True) - app.add_config_value('wf_rcparams', {}, True) - app.add_config_value('wf_apply_rcparams', False, True) - app.add_config_value('wf_working_directory', None, True) - app.add_config_value('wf_template', None, True) - - app.connect('doctree-read'.encode() - if PY2 else 'doctree-read', mark_wf_labels) - - metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} + app.add_directive("workflow", WorkflowDirective) + app.add_config_value("graph2use", "hierarchical", "html") + app.add_config_value("simple_form", True, "html") + app.add_config_value("wf_pre_code", None, True) + app.add_config_value("wf_include_source", False, True) + app.add_config_value("wf_html_show_source_link", True, True) + app.add_config_value("wf_formats", ["png", "svg", "pdf"], True) + app.add_config_value("wf_basedir", None, True) + app.add_config_value("wf_html_show_formats", True, True) + app.add_config_value("wf_rcparams", {}, True) + app.add_config_value("wf_apply_rcparams", False, True) + app.add_config_value("wf_working_directory", None, True) + app.add_config_value("wf_template", None, True) + + app.connect("doctree-read", mark_wf_labels) + + metadata = {"parallel_read_safe": True, "parallel_write_safe": True} return metadata @@ -291,11 +479,11 @@ def setup(app): def contains_doctest(text): try: # check if it's valid Python as-is - compile(text, '', 'exec') + compile(text, "", "exec") return False except SyntaxError: pass - r = re.compile(r'^\s*>>>', re.M) + r = re.compile(r"^\s*>>>", re.MULTILINE) m = r.search(text) return bool(m) @@ -310,7 +498,7 @@ def unescape_doctest(text): code = "" for line in text.split("\n"): - m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line) + m = re.match(r"^\s*(>>>|\.\.\.) (.*)$", line) if m: code += m.group(2) + "\n" elif line.strip(): @@ -324,7 +512,7 @@ def remove_coding(text): """ Remove the coding comment, which exec doesn't like. """ - sub_re = re.compile("^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) + sub_re = re.compile(r"^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) return sub_re.sub("", text) @@ -395,14 +583,14 @@ def remove_coding(text): wf_context = dict() -class ImageFile(object): +class ImageFile: def __init__(self, basename, dirname): self.basename = basename self.dirname = dirname self.formats = [] def filename(self, fmt): - return os.path.join(self.dirname, "%s.%s" % (self.basename, fmt)) + return os.path.join(self.dirname, f"{self.basename}.{fmt}") def filenames(self): return [self.filename(fmt) for fmt in self.formats] @@ -413,9 +601,10 @@ def out_of_date(original, derived): Returns True if derivative is out-of-date wrt original, both of which are full file paths. """ - return (not os.path.exists(derived) - or (os.path.exists(original) - and os.stat(derived).st_mtime < os.stat(original).st_mtime)) + return not os.path.exists(derived) or ( + os.path.exists(original) + and os.stat(derived).st_mtime < os.stat(original).st_mtime + ) class GraphError(RuntimeError): @@ -438,14 +627,16 @@ def run_code(code, code_path, ns=None, function_name=None): os.chdir(setup.config.wf_working_directory) except OSError as err: raise OSError( - str(err) + '\n`wf_working_directory` option in' - 'Sphinx configuration file must be a valid ' - 'directory path') + str(err) + "\n`wf_working_directory` option in" + "Sphinx configuration file must be a valid " + "directory path" + ) except TypeError as err: raise TypeError( - str(err) + '\n`wf_working_directory` option in ' - 'Sphinx configuration file must be a string or ' - 'None') + str(err) + "\n`wf_working_directory` option in " + "Sphinx configuration file must be a string or " + "None" + ) sys.path.insert(0, setup.config.wf_working_directory) elif code_path is not None: dirname = os.path.abspath(os.path.dirname(code_path)) @@ -458,11 +649,7 @@ def run_code(code, code_path, ns=None, function_name=None): # Redirect stdout stdout = sys.stdout - if PY3: - sys.stdout = io.StringIO() - else: - from cStringIO import StringIO - sys.stdout = StringIO() + sys.stdout = io.StringIO() # Assign a do-nothing print function to the namespace. There # doesn't seem to be any other way to provide a way to (not) print @@ -478,14 +665,14 @@ def _dummy_print(*arg, **kwarg): if not ns: if setup.config.wf_pre_code is not None: exec(str(setup.config.wf_pre_code), ns) - ns['print'] = _dummy_print + ns["print"] = _dummy_print if "__main__" in code: exec("__name__ = '__main__'", ns) code = remove_coding(code) exec(code, ns) if function_name is not None: exec(function_name + "()", ns) - except (Exception, SystemExit) as err: + except (Exception, SystemExit): raise GraphError(traceback.format_exc()) finally: os.chdir(pwd) @@ -496,18 +683,18 @@ def _dummy_print(*arg, **kwarg): def get_wf_formats(config): - default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} + default_dpi = {"png": 80, "hires.png": 200, "pdf": 200} formats = [] wf_formats = config.wf_formats if isinstance(wf_formats, (str, bytes)): # String Sphinx < 1.3, Split on , to mimic # Sphinx 1.3 and later. Sphinx 1.3 always # returns a list. - wf_formats = wf_formats.split(',') + wf_formats = wf_formats.split(",") for fmt in wf_formats: if isinstance(fmt, (str, bytes)): - if ':' in fmt: - suffix, dpi = fmt.split(':') + if ":" in fmt: + suffix, dpi = fmt.split(":") formats.append((str(suffix), int(dpi))) else: formats.append((fmt, default_dpi.get(fmt, 80))) @@ -518,17 +705,19 @@ def get_wf_formats(config): return formats -def render_figures(code, - code_path, - output_dir, - output_base, - context, - function_name, - config, - graph2use, - simple_form, - context_reset=False, - close_figs=False): +def render_figures( + code, + code_path, + output_dir, + output_base, + context, + function_name, + config, + graph2use, + simple_form, + context_reset=False, + close_figs=False, +): """ Run a nipype workflow creation script and save the graph in *output_dir*. Save the images under *output_dir* with file names derived from @@ -546,12 +735,10 @@ def render_figures(code, try: img_path = img.filename(fmt) imgname, ext = os.path.splitext(os.path.basename(img_path)) - ns['wf'].base_dir = output_dir - src = ns['wf'].write_graph( - imgname, - format=ext[1:], - graph2use=graph2use, - simple_form=simple_form) + ns["wf"].base_dir = output_dir + src = ns["wf"].write_graph( + imgname, format=ext[1:], graph2use=graph2use, simple_form=simple_form + ) shutil.move(src, img_path) except Exception: raise GraphError(traceback.format_exc()) @@ -559,210 +746,3 @@ def render_figures(code, img.formats.append(fmt) return [(code, [img])] - - -def run(arguments, content, options, state_machine, state, lineno): - document = state_machine.document - config = document.settings.env.config - nofigs = 'nofigs' in options - - formats = get_wf_formats(config) - default_fmt = formats[0][0] - - graph2use = options.get('graph2use', 'hierarchical') - simple_form = options.get('simple_form', True) - - options.setdefault('include-source', config.wf_include_source) - keep_context = 'context' in options - context_opt = None if not keep_context else options['context'] - - rst_file = document.attributes['source'] - rst_dir = os.path.dirname(rst_file) - - if len(arguments): - if not config.wf_basedir: - source_file_name = os.path.join(setup.app.builder.srcdir, - directives.uri(arguments[0])) - else: - source_file_name = os.path.join(setup.confdir, config.wf_basedir, - directives.uri(arguments[0])) - - # If there is content, it will be passed as a caption. - caption = '\n'.join(content) - - # If the optional function name is provided, use it - if len(arguments) == 2: - function_name = arguments[1] - else: - function_name = None - - with io.open(source_file_name, 'r', encoding='utf-8') as fd: - code = fd.read() - output_base = os.path.basename(source_file_name) - else: - source_file_name = rst_file - code = textwrap.dedent("\n".join([str(c) for c in content])) - counter = document.attributes.get('_wf_counter', 0) + 1 - document.attributes['_wf_counter'] = counter - base, _ = os.path.splitext(os.path.basename(source_file_name)) - output_base = '%s-%d.py' % (base, counter) - function_name = None - caption = '' - - base, source_ext = os.path.splitext(output_base) - if source_ext in ('.py', '.rst', '.txt'): - output_base = base - else: - source_ext = '' - - # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames - output_base = output_base.replace('.', '-') - - # is it in doctest format? - is_doctest = contains_doctest(code) - if 'format' in options: - if options['format'] == 'python': - is_doctest = False - else: - is_doctest = True - - # determine output directory name fragment - source_rel_name = relpath(source_file_name, setup.confdir) - source_rel_dir = os.path.dirname(source_rel_name) - while source_rel_dir.startswith(os.path.sep): - source_rel_dir = source_rel_dir[1:] - - # build_dir: where to place output files (temporarily) - build_dir = os.path.join( - os.path.dirname(setup.app.doctreedir), 'wf_directive', source_rel_dir) - # get rid of .. in paths, also changes pathsep - # see note in Python docs for warning about symbolic links on Windows. - # need to compare source and dest paths at end - build_dir = os.path.normpath(build_dir) - - if not os.path.exists(build_dir): - os.makedirs(build_dir) - - # output_dir: final location in the builder's directory - dest_dir = os.path.abspath( - os.path.join(setup.app.builder.outdir, source_rel_dir)) - if not os.path.exists(dest_dir): - os.makedirs(dest_dir) # no problem here for me, but just use built-ins - - # how to link to files from the RST file - dest_dir_link = os.path.join( - relpath(setup.confdir, rst_dir), source_rel_dir).replace( - os.path.sep, '/') - try: - build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/') - except ValueError: - # on Windows, relpath raises ValueError when path and start are on - # different mounts/drives - build_dir_link = build_dir - source_link = dest_dir_link + '/' + output_base + source_ext - - # make figures - try: - results = render_figures( - code, - source_file_name, - build_dir, - output_base, - keep_context, - function_name, - config, - graph2use, - simple_form, - context_reset=context_opt == 'reset', - close_figs=context_opt == 'close-figs') - errors = [] - except GraphError as err: - reporter = state.memo.reporter - sm = reporter.system_message( - 2, - "Exception occurred in plotting %s\n from %s:\n%s" % - (output_base, source_file_name, err), - line=lineno) - results = [(code, [])] - errors = [sm] - - # Properly indent the caption - caption = '\n'.join( - ' ' + line.strip() for line in caption.split('\n')) - - # generate output restructuredtext - total_lines = [] - for j, (code_piece, images) in enumerate(results): - if options['include-source']: - if is_doctest: - lines = [''] - lines += [row.rstrip() for row in code_piece.split('\n')] - else: - lines = ['.. code-block:: python', ''] - lines += [ - ' %s' % row.rstrip() for row in code_piece.split('\n') - ] - source_code = "\n".join(lines) - else: - source_code = "" - - if nofigs: - images = [] - - opts = [ - ':%s: %s' % (key, val) for key, val in list(options.items()) - if key in ('alt', 'height', 'width', 'scale', 'align', 'class') - ] - - only_html = ".. only:: html" - only_latex = ".. only:: latex" - only_texinfo = ".. only:: texinfo" - - # Not-None src_link signals the need for a source link in the generated - # html - if j == 0 and config.wf_html_show_source_link: - src_link = source_link - else: - src_link = None - - result = format_template( - config.wf_template or TEMPLATE, - default_fmt=default_fmt, - dest_dir=dest_dir_link, - build_dir=build_dir_link, - source_link=src_link, - multi_image=len(images) > 1, - only_html=only_html, - only_latex=only_latex, - only_texinfo=only_texinfo, - options=opts, - images=images, - source_code=source_code, - html_show_formats=config.wf_html_show_formats and len(images), - caption=caption) - - total_lines.extend(result.split("\n")) - total_lines.extend("\n") - - if total_lines: - state_machine.insert_input(total_lines, source=source_file_name) - - # copy image files to builder's output directory, if necessary - _mkdirp(dest_dir) - for code_piece, images in results: - for img in images: - for fn in img.filenames(): - destimg = os.path.join(dest_dir, os.path.basename(fn)) - if fn != destimg: - shutil.copyfile(fn, destimg) - - # copy script (if necessary) - target_name = os.path.join(dest_dir, output_base + source_ext) - with io.open(target_name, 'w', encoding="utf-8") as f: - if source_file_name == rst_file: - code_escaped = unescape_doctest(code) - else: - code_escaped = code - f.write(code_escaped) - - return errors diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 9d57ba87af..e3fbd80e6a 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The testing directory contains a small set of imaging files to be @@ -11,25 +10,22 @@ filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) -funcfile = os.path.join(basedir, 'data', 'functional.nii') -anatfile = os.path.join(basedir, 'data', 'structural.nii') +funcfile = os.path.join(basedir, "data", "functional.nii") +anatfile = os.path.join(basedir, "data", "structural.nii") template = funcfile transfm = funcfile -from . import decorators as dec from .utils import package_check, TempFATFS -skipif = dec.skipif - -def example_data(infile='functional.nii'): +def example_data(infile="functional.nii"): """returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory""" filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) - outfile = os.path.join(basedir, 'data', infile) + outfile = os.path.join(basedir, "data", infile) if not os.path.exists(outfile): - raise IOError('%s empty data file does NOT exist' % outfile) + raise OSError("%s empty data file does NOT exist" % outfile) return outfile diff --git a/nipype/testing/data/5tt_in.mif b/nipype/testing/data/5tt_in.mif new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/README b/nipype/testing/data/README index 550854c57e..ed70b57e43 100644 --- a/nipype/testing/data/README +++ b/nipype/testing/data/README @@ -1,5 +1,5 @@ This directory contains empty, dummy files which are meant to be used -in the doctests of nipype. For verion 0.3 of nipype, we're using +in the doctests of nipype. For version 0.3 of nipype, we're using Traits and for input files, the code checks to confirm the assigned files actually exist. It doesn't matter what the files are, or even if they contain "real data", only that they exist. Again, these files diff --git a/nipype/testing/data/T2.nii b/nipype/testing/data/T2.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/csffod.mif b/nipype/testing/data/csffod.mif new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/design.mat b/nipype/testing/data/design.mat index e69de29bb2..5f27af3198 100644 --- a/nipype/testing/data/design.mat +++ b/nipype/testing/data/design.mat @@ -0,0 +1,6 @@ +/NumWaves 3 +/NumPoints 3 +/Matrix +0 0 0 +0 0 0 +0 0 0 diff --git a/nipype/testing/data/design.txt b/nipype/testing/data/design.txt new file mode 100644 index 0000000000..d5de7d6a40 --- /dev/null +++ b/nipype/testing/data/design.txt @@ -0,0 +1,3 @@ +0 0 0 +0 0 0 +0 0 0 diff --git a/nipype/testing/data/epi_slspec.txt b/nipype/testing/data/epi_slspec.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/events.tsv b/nipype/testing/data/events.tsv new file mode 100644 index 0000000000..9f9fba67ef --- /dev/null +++ b/nipype/testing/data/events.tsv @@ -0,0 +1,9 @@ +onset duration frequency pulse_width amplitude +183.75 20.0 20.0 0.005 1.0 +313.75 20.0 20.0 0.005 1.0 +483.75 20.0 20.0 0.005 1.0 +633.75 20.0 20.0 0.005 1.0 +783.75 20.0 20.0 0.005 1.0 +933.75 20.0 20.0 0.005 1.0 +1083.75 20.0 20.0 0.005 1.0 +1233.75 20.0 20.0 0.005 1.0 diff --git a/nipype/testing/data/gmfod.mif b/nipype/testing/data/gmfod.mif new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/grad.b b/nipype/testing/data/grad.b new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/grads.txt b/nipype/testing/data/grads.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/gtmseg.mgz b/nipype/testing/data/gtmseg.mgz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/gtmseg.nii b/nipype/testing/data/gtmseg.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.aparc_a2009s.freesurfer.annot b/nipype/testing/data/lh.aparc_a2009s.freesurfer.annot new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.area.structural b/nipype/testing/data/lh.area.structural new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.central.structural.gii b/nipype/testing/data/lh.central.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.pbt.structural b/nipype/testing/data/lh.pbt.structural new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.sphere.reg.structural.gii b/nipype/testing/data/lh.sphere.reg.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/lh.sphere.structural.gii b/nipype/testing/data/lh.sphere.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/nipype2boutiques_example.json b/nipype/testing/data/nipype2boutiques_example.json new file mode 100644 index 0000000000..45359f49ed --- /dev/null +++ b/nipype/testing/data/nipype2boutiques_example.json @@ -0,0 +1,549 @@ +{ + "name": "FLIRT", + "command-line": "FLIRT [IN_FILE] [REFERENCE] [OUT_FILE] [OUT_MATRIX_FILE] [ANGLE_REP] [APPLY_ISOXFM] [APPLY_XFM] [BBRSLOPE] [BBRTYPE] [BGVALUE] [BINS] [COARSE_SEARCH] [COST] [COST_FUNC] [DATATYPE] [DISPLAY_INIT] [DOF] [ECHOSPACING] [FIELDMAP] [FIELDMAPMASK] [FINE_SEARCH] [FORCE_SCALING] [IN_MATRIX_FILE] [IN_WEIGHT] [INTERP] [MIN_SAMPLING] [NO_CLAMP] [NO_RESAMPLE] [NO_RESAMPLE_BLUR] [NO_SEARCH] [OUT_LOG] [PADDING_SIZE] [PEDIR] [REF_WEIGHT] [RIGID2D] [SAVE_LOG] [SCHEDULE] [SEARCHR_X] [SEARCHR_Y] [SEARCHR_Z] [SINC_WIDTH] [SINC_WINDOW] [USES_QFORM] [VERBOSE] [WM_SEG] [WMCOORDS] [WMNORMS]", + "author": "Nipype (interface), Oxford Centre for Functional MRI of the Brain (FMRIB) (tool)", + "description": "FLIRT, as implemented in Nipype (module: nipype.interfaces.fsl, interface: FLIRT).", + "inputs": [ + { + "id": "angle_rep", + "name": "Angle rep", + "type": "String", + "value-key": "[ANGLE_REP]", + "command-line-flag": "-anglerep", + "description": "'quaternion' or 'euler'. Representation of rotation angles.", + "optional": true, + "value-choices": [ + "quaternion", + "euler" + ] + }, + { + "id": "apply_isoxfm", + "name": "Apply isoxfm", + "type": "Number", + "value-key": "[APPLY_ISOXFM]", + "command-line-flag": "-applyisoxfm", + "description": "A float. As applyxfm but forces isotropic resampling.", + "optional": true + }, + { + "id": "apply_xfm", + "name": "Apply xfm", + "type": "Flag", + "value-key": "[APPLY_XFM]", + "command-line-flag": "-applyxfm", + "description": "A boolean. Apply transformation supplied by in_matrix_file or uses_qform to use the affine matrix stored in the reference header.", + "optional": true + }, + { + "id": "bbrslope", + "name": "Bbrslope", + "type": "Number", + "value-key": "[BBRSLOPE]", + "command-line-flag": "-bbrslope", + "description": "A float. Value of bbr slope.", + "optional": true + }, + { + "id": "bbrtype", + "name": "Bbrtype", + "type": "String", + "value-key": "[BBRTYPE]", + "command-line-flag": "-bbrtype", + "description": "'signed' or 'global_abs' or 'local_abs'. Type of bbr cost function: signed [default], global_abs, local_abs.", + "optional": true, + "value-choices": [ + "signed", + "global_abs", + "local_abs" + ] + }, + { + "id": "bgvalue", + "name": "Bgvalue", + "type": "Number", + "value-key": "[BGVALUE]", + "command-line-flag": "-setbackground", + "description": "A float. Use specified background value for points outside fov.", + "optional": true + }, + { + "id": "bins", + "name": "Bins", + "type": "Number", + "integer": true, + "value-key": "[BINS]", + "command-line-flag": "-bins", + "description": "An integer (int or long). Number of histogram bins.", + "optional": true + }, + { + "id": "coarse_search", + "name": "Coarse search", + "type": "Number", + "integer": true, + "value-key": "[COARSE_SEARCH]", + "command-line-flag": "-coarsesearch", + "description": "An integer (int or long). Coarse search delta angle.", + "optional": true + }, + { + "id": "cost", + "name": "Cost", + "type": "String", + "value-key": "[COST]", + "command-line-flag": "-cost", + "description": "'mutualinfo' or 'corratio' or 'normcorr' or 'normmi' or 'leastsq' or 'labeldiff' or 'bbr'. Cost function.", + "optional": true, + "value-choices": [ + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr" + ] + }, + { + "id": "cost_func", + "name": "Cost func", + "type": "String", + "value-key": "[COST_FUNC]", + "command-line-flag": "-searchcost", + "description": "'mutualinfo' or 'corratio' or 'normcorr' or 'normmi' or 'leastsq' or 'labeldiff' or 'bbr'. Cost function.", + "optional": true, + "value-choices": [ + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr" + ] + }, + { + "id": "datatype", + "name": "Datatype", + "type": "String", + "value-key": "[DATATYPE]", + "command-line-flag": "-datatype", + "description": "'char' or 'short' or 'int' or 'float' or 'double'. Force output data type.", + "optional": true, + "value-choices": [ + "char", + "short", + "int", + "float", + "double" + ] + }, + { + "id": "display_init", + "name": "Display init", + "type": "Flag", + "value-key": "[DISPLAY_INIT]", + "command-line-flag": "-displayinit", + "description": "A boolean. Display initial matrix.", + "optional": true + }, + { + "id": "dof", + "name": "Dof", + "type": "Number", + "integer": true, + "value-key": "[DOF]", + "command-line-flag": "-dof", + "description": "An integer (int or long). Number of transform degrees of freedom.", + "optional": true + }, + { + "id": "echospacing", + "name": "Echospacing", + "type": "Number", + "value-key": "[ECHOSPACING]", + "command-line-flag": "-echospacing", + "description": "A float. Value of epi echo spacing - units of seconds.", + "optional": true + }, + { + "id": "fieldmap", + "name": "Fieldmap", + "type": "File", + "value-key": "[FIELDMAP]", + "command-line-flag": "-fieldmap", + "description": "A file name. Fieldmap image in rads/s - must be already registered to the reference image.", + "optional": true + }, + { + "id": "fieldmapmask", + "name": "Fieldmapmask", + "type": "File", + "value-key": "[FIELDMAPMASK]", + "command-line-flag": "-fieldmapmask", + "description": "A file name. Mask for fieldmap image.", + "optional": true + }, + { + "id": "fine_search", + "name": "Fine search", + "type": "Number", + "integer": true, + "value-key": "[FINE_SEARCH]", + "command-line-flag": "-finesearch", + "description": "An integer (int or long). Fine search delta angle.", + "optional": true + }, + { + "id": "force_scaling", + "name": "Force scaling", + "type": "Flag", + "value-key": "[FORCE_SCALING]", + "command-line-flag": "-forcescaling", + "description": "A boolean. Force rescaling even for low-res images.", + "optional": true + }, + { + "id": "in_file", + "name": "In file", + "type": "File", + "value-key": "[IN_FILE]", + "command-line-flag": "-in", + "description": "An existing file name. Input file.", + "optional": false + }, + { + "id": "in_matrix_file", + "name": "In matrix file", + "type": "File", + "value-key": "[IN_MATRIX_FILE]", + "command-line-flag": "-init", + "description": "A file name. Input 4x4 affine matrix.", + "optional": true + }, + { + "id": "in_weight", + "name": "In weight", + "type": "File", + "value-key": "[IN_WEIGHT]", + "command-line-flag": "-inweight", + "description": "An existing file name. File for input weighting volume.", + "optional": true + }, + { + "id": "interp", + "name": "Interp", + "type": "String", + "value-key": "[INTERP]", + "command-line-flag": "-interp", + "description": "'trilinear' or 'nearestneighbour' or 'sinc' or 'spline'. Final interpolation method used in reslicing.", + "optional": true, + "value-choices": [ + "trilinear", + "nearestneighbour", + "sinc", + "spline" + ] + }, + { + "id": "min_sampling", + "name": "Min sampling", + "type": "Number", + "value-key": "[MIN_SAMPLING]", + "command-line-flag": "-minsampling", + "description": "A float. Set minimum voxel dimension for sampling.", + "optional": true + }, + { + "id": "no_clamp", + "name": "No clamp", + "type": "Flag", + "value-key": "[NO_CLAMP]", + "command-line-flag": "-noclamp", + "description": "A boolean. Do not use intensity clamping.", + "optional": true + }, + { + "id": "no_resample", + "name": "No resample", + "type": "Flag", + "value-key": "[NO_RESAMPLE]", + "command-line-flag": "-noresample", + "description": "A boolean. Do not change input sampling.", + "optional": true + }, + { + "id": "no_resample_blur", + "name": "No resample blur", + "type": "Flag", + "value-key": "[NO_RESAMPLE_BLUR]", + "command-line-flag": "-noresampblur", + "description": "A boolean. Do not use blurring on downsampling.", + "optional": true + }, + { + "id": "no_search", + "name": "No search", + "type": "Flag", + "value-key": "[NO_SEARCH]", + "command-line-flag": "-nosearch", + "description": "A boolean. Set all angular searches to ranges 0 to 0.", + "optional": true + }, + { + "id": "padding_size", + "name": "Padding size", + "type": "Number", + "integer": true, + "value-key": "[PADDING_SIZE]", + "command-line-flag": "-paddingsize", + "description": "An integer (int or long). For applyxfm: interpolates outside image by size.", + "optional": true + }, + { + "id": "pedir", + "name": "Pedir", + "type": "Number", + "integer": true, + "value-key": "[PEDIR]", + "command-line-flag": "-pedir", + "description": "An integer (int or long). Phase encode direction of epi - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z.", + "optional": true + }, + { + "id": "ref_weight", + "name": "Ref weight", + "type": "File", + "value-key": "[REF_WEIGHT]", + "command-line-flag": "-refweight", + "description": "An existing file name. File for reference weighting volume.", + "optional": true + }, + { + "id": "reference", + "name": "Reference", + "type": "File", + "value-key": "[REFERENCE]", + "command-line-flag": "-ref", + "description": "An existing file name. Reference file.", + "optional": false + }, + { + "id": "rigid2D", + "name": "Rigid2d", + "type": "Flag", + "value-key": "[RIGID2D]", + "command-line-flag": "-2D", + "description": "A boolean. Use 2d rigid body mode - ignores dof.", + "optional": true + }, + { + "id": "save_log", + "name": "Save log", + "type": "Flag", + "value-key": "[SAVE_LOG]", + "command-line-flag": "--save_log", + "description": "A boolean. Save to log file.", + "optional": true + }, + { + "id": "schedule", + "name": "Schedule", + "type": "File", + "value-key": "[SCHEDULE]", + "command-line-flag": "-schedule", + "description": "An existing file name. Replaces default schedule.", + "optional": true + }, + { + "id": "searchr_x", + "name": "Searchr x", + "type": "Number", + "list": true, + "integer": true, + "min-list-entries": 2, + "max-list-entries": 2, + "value-key": "[SEARCHR_X]", + "command-line-flag": "-searchrx", + "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along x-axis, in degrees.", + "optional": true + }, + { + "id": "searchr_y", + "name": "Searchr y", + "type": "Number", + "list": true, + "integer": true, + "min-list-entries": 2, + "max-list-entries": 2, + "value-key": "[SEARCHR_Y]", + "command-line-flag": "-searchry", + "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along y-axis, in degrees.", + "optional": true + }, + { + "id": "searchr_z", + "name": "Searchr z", + "type": "Number", + "list": true, + "integer": true, + "min-list-entries": 2, + "max-list-entries": 2, + "value-key": "[SEARCHR_Z]", + "command-line-flag": "-searchrz", + "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along z-axis, in degrees.", + "optional": true + }, + { + "id": "sinc_width", + "name": "Sinc width", + "type": "Number", + "integer": true, + "value-key": "[SINC_WIDTH]", + "command-line-flag": "-sincwidth", + "description": "An integer (int or long). Full-width in voxels.", + "optional": true + }, + { + "id": "sinc_window", + "name": "Sinc window", + "type": "String", + "value-key": "[SINC_WINDOW]", + "command-line-flag": "-sincwindow", + "description": "'rectangular' or 'hanning' or 'blackman'. Sinc window.", + "optional": true, + "value-choices": [ + "rectangular", + "hanning", + "blackman" + ] + }, + { + "id": "uses_qform", + "name": "Uses qform", + "type": "Flag", + "value-key": "[USES_QFORM]", + "command-line-flag": "-usesqform", + "description": "A boolean. Initialize using sform or qform.", + "optional": true + }, + { + "id": "verbose", + "name": "Verbose", + "type": "Number", + "integer": true, + "value-key": "[VERBOSE]", + "command-line-flag": "-verbose", + "description": "An integer (int or long). Verbose mode, 0 is least.", + "optional": true + }, + { + "id": "wm_seg", + "name": "Wm seg", + "type": "File", + "value-key": "[WM_SEG]", + "command-line-flag": "-wmseg", + "description": "A file name. White matter segmentation volume needed by bbr cost function.", + "optional": true + }, + { + "id": "wmcoords", + "name": "Wmcoords", + "type": "File", + "value-key": "[WMCOORDS]", + "command-line-flag": "-wmcoords", + "description": "A file name. White matter boundary coordinates for bbr cost function.", + "optional": true + }, + { + "id": "wmnorms", + "name": "Wmnorms", + "type": "File", + "value-key": "[WMNORMS]", + "command-line-flag": "-wmnorms", + "description": "A file name. White matter boundary normals for bbr cost function.", + "optional": true + } + ], + "output-files": [ + { + "name": "Out file", + "id": "out_file", + "optional": true, + "description": "A file name. Registered output file.", + "path-template": "[IN_FILE]_flirt", + "value-key": "[OUT_FILE]", + "command-line-flag": "-out" + }, + { + "name": "Out log", + "id": "out_log", + "optional": true, + "description": "A file name. Output log.", + "path-template": "[IN_FILE]_flirt.log", + "value-key": "[OUT_LOG]" + }, + { + "name": "Out matrix file", + "id": "out_matrix_file", + "optional": true, + "description": "A file name. Output affine matrix in 4x4 asciii format.", + "path-template": "[IN_FILE]_flirt.mat", + "value-key": "[OUT_MATRIX_FILE]", + "command-line-flag": "-omat" + }, + { + "name": "Out file", + "id": "out_file", + "path-template": "out_file", + "optional": true, + "description": "An existing file name. Path/name of registered file (if generated)." + }, + { + "name": "Out log", + "id": "out_log", + "path-template": "out_log", + "optional": true, + "description": "A file name. Path/name of output log (if generated)." + }, + { + "name": "Out matrix file", + "id": "out_matrix_file", + "path-template": "out_matrix_file", + "optional": true, + "description": "An existing file name. Path/name of calculated affine transform (if generated)." + } + ], + "groups": [ + { + "id": "all_or_none_group", + "name": "All or none group", + "members": [ + "save_log", + "out_log" + ], + "all-or-none": true + }, + { + "id": "mutex_group", + "name": "Mutex group", + "members": [ + "apply_isoxfm", + "apply_xfm" + ], + "mutually-exclusive": true + } + ], + "tool-version": "1.0.0", + "schema-version": "0.5", + "container-image": { + "image": "mcin/docker-fsl:latest", + "type": "docker", + "index": "index.docker.io" + }, + "tags": { + "domain": "neuroinformatics", + "source": "nipype-interface" + } +} \ No newline at end of file diff --git a/nipype/testing/data/ref_tac.dat b/nipype/testing/data/ref_tac.dat new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/rh.aparc_a2009s.freesurfer.annot b/nipype/testing/data/rh.aparc_a2009s.freesurfer.annot new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/rh.central.structural.gii b/nipype/testing/data/rh.central.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/rh.pbt.structural b/nipype/testing/data/rh.pbt.structural new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/rh.sphere.reg.structural.gii b/nipype/testing/data/rh.sphere.reg.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/rh.sphere.structural.gii b/nipype/testing/data/rh.sphere.structural.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sh.mif b/nipype/testing/data/sh.mif new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01.L.midthickness.32k_fs_LR.surf.gii b/nipype/testing/data/sub-01.L.midthickness.32k_fs_LR.surf.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01.R.midthickness.32k_fs_LR.surf.gii b/nipype/testing/data/sub-01.R.midthickness.32k_fs_LR.surf.gii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01_ses-baseline_pet.nii.gz b/nipype/testing/data/sub-01_ses-baseline_pet.nii.gz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01_ses-baseline_pet_mean_reg.lta b/nipype/testing/data/sub-01_ses-baseline_pet_mean_reg.lta new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/sub-01_task-rest.dtseries.nii b/nipype/testing/data/sub-01_task-rest.dtseries.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/tac.nii b/nipype/testing/data/tac.nii new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/timing.dat b/nipype/testing/data/timing.dat new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/data/wmfod.mif b/nipype/testing/data/wmfod.mif new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/testing/decorators.py b/nipype/testing/decorators.py deleted file mode 100644 index f849815700..0000000000 --- a/nipype/testing/decorators.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Extend numpy's decorators to use nipype's gui and data labels. -""" - -from numpy.testing.decorators import knownfailureif, skipif - -from nibabel.data import DataError - - -def make_label_dec(label, ds=None): - """Factory function to create a decorator that applies one or more labels. - - Parameters - ---------- - label : str or sequence - One or more labels that will be applied by the decorator to the - functions it decorates. Labels are attributes of the decorated function - with their value set to True. - ds : str - An optional docstring for the resulting decorator. If not given, a - default docstring is auto-generated. - - Returns - ------- - ldec : function - A decorator. - - Examples - -------- - >>> slow = make_label_dec('slow') - >>> slow.__doc__ - "Labels a test as 'slow'" - - >>> rare = make_label_dec(['slow','hard'], - ... "Mix labels 'slow' and 'hard' for rare tests") - >>> @rare - ... def f(): pass - ... - >>> - >>> f.slow - True - >>> f.hard - True - """ - if isinstance(label, str): - labels = [label] - else: - labels = label - # Validate that the given label(s) are OK for use in setattr() by doing a - # dry run on a dummy function. - tmp = lambda: None - for label in labels: - setattr(tmp, label, True) - # This is the actual decorator we'll return - - def decor(f): - for label in labels: - setattr(f, label, True) - return f - - # Apply the user's docstring - if ds is None: - ds = "Labels a test as %r" % label - decor.__doc__ = ds - return decor - - -# For tests that need further review - - -def needs_review(msg): - """ Skip a test that needs further review. - - Parameters - ---------- - msg : string - msg regarding the review that needs to be done - """ - - def skip_func(func): - return skipif(True, msg)(func) - - return skip_func - - -# Easier version of the numpy knownfailure -def knownfailure(f): - return knownfailureif(True)(f) - - -def if_datasource(ds, msg): - try: - ds.get_filename() - except DataError: - return skipif(True, msg) - return lambda f: f diff --git a/nipype/testing/fixtures.py b/nipype/testing/fixtures.py index 6d8b3b0874..b28741b9d8 100644 --- a/nipype/testing/fixtures.py +++ b/nipype/testing/fixtures.py @@ -1,20 +1,13 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Pytest fixtures used in tests. """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - import os import pytest import numpy as np import nibabel as nb -from io import open -from builtins import str - from nipype.utils.filemanip import ensure_list from nipype.interfaces.fsl import Info from nipype.interfaces.fsl.base import FSLCommand @@ -32,14 +25,13 @@ def analyze_pair_image_files(outdir, filelist, shape): def nifti_image_files(outdir, filelist, shape): for f in ensure_list(filelist): img = np.random.random(shape) - nb.Nifti1Image(img, np.eye(4), None).to_filename( - os.path.join(outdir, f)) + nb.Nifti1Image(img, np.eye(4), None).to_filename(os.path.join(outdir, f)) @pytest.fixture() def create_files_in_directory(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): @@ -52,7 +44,7 @@ def change_directory(): @pytest.fixture() def create_analyze_pair_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.hdr'] + filelist = ["a.hdr"] analyze_pair_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): @@ -65,11 +57,11 @@ def change_directory(): @pytest.fixture() def create_files_in_directory_plus_dummy_file(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) - tmpdir.join('reg.dat').write('dummy file') - filelist.append('reg.dat') + tmpdir.join("reg.dat").write("dummy file") + filelist.append("reg.dat") def change_directory(): cwd.chdir() @@ -81,7 +73,7 @@ def change_directory(): @pytest.fixture() def create_surf_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() - surf = 'lh.a.nii' + surf = "lh.a.nii" nifti_image_files(tmpdir.strpath, filelist=surf, shape=(1, 100, 1)) def change_directory(): @@ -92,12 +84,12 @@ def change_directory(): def set_output_type(fsl_output_type): - prev_output_type = os.environ.get('FSLOUTPUTTYPE', None) + prev_output_type = os.environ.get("FSLOUTPUTTYPE", None) if fsl_output_type is not None: - os.environ['FSLOUTPUTTYPE'] = fsl_output_type - elif 'FSLOUTPUTTYPE' in os.environ: - del os.environ['FSLOUTPUTTYPE'] + os.environ["FSLOUTPUTTYPE"] = fsl_output_type + elif "FSLOUTPUTTYPE" in os.environ: + del os.environ["FSLOUTPUTTYPE"] FSLCommand.set_default_output_type(Info.output_type()) return prev_output_type @@ -107,7 +99,7 @@ def set_output_type(fsl_output_type): def create_files_in_directory_plus_output_type(request, tmpdir): func_prev_type = set_output_type(request.param) origdir = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) out_ext = Info.output_type_to_ext(Info.output_type()) diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index 798f640805..9217d54694 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -1,45 +1,44 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Test testing utilities """ import os -import warnings import subprocess -from mock import patch, MagicMock +from unittest.mock import patch, MagicMock +from unittest import SkipTest from nipype.testing.utils import TempFATFS def test_tempfatfs(): try: fatfs = TempFATFS() - except (IOError, OSError): - warnings.warn("Cannot mount FAT filesystems with FUSE") - else: - with fatfs as tmp_dir: - assert os.path.exists(tmp_dir) + except OSError: + raise SkipTest("Cannot mount FAT filesystems with FUSE") + with fatfs as tmp_dir: + assert os.path.exists(tmp_dir) @patch( - 'subprocess.check_call', - MagicMock(side_effect=subprocess.CalledProcessError('', ''))) + "subprocess.check_call", + MagicMock(side_effect=subprocess.CalledProcessError("", "")), +) def test_tempfatfs_calledprocesserror(): try: TempFATFS() - except IOError as e: + except OSError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, subprocess.CalledProcessError) else: assert False -@patch('subprocess.check_call', MagicMock()) -@patch('subprocess.Popen', MagicMock(side_effect=OSError())) +@patch("subprocess.check_call", MagicMock()) +@patch("subprocess.Popen", MagicMock(side_effect=OSError())) def test_tempfatfs_oserror(): try: TempFATFS() - except IOError as e: + except OSError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, OSError) else: diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 716b16da78..71a75a41c7 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -1,12 +1,7 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Additional handy utilities for testing """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import range, object, open - import os import time import shutil @@ -14,16 +9,15 @@ import subprocess from subprocess import CalledProcessError from tempfile import mkdtemp -from future.utils import raise_from from ..utils.misc import package_check -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" import numpy as np import nibabel as nb -class TempFATFS(object): +class TempFATFS: def __init__(self, size_in_mbytes=8, delay=0.5): """Temporary filesystem for testing non-POSIX filesystems on a POSIX system. @@ -42,37 +36,39 @@ def __init__(self, size_in_mbytes=8, delay=0.5): """ self.delay = delay self.tmpdir = mkdtemp() - self.dev_null = open(os.devnull, 'wb') + self.dev_null = open(os.devnull, "wb") - vfatfile = os.path.join(self.tmpdir, 'vfatblock') - self.vfatmount = os.path.join(self.tmpdir, 'vfatmount') - self.canary = os.path.join(self.vfatmount, '.canary') + vfatfile = os.path.join(self.tmpdir, "vfatblock") + self.vfatmount = os.path.join(self.tmpdir, "vfatmount") + self.canary = os.path.join(self.vfatmount, ".canary") - with open(vfatfile, 'wb') as fobj: - fobj.write(b'\x00' * (int(size_in_mbytes) << 20)) + with open(vfatfile, "wb") as fobj: + fobj.write(b"\x00" * (int(size_in_mbytes) << 20)) os.mkdir(self.vfatmount) - mkfs_args = ['mkfs.vfat', vfatfile] - mount_args = ['fusefat', '-o', 'rw+', '-f', vfatfile, self.vfatmount] + mkfs_args = ["mkfs.vfat", vfatfile] + mount_args = ["fusefat", "-o", "rw+", "-f", vfatfile, self.vfatmount] try: subprocess.check_call( - args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null) + args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null + ) except CalledProcessError as e: - raise_from(IOError("mkfs.vfat failed"), e) + raise OSError("mkfs.vfat failed") from e try: self.fusefat = subprocess.Popen( - args=mount_args, stdout=self.dev_null, stderr=self.dev_null) + args=mount_args, stdout=self.dev_null, stderr=self.dev_null + ) except OSError as e: - raise_from(IOError("fusefat is not installed"), e) + raise OSError("fusefat is not installed") from e time.sleep(self.delay) if self.fusefat.poll() is not None: - raise IOError("fusefat terminated too soon") + raise OSError("fusefat terminated too soon") - open(self.canary, 'wb').close() + open(self.canary, "wb").close() def __enter__(self): return self.vfatmount diff --git a/nipype/tests/test_nipype.py b/nipype/tests/test_nipype.py index 01fd081bc9..3f103b5529 100644 --- a/nipype/tests/test_nipype.py +++ b/nipype/tests/test_nipype.py @@ -1,3 +1,4 @@ +import os from .. import get_info from ..info import get_nipype_gitversion import pytest @@ -12,8 +13,85 @@ def test_nipype_info(): assert exception_not_raised -@pytest.mark.skipif(not get_nipype_gitversion(), - reason="not able to get version from get_nipype_gitversion") +@pytest.mark.skipif( + not get_nipype_gitversion(), + reason="not able to get version from get_nipype_gitversion", +) def test_git_hash(): # removing the first "g" from gitversion - get_nipype_gitversion()[1:] == get_info()['commit_hash'] + get_nipype_gitversion()[1:] == get_info()["commit_hash"] + + +def _check_no_et(): + import os + from unittest.mock import patch + + et = os.getenv("NIPYPE_NO_ET") is None + + with patch.dict("os.environ", {"NIPYPE_NO_ET": "1"}): + from nipype.interfaces.base import BaseInterface + + ver_data = BaseInterface._etelemetry_version_data + + if et and ver_data is None: + raise ValueError( + "etelemetry enabled and version data missing - double hits likely" + ) + + return et + + +def test_no_et_bare(tmp_path): + from unittest.mock import patch + from nipype.pipeline import engine as pe + from nipype.interfaces import utility as niu + from nipype.interfaces.base import BaseInterface + + et = os.getenv("NIPYPE_NO_ET") is None + + # Pytest doesn't trigger this, so let's pretend it's there + with patch.object(BaseInterface, "_etelemetry_version_data", {}): + # Direct function call - environment not set + f = niu.Function(function=_check_no_et) + res = f.run() + assert res.outputs.out == et + + # Basic node - environment not set + n = pe.Node( + niu.Function(function=_check_no_et), name="n", base_dir=str(tmp_path) + ) + res = n.run() + assert res.outputs.out == et + + # Linear run - environment not set + wf1 = pe.Workflow(name="wf1", base_dir=str(tmp_path)) + wf1.add_nodes([pe.Node(niu.Function(function=_check_no_et), name="n")]) + res = wf1.run() + assert next(iter(res.nodes)).result.outputs.out == et + + +@pytest.mark.parametrize("plugin", ("MultiProc", "LegacyMultiProc")) +@pytest.mark.parametrize("run_without_submitting", (True, False)) +def test_no_et_multiproc(tmp_path, plugin, run_without_submitting): + from unittest.mock import patch + from nipype.pipeline import engine as pe + from nipype.interfaces import utility as niu + from nipype.interfaces.base import BaseInterface + + et = os.getenv("NIPYPE_NO_ET") is None + + # Multiprocessing runs initialize new processes with NIPYPE_NO_ET + # This does not apply to unsubmitted jobs, run by the main thread + expectation = et if run_without_submitting else False + + # Pytest doesn't trigger this, so let's pretend it's there + with patch.object(BaseInterface, "_etelemetry_version_data", {}): + wf = pe.Workflow(name="wf2", base_dir=str(tmp_path)) + n = pe.Node( + niu.Function(function=_check_no_et), + run_without_submitting=run_without_submitting, + name="n", + ) + wf.add_nodes([n]) + res = wf.run(plugin=plugin, plugin_args={"n_procs": 1}) + assert next(iter(res.nodes)).result.outputs.out is expectation diff --git a/nipype/utils/__init__.py b/nipype/utils/__init__.py index 4a0741e48e..56d7dfb2c7 100644 --- a/nipype/utils/__init__.py +++ b/nipype/utils/__init__.py @@ -1,6 +1,2 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -from .config import NUMPY_MMAP from .onetime import OneTimeProperty, setattr_on_read from .tmpdirs import TemporaryDirectory, InTemporaryDirectory diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 79c0bf6b51..8317270d83 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -1,40 +1,31 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Created on 20 Apr 2010 logging options : INFO, DEBUG hash_method : content, timestamp @author: Chris Filo Gorgolewski -''' -from __future__ import (print_function, division, unicode_literals, - absolute_import) +""" import os import sys import errno import atexit from warnings import warn -from distutils.version import LooseVersion +from looseversion import LooseVersion import configparser -import numpy as np -from builtins import bytes, str, object, open from simplejson import load, dump -from future import standard_library from .misc import str2bool -from ..external import portalocker - -standard_library.install_aliases() +from filelock import SoftFileLock CONFIG_DEPRECATIONS = { - 'profile_runtime': ('monitoring.enabled', '1.0'), - 'filemanip_level': ('logging.utils_level', '1.0'), + "profile_runtime": ("monitoring.enabled", "1.0"), + "filemanip_level": ("logging.utils_level", "1.0"), } -NUMPY_MMAP = LooseVersion(np.__version__) >= LooseVersion('1.12.0') DEFAULT_CONFIG_TPL = """\ [logging] @@ -68,6 +59,7 @@ parameterize_dirs = true poll_sleep_duration = 2 xvfb_max_wait = 10 +check_version = true [monitoring] enabled = false @@ -76,7 +68,7 @@ [check] interval = 1209600 -""".format +""" def mkdir_p(path): @@ -89,33 +81,31 @@ def mkdir_p(path): raise -class NipypeConfig(object): +class NipypeConfig: """Base nipype config class""" def __init__(self, *args, **kwargs): self._config = configparser.ConfigParser() self._cwd = None - config_dir = os.path.expanduser('~/.nipype') - self.data_file = os.path.join(config_dir, 'nipype.json') + config_dir = os.path.expanduser( + os.getenv("NIPYPE_CONFIG_DIR", default="~/.nipype") + ) + self.data_file = os.path.join(config_dir, "nipype.json") self.set_default_config() self._display = None self._resource_monitor = None - if os.path.exists(config_dir): - self._config.read( - [os.path.join(config_dir, 'nipype.cfg'), 'nipype.cfg']) + self._config.read([os.path.join(config_dir, "nipype.cfg"), "nipype.cfg"]) for option in CONFIG_DEPRECATIONS: - for section in ['execution', 'logging', 'monitoring']: + for section in ["execution", "logging", "monitoring"]: if self.has_option(section, option): - new_section, new_option = CONFIG_DEPRECATIONS[option][ - 0].split('.') + new_section, new_option = CONFIG_DEPRECATIONS[option][0].split(".") if not self.has_option(new_section, new_option): # Warn implicit in get - self.set(new_section, new_option, - self.get(section, option)) + self.set(new_section, new_option, self.get(section, option)) @property def cwd(self): @@ -126,34 +116,39 @@ def cwd(self): try: self._cwd = os.getcwd() except OSError: - warn('Trying to run Nipype from a nonexistent directory "{}".'. - format(os.getenv('PWD', 'unknown')), RuntimeWarning) + warn( + 'Trying to run Nipype from a nonexistent directory "{}".'.format( + os.getenv("PWD", "unknown") + ), + RuntimeWarning, + ) raise return self._cwd def set_default_config(self): """Read default settings template and set into config object""" - default_cfg = DEFAULT_CONFIG_TPL( - log_dir=os.path.expanduser( - '~'), # Get $HOME in a platform-agnostic way - crashdump_dir=self.cwd # Read cached cwd + default_cfg = DEFAULT_CONFIG_TPL.format( + log_dir=os.path.expanduser("~"), # Get $HOME in a platform-agnostic way + crashdump_dir=self.cwd, # Read cached cwd ) try: self._config.read_string(default_cfg) # Python >= 3.2 except AttributeError: from io import StringIO + self._config.readfp(StringIO(default_cfg)) def enable_debug_mode(self): """Enables debug configuration""" from .. import logging - self._config.set('execution', 'stop_on_first_crash', 'true') - self._config.set('execution', 'remove_unnecessary_outputs', 'false') - self._config.set('execution', 'keep_inputs', 'true') - self._config.set('logging', 'workflow_level', 'DEBUG') - self._config.set('logging', 'interface_level', 'DEBUG') - self._config.set('logging', 'utils_level', 'DEBUG') + + self._config.set("execution", "stop_on_first_crash", "true") + self._config.set("execution", "remove_unnecessary_outputs", "false") + self._config.set("execution", "keep_inputs", "true") + self._config.set("logging", "workflow_level", "DEBUG") + self._config.set("logging", "interface_level", "DEBUG") + self._config.set("logging", "utils_level", "DEBUG") logging.update_logging(self._config) def set_log_dir(self, log_dir): @@ -162,17 +157,17 @@ def set_log_dir(self, log_dir): This should be the first thing that is done before any nipype class with logging is imported. """ - self._config.set('logging', 'log_directory', log_dir) + self._config.set("logging", "log_directory", log_dir) def get(self, section, option, default=None): """Get an option""" if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. ' - 'Please use "%s" instead.') % ( - option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ( + 'Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.' + ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) - section, option = CONFIG_DEPRECATIONS[option][0].split('.') + section, option = CONFIG_DEPRECATIONS[option][0].split(".") if self._config.has_option(section, option): return self._config.get(section, option) @@ -184,12 +179,12 @@ def set(self, section, option, value): value = str(value) if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. ' - 'Please use "%s" instead.') % ( - option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ( + 'Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.' + ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) - section, option = CONFIG_DEPRECATIONS[option][0].split('.') + section, option = CONFIG_DEPRECATIONS[option][0].split(".") return self._config.set(section, option, value) @@ -209,46 +204,47 @@ def get_data(self, key): """Read options file""" if not os.path.exists(self.data_file): return None - with open(self.data_file, 'rt') as file: - portalocker.lock(file, portalocker.LOCK_EX) - datadict = load(file) + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file) as file: + datadict = load(file) if key in datadict: return datadict[key] return None def save_data(self, key, value): - """Store config flie""" + """Store config file""" datadict = {} if os.path.exists(self.data_file): - with open(self.data_file, 'rt') as file: - portalocker.lock(file, portalocker.LOCK_EX) - datadict = load(file) + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file) as file: + datadict = load(file) else: dirname = os.path.dirname(self.data_file) if not os.path.exists(dirname): mkdir_p(dirname) - with open(self.data_file, 'wt') as file: - portalocker.lock(file, portalocker.LOCK_EX) - datadict[key] = value - dump(datadict, file) + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file, "w") as file: + datadict[key] = value + dump(datadict, file) def update_config(self, config_dict): """Extend internal dictionary with config_dict""" - for section in ['execution', 'logging', 'check']: + for section in ["execution", "logging", "check"]: if section in config_dict: for key, val in list(config_dict[section].items()): - if not key.startswith('__'): + if not key.startswith("__"): self._config.set(section, key, str(val)) def update_matplotlib(self): """Set backend on matplotlib from options""" import matplotlib - matplotlib.use(self.get('execution', 'matplotlib_backend')) + + matplotlib.use(self.get("execution", "matplotlib_backend")) def enable_provenance(self): """Sets provenance storing on""" - self._config.set('execution', 'write_provenance', 'true') - self._config.set('execution', 'hash_method', 'content') + self._config.set("execution", "write_provenance", "true") + self._config.set("execution", "hash_method", "content") @property def resource_monitor(self): @@ -257,8 +253,9 @@ def resource_monitor(self): return self._resource_monitor # Cache config from nipype config - self.resource_monitor = str2bool( - self._config.get('monitoring', 'enabled')) or False + self.resource_monitor = ( + str2bool(self._config.get("monitoring", "enabled")) or False + ) return self._resource_monitor @resource_monitor.setter @@ -276,16 +273,21 @@ def resource_monitor(self, value): self._resource_monitor = False try: import psutil + self._resource_monitor = LooseVersion( - psutil.__version__) >= LooseVersion('5.0') + psutil.__version__ + ) >= LooseVersion("5.0") except ImportError: pass finally: if not self._resource_monitor: - warn('Could not enable the resource monitor: ' - 'psutil>=5.0 could not be imported.') - self._config.set('monitoring', 'enabled', - ('%s' % self._resource_monitor).lower()) + warn( + "Could not enable the resource monitor: " + "psutil>=5.0 could not be imported." + ) + self._config.set( + "monitoring", "enabled", ("%s" % self._resource_monitor).lower() + ) def enable_resource_monitor(self): """Sets the resource monitor on""" @@ -306,13 +308,13 @@ def get_display(self): # shell=True, stdout=sp.DEVNULL)) if self._display is not None: - return ':%d' % self._display.new_display + return ":%d" % self._display.new_display sysdisplay = None - if self._config.has_option('execution', 'display_variable'): - sysdisplay = self._config.get('execution', 'display_variable') + if self._config.has_option("execution", "display_variable"): + sysdisplay = self._config.get("execution", "display_variable") - sysdisplay = sysdisplay or os.getenv('DISPLAY') + sysdisplay = sysdisplay or os.getenv("DISPLAY") if sysdisplay: from collections import namedtuple @@ -320,49 +322,51 @@ def _mock(): pass # Store a fake Xvfb object. Format - :[.] - ndisp = sysdisplay.split(':')[-1].split('.')[0] - Xvfb = namedtuple('Xvfb', ['new_display', 'stop']) + ndisp = sysdisplay.split(":")[-1].split(".")[0] + Xvfb = namedtuple("Xvfb", ["new_display", "stop"]) self._display = Xvfb(int(ndisp), _mock) return self.get_display() else: - if 'darwin' in sys.platform: + if "darwin" in sys.platform: raise RuntimeError( - 'Xvfb requires root permissions to run in OSX. Please ' - 'make sure that an X server is listening and set the ' - 'appropriate config on either $DISPLAY or nipype\'s ' + "Xvfb requires root permissions to run in OSX. Please " + "make sure that an X server is listening and set the " + "appropriate config on either $DISPLAY or nipype's " '"display_variable" config. Valid X servers include ' - 'VNC, XQuartz, or manually started Xvfb.') + "VNC, XQuartz, or manually started Xvfb." + ) # If $DISPLAY is empty, it confuses Xvfb so unset - if sysdisplay == '': - del os.environ['DISPLAY'] + if sysdisplay == "": + del os.environ["DISPLAY"] try: from xvfbwrapper import Xvfb except ImportError: raise RuntimeError( - 'A display server was required, but $DISPLAY is not ' - 'defined and Xvfb could not be imported.') + "A display server was required, but $DISPLAY is not " + "defined and Xvfb could not be imported." + ) - self._display = Xvfb(nolisten='tcp') + self._display = Xvfb(nolisten="tcp") self._display.start() # Older versions of xvfbwrapper used vdisplay_num - if not hasattr(self._display, 'new_display'): - setattr(self._display, 'new_display', - self._display.vdisplay_num) + if not hasattr(self._display, "new_display"): + self._display.new_display = self._display.vdisplay_num return self.get_display() def stop_display(self): """Closes the display if started""" if self._display is not None: from .. import logging + self._display.stop() - logging.getLogger('nipype.interface').debug( - 'Closing display (if virtual)') + logging.getLogger("nipype.interface").debug("Closing display (if virtual)") @atexit.register def free_display(): """Stop virtual display (if it is up)""" from .. import config + config.stop_display() diff --git a/nipype/utils/datetime.py b/nipype/utils/datetime.py new file mode 100644 index 0000000000..4a9779f20f --- /dev/null +++ b/nipype/utils/datetime.py @@ -0,0 +1,19 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Utilities for dates and time +""" + +from datetime import datetime as dt +import sys + +if sys.version_info >= (3, 11): + from datetime import UTC + + def utcnow(): + """Adapter since 3.12 prior utcnow is deprecated, + but not EOLed 3.8 does not have datetime.UTC""" + return dt.now(UTC) + +else: + utcnow = dt.utcnow diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 1df779f2ce..7731329265 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utilities to pull in documentation from command-line tools. @@ -13,10 +12,6 @@ docstring = docparse.get_doc(better.cmd, better.opt_map) """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, open, bytes - import subprocess from ..interfaces.base import CommandLine from .misc import is_container @@ -39,12 +34,13 @@ def grab_doc(cmd, trap_error=True): """ proc = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True + ) stdout, stderr = proc.communicate() if trap_error and proc.returncode: - msg = 'Attempting to run %s. Returned Error: %s' % (cmd, stderr) - raise IOError(msg) + msg = f"Attempting to run {cmd}. Returned Error: {stderr}" + raise OSError(msg) if stderr: # A few programs, like fast and fnirt, send their help to @@ -80,7 +76,7 @@ def reverse_opt_map(opt_map): # The value is a tuple where the first element is the # format string and the second element is a docstring. value = value[0] - if (key != 'flags' and value is not None): + if key != "flags" and value is not None: revdict[value.split()[0]] = key return revdict @@ -108,21 +104,21 @@ def format_params(paramlist, otherlist=None): The formatted docstring. """ - hdr = 'Parameters' - delim = '----------' + hdr = "Parameters" + delim = "----------" paramlist.insert(0, delim) paramlist.insert(0, hdr) - params = '\n'.join(paramlist) + params = "\n".join(paramlist) otherparams = [] - doc = ''.join(params) + doc = "".join(params) if otherlist: - hdr = 'Others Parameters' - delim = '-----------------' + hdr = "Others Parameters" + delim = "-----------------" otherlist.insert(0, delim) otherlist.insert(0, hdr) - otherlist.insert(0, '\n') - otherparams = '\n'.join(otherlist) - doc = ''.join([doc, otherparams]) + otherlist.insert(0, "\n") + otherparams = "\n".join(otherlist) + doc = f"{doc}{otherparams}" return doc @@ -135,7 +131,7 @@ def insert_doc(doc, new_items): Parameters ---------- doc : str - The existing docstring we're inserting docmentation into. + The existing docstring we're inserting documentation into. new_items : list List of strings to be inserted in the ``doc``. @@ -163,20 +159,14 @@ def insert_doc(doc, new_items): """ # Insert new_items after the Parameters header - doclist = doc.split('\n') + doclist = doc.split("\n") tmpdoc = doclist[:2] # Add new_items tmpdoc.extend(new_items) # Add rest of documents tmpdoc.extend(doclist[2:]) # Insert newlines - newdoc = [] - for line in tmpdoc: - newdoc.append(line) - newdoc.append('\n') - # We add one too many newlines, remove it. - newdoc.pop(-1) - return ''.join(newdoc) + return "\n".join(tmpdoc) def build_doc(doc, opts): @@ -194,13 +184,13 @@ def build_doc(doc, opts): ------- newdoc : string The docstring with flags replaced with attribute names and - formated to match nipy standards (as best we can). + formatted to match nipy standards (as best we can). """ # Split doc into line elements. Generally, each line is an # individual flag/option. - doclist = doc.split('\n') + doclist = doc.split("\n") newdoc = [] flags_doc = [] for line in doclist: @@ -209,24 +199,24 @@ def build_doc(doc, opts): # Probably an empty line continue # For lines we care about, the first item is the flag - if ',' in linelist[0]: # sometimes flags are only seperated by comma - flag = linelist[0].split(',')[0] + if "," in linelist[0]: # sometimes flags are only separated by comma + flag = linelist[0].split(",")[0] else: flag = linelist[0] attr = opts.get(flag) if attr is not None: # newline = line.replace(flag, attr) # Replace the flag with our attribute name - linelist[0] = '%s :\n ' % str(attr) + linelist[0] = "%s :\n " % str(attr) # Add some line formatting - newline = ' '.join(linelist) + newline = " ".join(linelist) newdoc.append(newline) else: if line[0].isspace(): # For all the docs I've looked at, the flags all have # indentation (spaces) at the start of the line. # Other parts of the docs, like 'usage' statements - # start with alpha-numeric characters. We only care + # start with alphanumeric characters. We only care # about the flags. flags_doc.append(line) return format_params(newdoc, flags_doc) @@ -249,24 +239,25 @@ def get_doc(cmd, opt_map, help_flag=None, trap_error=True): Returns ------- doc : string - The formated docstring + The formatted docstring """ res = CommandLine( - 'which %s' % cmd.split(' ')[0], + "which %s" % cmd.split(" ")[0], resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() cmd_path = res.runtime.stdout.strip() - if cmd_path == '': - raise Exception('Command %s not found' % cmd.split(' ')[0]) + if cmd_path == "": + raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: - cmd = ' '.join((cmd, help_flag)) + cmd = f"{cmd} {help_flag}" doc = grab_doc(cmd, trap_error) opts = reverse_opt_map(opt_map) return build_doc(doc, opts) -def _parse_doc(doc, style=['--']): +def _parse_doc(doc, style=["--"]): """Parses a help doc for inputs Parameters @@ -283,16 +274,16 @@ def _parse_doc(doc, style=['--']): # Split doc into line elements. Generally, each line is an # individual flag/option. - doclist = doc.split('\n') + doclist = doc.split("\n") optmap = {} if isinstance(style, (str, bytes)): style = [style] for line in doclist: linelist = line.split() flag = [ - item for i, item in enumerate(linelist) - if i < 2 and any([item.startswith(s) - for s in style]) and len(item) > 1 + item + for i, item in enumerate(linelist) + if i < 2 and item.startswith(tuple(style)) and len(item) > 1 ] if flag: if len(flag) == 1: @@ -307,11 +298,11 @@ def _parse_doc(doc, style=['--']): break flag = flag[style_idx.index(min(style_idx))] style_idx = min(style_idx) - optmap[flag.split(style[style_idx])[1]] = '%s %%s' % flag + optmap[flag.split(style[style_idx])[1]] = "%s %%s" % flag return optmap -def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): +def get_params_from_doc(cmd, style="--", help_flag=None, trap_error=True): """Auto-generate option map from command line help Parameters @@ -333,14 +324,15 @@ def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): """ res = CommandLine( - 'which %s' % cmd.split(' ')[0], + "which %s" % cmd.split(" ")[0], resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() cmd_path = res.runtime.stdout.strip() - if cmd_path == '': - raise Exception('Command %s not found' % cmd.split(' ')[0]) + if cmd_path == "": + raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: - cmd = ' '.join((cmd, help_flag)) + cmd = f"{cmd} {help_flag}" doc = grab_doc(cmd, trap_error) return _parse_doc(doc, style) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 7a52205090..3ae4b77246 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -1,37 +1,28 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Module to draw an html gantt chart from logfile produced by ``nipype.utils.profiler.log_nodes_cb()`` """ -from __future__ import (print_function, division, unicode_literals, - absolute_import) - # Import packages -import sys import random import datetime import simplejson as json -from builtins import str, range, open -# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict -from future import standard_library -standard_library.install_aliases() + from collections import OrderedDict # Pandas try: import pandas as pd except ImportError: - print('Pandas not found; in order for full functionality of this module ' - 'install the pandas package') - pass - -PY3 = sys.version_info[0] > 2 + print( + "Pandas not found; in order for full functionality of this module " + "install the pandas package" + ) def create_event_dict(start_time, nodes_list): - ''' + """ Function to generate a dictionary of event (start/finish) nodes from the nodes list @@ -47,7 +38,7 @@ def create_event_dict(start_time, nodes_list): events : dictionary a dictionary where the key is the timedelta from the start of the pipeline execution to the value node it accompanies - ''' + """ # Import packages import copy @@ -55,28 +46,28 @@ def create_event_dict(start_time, nodes_list): events = {} for node in nodes_list: # Format node fields - estimated_threads = node.get('num_threads', 1) - estimated_memory_gb = node.get('estimated_memory_gb', 1.0) - runtime_threads = node.get('runtime_threads', 0) - runtime_memory_gb = node.get('runtime_memory_gb', 0.0) + estimated_threads = node.get("num_threads", 1) + estimated_memory_gb = node.get("estimated_memory_gb", 1.0) + runtime_threads = node.get("runtime_threads", 0) + runtime_memory_gb = node.get("runtime_memory_gb", 0.0) # Init and format event-based nodes - node['estimated_threads'] = estimated_threads - node['estimated_memory_gb'] = estimated_memory_gb - node['runtime_threads'] = runtime_threads - node['runtime_memory_gb'] = runtime_memory_gb + node["estimated_threads"] = estimated_threads + node["estimated_memory_gb"] = estimated_memory_gb + node["runtime_threads"] = runtime_threads + node["runtime_memory_gb"] = runtime_memory_gb start_node = node finish_node = copy.deepcopy(node) - start_node['event'] = 'start' - finish_node['event'] = 'finish' + start_node["event"] = "start" + finish_node["event"] = "finish" # Get dictionary key - start_delta = (node['start'] - start_time).total_seconds() - finish_delta = (node['finish'] - start_time).total_seconds() + start_delta = (node["start"] - start_time).total_seconds() + finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary if events.get(start_delta) or events.get(finish_delta): - err_msg = 'Event logged twice or events started at exact same time!' + err_msg = "Event logged twice or events started at exact same time!" raise KeyError(err_msg) events[start_delta] = start_node events[finish_delta] = finish_node @@ -86,7 +77,7 @@ def create_event_dict(start_time, nodes_list): def log_to_dict(logfile): - ''' + """ Function to extract log node dictionaries into a list of python dictionaries and return the list as well as the final node @@ -101,21 +92,39 @@ def log_to_dict(logfile): nodes_list : list a list of python dictionaries containing the runtime info for each nipype node - ''' + """ # Init variables - with open(logfile, 'r') as content: + with open(logfile) as content: # read file separating each line lines = content.readlines() nodes_list = [json.loads(l) for l in lines] + def _convert_string_to_datetime(datestring): + try: + datetime_object: datetime.datetime = datetime.datetime.strptime( + datestring, "%Y-%m-%dT%H:%M:%S.%f" + ) + return datetime_object + except Exception as _: + pass + return datestring + + date_object_node_list: list = list() + for n in nodes_list: + if "start" in n: + n["start"] = _convert_string_to_datetime(n["start"]) + if "finish" in n: + n["finish"] = _convert_string_to_datetime(n["finish"]) + date_object_node_list.append(n) + # Return list of nodes - return nodes_list + return date_object_node_list def calculate_resource_timeseries(events, resource): - ''' + """ Given as event dictionary, calculate the resources used as a timeseries @@ -133,7 +142,7 @@ def calculate_resource_timeseries(events, resource): time_series : pandas Series a pandas Series object that contains timestamps as the indices and the resource amount as values - ''' + """ # Import packages import pandas as pd @@ -144,14 +153,14 @@ def calculate_resource_timeseries(events, resource): # Iterate through the events for _, event in sorted(events.items()): - if event['event'] == "start": - if resource in event and event[resource] != 'Unknown': + if event["event"] == "start": + if resource in event and event[resource] != "Unknown": all_res += float(event[resource]) - current_time = event['start'] - elif event['event'] == "finish": - if resource in event and event[resource] != 'Unknown': + current_time = event["start"] + elif event["event"] == "finish": + if resource in event and event[resource] != "Unknown": all_res -= float(event[resource]) - current_time = event['finish'] + current_time = event["finish"] res[current_time] = all_res # Formulate the pandas timeseries @@ -165,7 +174,7 @@ def calculate_resource_timeseries(events, resource): def draw_lines(start, total_duration, minute_scale, scale): - ''' + """ Function to draw the minute line markers and timestamps Parameters @@ -186,10 +195,10 @@ def draw_lines(start, total_duration, minute_scale, scale): result : string the html-formatted string for producing the minutes-based time line markers - ''' + """ # Init variables - result = '' + result = "" next_line = 220 next_time = start num_lines = int(((total_duration // 60) // minute_scale) + 2) @@ -200,8 +209,11 @@ def draw_lines(start, total_duration, minute_scale, scale): new_line = "
" % next_line result += new_line # Time digits - time = "

%02d:%02d

" % \ - (next_line-20, next_time.hour, next_time.minute) + time = "

%02d:%02d

" % ( + next_line - 20, + next_time.hour, + next_time.minute, + ) result += time # Increment line spacing and digits next_line += minute_scale * scale @@ -211,9 +223,8 @@ def draw_lines(start, total_duration, minute_scale, scale): return result -def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, - colors): - ''' +def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, colors): + """ Function to return the html-string of the node drawings for the gantt chart @@ -243,28 +254,30 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, result : string the html-formatted string for producing the minutes-based time line markers - ''' + """ # Init variables - result = '' + result = "" scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale end_times = [ - datetime.datetime(start.year, start.month, start.day, start.hour, - start.minute, start.second) for core in range(cores) + datetime.datetime( + start.year, start.month, start.day, start.hour, start.minute, start.second + ) + for core in range(cores) ] # For each node in the pipeline for node in nodes_list: # Get start and finish times - node_start = node['start'] - node_finish = node['finish'] + node_start = node["start"] + node_finish = node["finish"] # Calculate an offset and scale duration - offset = ((node_start - start).total_seconds() / 60) * scale * \ - space_between_minutes + 220 + offset = ( + (node_start - start).total_seconds() / 60 + ) * scale * space_between_minutes + 220 # Scale duration - scale_duration = ( - node['duration'] / 60) * scale * space_between_minutes + scale_duration = (node["duration"] / 60) * scale * space_between_minutes if scale_duration < 5: scale_duration = 5 scale_duration -= 2 @@ -274,32 +287,38 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, if end_times[core] < node_start: left += core * 30 end_times[core] = datetime.datetime( - node_finish.year, node_finish.month, node_finish.day, - node_finish.hour, node_finish.minute, node_finish.second) + node_finish.year, + node_finish.month, + node_finish.day, + node_finish.hour, + node_finish.minute, + node_finish.second, + ) break # Get color for node object color = random.choice(colors) - if 'error' in node: - color = 'red' + if "error" in node: + color = "red" # Setup dictionary for node html string insertion node_dict = { - 'left': left, - 'offset': offset, - 'scale_duration': scale_duration, - 'color': color, - 'node_name': node['name'], - 'node_dur': node['duration'] / 60.0, - 'node_start': node_start.strftime("%Y-%m-%d %H:%M:%S"), - 'node_finish': node_finish.strftime("%Y-%m-%d %H:%M:%S") + "left": left, + "offset": offset, + "scale_duration": scale_duration, + "color": color, + "node_name": node["name"], + "node_dur": node["duration"] / 60.0, + "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), + "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), } # Create new node string - new_node = "
" % \ - node_dict + new_node = ( + "
" % node_dict + ) # Append to output result result += new_node @@ -308,24 +327,26 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, return result -def draw_resource_bar(start_time, finish_time, time_series, - space_between_minutes, minute_scale, color, left, - resource): - ''' - ''' +def draw_resource_bar( + start_time, + finish_time, + time_series, + space_between_minutes, + minute_scale, + color, + left, + resource, +): + """ """ # Memory header - result = "

%s

" \ - % (left, resource) + result = "

%s

" % (left, resource) # Image scaling factors scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale # Iterate through time series - if PY3: - ts_items = time_series.items() - else: - ts_items = time_series.iteritems() + ts_items = time_series.items() ts_len = len(time_series) for idx, (ts_start, amount) in enumerate(ts_items): @@ -334,8 +355,9 @@ def draw_resource_bar(start_time, finish_time, time_series, else: ts_end = finish_time # Calculate offset from start at top - offset = ((ts_start-start_time).total_seconds() / 60.0) * scale * \ - space_between_minutes + 220 + offset = ( + (ts_start - start_time).total_seconds() / 60.0 + ) * scale * space_between_minutes + 220 # Scale duration duration_mins = (ts_end - ts_start).total_seconds() / 60.0 height = duration_mins * scale * space_between_minutes @@ -346,29 +368,31 @@ def draw_resource_bar(start_time, finish_time, time_series, # Bar width is proportional to resource amount width = amount * 20 - if resource.lower() == 'memory': - label = '%.3f GB' % amount + if resource.lower() == "memory": + label = "%.3f GB" % amount else: - label = '%d threads' % amount + label = "%d threads" % amount # Setup dictionary for bar html string insertion bar_dict = { - 'color': color, - 'height': height, - 'width': width, - 'offset': offset, - 'left': left, - 'label': label, - 'duration': duration_mins, - 'start': ts_start.strftime('%Y-%m-%d %H:%M:%S'), - 'finish': ts_end.strftime('%Y-%m-%d %H:%M:%S') + "color": color, + "height": height, + "width": width, + "offset": offset, + "left": left, + "label": label, + "duration": duration_mins, + "start": ts_start.strftime("%Y-%m-%d %H:%M:%S"), + "finish": ts_end.strftime("%Y-%m-%d %H:%M:%S"), } - bar_html = "
" + bar_html = ( + "
" + ) # Add another bar to html line result += bar_html % bar_dict @@ -376,12 +400,14 @@ def draw_resource_bar(start_time, finish_time, time_series, return result -def generate_gantt_chart(logfile, - cores, - minute_scale=10, - space_between_minutes=50, - colors=["#7070FF", "#4E4EB2", "#2D2D66", "#9B9BFF"]): - ''' +def generate_gantt_chart( + logfile, + cores, + minute_scale=10, + space_between_minutes=50, + colors=["#7070FF", "#4E4EB2", "#2D2D66", "#9B9BFF"], +): + """ Generates a gantt chart in html showing the workflow execution based on a callback log file. This script was intended to be used with the MultiprocPlugin. The following code shows how to set up the workflow in order to generate the log file: @@ -429,10 +455,10 @@ def generate_gantt_chart(logfile, # plugin_args={'n_procs':8, 'memory':12, 'status_callback': log_nodes_cb}) # generate_gantt_chart('callback.log', 8) - ''' + """ # add the html header - html_string = ''' + html_string = """