From 4fcdb3cd571acb33bdd15c3980359a5d31455ace Mon Sep 17 00:00:00 2001 From: Joshua Teves Date: Fri, 8 Nov 2019 09:45:03 -0500 Subject: [PATCH] [ENH][TST] Overhauls CI completely (#418) * Adds integration and five echo skipping * Style fixes * Updates config for CircleCI * Attempts to fix YML * [TEST] Update Dockerfile to match new integr tests * [TEST] Fixes integration tests in Docker image * [FIX] Remove intermediate IO files * Resolves merge conflict, adds output check * Some fixes * [TEST] Updates dev_tool testing infra * [TEST] Fixes pytest integration path checking * [TEST] CircleCI uses Docker image to run tests * [FIX] Minor dev_tool issues for CircleCI * [TEST] Use variable for integration test filename * Attempts to fix CircleCI style check * Revert "Attempts to fix CircleCI style check" This reverts commit 769f4b7b69a639fd265ca2402c592f94d8bb69bb. * Attempt to fix tput call * Adds checkout to code in YML * [TEST] Integration tests run in parallel * [TEST] Separate data downloads from Docker build * [TEST] Update integration test data path * [TEST] CircleCI uses good Docker * [TEST] No version check in circleci * [TEST] Checkout for get_data / style check * Attempts to fix integration test inclusion * [TEST] Checkout for get_data / style check * [FIX] Fix circleci config hopefully * [FIX] No / workdir for circleci machine * [FIX] Use ~ for coverage in circleci * Switches integration tests to truncated length data * [FIX] Actually merge coverage files * [FIX] Coverage cache path circleci * [TEST] Integration test outputs in tests/data * [FIX] circleci config bug * [TEST] Major testing infra overhaul Docker image considerably slimmed down (only test python 3.6 locally), added new dev_requirements.txt to make conda yaml files obsolete, added Makefile to make testing easier locally (if you aren't using the Docker image), and removed integration test data downloads from separate script and into the integration tests themselves * [TEST] Massive CircleCI config regression @tsalo had it right --- moving towards a fully Dockerized implementation was not the way forward for a simple Python package. * [TEST] Better integration testing? At least, more equivalent to what was happening before, where we check that ONLY the expected output files are generated (no more, no less) * [FIX] CircleCI workflow issue * [MNT] No flake8-putty * [FLK] New flake8 error detected * [TEST] Run style check separately @leej3 said it's not fair to stop running tests for a few minor style errors, and he's usually right so.... * [TEST] Py37 for all non-unit test stuff --- .circleci/config.yml | 369 +++++------------- .circleci/tedana_outputs.txt | 141 ------- .dockerignore | 3 + Dockerfile_dev | 219 ++--------- Makefile | 19 + dev_requirements.txt | 6 + dev_tools/envs/py35_env.yml | 20 - dev_tools/envs/py36_env.yml | 36 -- dev_tools/envs/py37_env.yml | 20 - dev_tools/local_testing.sh | 218 ----------- dev_tools/run_tests.sh | 63 +++ requirements.txt | 15 +- setup.cfg | 6 +- tedana/tests/conftest.py | 27 ++ tedana/tests/data/tedana_outputs.txt | 57 +++ .../tests/data}/tedana_outputs_verbose.txt | 0 tedana/tests/test_integration.py | 116 ++++++ tedana/tests/test_integration_five_echo.py | 12 - tedana/tests/test_integration_three_echo.py | 12 - tedana/tests/test_io.py | 34 +- tedana/viz.py | 4 +- 21 files changed, 451 insertions(+), 946 deletions(-) delete mode 100644 .circleci/tedana_outputs.txt create mode 100644 Makefile create mode 100644 dev_requirements.txt delete mode 100644 dev_tools/envs/py35_env.yml delete mode 100644 dev_tools/envs/py36_env.yml delete mode 100644 dev_tools/envs/py37_env.yml delete mode 100644 dev_tools/local_testing.sh create mode 100644 dev_tools/run_tests.sh create mode 100644 tedana/tests/conftest.py create mode 100644 tedana/tests/data/tedana_outputs.txt rename {.circleci => tedana/tests/data}/tedana_outputs_verbose.txt (100%) create mode 100644 tedana/tests/test_integration.py delete mode 100644 tedana/tests/test_integration_five_echo.py delete mode 100644 tedana/tests/test_integration_three_echo.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 1c9eb66b2..e84694f77 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,302 +7,159 @@ orbs: codecov: codecov/codecov@1.0.5 jobs: - build: - working_directory: /tmp/src/tedana + unittest_35: docker: - # Ubuntu 14.04 with Docker 17.10.0-ce - image: continuumio/miniconda3 - steps: - - checkout - - persist_to_workspace: - root: /tmp - paths: - - src/tedana/ - - restore_cache: # ensure this step occurs *before* installing dependencies - key: test-v1-{{ checksum "requirements.txt" }} - - run: - name: Generate environment - command: | - if [[ -e /opt/conda/envs/venv ]]; then - echo "Restoring environment from cache" - source activate venv - else - conda create -n venv python=3.6 lxml nose cython mkl sphinx patsy boto3 pillow -yq - conda install -n venv nose-timer -c conda-forge -yq - source activate venv - pip install pytest pytest-cov codecov coverage coveralls 'flake8<3.0' flake8-putty - pip install -r requirements.txt - fi - - save_cache: - key: test-v1-{{ checksum "requirements.txt" }} - paths: - - "/opt/conda/envs/venv" - - get_data: working_directory: /tmp/src/tedana - docker: - # Ubuntu 14.04 with Docker 17.10.0-ce - - image: continuumio/miniconda3 steps: + - checkout - restore_cache: - keys: - - data-v1-{{ .Branch }} - - data-v1- + key: conda-py35-v1-{{ checksum "dev_requirements.txt" }} - run: - name: Download test three-echo data + name: Generate environment command: | - if [[ -e /tmp/data/three-echo ]]; then - echo "Restoring three-echo data from cache" - else - mkdir -p /tmp/data - apt-get install -y curl - curl -L --create-dirs -o \ - /tmp/data/three-echo/three_echo_Cornell_zcat.nii.gz https://osf.io/8fzse/download + apt-get install -y make + if [ ! -d /opt/conda/envs/tedana_py35 ]; then + conda create -yq -n tedana_py35 python=3.5 + source activate tedana_py35 + pip install -r dev_requirements.txt fi - run: - name: Download test five-echo data + name: Running unit tests command: | - if [[ -e /tmp/data/five-echo ]]; then - echo "Restoring five-echo data from cache" - else - mkdir /tmp/data/five-echo - apt-get install -y curl - curl -L -o five_echo_NIH.tar.xz https://osf.io/ea5v3/download - tar xf five_echo_NIH.tar.xz -C /tmp/data/five-echo - fi + source activate tedana_py35 + make unittest + mkdir /tmp/src/coverage + mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.py35 + - save_cache: + key: conda-py35-v1-{{ checksum "dev_requirements.txt" }} + paths: + - /opt/conda/envs/tedana_py35 - persist_to_workspace: root: /tmp paths: - - data/three-echo/ - - data/five-echo/ - - save_cache: - key: data-v1-{{ .Branch }} - paths: - - /tmp/data + - src/coverage/.coverage.py35 - get_regression_data: - working_directory: /tmp/src/tedana + unittest_36: docker: - # Ubuntu 14.04 with Docker 17.10.0-ce - image: continuumio/miniconda3 + working_directory: /tmp/src/tedana steps: + - checkout - restore_cache: - keys: - - test-v1-{{ .Branch }} - - test-v1- + key: conda-py36-v1-{{ checksum "dev_requirements.txt" }} - run: - name: Download expected output for three-echo data + name: Generate environment command: | - if [[ -e /tmp/test/three-echo ]]; then - echo "Restoring three-echo regression data from cache" - else - mkdir -p /tmp/test/three-echo/ - apt-get install -y curl - curl -L -o TED.Cornell_processed_three_echo_dataset.tar.xz https://osf.io/u65sq/download - tar xf TED.Cornell_processed_three_echo_dataset.tar.xz --no-same-owner -C /tmp/test/three-echo/ + apt-get install -y make + if [ ! -d /opt/conda/envs/tedana_py36 ]; then + conda create -yq -n tedana_py36 python=3.6 + source activate tedana_py36 + pip install -r dev_requirements.txt fi - run: - name: Download expected output for five-echo data + name: Running unit tests command: | - if [[ -e /tmp/test/five-echo ]]; then - echo "Restoring five-echo regression data from cache" - else - mkdir -p /tmp/test/five-echo/ - apt-get install -y curl - curl -L -o TED.p06.tar.xz https://osf.io/fr6mx/download - tar xf TED.p06.tar.xz --no-same-owner -C /tmp/test/five-echo/ - fi - - persist_to_workspace: - root: /tmp - paths: - - test/three-echo/ - - test/five-echo/ + source activate tedana_py36 + make unittest + mkdir /tmp/src/coverage + mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.py36 - save_cache: - key: test-v1-{{ .Branch }} + key: conda-py36-v1-{{ checksum "dev_requirements.txt" }} paths: - - /tmp/test - - three_echo_rest: - working_directory: /tmp/src/tedana - docker: - - image: continuumio/miniconda3 - steps: - - attach_workspace: - at: /tmp - - restore_cache: # load environment - key: test-v1-{{ checksum "requirements.txt" }} - - run: - name: Run three-echo dataset - no_output_timeout: 40m - command: | - source activate venv - python setup.py install - py.test tedana/tests/test_integration_three_echo.py \ - --cov-append --cov-report term-missing --cov=tedana - mkdir /tmp/src/coverage/ - mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.1 - - run: - name: Checking outputs - command: | - # find file - find /tmp/data/three-echo/TED.three-echo/* \ - -exec basename {} \; > /tmp/data/three-echo/TED.three-echo/outputs.out - # set filenames - f1=/tmp/src/tedana/.circleci/tedana_outputs.txt - f2=/tmp/data/three-echo/TED.three-echo/outputs.out - # sort both files, pipe into grep to check for tedana - # logfile format; should only see one - comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' - numlogs=$(comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' | wc -l) - if [[ ! $numlogs -eq 1 ]]; - then - printf "Incorrect number of logfiles: %s" $numlogs - fi - # verify non-log outputs match exactly - find /tmp/data/three-echo/TED.three-echo/* \ - -exec basename {} \; | grep -v outputs.out | sort > /tmp/data/three-echo/TED.three-echo/outputs.out - f3=/tmp/data/three-echo/TED.three-echo/outputs_nolog.txt - cat $f2 | grep -v -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' > $f3 - diff $f1 $f3 - - store_artifacts: - path: /tmp/data/three-echo - - store_artifacts: - path: /tmp/src/coverage + - /opt/conda/envs/tedana_py36 - persist_to_workspace: root: /tmp paths: - - src/coverage/.coverage.1 + - src/coverage/.coverage.py36 - five_echo_task: - working_directory: /tmp/src/tedana + unittest_37: docker: - image: continuumio/miniconda3 + working_directory: /tmp/src/tedana steps: - - attach_workspace: - at: /tmp - - restore_cache: # load environment - key: test-v1-{{ checksum "requirements.txt" }} - - run: - name: Run five-echo dataset - no_output_timeout: 40m - command: | - source activate venv - python setup.py install - py.test tedana/tests/test_integration_five_echo.py \ - --cov-append --cov-report term-missing --cov=tedana - mkdir /tmp/src/coverage/ - mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.2 + - checkout + - restore_cache: + key: conda-py37-v1-{{ checksum "dev_requirements.txt" }} - run: - name: Checking outputs + name: Generate environment command: | - # find file - find /tmp/data/five-echo/TED.five-echo/* \ - -exec basename {} \; > /tmp/data/five-echo/TED.five-echo/outputs.out - # set filenames - f1=/tmp/src/tedana/.circleci/tedana_outputs_verbose.txt - f2=/tmp/data/five-echo/TED.five-echo/outputs.out - # sort both files, pipe into grep to check for tedana - # logfile format; should only see one - comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' - numlogs=$(comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' | wc -l) - if [[ ! $numlogs -eq 1 ]]; - then - printf "Incorrect number of logfiles: %s" $numlogs + apt-get install -y make + if [ ! -d /opt/conda/envs/tedana_py37 ]; then + conda create -yq -n tedana_py37 python=3.7 + source activate tedana_py37 + pip install -r dev_requirements.txt fi - # verify that non-log outputs match exactly - f3=/tmp/data/five-echo/TED.five-echo/outputs_nolog.txt - find /tmp/data/five-echo/TED.five-echo/* \ - -exec basename {} \; | grep -v outputs.out | sort > /tmp/data/five-echo/TED.five-echo/outputs.out - cat $f2 | grep -v -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' > $f3 - diff $f1 $f3 - - - store_artifacts: - path: /tmp/data/five-echo - - store_artifacts: - path: /tmp/src/coverage - - persist_to_workspace: - root: /tmp - paths: - - src/coverage/.coverage.2 - - py36_unit_tests_and_coverage: - working_directory: /tmp/src/tedana - docker: - - image: continuumio/miniconda3 - steps: - - attach_workspace: - at: /tmp - - restore_cache: # load environment - key: test-v1-{{ checksum "requirements.txt" }} - run: - name: Run unit tests + name: Running unit tests command: | - source activate venv - python setup.py install - py.test --ignore-glob=tedana/tests/test_integration*.py \ - --cov-append --cov-report term-missing --cov=tedana tedana - mkdir /tmp/src/coverage/ - mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.3 - - store_artifacts: - path: /tmp/src/coverage + source activate tedana_py37 + make unittest + mkdir /tmp/src/coverage + mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.py37 + - save_cache: + key: conda-py37-v1-{{ checksum "dev_requirements.txt" }} + paths: + - /opt/conda/envs/tedana_py37 - persist_to_workspace: root: /tmp paths: - - src/coverage/.coverage.3 + - src/coverage/.coverage.py37 - py35_unit_tests: - working_directory: /tmp/src/tedana + style_check: docker: - image: continuumio/miniconda3 + working_directory: /tmp/src/tedana steps: - checkout + - restore_cache: + key: conda-py37-v1-{{ checksum "dev_requirements.txt" }} - run: name: Generate environment command: | - conda create -n py35_env python=3.5 pytest -yq - source activate py35_env - pip install -r requirements.txt + apt-get install -y make + if [ ! -d /opt/conda/envs/tedana_py37 ]; then + conda create -yq -n tedana_py37 python=3.6 + source activate tedana_py37 + pip install -r dev_requirements.txt + fi - run: - name: Run unit tests + name: Style check command: | - source activate py35_env - python setup.py install - py.test --ignore=tedana/tests/test_integration_five_echo.py \ - --ignore=tedana/tests/test_integration_three_echo.py tedana + source activate tedana_py37 + make lint - py37_unit_tests: - working_directory: /tmp/src/tedana + integration: docker: - image: continuumio/miniconda3 + working_directory: /tmp/src/tedana steps: - checkout + - restore_cache: + key: conda-py37-v1-{{ checksum "dev_requirements.txt" }} - run: name: Generate environment command: | - conda create -n py37_env python=3.7 pytest -yq - source activate py37_env - pip install -r requirements.txt - - run: - name: Run unit tests - command: | - source activate py37_env - python setup.py install - py.test --ignore-glob=tedana/tests/test_integration*.py tedana - - style_check: - working_directory: /tmp/src/tedana - docker: - - image: continuumio/miniconda3 - steps: - - attach_workspace: - at: /tmp - - restore_cache: # load environment - key: test-v1-{{ checksum "requirements.txt" }} + apt-get install -y make + if [ ! -d /opt/conda/envs/tedana_py37 ]; then + conda create -yq -n tedana_py37 python=3.6 + source activate tedana_py37 + pip install -r dev_requirements.txt + fi - run: - name: Run style check + name: Run integration tests + no_output_timeout: 40m command: | - source activate venv - flake8 tedana + source activate tedana_py37 + make integration + mkdir /tmp/src/coverage + mv /tmp/src/tedana/.coverage /tmp/src/coverage/.coverage.integration + - store_artifacts: + path: /tmp/data + - persist_to_workspace: + root: /tmp + paths: + - src/coverage/.coverage.integration merge_coverage: working_directory: /tmp/src/tedana @@ -311,13 +168,14 @@ jobs: steps: - attach_workspace: at: /tmp - - restore_cache: # load environment - key: test-v1-{{ checksum "requirements.txt" }} + - checkout + - restore_cache: + key: conda-py37-v1-{{ checksum "dev_requirements.txt" }} - run: name: Merge coverage files command: | apt-get install -y curl - source activate venv + source activate tedana_py37 cd /tmp/src/coverage/ coverage combine coverage xml @@ -330,29 +188,14 @@ workflows: version: 2.1 build_test: jobs: - - build - - get_data - - get_regression_data - - py35_unit_tests - - py36_unit_tests_and_coverage: - requires: - - build - - py37_unit_tests - - style_check: - requires: - - build - - three_echo_rest: - requires: - - build - - get_data - - get_regression_data - - five_echo_task: - requires: - - build - - get_data - - get_regression_data + - unittest_35 + - unittest_36 + - unittest_37 + - style_check + - integration - merge_coverage: requires: - - py36_unit_tests_and_coverage - - three_echo_rest - - five_echo_task + - unittest_35 + - unittest_36 + - unittest_37 + - integration diff --git a/.circleci/tedana_outputs.txt b/.circleci/tedana_outputs.txt deleted file mode 100644 index e822ddb2d..000000000 --- a/.circleci/tedana_outputs.txt +++ /dev/null @@ -1,141 +0,0 @@ -Component_Overview.png -Kappa_vs_Rho_Scatter.png -betas_OC.nii.gz -betas_hik_OC.nii.gz -comp_000.png -comp_001.png -comp_002.png -comp_003.png -comp_004.png -comp_005.png -comp_006.png -comp_007.png -comp_008.png -comp_009.png -comp_010.png -comp_011.png -comp_012.png -comp_013.png -comp_014.png -comp_015.png -comp_016.png -comp_017.png -comp_018.png -comp_019.png -comp_020.png -comp_021.png -comp_022.png -comp_023.png -comp_024.png -comp_025.png -comp_026.png -comp_027.png -comp_028.png -comp_029.png -comp_030.png -comp_031.png -comp_032.png -comp_033.png -comp_034.png -comp_035.png -comp_036.png -comp_037.png -comp_038.png -comp_039.png -comp_040.png -comp_041.png -comp_042.png -comp_043.png -comp_044.png -comp_045.png -comp_046.png -comp_047.png -comp_048.png -comp_049.png -comp_050.png -comp_051.png -comp_052.png -comp_053.png -comp_054.png -comp_055.png -comp_056.png -comp_057.png -comp_058.png -comp_059.png -comp_060.png -comp_061.png -comp_062.png -comp_063.png -comp_064.png -comp_065.png -comp_066.png -comp_067.png -comp_068.png -comp_069.png -comp_070.png -comp_071.png -comp_072.png -comp_073.png -comp_074.png -comp_075.png -comp_076.png -comp_077.png -comp_078.png -comp_079.png -comp_080.png -comp_081.png -comp_082.png -comp_083.png -comp_084.png -comp_085.png -comp_086.png -comp_087.png -comp_088.png -comp_089.png -comp_090.png -comp_091.png -comp_092.png -comp_093.png -comp_094.png -comp_095.png -comp_096.png -comp_097.png -comp_098.png -comp_099.png -comp_100.png -comp_101.png -comp_102.png -comp_103.png -comp_104.png -comp_105.png -comp_106.png -comp_107.png -comp_108.png -comp_109.png -comp_110.png -comp_111.png -comp_112.png -comp_113.png -comp_114.png -comp_115.png -comp_116.png -comp_117.png -comp_118.png -comp_119.png -comp_120.png -comp_121.png -comp_122.png -comp_table_ica.txt -comp_table_pca.txt -dn_ts_OC.nii.gz -feats_OC2.nii.gz -figures -hik_ts_OC.nii.gz -lowk_ts_OC.nii.gz -meica_mix.1D -mepca_OC_components.nii.gz -mepca_mix.1D -report.txt -s0v.nii.gz -t2sv.nii.gz -ts_OC.nii.gz diff --git a/.dockerignore b/.dockerignore index 3f77a7624..3001c6108 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,4 @@ tedana/ +dist/ +.git/ +docs/ diff --git a/Dockerfile_dev b/Dockerfile_dev index 67e84c6bf..780f385ad 100644 --- a/Dockerfile_dev +++ b/Dockerfile_dev @@ -1,16 +1,4 @@ -# Your version: 0.6.0 Latest version: 0.6.0 -# Generated by Neurodocker version 0.6.0 -# Timestamp: 2019-11-06 17:06:07 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -FROM debian:latest - -ARG DEBIAN_FRONTEND="noninteractive" +FROM continuumio/miniconda3 ENV LANG="en_US.UTF-8" \ LC_ALL="en_US.UTF-8" \ @@ -24,6 +12,13 @@ RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ curl \ locales \ unzip \ + curl \ + git \ + wget \ + gzip \ + bzip2 \ + sed \ + make \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ @@ -44,196 +39,30 @@ ENTRYPOINT ["/neurodocker/startup.sh"] ENV LANG="C.UTF-8" \ LC_ALL="C.UTF-8" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - curl \ - git \ - wget \ - bzip2 \ - ca-certificates \ - sed \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -RUN mkdir -p /dev_tools/envs /tedana - -COPY ["./dev_tools/envs/py35_env.yml", "/dev_tools/envs/py35_env.yml"] - -COPY ["./dev_tools/envs/py36_env.yml", "/dev_tools/envs/py36_env.yml"] +RUN git clone https://github.com/me-ica/tedana.git /tedana -COPY ["./dev_tools/envs/py37_env.yml", "/dev_tools/envs/py37_env.yml"] +COPY ["./requirements.txt", "/tedana/requirements.txt"] -ENV CONDA_DIR="/opt/conda" \ - PATH="/opt/conda/bin:$PATH" -RUN export PATH="/opt/conda/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/conda \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean --all && sync \ - && conda env create -q --name tedana_py35 --file /dev_tools/envs/py35_env.yml \ - && rm -rf ~/.cache/pip/* +COPY ["./dev_requirements.txt", "/tedana/dev_requirements.txt"] -RUN conda env create -q --name tedana_py36 --file /dev_tools/envs/py36_env.yml \ - && rm -rf ~/.cache/pip/* +RUN bash -c "conda create -yq --name tedana_py36 python=3.6 pip \ + && source activate tedana_py36 \ + && pip install -r /tedana/dev_requirements.txt \ + && pip install ipython \ + && rm -rf ~/.cache/pip/* \ + && conda clean --all" -RUN conda env create -q --name tedana_py37 --file /dev_tools/envs/py37_env.yml \ - && rm -rf ~/.cache/pip/* +RUN /opt/conda/envs/tedana_py36/bin/ipython profile create \ + && sed -i 's/#c.InteractiveShellApp.extensions = \[\]/ \ + c.InteractiveShellApp.extensions = \['\''autoreload'\''\]/g' \ + /root/.ipython/profile_default/ipython_config.py -RUN \ - mkdir -p /data/three-echo \ - && curl -L -o /data/three-echo/three_echo_Cornell_zcat.nii.gz https://osf.io/8fzse/download +RUN mkdir -p /tedana/dev_tools -RUN \ - mkdir /data/five-echo \ - && curl -L -o five_echo_NIH.tar.xz https://osf.io/ea5v3/download \ - && tar xf five_echo_NIH.tar.xz -C /data/five-echo \ - && rm -f five_echo_NIH.tar.xz - -RUN \ - mkdir -p /data/test/three-echo \ - && curl -L -o TED.Cornell_processed_three_echo_dataset.tar.xz https://osf.io/u65sq/download \ - && tar xf TED.Cornell_processed_three_echo_dataset.tar.xz --no-same-owner -C /data/test/three-echo/ \ - && rm -f TED.Cornell_processed_three_echo_dataset.tar.xz - -RUN \ - mkdir -p /data/test/five-echo \ - && curl -L -o TED.p06.tar.xz https://osf.io/fr6mx/download \ - && tar xf TED.p06.tar.xz --no-same-owner -C /data/test/five-echo/ \ - && rm -f TED.p06.tar.xz - -RUN \ - /opt/conda/envs/tedana_py36/bin/ipython profile create \ - && sed -i 's/#c.InteractiveShellApp.extensions = \[\]/c.InteractiveShellApp.extensions = \['\''autoreload'\''\]/g' /root/.ipython/profile_default/ipython_config.py - -COPY ["./dev_tools/local_testing.sh", "/dev_tools/local_testing.sh"] +COPY ["./dev_tools", "/tedana/dev_tools"] RUN sed -i '$isource activate tedana_py36' $ND_ENTRYPOINT -RUN sed -i '$isource /dev_tools/local_testing.sh' $ND_ENTRYPOINT +RUN sed -i '$isource /tedana/dev_tools/run_tests.sh' $ND_ENTRYPOINT WORKDIR /tedana - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "debian:latest" \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LANG": "C.UTF-8", \ - \n "LC_ALL": "C.UTF-8" \ - \n } \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "curl", \ - \n "git", \ - \n "wget", \ - \n "bzip2", \ - \n "ca-certificates", \ - \n "sed" \ - \n ] \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p /dev_tools/envs /tedana" \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n "./dev_tools/envs/py35_env.yml", \ - \n "/dev_tools/envs/py35_env.yml" \ - \n ] \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n "./dev_tools/envs/py36_env.yml", \ - \n "/dev_tools/envs/py36_env.yml" \ - \n ] \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n "./dev_tools/envs/py37_env.yml", \ - \n "/dev_tools/envs/py37_env.yml" \ - \n ] \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "create_env": "tedana_py35", \ - \n "install_path": "/opt/conda", \ - \n "yaml_file": "/dev_tools/envs/py35_env.yml", \ - \n "activate_env": "false" \ - \n } \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "create_env": "tedana_py36", \ - \n "install_path": "/opt/conda", \ - \n "yaml_file": "/dev_tools/envs/py36_env.yml", \ - \n "activate_env": "true" \ - \n } \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "create_env": "tedana_py37", \ - \n "install_path": "/opt/conda", \ - \n "yaml_file": "/dev_tools/envs/py37_env.yml", \ - \n "activate_env": "false" \ - \n } \ - \n ], \ - \n [ \ - \n "run", \ - \n "\\n mkdir -p /data/three-echo\\n && curl -L -o /data/three-echo/three_echo_Cornell_zcat.nii.gz https://osf.io/8fzse/download" \ - \n ], \ - \n [ \ - \n "run", \ - \n "\\n mkdir /data/five-echo\\n && curl -L -o five_echo_NIH.tar.xz https://osf.io/ea5v3/download\\n && tar xf five_echo_NIH.tar.xz -C /data/five-echo\\n && rm -f five_echo_NIH.tar.xz" \ - \n ], \ - \n [ \ - \n "run", \ - \n "\\n mkdir -p /data/test/three-echo\\n && curl -L -o TED.Cornell_processed_three_echo_dataset.tar.xz https://osf.io/u65sq/download\\n && tar xf TED.Cornell_processed_three_echo_dataset.tar.xz --no-same-owner -C /data/test/three-echo/\\n && rm -f TED.Cornell_processed_three_echo_dataset.tar.xz" \ - \n ], \ - \n [ \ - \n "run", \ - \n "\\n mkdir -p /data/test/five-echo\\n && curl -L -o TED.p06.tar.xz https://osf.io/fr6mx/download\\n && tar xf TED.p06.tar.xz --no-same-owner -C /data/test/five-echo/\\n && rm -f TED.p06.tar.xz" \ - \n ], \ - \n [ \ - \n "run", \ - \n "\\n /opt/conda/envs/tedana_py36/bin/ipython profile create\\n && sed -i '"'"'s/#c.InteractiveShellApp.extensions = \\[\\]/c.InteractiveShellApp.extensions = \\['"'"'\\'"'"''"'"'autoreload'"'"'\\'"'"''"'"'\\]/g'"'"' /root/.ipython/profile_default/ipython_config.py" \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n "./dev_tools/local_testing.sh", \ - \n "/dev_tools/local_testing.sh" \ - \n ] \ - \n ], \ - \n [ \ - \n "add_to_entrypoint", \ - \n "source activate tedana_py36" \ - \n ], \ - \n [ \ - \n "add_to_entrypoint", \ - \n "source /dev_tools/local_testing.sh" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/tedana" \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..4ad309fd7 --- /dev/null +++ b/Makefile @@ -0,0 +1,19 @@ +.PHONY: all lint + +all_tests: lint unittest integration + +help: + @echo "Please use 'make ' where is one of:" + @echo " lint to run flake8 on all Python files" + @echo " unittest to run unit tests on tedana" + @echo " integration to run integration tests on tedana" + @echo " all_tests to run 'lint', 'unittest', and 'integration'" + +lint: + @flake8 tedana + +unittest: + @py.test --skipintegration --cov-append --cov-report term-missing --cov=tedana tedana/ + +integration: + @py.test --cov-append --cov-report term-missing --cov=tedana tedana/tests/test_integration.py diff --git a/dev_requirements.txt b/dev_requirements.txt new file mode 100644 index 000000000..0766e720f --- /dev/null +++ b/dev_requirements.txt @@ -0,0 +1,6 @@ +-r requirements.txt +sphinx +coverage +flake8>=3.7 +pytest +pytest-cov diff --git a/dev_tools/envs/py35_env.yml b/dev_tools/envs/py35_env.yml deleted file mode 100644 index f1d7ef0d5..000000000 --- a/dev_tools/envs/py35_env.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: tedana_py35 -channels: - - defaults - - conda-forge -dependencies: - - python=3.5 - - pip - - pip: - - duecredit - - matplotlib - - nilearn - - nibabel>=2.1.0 - - numpy>=1.14 - - numpydoc - - pandas - - pytest - - scikit-learn - - scipy - - sphinx-argparse - - versioneer diff --git a/dev_tools/envs/py36_env.yml b/dev_tools/envs/py36_env.yml deleted file mode 100644 index 6c79f5c83..000000000 --- a/dev_tools/envs/py36_env.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: tedana_py36 -channels: - - defaults - - conda-forge -dependencies: - - python=3.6 - - boto3 - - cython - - ipython - - lxml - - mkl - - nose - - nose-timer - - patsy - - pillow - - pip - - sphinx - - pip: - - codecov - - coverage - - coveralls - - duecredit - - flake8<3.0 - - flake8-putty - - matplotlib - - nibabel>=2.1.0 - - nilearn - - numpy>=1.14 - - numpydoc - - pandas - - pytest - - pytest-cov - - scikit-learn - - scipy - - sphinx-argparse - - versioneer diff --git a/dev_tools/envs/py37_env.yml b/dev_tools/envs/py37_env.yml deleted file mode 100644 index 59433d058..000000000 --- a/dev_tools/envs/py37_env.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: tedana_py37 -channels: - - defaults - - conda-forge -dependencies: - - python=3.7 - - pip - - pip: - - duecredit - - matplotlib - - nilearn - - nibabel>=2.1.0 - - numpy>=1.14 - - numpydoc - - pandas - - pytest - - scikit-learn - - scipy - - sphinx-argparse - - versioneer diff --git a/dev_tools/local_testing.sh b/dev_tools/local_testing.sh deleted file mode 100644 index 457c04b3c..000000000 --- a/dev_tools/local_testing.sh +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env bash - -generate_tedana_dockerfile() { - # - # Generates Dockerfile to build Docker image for local tedana testing - # - - get_three_echo_data=' - mkdir -p /data/three-echo - && curl -L -o /data/three-echo/three_echo_Cornell_zcat.nii.gz https://osf.io/8fzse/download' - get_five_echo_data=' - mkdir /data/five-echo - && curl -L -o five_echo_NIH.tar.xz https://osf.io/ea5v3/download - && tar xf five_echo_NIH.tar.xz -C /data/five-echo - && rm -f five_echo_NIH.tar.xz' - get_three_echo_reg=' - mkdir -p /data/test/three-echo - && curl -L -o TED.Cornell_processed_three_echo_dataset.tar.xz https://osf.io/u65sq/download - && tar xf TED.Cornell_processed_three_echo_dataset.tar.xz --no-same-owner -C /data/test/three-echo/ - && rm -f TED.Cornell_processed_three_echo_dataset.tar.xz' - get_five_echo_reg=' - mkdir -p /data/test/five-echo - && curl -L -o TED.p06.tar.xz https://osf.io/fr6mx/download - && tar xf TED.p06.tar.xz --no-same-owner -C /data/test/five-echo/ - && rm -f TED.p06.tar.xz' - generate_ipython_config=" - /opt/conda/envs/tedana_py36/bin/ipython profile create - && sed -i 's/#c.InteractiveShellApp.extensions = \[\]/c.InteractiveShellApp.extensions = \['\''autoreload'\''\]/g' /root/.ipython/profile_default/ipython_config.py" - - docker run --rm kaczmarj/neurodocker:0.6.0 generate docker \ - --base debian:latest \ - --pkg-manager apt \ - --env LANG=C.UTF-8 LC_ALL=C.UTF-8 \ - --install curl git wget bzip2 ca-certificates sed \ - --run "mkdir -p /dev_tools/envs /tedana" \ - --copy ./dev_tools/envs/py35_env.yml /dev_tools/envs/py35_env.yml \ - --copy ./dev_tools/envs/py36_env.yml /dev_tools/envs/py36_env.yml \ - --copy ./dev_tools/envs/py37_env.yml /dev_tools/envs/py37_env.yml \ - --miniconda create_env=tedana_py35 \ - install_path=/opt/conda \ - yaml_file=/dev_tools/envs/py35_env.yml \ - activate_env=false \ - --miniconda create_env=tedana_py36 \ - install_path=/opt/conda \ - yaml_file=/dev_tools/envs/py36_env.yml \ - activate_env=true \ - --miniconda create_env=tedana_py37 \ - install_path=/opt/conda \ - yaml_file=/dev_tools/envs/py37_env.yml \ - activate_env=false \ - --run "${get_three_echo_data}" \ - --run "${get_five_echo_data}" \ - --run "${get_three_echo_reg}" \ - --run "${get_five_echo_reg}" \ - --run "${generate_ipython_config}" \ - --copy "./dev_tools/local_testing.sh" /dev_tools/local_testing.sh \ - --add-to-entrypoint "source activate tedana_py36" \ - --add-to-entrypoint "source /dev_tools/local_testing.sh" \ - --workdir /tedana \ - > ./Dockerfile_dev -} - - -build_tedana_image() { - # - # Recreates local Dockerfile and builds tedana/tedana-dev:local Docker image - # - - if [ ! -z "${1}" ]; then - tag="${1}" - else - tag=local - fi - - generate_tedana_dockerfile - docker build --tag tedana/tedana-dev:${tag} -f Dockerfile_dev . -} - - -cprint() { - # - # Prints all supplied arguments as a green string - # - - if [[ -t 0 ]]; then - COLS=$( tput cols ) - else - COLS=80 - fi - - msg=${*} - eq=$( python -c "print('=' * ((${COLS} - len('${msg}') - 4) // 2))" ) - python -c "print('\033[1m\033[92m${eq} ${msg} ${eq}\033[0m')" -} - - -_check_tedana_outputs() { - # - # Runs tedana unit tests for specified Python version / virtual environment - # - # Required argments: - # dataset name of dataset to use for testing. should be one of - # [three-echo, five-echo] - - # confirm specification of three-echo or five-echo input - if [ -z "${1}" ] || { [ "${1}" != "three-echo" ] && [ "${1}" != "five-echo" ]; }; then - printf 'Must supply dataset name for checking integration test ' >&2 - printf 'outputs; must be one of [three-echo, five-echo]\n' >&2 - return - fi - - # find file - find /data/"${1}"/TED."${1}"/* \ - -exec basename {} \; > /data/"${1}"/TED."${1}"/outputs.out - - # set filenames - f1=/tedana/.circleci/tedana_outputs.txt - f2=/data/"${1}"/TED."${1}"/outputs.out - - # sort both files, pipe into grep to check for tedana - # logfile format; should only see one - comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' - numlogs=$(comm -13 <(sort -u $f1) <(sort -u $f2) | grep -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' | wc -l) - if [[ ! $numlogs -eq 1 ]]; then - printf "Incorrect number of logfiles: %s" $numlogs - fi - - # verify non-log outputs match exactly - f3=/data/"${1}"/TED."${1}"/outputs_nolog.txt - find /data/"${1}"/TED."${1}"/* \ - -exec basename {} \; | grep -v outputs.out | sort > /data/"${1}"/TED."${1}"/outputs.out - cat $f2 | grep -v -E -e '^tedana_[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.txt$' > $f3 - diff $f1 $f3 -} - - -_run_integration_test() { - # - # Runs tedana integration tests for specified dataset - # - # Required argments: - # dataset name of dataset to use for testing. should be one of - # [three-echo, five-echo] - - if [ -z "${1}" ] || { [ "${1}" != "three-echo" ] && [ "${1}" != "five-echo" ]; }; then - printf 'Must supply dataset name for running integration test; ' >&2 - printf 'must be one of [three-echo, five-echo]\n' >&2 - return - fi - ds=${1} - cprint "RUNNING INTEGRATION TESTS FOR DATASET: ${ds}" - source activate tedana_py36 - python setup.py -q install - py.test tedana/tests/test_integration_${ds/-/_}.py - _check_tedana_outputs "${ds}" -} - - -run_integration_tests() { - # - # Runs tedana integration tests for both three-echo and five-echo datasets - # - - for ds in three-echo five-echo; do - _run_integration_test ${ds} - done -} - - -run_unit_tests() { - # - # Runs tedana unit tests for Python 3.5, 3.6, and 3.7 environments - # - - for pyenv in tedana_py35 tedana_py36 tedana_py37; do - cprint "RUNNING UNIT TESTS FOR PYTHON ENVIRONMENT: ${pyenv}" - source activate ${pyenv} - python setup.py -q install - py.test --ignore-glob=tedana/tests/test_integration*.py tedana - done -} - - -run_lint_tests() { - # - # Lints the tedana codebase - # - - cprint "LINTING TEDANA CODEBASE" - source activate tedana_py36 - flake8 tedana -} - - -run_tests() { - # - # Runs tedana test suite EXCLUDING FIVE-ECHO TEST - # - - run_lint_tests - run_unit_tests - _run_integration_test three-echo - - cprint "FINISHED RUNNING TESTS! GREAT SUCCESS" -} - - -run_all_tests() { - # - # Runs entire tedana test suite - # - - run_lint_tests - run_unit_tests - run_integration_tests - - cprint "FINISHED RUNNING ALL TESTS! GREAT SUCCESS" -} diff --git a/dev_tools/run_tests.sh b/dev_tools/run_tests.sh new file mode 100644 index 000000000..a6fd7625b --- /dev/null +++ b/dev_tools/run_tests.sh @@ -0,0 +1,63 @@ +#!/usr/bin/env bash + +cprint() { + # + # Prints all supplied arguments as a bold, green string + # + + if [[ -t 0 ]] && [ ! -z "${TERM}" ]; then + COLS=$( tput -T screen cols ) + else + COLS=80 + fi + + msg="${*}" + eq=$( python -c "print('=' * ((${COLS} - len('${msg}') - 4) // 2))" ) + python -c "print('\033[1m\033[92m${eq} ${msg} ${eq}\033[0m')" +} + + +run_integration_tests() { + # + # Runs tedana integration tests + # + + cprint "RUNNING INTEGRATION TESTS" + make integration + cprint "INTEGRATION TESTS PASSED !" +} + + +run_unit_tests() { + # + # Runs tedana unit tests + # + + cprint "RUNNING UNIT TESTS" + make unittest + cprint "UNIT TESTS PASSED !" +} + + +run_lint_tests() { + # + # Lints the tedana codebase + # + + cprint "RUNNING FLAKE8 TO LINT CODEBASE" + make lint + cprint "CODEBASE LINTED SUCCESSFULLY !" +} + + +run_all_tests() { + # + # Runs tedana test suite + # + + run_lint_tests + run_unit_tests + run_integration_tests + + cprint "FINISHED RUNNING ALL TESTS -- GREAT SUCCESS !" +} diff --git a/requirements.txt b/requirements.txt index 4b4303b94..23345c00b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,12 @@ +duecredit +matplotlib +nibabel>=2.1.0 +nilearn numpy>=1.14 +numpydoc +pandas +requests scikit-learn scipy -nilearn -nibabel>=2.1.0 -versioneer -pandas -matplotlib -duecredit sphinx-argparse -numpydoc +versioneer diff --git a/setup.cfg b/setup.cfg index 2d9d8378e..836fbe363 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,6 +9,6 @@ parentdir_prefix = [flake8] max-line-length = 99 exclude=*build/,tests -putty-ignore = - */__init__.py : +F401 -ignore = E126,E402 +ignore = E126,E402,W504 +per-file-ignores = + */__init__.py:F401 diff --git a/tedana/tests/conftest.py b/tedana/tests/conftest.py new file mode 100644 index 000000000..6725a08b0 --- /dev/null +++ b/tedana/tests/conftest.py @@ -0,0 +1,27 @@ +import pytest + + +def pytest_addoption(parser): + parser.addoption('--skipintegration', action='store_true', + default=False, help='Skip integration tests.') + + +@pytest.fixture +def skip_integration(request): + return request.config.getoption('--skipintegration') + + +def three_echo_location(): + return 'three_echo_location' + + +def three_echo_outputs(): + return 'three_echo_outputs' + + +def five_echo_location(): + return 'five_echo_location' + + +def five_echo_outputs(): + return 'five_echo_outputs' diff --git a/tedana/tests/data/tedana_outputs.txt b/tedana/tests/data/tedana_outputs.txt new file mode 100644 index 000000000..45d2efd61 --- /dev/null +++ b/tedana/tests/data/tedana_outputs.txt @@ -0,0 +1,57 @@ +figures/Component_Overview.png +figures/Kappa_vs_Rho_Scatter.png +betas_OC.nii.gz +betas_hik_OC.nii.gz +figures/comp_000.png +figures/comp_001.png +figures/comp_002.png +figures/comp_003.png +figures/comp_004.png +figures/comp_005.png +figures/comp_006.png +figures/comp_007.png +figures/comp_008.png +figures/comp_009.png +figures/comp_010.png +figures/comp_011.png +figures/comp_012.png +figures/comp_013.png +figures/comp_014.png +figures/comp_015.png +figures/comp_016.png +figures/comp_017.png +figures/comp_018.png +figures/comp_019.png +figures/comp_020.png +figures/comp_021.png +figures/comp_022.png +figures/comp_023.png +figures/comp_024.png +figures/comp_025.png +figures/comp_026.png +figures/comp_027.png +figures/comp_028.png +figures/comp_029.png +figures/comp_030.png +figures/comp_031.png +figures/comp_032.png +figures/comp_033.png +figures/comp_034.png +figures/comp_035.png +figures/comp_036.png +figures/comp_037.png +figures/comp_038.png +comp_table_ica.txt +comp_table_pca.txt +dn_ts_OC.nii.gz +feats_OC2.nii.gz +figures +hik_ts_OC.nii.gz +lowk_ts_OC.nii.gz +meica_mix.1D +mepca_OC_components.nii.gz +mepca_mix.1D +report.txt +s0v.nii.gz +t2sv.nii.gz +ts_OC.nii.gz diff --git a/.circleci/tedana_outputs_verbose.txt b/tedana/tests/data/tedana_outputs_verbose.txt similarity index 100% rename from .circleci/tedana_outputs_verbose.txt rename to tedana/tests/data/tedana_outputs_verbose.txt diff --git a/tedana/tests/test_integration.py b/tedana/tests/test_integration.py new file mode 100644 index 000000000..1bf216596 --- /dev/null +++ b/tedana/tests/test_integration.py @@ -0,0 +1,116 @@ +""" +Integration tests for "real" data +""" + +from io import BytesIO +import glob +from gzip import GzipFile +import os +from pkg_resources import resource_filename +import re +import shutil +import tarfile + +import pytest +import requests + +from tedana.workflows import tedana_workflow + + +def check_integration_outputs(fname, outpath): + """ + Checks outputs of integration tests + + Parameters + ---------- + fname : str + Path to file with expected outputs + outpath : str + Path to output directory generated from integration tests + """ + + # Gets filepaths generated by integration test + existing = [os.path.relpath(f, outpath) for f in + glob.glob(os.path.join(outpath, '**'), recursive=True)[1:]] + + # Checks for log file + log_regex = ('^tedana_' + '[12][0-9]{3}-[0-9]{2}-[0-9]{2}T[0-9]{2}:' + '[0-9]{2}:[0-9]{2}.txt$') + logfiles = [out for out in existing if re.match(log_regex, out)] + assert len(logfiles) == 1 + + # Removes logfile from list of existing files + existing.remove(logfiles[0]) + + # Compares remaining files with those expected + with open(fname, 'r') as f: + tocheck = f.read().splitlines() + assert sorted(tocheck) == sorted(existing) + + +def download_test_data(osf, outpath): + """ + Downloads tar.gz data stored at `osf` and unpacks into `outpath` + + Parameters + ---------- + osf : str + URL to OSF file that contains data to be downloaded + outpath : str + Path to directory where OSF data should be extracted + """ + + req = requests.get(osf) + req.raise_for_status() + t = tarfile.open(fileobj=GzipFile(fileobj=BytesIO(req.content))) + os.makedirs(outpath, exist_ok=True) + t.extractall(outpath) + + +def test_integration_five_echo(skip_integration): + """ Integration test of the full tedana workflow using five-echo test data + """ + + if skip_integration: + pytest.skip('Skipping five-echo integration test') + out_dir = '/tmp/data/five-echo/TED.five-echo' + if os.path.exists(out_dir): + shutil.rmtree(out_dir) + + # download data and run the test + download_test_data('https://osf.io/9c42e/download', + os.path.dirname(out_dir)) + tedana_workflow( + data='/tmp/data/five-echo/p06.SBJ01_S09_Task11_e[1,2,3,4,5].sm.nii.gz', + tes=[15.4, 29.7, 44.0, 58.3, 72.6], + out_dir=out_dir, + debug=True, verbose=True) + + # compare the generated output files + fn = resource_filename('tedana', 'tests/data/tedana_outputs_verbose.txt') + check_integration_outputs(fn, out_dir) + + +def test_integration_three_echo(skip_integration): + """ Integration test of the full tedana workflow using three-echo test data + """ + + if skip_integration: + pytest.skip('Skipping three-echo integration test') + out_dir = '/tmp/data/three-echo/TED.three-echo' + if os.path.exists(out_dir): + shutil.rmtree(out_dir) + + # download data and run the test + download_test_data('https://osf.io/rqhfc/download', + os.path.dirname(out_dir)) + tedana_workflow( + data='/tmp/data/three-echo/three_echo_Cornell_zcat.nii.gz', + tes=[14.5, 38.5, 62.5], + out_dir=out_dir, + tedpca='kundu', png=True) + + # compare the generated output files + fn = resource_filename('tedana', 'tests/data/tedana_outputs.txt') + check_integration_outputs(fn, out_dir) diff --git a/tedana/tests/test_integration_five_echo.py b/tedana/tests/test_integration_five_echo.py deleted file mode 100644 index 49b1112ff..000000000 --- a/tedana/tests/test_integration_five_echo.py +++ /dev/null @@ -1,12 +0,0 @@ -from tedana.workflows import tedana_workflow - - -def test_integration_five_echo(): - """ - An integration test of the full tedana workflow using five-echo test data. - """ - tedana_workflow( - data='/tmp/data/five-echo/p06.SBJ01_S09_Task11_e[1,2,3,4,5].sm.nii.gz', - tes=[15.4, 29.7, 44.0, 58.3, 72.6], - out_dir='/tmp/data/five-echo/TED.five-echo', - debug=True, verbose=True) diff --git a/tedana/tests/test_integration_three_echo.py b/tedana/tests/test_integration_three_echo.py deleted file mode 100644 index b00b13006..000000000 --- a/tedana/tests/test_integration_three_echo.py +++ /dev/null @@ -1,12 +0,0 @@ -from tedana.workflows import tedana_workflow - - -def test_integration_three_echo(): - """ - An integration test of the full tedana workflow using three-echo test data. - """ - tedana_workflow( - data='/tmp/data/three-echo/three_echo_Cornell_zcat.nii.gz', - tes=[14.5, 38.5, 62.5], - out_dir='/tmp/data/three-echo/TED.three-echo', - tedpca='kundu', png=True) diff --git a/tedana/tests/test_io.py b/tedana/tests/test_io.py index 214140e0d..b1ccc2683 100644 --- a/tedana/tests/test_io.py +++ b/tedana/tests/test_io.py @@ -12,7 +12,7 @@ from tedana.tests.utils import get_test_data_path -import os +import os data_dir = get_test_data_path() @@ -62,13 +62,13 @@ def test_smoke_split_ts(): Note: classification is ["accepted", "rejected", "ignored"] """ np.random.seed(0) # seeded because comptable MUST have accepted components - n_samples = 100 + n_samples = 100 n_times = 20 n_components = 6 data = np.random.random((n_samples, n_times)) mmix = np.random.random((n_times, n_components)) mask = np.random.randint(2, size=n_samples) - + # creating the component table with component as random floats, a "metric," and random classification component = np.random.random((n_components)) metric = np.random.random((n_components)) @@ -82,7 +82,7 @@ def test_smoke_split_ts(): assert resid is not None -def test_smoke_write_split_ts(): +def test_smoke_write_split_ts(): """ Ensures that write_split_ts writes out the expected files with random input and tear them down """ @@ -101,12 +101,12 @@ def test_smoke_write_split_ts(): comptable = pd.DataFrame(df_data, columns=['component', 'metric', 'classification']) assert me.write_split_ts(data, mmix, mask, comptable, ref_img) is not None - + # TODO: midk_ts.nii is never generated? - for filename in ["hik_ts_.nii", "lowk_ts_.nii", "dn_ts_.nii"]: # remove all files generated - try: + for filename in ["hik_ts_.nii.gz", "lowk_ts_.nii.gz", "dn_ts_.nii.gz"]: # remove all files generated + try: os.remove(filename) - except OSError: + except OSError: print(filename + " not generated") pass @@ -122,10 +122,10 @@ def test_smoke_writefeats(): ref_img = os.path.join(data_dir, 'mask.nii.gz') assert me.writefeats(data, mmix, mask, ref_img) is not None - - # this only generates feats_.nii, so delete that - try: - os.remove("feats_.nii") + + # this only generates feats_.nii, so delete that + try: + os.remove("feats_.nii.gz") except OSError: print("feats_.nii not generated") pass @@ -144,15 +144,15 @@ def test_smoke_filewrite(): assert me.filewrite(data_1d, filename, ref_img) is not None assert me.filewrite(data_2d, filename, ref_img) is not None - try: - os.remove(".nii") - except OSError: + try: + os.remove(".nii.gz") + except OSError: print(".nii not generated") def test_smoke_load_data(): - """ - Ensures that data is loaded when given a random neuroimage + """ + Ensures that data is loaded when given a random neuroimage """ data = os.path.join(data_dir, 'mask.nii.gz') n_echos = 1 diff --git a/tedana/viz.py b/tedana/viz.py index a1da110db..92c664872 100644 --- a/tedana/viz.py +++ b/tedana/viz.py @@ -220,8 +220,8 @@ def write_kappa_scatter(comptable, out_dir): # Prebuild legend so that the marker sizes are uniform for kind in mkr_dict: - plt.scatter([], [], s=1, marker=mkr_dict[kind][0], - c=mkr_dict[kind][1], label=kind, alpha=0.5) + plt.scatter([], [], s=1, marker=mkr_dict[kind][0], + c=mkr_dict[kind][1], label=kind, alpha=0.5) # Create legend ax_scatter.legend(markerscale=10)