Skip to content

Commit

Permalink
Merge branch 'main' into 98BT_refac
Browse files Browse the repository at this point in the history
  • Loading branch information
bclenet authored Mar 6, 2024
2 parents 38af98f + 009a972 commit 8741d5e
Show file tree
Hide file tree
Showing 12 changed files with 91 additions and 123 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

# to avoid commiting data
./data/
data/

# neuro user in docker image
neuro
Expand Down
10 changes: 5 additions & 5 deletions INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ Start a Docker container from the Docker image :

```bash
# Replace PATH_TO_THE_REPOSITORY in the following command (e.g.: with /home/user/dev/narps_open_pipelines/)
docker run -it -v PATH_TO_THE_REPOSITORY:/home/neuro/code/ nipype/nipype:py38
docker run -it -v PATH_TO_THE_REPOSITORY:/work/ nipype/nipype:py38
```

Optionally edit the configuration file `narps_open/utils/configuration/default_config.toml` so that the referred paths match the ones inside the container. E.g.: if using the previous command line, the `directories` part of the configuration file should be :
Expand All @@ -73,9 +73,9 @@ Optionally edit the configuration file `narps_open/utils/configuration/default_c
# ...

[directories]
dataset = "/home/neuro/code/data/original/ds001734/"
reproduced_results = "/home/neuro/code/data/reproduced/"
narps_results = "/home/neuro/code/data/results/"
dataset = "/work/data/original/ds001734/"
reproduced_results = "/work/data/reproduced/"
narps_results = "/work/data/results/"

# ...
```
Expand All @@ -87,7 +87,7 @@ Install NARPS Open Pipelines inside the container :

```bash
source activate neuro
cd /home/neuro/code/
cd /work/
pip install .
```

Expand Down
4 changes: 2 additions & 2 deletions docs/environment.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ From this command line, you need to add volumes to be able to link with your loc
```bash
# Replace PATH_TO_THE_REPOSITORY in the following command (e.g.: with /home/user/dev/narps_open_pipelines/)
docker run -it \
-v PATH_TO_THE_REPOSITORY:/home/neuro/code/ \
-v PATH_TO_THE_REPOSITORY:/work/ \
nipype/nipype:py38
```

Expand All @@ -25,7 +25,7 @@ If you wish to use [Jupyter](https://jupyter.org/) to run the code, a port forwa

```bash
docker run -it \
-v PATH_TO_THE_REPOSITORY:/home/neuro/code/ \
-v PATH_TO_THE_REPOSITORY:/work/ \
-p 8888:8888 \
nipype/nipype:py38
```
Expand Down
6 changes: 3 additions & 3 deletions narps_open/utils/configuration/default_config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ title = "Default configuration for the NARPS open pipelines project"
config_type = "default"

[directories]
dataset = "data/original/ds001734/"
reproduced_results = "data/reproduced/"
narps_results = "data/results/"
dataset = "/work/data/original/ds001734/"
reproduced_results = "/work/run/reproduced/"
narps_results = "/work/data/results/"

[runner]
nb_procs = 8 # Maximum number of threads executed by the runner
Expand Down
10 changes: 5 additions & 5 deletions narps_open/utils/configuration/testing_config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ title = "Testing configuration for the NARPS open pipelines project"
config_type = "testing"

[directories]
dataset = "data/original/ds001734/"
reproduced_results = "run/data/reproduced/"
narps_results = "data/results/"
test_data = "tests/test_data/"
test_runs = "run/"
dataset = "/work/data/original/ds001734/"
reproduced_results = "/work/run/reproduced/"
narps_results = "/work/data/results/"
test_data = "/work/tests/test_data/"
test_runs = "/work/run/"

[runner]
nb_procs = 8 # Maximum number of threads executed by the runner
Expand Down
15 changes: 7 additions & 8 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
pytest on (a) test file(s) in the same directory.
"""

from os import remove
from os import remove, mkdir
from os.path import join, isfile
from tempfile import mkdtemp
from shutil import rmtree

from numpy import isclose
Expand All @@ -26,13 +27,11 @@
Configuration(config_type='testing')

@fixture
def remove_test_dir(directory_path):
""" A fixture to remove temporary directory created by tests """

rmtree(directory_path, ignore_errors = True)
mkdir(directory_path)
yield # test runs here
rmtree(directory_path, ignore_errors = True)
def temporary_data_dir():
""" A fixture to create and remove a temporary directory for the tests """
data_dir = mkdtemp()
yield data_dir
rmtree(data_dir, ignore_errors = True)

@helpers.register
def compare_float_2d_arrays(array_1, array_2):
Expand Down
65 changes: 29 additions & 36 deletions tests/core/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,38 +10,25 @@
pytest -q test_common.py
pytest -q test_common.py -k <selected_test>
"""
from os import mkdir, makedirs
from os import makedirs
from os.path import join, exists, abspath
from shutil import rmtree
from pathlib import Path

from pytest import mark, fixture
from pytest import mark
from nipype import Node, Function, Workflow

from narps_open.utils.configuration import Configuration
import narps_open.core.common as co

TEMPORARY_DIR = join(Configuration()['directories']['test_runs'], 'test_common')

@fixture
def remove_test_dir():
""" A fixture to remove temporary directory created by tests """

rmtree(TEMPORARY_DIR, ignore_errors = True)
mkdir(TEMPORARY_DIR)
yield # test runs here
rmtree(TEMPORARY_DIR, ignore_errors = True)

class TestCoreCommon:
""" A class that contains all the unit tests for the common module."""

@staticmethod
@mark.unit_test
def test_remove_file(remove_test_dir):
def test_remove_file(temporary_data_dir):
""" Test the remove_file function """

# Create a single file
test_file_path = abspath(join(TEMPORARY_DIR, 'file1.txt'))
test_file_path = abspath(join(temporary_data_dir, 'file1.txt'))
Path(test_file_path).touch()

# Check file exist
Expand All @@ -62,15 +49,15 @@ def test_remove_file(remove_test_dir):

@staticmethod
@mark.unit_test
def test_remove_directory(remove_test_dir):
def test_remove_directory(temporary_data_dir):
""" Test the remove_directory function """

# Create a single inside dir tree
dir_path = abspath(join(TEMPORARY_DIR, 'dir_1', 'dir_2'))
dir_path = abspath(join(temporary_data_dir, 'dir_1', 'dir_2'))
makedirs(dir_path)
file_path = abspath(join(TEMPORARY_DIR, 'dir_1', 'dir_2', 'file1.txt'))
file_path = abspath(join(temporary_data_dir, 'dir_1', 'dir_2', 'file1.txt'))
Path(file_path).touch()
test_dir_path = abspath(join(TEMPORARY_DIR, 'dir_1'))
test_dir_path = abspath(join(temporary_data_dir, 'dir_1'))

# Check file exist
assert exists(file_path)
Expand All @@ -90,13 +77,13 @@ def test_remove_directory(remove_test_dir):

@staticmethod
@mark.unit_test
def test_remove_parent_directory(remove_test_dir):
def test_remove_parent_directory(temporary_data_dir):
""" Test the remove_parent_directory function """

# Create a single inside dir tree
dir_path = abspath(join(TEMPORARY_DIR, 'dir_1', 'dir_2'))
dir_path = abspath(join(temporary_data_dir, 'dir_1', 'dir_2'))
makedirs(dir_path)
file_path = abspath(join(TEMPORARY_DIR, 'dir_1', 'dir_2', 'file1.txt'))
file_path = abspath(join(temporary_data_dir, 'dir_1', 'dir_2', 'file1.txt'))
Path(file_path).touch()

# Check file exist
Expand Down Expand Up @@ -151,7 +138,7 @@ def test_node_elements_in_string():

@staticmethod
@mark.unit_test
def test_connect_elements_in_string(remove_test_dir):
def test_connect_elements_in_string(temporary_data_dir):
""" Test the elements_in_string function as evaluated in a connect """

# Inputs
Expand Down Expand Up @@ -180,7 +167,7 @@ def test_connect_elements_in_string(remove_test_dir):

# Create Workflow
test_workflow = Workflow(
base_dir = TEMPORARY_DIR,
base_dir = temporary_data_dir,
name = 'test_workflow'
)
test_workflow.connect([
Expand All @@ -193,11 +180,13 @@ def test_connect_elements_in_string(remove_test_dir):

test_workflow.run()

test_file_t = join(TEMPORARY_DIR, 'test_workflow', 'node_true', '_report', 'report.rst')
test_file_t = join(temporary_data_dir,
'test_workflow', 'node_true', '_report', 'report.rst')
with open(test_file_t, 'r', encoding = 'utf-8') as file:
assert '* out_value : test_string' in file.read()

test_file_f = join(TEMPORARY_DIR, 'test_workflow', 'node_false', '_report', 'report.rst')
test_file_f = join(temporary_data_dir,
'test_workflow', 'node_false', '_report', 'report.rst')
with open(test_file_f, 'r', encoding = 'utf-8') as file:
assert '* out_value : None' in file.read()

Expand Down Expand Up @@ -238,7 +227,7 @@ def test_node_clean_list():

@staticmethod
@mark.unit_test
def test_connect_clean_list(remove_test_dir):
def test_connect_clean_list(temporary_data_dir):
""" Test the clean_list function as evaluated in a connect """

# Inputs
Expand Down Expand Up @@ -269,7 +258,7 @@ def test_connect_clean_list(remove_test_dir):

# Create Workflow
test_workflow = Workflow(
base_dir = TEMPORARY_DIR,
base_dir = temporary_data_dir,
name = 'test_workflow'
)
test_workflow.connect([
Expand All @@ -279,11 +268,13 @@ def test_connect_clean_list(remove_test_dir):
])
test_workflow.run()

test_file_1 = join(TEMPORARY_DIR, 'test_workflow', 'node_1', '_report', 'report.rst')
test_file_1 = join(temporary_data_dir,
'test_workflow', 'node_1', '_report', 'report.rst')
with open(test_file_1, 'r', encoding = 'utf-8') as file:
assert f'* out_value : {output_list_1}' in file.read()

test_file_2 = join(TEMPORARY_DIR, 'test_workflow', 'node_2', '_report', 'report.rst')
test_file_2 = join(temporary_data_dir,
'test_workflow', 'node_2', '_report', 'report.rst')
with open(test_file_2, 'r', encoding = 'utf-8') as file:
assert f'* out_value : {output_list_2}' in file.read()

Expand Down Expand Up @@ -324,7 +315,7 @@ def test_node_list_intersection():

@staticmethod
@mark.unit_test
def test_connect_list_intersection(remove_test_dir):
def test_connect_list_intersection(temporary_data_dir):
""" Test the list_intersection function as evaluated in a connect """

# Inputs / outputs
Expand Down Expand Up @@ -355,7 +346,7 @@ def test_connect_list_intersection(remove_test_dir):

# Create Workflow
test_workflow = Workflow(
base_dir = TEMPORARY_DIR,
base_dir = temporary_data_dir,
name = 'test_workflow'
)
test_workflow.connect([
Expand All @@ -365,11 +356,13 @@ def test_connect_list_intersection(remove_test_dir):
])
test_workflow.run()

test_file_1 = join(TEMPORARY_DIR, 'test_workflow', 'node_1', '_report', 'report.rst')
test_file_1 = join(temporary_data_dir,
'test_workflow', 'node_1', '_report', 'report.rst')
with open(test_file_1, 'r', encoding = 'utf-8') as file:
assert f'* out_value : {output_list_1}' in file.read()

test_file_2 = join(TEMPORARY_DIR, 'test_workflow', 'node_2', '_report', 'report.rst')
test_file_2 = join(temporary_data_dir,
'test_workflow', 'node_2', '_report', 'report.rst')
with open(test_file_2, 'r', encoding = 'utf-8') as file:
assert f'* out_value : {output_list_2}' in file.read()

Expand Down
9 changes: 3 additions & 6 deletions tests/pipelines/test_team_J7F9.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
from narps_open.utils.configuration import Configuration
from narps_open.pipelines.team_J7F9 import PipelineTeamJ7F9

TEMPORARY_DIR = join(Configuration()['directories']['test_runs'], 'test_J7F9')

class TestPipelinesTeamJ7F9:
""" A class that contains all the unit tests for the PipelineTeamJ7F9 class."""

Expand Down Expand Up @@ -177,8 +175,7 @@ def test_subject_information():

@staticmethod
@mark.unit_test
@mark.parametrize('remove_test_dir', TEMPORARY_DIR)
def test_confounds_file(remove_test_dir):
def test_confounds_file(temporary_data_dir):
""" Test the get_confounds_file method """

confounds_file = join(
Expand All @@ -187,11 +184,11 @@ def test_confounds_file(remove_test_dir):
Configuration()['directories']['test_data'], 'pipelines', 'team_J7F9', 'confounds.tsv')

# Get new confounds file
PipelineTeamJ7F9.get_confounds_file(confounds_file, 'sid', 'rid', TEMPORARY_DIR)
PipelineTeamJ7F9.get_confounds_file(confounds_file, 'sid', 'rid', temporary_data_dir)

# Check confounds file was created
created_confounds_file = join(
TEMPORARY_DIR, 'confounds_files', 'confounds_file_sub-sid_run-rid.tsv')
temporary_data_dir, 'confounds_files', 'confounds_file_sub-sid_run-rid.tsv')
assert exists(created_confounds_file)

# Check contents
Expand Down
9 changes: 3 additions & 6 deletions tests/pipelines/test_team_T54A.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
from narps_open.pipelines.team_T54A import PipelineTeamT54A
from narps_open.utils.configuration import Configuration

TEMPORARY_DIR = join(Configuration()['directories']['test_runs'], 'test_T54A')

class TestPipelinesTeamT54A:
""" A class that contains all the unit tests for the PipelineTeamT54A class."""

Expand Down Expand Up @@ -137,8 +135,7 @@ def test_subject_information():

@staticmethod
@mark.unit_test
@mark.parametrize('remove_test_dir', TEMPORARY_DIR)
def test_parameters_file(remove_test_dir):
def test_parameters_file(temporary_data_dir):
""" Test the get_parameters_file method """

confounds_file_path = join(
Expand All @@ -148,12 +145,12 @@ def test_parameters_file(remove_test_dir):
confounds_file_path,
'fake_subject_id',
'fake_run_id',
TEMPORARY_DIR
temporary_data_dir
)

# Check parameter file was created
assert exists(join(
TEMPORARY_DIR,
temporary_data_dir,
'parameters_file',
'parameters_file_sub-fake_subject_id_run-fake_run_id.tsv')
)
Expand Down
Loading

0 comments on commit 8741d5e

Please sign in to comment.