Skip to content

Commit

Permalink
[REFAC] using narps_open.core functions [TEST] events data in a file
Browse files Browse the repository at this point in the history
  • Loading branch information
bclenet committed Nov 20, 2023
1 parent c6ae521 commit 7433106
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 87 deletions.
74 changes: 26 additions & 48 deletions narps_open/pipelines/team_08MQ.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from narps_open.data.task import TaskInformation
from narps_open.data.participants import get_group
from narps_open.core.common import remove_file, list_intersection, elements_in_string, clean_list

# Setup FSL
FSLCommand.set_default_output_type('NIFTI_GZ')

Expand All @@ -45,29 +46,6 @@ def __init__(self):
('negative_effect_loss', 'T', ['gain', 'loss'], [0, -1])
]

def remove_files(_, files):
"""
This method is used in a Function node to fully remove
files generated by a Node, once they aren't needed anymore.
Parameters:
- _: Node input only used for triggering the Node
- files: str or list, a single filename or a list of filenames to remove
"""
from os import remove

if isinstance(files, str):
files = [files]

try:
for file in files:
remove(file)
except OSError as error:
print(error)
else:
print('The following files were successfully deleted.')
print(files)

def get_preprocessing(self):
""" Return a Nipype workflow describing the prerpocessing part of the pipeline """

Expand Down Expand Up @@ -237,35 +215,35 @@ def get_preprocessing(self):
compute_confounds.inputs.repetition_time = TaskInformation()['RepetitionTime']

# Function Nodes remove_files - Remove sizeable files once they aren't needed
remove_func_0 = Node(Function(
function = self.remove_files,
input_names = ['_', 'files'],
remove_func_0 = MapNode(Function(
function = remove_file,
input_names = ['_', 'file'],
output_names = []
), name = 'remove_func_0')
), name = 'remove_func_0', iterfield = 'file')

remove_func_1 = Node(Function(
function = self.remove_files,
input_names = ['_', 'files'],
remove_func_1 = MapNode(Function(
function = remove_file,
input_names = ['_', 'file'],
output_names = []
), name = 'remove_func_1')
), name = 'remove_func_1', iterfield = 'file')

remove_func_2 = Node(Function(
function = self.remove_files,
input_names = ['_', 'files'],
remove_func_2 = MapNode(Function(
function = remove_file,
input_names = ['_', 'file'],
output_names = []
), name = 'remove_func_2')
), name = 'remove_func_2', iterfield = 'file')

remove_func_3 = Node(Function(
function = self.remove_files,
input_names = ['_', 'files'],
remove_func_3 = MapNode(Function(
function = remove_file,
input_names = ['_', 'file'],
output_names = []
), name = 'remove_func_3')
), name = 'remove_func_3', iterfield = 'file')

remove_func_4 = Node(Function(
function = self.remove_files,
input_names = ['_', 'files'],
remove_func_4 = MapNode(Function(
function = remove_file,
input_names = ['_', 'file'],
output_names = []
), name = 'remove_func_4')
), name = 'remove_func_4', iterfield = 'file')

preprocessing = Workflow(base_dir = self.directories.working_dir, name = 'preprocessing')
preprocessing.config['execution']['stop_on_first_crash'] = 'true'
Expand Down Expand Up @@ -326,15 +304,15 @@ def get_preprocessing(self):
(alignment_func_to_mni, data_sink, [('output_image', 'preprocessing.@output_image')]),

# File removals
(motion_correction, remove_func_0, [('out_file', 'files')]),
(motion_correction, remove_func_0, [('out_file', 'file')]),
(data_sink, remove_func_0, [('out_file', '_')]),
(slice_time_correction, remove_func_1, [('slice_time_corrected_file', 'files')]),
(slice_time_correction, remove_func_1, [('slice_time_corrected_file', 'file')]),
(data_sink, remove_func_1, [('out_file', '_')]),
(smoothing, remove_func_2, [('smoothed_file', 'files')]),
(smoothing, remove_func_2, [('smoothed_file', 'file')]),
(data_sink, remove_func_2, [('out_file', '_')]),
(alignment_func_to_anat, remove_func_3, [('out_file', 'files')]),
(alignment_func_to_anat, remove_func_3, [('out_file', 'file')]),
(data_sink, remove_func_3, [('out_file', '_')]),
(alignment_func_to_mni, remove_func_4, [('output_image', 'files')]),
(alignment_func_to_mni, remove_func_4, [('output_image', 'file')]),
(data_sink, remove_func_4, [('out_file', '_')])
])

Expand Down
31 changes: 0 additions & 31 deletions tests/pipelines/__init__.py

This file was deleted.

43 changes: 35 additions & 8 deletions tests/pipelines/test_team_08MQ.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,14 @@
pytest -q test_team_08MQ.py
pytest -q test_team_08MQ.py -k <selected_test>
"""
from os.path import join

from pytest import helpers, mark
from numpy import isclose
from nipype import Workflow
from nipype.interfaces.base import Bunch

from narps_open.utils.configuration import Configuration
from narps_open.pipelines.team_08MQ import PipelineTeam08MQ

class TestPipelinesTeam08MQ:
Expand Down Expand Up @@ -70,20 +73,44 @@ def test_outputs():

@staticmethod
@mark.unit_test
def test_subject_information(mocker):
def test_subject_information():
""" Test the get_subject_information method """

helpers.mock_event_data(mocker)

information = PipelineTeam08MQ.get_subject_information('fake_event_file_path')[0]
information = PipelineTeam08MQ.get_subject_information(join(
Configuration()['directories']['test_data'],
'pipelines',
'events.tsv'
))[0]

assert isinstance(information, Bunch)
assert information.amplitudes == [[1.0, 1.0], [14.0, 34.0], [6.0, 14.0], [1.0, 1.0]]
assert information.durations == [[4.0, 4.0], [2.388, 2.289], [2.388, 2.289], [4.0, 4.0]]
assert information.conditions == ['event', 'gain', 'loss', 'response']
assert information.onsets == [
[4.071, 11.834], [4.071, 11.834], [4.071, 11.834], [4.071, 11.834]

reference_amplitudes = [
[1.0, 1.0, 1.0, 1.0, 1.0],
[14.0, 34.0, 38.0, 10.0, 16.0],
[6.0, 14.0, 19.0, 15.0, 17.0],
[1.0, 1.0, 0.0, -1.0, -1.0]
]
for reference_array, test_array in zip(reference_amplitudes, information.amplitudes):
assert isclose(reference_array, test_array).all()

reference_durations = [
[4.0, 4.0, 4.0, 4.0, 4.0],
[2.388, 2.289, 0.0, 2.08, 2.288],
[2.388, 2.289, 0.0, 2.08, 2.288],
[4.0, 4.0, 4.0, 4.0, 4.0]
]
for reference_array, test_array in zip(reference_durations, information.durations):
assert isclose(reference_array, test_array).all()

reference_onsets = [
[4.071, 11.834, 19.535, 27.535, 36.435],
[4.071, 11.834, 19.535, 27.535, 36.435],
[4.071, 11.834, 19.535, 27.535, 36.435],
[4.071, 11.834, 19.535, 27.535, 36.435]
]
for reference_array, test_array in zip(reference_onsets, information.onsets):
assert isclose(reference_array, test_array).all()

@staticmethod
@mark.unit_test
Expand Down
6 changes: 6 additions & 0 deletions tests/test_data/pipelines/events.tsv
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
onset duration gain loss RT participant_response
4.071 4 14 6 2.388 weakly_accept
11.834 4 34 14 2.289 strongly_accept
19.535 4 38 19 0 NoResp
27.535 4 10 15 2.08 strongly_reject
36.435 4 16 17 2.288 weakly_reject

0 comments on commit 7433106

Please sign in to comment.