From dcbe3e5fc08938c44ac8545c2815c6a32868d353 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 18 Jan 2021 15:29:42 -0500 Subject: [PATCH 01/45] add extract ushape script --- scilpy/tractanalysis/features.py | 39 +++++++++ scripts/scil_extract_ushape.py | 115 +++++++++++++++++++++++++++ scripts/tests/test_extract_ushape.py | 18 +++++ 3 files changed, 172 insertions(+) create mode 100644 scripts/scil_extract_ushape.py create mode 100644 scripts/tests/test_extract_ushape.py diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index ea7fe1f46..a5a2d887d 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -8,6 +8,45 @@ from dipy.segment.metric import AveragePointwiseEuclideanMetric from dipy.tracking import metrics as tm import numpy as np +from sklearn.preprocessing import normalize + +def detect_ushape(streamlines, ufactor=[0.5, 1]): + """ + Remove loops and sharp turns from a list of streamlines. + Parameters + ---------- + streamlines: list of ndarray + The list of streamlines from which to remove loops and sharp turns. + threshold: float + Threshold bla bla bla + + Returns + ------- + list: the ids of clean streamlines + Only the ids are returned so proper filtering can be done afterwards + """ + ids = [] + for i, s in enumerate(streamlines): + if len(s)>=4: + first_point = s[0] + last_point = s[-1] + second_point = s[round(len(s)/3)] + third_point = s[round(2*len(s)/3)] + + v1 = first_point - second_point + v2 = second_point - third_point + v3 = third_point - last_point + + v1 = v1 / np.linalg.norm(v1) + v2 = v2 / np.linalg.norm(v2) + v3 = v3 / np.linalg.norm(v3) + + val = np.dot(np.cross(v1,v2), np.cross(v2,v3)) + + if min(ufactor) < val < max(ufactor): + ids.append(i) + + return ids def remove_loops_and_sharp_turns(streamlines, diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py new file mode 100644 index 000000000..68216e773 --- /dev/null +++ b/scripts/scil_extract_ushape.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +This script can be used to extract U-Shape streamlines. +The main idea comes from trackvis code: + +pt 1: 1 st end point +pt 2: 1/3 location on the track +pt 3: 2/3 location on the track +pt 4: 2nd end point + +Compute 3 normalized vectors: +v1: pt1 -> pt2 +v2: pt2 -> pt3 +v3: pt3 -> pt4 + +U-factor:dot product of v1 X v2 and v2 X v3. +X is the cross product of two vectors. +---------------------------------------------------------------------------- +""" + +import argparse +import json +import logging + +from dipy.io.streamline import save_tractogram +import numpy as np + +from scilpy.io.streamlines import load_tractogram_with_reference +from scilpy.io.utils import (add_json_args, + add_overwrite_arg, + add_reference_arg, + assert_inputs_exist, + assert_outputs_exist, + check_tracts_same_format) +from scilpy.utils.streamlines import filter_tractogram_data +from scilpy.tractanalysis.features import detect_ushape + + +def _build_arg_parser(): + p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, + description=__doc__) + p.add_argument('in_tractogram', + help='Tractogram input file name.') + p.add_argument('out_tractogram', + help='Output tractogram with ushape.') + p.add_argument('--ufactor', default=[0.5, 1], type=float, nargs=2, + help='U factor defines U-shapeness of the track.') + p.add_argument('--remaining_tractogram', + help='If set, saves remaining streamlines.') + p.add_argument('--display_counts', action='store_true', + help='Print streamline count before and after filtering') + + add_overwrite_arg(p) + add_reference_arg(p) + add_json_args(p) + + return p + + +def main(): + parser = _build_arg_parser() + args = parser.parse_args() + + assert_inputs_exist(parser, args.in_tractogram) + assert_outputs_exist(parser, args, args.out_tractogram, + optional=args.remaining_tractogram) + check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, + args.remaining_tractogram]) + + #if args.threshold <= 0: + # parser.error('Threshold "{}" '.format(args.ufactor) + + # 'must be greater than 0') + + tractogram = load_tractogram_with_reference( + parser, args, args.in_tractogram) + + streamlines = tractogram.streamlines + + ids_c = [] + + ids_l = [] + + if len(streamlines) > 1: + ids_c = detect_ushape(streamlines, args.ufactor) + ids_l = np.setdiff1d(np.arange(len(streamlines)), ids_c) + else: + parser.error( + 'Zero or one streamline in {}'.format(args.in_tractogram) + + '. The file must have more than one streamline.') + + if len(ids_c) > 0: + sft_c = filter_tractogram_data(tractogram, ids_c) + save_tractogram(sft_c, args.out_tractogram) + else: + logging.warning( + 'No clean streamlines in {}'.format(args.in_tractogram)) + + if args.display_counts: + sc_bf = len(tractogram.streamlines) + sc_af = len(sft_c.streamlines) + print(json.dumps({'streamline_count_before_filtering': int(sc_bf), + 'streamline_count_after_filtering': int(sc_af)}, + indent=args.indent)) + + if len(ids_l) == 0: + logging.warning('No loops in {}'.format(args.in_tractogram)) + elif args.remaining_tractogram: + sft_l = filter_tractogram_data(tractogram, ids_l) + save_tractogram(sft_l, args.remaining_tractogram) + + +if __name__ == "__main__": + main() diff --git a/scripts/tests/test_extract_ushape.py b/scripts/tests/test_extract_ushape.py new file mode 100644 index 000000000..5cfe6069d --- /dev/null +++ b/scripts/tests/test_extract_ushape.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import tempfile + +from scilpy.io.fetcher import get_testing_files_dict, fetch_data, get_home + + +# If they already exist, this only takes 5 seconds (check md5sum) +#fetch_data(get_testing_files_dict(), keys=['connectivity.zip']) +#tmp_dir = tempfile.TemporaryDirectory() + + +def test_help_option(script_runner): + ret = script_runner.run('scil_extract_ushape.py', + '--help') + assert ret.success From 5654380c54e28c1bd76c8a34459817dc9db0aaf2 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 18 Jan 2021 15:30:18 -0500 Subject: [PATCH 02/45] chmod --- scripts/scil_extract_ushape.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 scripts/scil_extract_ushape.py diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py old mode 100644 new mode 100755 From bcd83ed74d367063c2eb7d78550b7327789828b8 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 18 Jan 2021 15:31:45 -0500 Subject: [PATCH 03/45] fix message for ufactor --- scripts/scil_extract_ushape.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 68216e773..453cb5dc9 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -69,9 +69,9 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - #if args.threshold <= 0: - # parser.error('Threshold "{}" '.format(args.ufactor) + - # 'must be greater than 0') + if min(args.ufactor) <= 0 or max(args.ufactor) > 1: + parser.error('U-factor "{}" '.format(args.ufactor) + + 'must be greater than 0 and lower or equal to 1.') tractogram = load_tractogram_with_reference( parser, args, args.in_tractogram) From 3ebdbe5997696c20bec9f41edac492dcf513a7ab Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 18 Jan 2021 16:02:01 -0500 Subject: [PATCH 04/45] fix scil_remove_loop + pep8 --- scilpy/tractanalysis/features.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index a5a2d887d..7efbd9f45 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -8,7 +8,7 @@ from dipy.segment.metric import AveragePointwiseEuclideanMetric from dipy.tracking import metrics as tm import numpy as np -from sklearn.preprocessing import normalize + def detect_ushape(streamlines, ufactor=[0.5, 1]): """ @@ -27,7 +27,7 @@ def detect_ushape(streamlines, ufactor=[0.5, 1]): """ ids = [] for i, s in enumerate(streamlines): - if len(s)>=4: + if len(s) >= 4: first_point = s[0] last_point = s[-1] second_point = s[round(len(s)/3)] @@ -41,7 +41,7 @@ def detect_ushape(streamlines, ufactor=[0.5, 1]): v2 = v2 / np.linalg.norm(v2) v3 = v3 / np.linalg.norm(v3) - val = np.dot(np.cross(v1,v2), np.cross(v2,v3)) + val = np.dot(np.cross(v1, v2), np.cross(v2, v3)) if min(ufactor) < val < max(ufactor): ids.append(i) @@ -84,6 +84,8 @@ def remove_loops_and_sharp_turns(streamlines, if tm.winding(s) < max_angle: ids.append(i) streamlines_clean.append(s) + else: + streamlines_clean.append = np.empty((1,3)) if use_qb: ids = [] From c41ef5c226aba234ed18ca5c5a0868893b6494ac Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Sat, 6 Mar 2021 18:16:53 -0500 Subject: [PATCH 05/45] RF: Update scil_visualize_fodf to use new fury odf_slicer --- requirements.txt | 2 +- scilpy/viz/scene_utils.py | 65 +++++++++++++++++++++------------- scripts/scil_visualize_fodf.py | 53 +++++++-------------------- 3 files changed, 54 insertions(+), 66 deletions(-) diff --git a/requirements.txt b/requirements.txt index bf073508f..bb4750ea2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ coloredlogs==10.0.* cycler==0.10.* Cython==0.29.* dipy==1.3.* -fury==0.6.* +-e git+https://github.com/fury-gl/fury.git@c7505e207105684ba4353389a5a4fe7d8c10bcf0 #egg=fury future==0.17.* h5py==2.10.* kiwisolver==1.0.* diff --git a/scilpy/viz/scene_utils.py b/scilpy/viz/scene_utils.py index 2a4f493ae..40496f549 100644 --- a/scilpy/viz/scene_utils.py +++ b/scilpy/viz/scene_utils.py @@ -3,11 +3,15 @@ from enum import Enum import numpy as np -from dipy.reconst.shm import sh_to_sf +from dipy.reconst.shm import sh_to_sf_matrix from fury import window, actor from scilpy.io.utils import snapshot +orient_to_axis_dict = {'sagittal': 'xaxis', + 'coronal': 'yaxis', + 'axial': 'zaxis'} + class CamParams(Enum): """ @@ -19,7 +23,7 @@ class CamParams(Enum): ZOOM_FACTOR = 'zoom_factor' -def initialize_camera(orientation, volume_shape): +def initialize_camera(orientation, slice_index, volume_shape): """ Initialize a camera for a given orientation. """ @@ -28,55 +32,69 @@ def initialize_camera(orientation, volume_shape): camera[CamParams.ZOOM_FACTOR] = 2.0 / max(volume_shape) eye_distance = max(volume_shape) if orientation == 'sagittal': + if slice_index is None: + slice_index = volume_shape[0] // 2 camera[CamParams.VIEW_POS] = np.array([-eye_distance, (volume_shape[1] - 1) / 2.0, (volume_shape[2] - 1) / 2.0]) - camera[CamParams.VIEW_CENTER] = np.array([0.0, + camera[CamParams.VIEW_CENTER] = np.array([slice_index, (volume_shape[1] - 1) / 2.0, (volume_shape[2] - 1) / 2.0]) camera[CamParams.VIEW_UP] = np.array([0.0, 0.0, 1.0]) elif orientation == 'coronal': + if slice_index is None: + slice_index = volume_shape[1] // 2 camera[CamParams.VIEW_POS] = np.array([(volume_shape[0] - 1) / 2.0, eye_distance, (volume_shape[2] - 1) / 2.0]) camera[CamParams.VIEW_CENTER] = np.array([(volume_shape[0] - 1) / 2.0, - 0.0, + slice_index, (volume_shape[2] - 1) / 2.0]) camera[CamParams.VIEW_UP] = np.array([0.0, 0.0, 1.0]) elif orientation == 'axial': + if slice_index is None: + slice_index = volume_shape[2] // 2 camera[CamParams.VIEW_POS] = np.array([(volume_shape[0] - 1) / 2.0, (volume_shape[1] - 1) / 2.0, -eye_distance]) camera[CamParams.VIEW_CENTER] = np.array([(volume_shape[0] - 1) / 2.0, (volume_shape[1] - 1) / 2.0, - 0.0]) + slice_index]) camera[CamParams.VIEW_UP] = np.array([0.0, 1.0, 0.0]) else: raise ValueError('Invalid axis name: {0}'.format(orientation)) return camera -def set_display_extent(slicer_actor, orientation, volume_shape): +def set_display_extent(slicer_actor, orientation, volume_shape, slice_index): """ Set the display extent for a fury actor in ``orientation``. """ if orientation == 'sagittal': - slicer_actor.display_extent(0, 0, 0, volume_shape[1], + if slice_index is None: + slice_index = volume_shape[0] // 2 + slicer_actor.display_extent(slice_index, slice_index, + 0, volume_shape[1], 0, volume_shape[2]) elif orientation == 'coronal': - slicer_actor.display_extent(0, volume_shape[0], 0, 0, + if slice_index is None: + slice_index = volume_shape[1] // 2 + slicer_actor.display_extent(0, volume_shape[0], + slice_index, slice_index, 0, volume_shape[2]) elif orientation == 'axial': + if slice_index is None: + slice_index = volume_shape[2] // 2 slicer_actor.display_extent(0, volume_shape[0], 0, volume_shape[1], - 0, 0) + slice_index, slice_index) else: raise ValueError('Invalid axis name : {0}'.format(orientation)) def create_odf_slicer(sh_fodf, mask, sphere, nb_subdivide, sh_order, sh_basis, full_basis, orientation, - scale, radial_scale, norm, colormap): + scale, radial_scale, norm, colormap, slice_index): """ Create a ODF slicer actor displaying a fODF slice. The input volume is a 3-dimensional grid containing the SH coefficients of the fODF for each @@ -87,20 +105,16 @@ def create_odf_slicer(sh_fodf, mask, sphere, nb_subdivide, if nb_subdivide is not None: sphere = sphere.subdivide(nb_subdivide) - # Convert SH coefficients to SF coefficients - fodf = sh_to_sf(sh_fodf, sphere, sh_order, sh_basis, - full_basis=full_basis) - - # Get mask if supplied, otherwise create a mask discarding empty voxels - if mask is None: - mask = np.linalg.norm(fodf, axis=-1) > 0. + # SH coefficients to SF coefficients matrix + B_mat = sh_to_sf_matrix(sphere, sh_order, sh_basis, + full_basis, return_inv=False) - odf_actor = actor.odf_slicer(fodf, mask=mask, norm=norm, + odf_actor = actor.odf_slicer(sh_fodf, mask=mask, norm=norm, radial_scale=radial_scale, sphere=sphere, colormap=colormap, - scale=scale) - set_display_extent(odf_actor, orientation, fodf.shape) + scale=scale, B_matrix=B_mat) + set_display_extent(odf_actor, orientation, sh_fodf.shape[:3], slice_index) return odf_actor @@ -124,8 +138,9 @@ def _get_affine_for_texture(orientation, offset): return affine -def create_texture_slicer(texture, value_range=None, orientation='axial', - opacity=1.0, offset=0.5, interpolation='nearest'): +def create_texture_slicer(texture, slice_index, value_range=None, + orientation='axial', opacity=1.0, offset=0.5, + interpolation='nearest'): """ Create a texture displayed behind the fODF. The texture is applied on a plane with a given offset for the fODF grid. @@ -136,7 +151,7 @@ def create_texture_slicer(texture, value_range=None, orientation='axial', value_range=value_range, opacity=opacity, interpolation=interpolation) - set_display_extent(slicer_actor, orientation, texture.shape) + set_display_extent(slicer_actor, orientation, texture.shape, slice_index) return slicer_actor @@ -159,14 +174,14 @@ def create_peaks_slicer(data, orientation, peak_values=None, mask=None, return peaks_slicer -def create_scene(actors, orientation, volume_shape): +def create_scene(actors, orientation, slice_index, volume_shape): """ Create a 3D scene containing actors fitting inside a grid. The camera is placed based on the orientation supplied by the user. The projection mode is parallel. """ # Configure camera - camera = initialize_camera(orientation, volume_shape) + camera = initialize_camera(orientation, slice_index, volume_shape) scene = window.Scene() scene.projection('parallel') diff --git a/scripts/scil_visualize_fodf.py b/scripts/scil_visualize_fodf.py index ae526f4ce..1ae8162e7 100755 --- a/scripts/scil_visualize_fodf.py +++ b/scripts/scil_visualize_fodf.py @@ -173,52 +173,25 @@ def _parse_args(parser): return args -def _crop_along_axis(data, index, axis_name): - """ - Extract a 2-dimensional slice from a 3-dimensional data volume - """ - if axis_name == 'sagittal': - if index is None: - data_slice = data[data.shape[0]//2, :, :] - else: - data_slice = data[index, :, :] - return data_slice[None, ...] - elif axis_name == 'coronal': - if index is None: - data_slice = data[:, data.shape[1]//2, :] - else: - data_slice = data[:, index, :] - return data_slice[:, None, ...] - elif axis_name == 'axial': - if index is None: - data_slice = data[:, :, data.shape[2]//2] - else: - data_slice = data[:, :, index] - return data_slice[:, :, None] - - def _get_data_from_inputs(args): """ Load data given by args. Perform checks to ensure dimensions agree between the data for mask, background, peaks and fODF. """ fodf = nib.nifti1.load(args.in_fodf).get_fdata(dtype=np.float32) - data = {'fodf': _crop_along_axis(fodf, args.slice_index, - args.axis_name)} + data = {'fodf': fodf} if args.background: bg = nib.nifti1.load(args.background).get_fdata(dtype=np.float32) if bg.shape[:3] != fodf.shape[:-1]: raise ValueError('Background dimensions {0} do not agree with fODF' ' dimensions {1}.'.format(bg.shape, fodf.shape)) - data['bg'] = _crop_along_axis(bg, args.slice_index, - args.axis_name) + data['bg'] = bg if args.mask: mask = get_data_as_mask(nib.nifti1.load(args.mask), dtype=bool) if mask.shape != fodf.shape[:-1]: raise ValueError('Mask dimensions {0} do not agree with fODF ' 'dimensions {1}.'.format(mask.shape, fodf.shape)) - data['mask'] = _crop_along_axis(mask, args.slice_index, - args.axis_name) + data['mask'] = mask if args.peaks: peaks = nib.nifti1.load(args.peaks).get_fdata(dtype=np.float32) if peaks.shape[:3] != fodf.shape[:-1]: @@ -234,8 +207,7 @@ def _get_data_from_inputs(args): raise ValueError('Peaks volume last dimension ({0}) cannot ' 'be reshaped as (npeaks, 3).' .format(peaks.shape[-1])) - data['peaks'] = _crop_along_axis(peaks, args.slice_index, - args.axis_name) + data['peaks'] = peaks if args.peaks_values: peak_vals =\ nib.nifti1.load(args.peaks_values).get_fdata(dtype=np.float32) @@ -243,12 +215,9 @@ def _get_data_from_inputs(args): raise ValueError('Peaks volume dimensions {0} do not agree ' 'with fODF dimensions {1}.' .format(peak_vals.shape, fodf.shape)) - data['peaks_values'] =\ - _crop_along_axis(peak_vals, args.slice_index, - args.axis_name) + data['peaks_values'] = peak_vals - grid_shape = data['fodf'].shape[:3] - return data, grid_shape + return data def validate_order(sh_order, ncoeffs, full_basis): @@ -266,7 +235,7 @@ def validate_order(sh_order, ncoeffs, full_basis): def main(): parser = _build_arg_parser() args = _parse_args(parser) - data, grid_shape = _get_data_from_inputs(args) + data = _get_data_from_inputs(args) sph = get_sphere(args.sphere) sh_order = order_from_ncoef(data['fodf'].shape[-1], args.full_basis) if not validate_order(sh_order, data['fodf'].shape[-1], args.full_basis): @@ -288,12 +257,14 @@ def main(): args.sh_basis, args.full_basis, args.axis_name, args.scale, not args.radial_scale_off, - not args.norm_off, args.colormap) + not args.norm_off, args.colormap, + args.slice_index) actors.append(odf_actor) # Instantiate a texture slicer actor if a background image is supplied if 'bg' in data: bg_actor = create_texture_slicer(data['bg'], + args.slice_index, args.bg_range, args.axis_name, args.bg_opacity, @@ -319,7 +290,9 @@ def main(): actors.append(peaks_actor) # Prepare and display the scene - scene = create_scene(actors, args.axis_name, grid_shape) + scene = create_scene(actors, args.axis_name, + args.slice_index, + data['fodf'].shape[:3]) render_scene(scene, args.win_dims, args.interactor, args.output, args.silent) From 8fa07091a82da467d688698bdafd5cf548abb721 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Wed, 10 Mar 2021 11:35:40 -0500 Subject: [PATCH 06/45] NF: Script to visualize nifti images (peaks, texture or fodf) --- scilpy/viz/scene_utils.py | 112 ++++++++++-- scripts/scil_visualize_fodf.py | 3 +- scripts/scil_visualize_nifti.py | 290 ++++++++++++++++++++++++++++++++ 3 files changed, 392 insertions(+), 13 deletions(-) create mode 100755 scripts/scil_visualize_nifti.py diff --git a/scilpy/viz/scene_utils.py b/scilpy/viz/scene_utils.py index 40496f549..9bac584dd 100644 --- a/scilpy/viz/scene_utils.py +++ b/scilpy/viz/scene_utils.py @@ -8,10 +8,6 @@ from scilpy.io.utils import snapshot -orient_to_axis_dict = {'sagittal': 'xaxis', - 'coronal': 'yaxis', - 'axial': 'zaxis'} - class CamParams(Enum): """ @@ -23,6 +19,84 @@ class CamParams(Enum): ZOOM_FACTOR = 'zoom_factor' +class InteractableSlicer(): + def __init__(self, slicer, shape, orientation, slice_index): + self.slicer = slicer + self.shape = shape + if orientation not in ['sagittal', 'coronal', 'axial']: + raise ValueError('Invalid orientation for slicer.') + self.orientation = orientation + if slice_index is None: + slice_index = get_middle_slice_index(orientation, shape) + self.slice_index = slice_index + + def next_slice(self): + if self.orientation == 'sagittal': + index = min(self.slice_index + 1, self.shape[0] - 1) + self.slicer.display_extent(index, index, 0, + self.shape[1] - 1, + 0, self.shape[2] - 1) + elif self.orientation == 'coronal': + index = min(self.slice_index + 1, self.shape[1] - 1) + self.slicer.display_extent(0, self.shape[0] - 1, + index, index, + 0, self.shape[2] - 1) + elif self.orientation == 'axial': + index = min(self.slice_index + 1, self.shape[2] - 1) + self.slicer.display_extent(0, self.shape[0] - 1, + 0, self.shape[1] - 1, + index, index) + self.slice_index = index + + def prev_slice(self): + if self.orientation == 'sagittal': + index = max(self.slice_index - 1, 0) + self.slicer.display_extent(index, index, 0, + self.shape[1] - 1, + 0, self.shape[2] - 1) + elif self.orientation == 'coronal': + index = max(self.slice_index - 1, 0) + self.slicer.display_extent(0, self.shape[0] - 1, + index, index, + 0, self.shape[2] - 1) + elif self.orientation == 'axial': + index = max(self.slice_index - 1, 0) + self.slicer.display_extent(0, self.shape[0] - 1, + 0, self.shape[1] - 1, + index, index) + self.slice_index = index + + +class KeyUpDownCallback(object): + """ + Callback for keyboard up/down interaction. + """ + def __init__(self, interactables): + self.interactables = interactables + + def __call__(self, caller, ev): + key = caller.GetKeySym() + if key == 'Up': + for i in self.interactables: + i.next_slice() + elif key == 'Down': + for i in self.interactables: + i.prev_slice() + caller.Render() + + +def get_middle_slice_index(orientation, shape): + if orientation == 'sagittal': + slice_index = shape[0] // 2 + elif orientation == 'coronal': + slice_index = shape[1] // 2 + elif orientation == 'axial': + slice_index = shape[2] // 2 + else: + raise ValueError('Invalid axis name: {0}'.format(orientation)) + return slice_index + + def initialize_camera(orientation, slice_index, volume_shape): """ Initialize a camera for a given orientation. @@ -30,6 +104,8 @@ def initialize_camera(orientation, slice_index, volume_shape): camera = {} # Tighten the view around the data camera[CamParams.ZOOM_FACTOR] = 2.0 / max(volume_shape) + # heuristic for setting the camera position at a distance + # proportional to the scale of the scene eye_distance = max(volume_shape) if orientation == 'sagittal': if slice_index is None: @@ -138,7 +214,7 @@ def _get_affine_for_texture(orientation, offset): return affine -def create_texture_slicer(texture, slice_index, value_range=None, +def create_texture_slicer(texture, mask, slice_index, value_range=None, orientation='axial', opacity=1.0, offset=0.5, interpolation='nearest'): """ @@ -147,17 +223,22 @@ def create_texture_slicer(texture, slice_index, value_range=None, """ affine = _get_affine_for_texture(orientation, offset) - slicer_actor = actor.slicer(texture, affine=affine, + if mask is not None: + masked_texture = np.zeros_like(texture) + masked_texture[mask] = texture[mask] + else: + masked_texture = texture + + slicer_actor = actor.slicer(masked_texture, affine=affine, value_range=value_range, opacity=opacity, interpolation=interpolation) set_display_extent(slicer_actor, orientation, texture.shape, slice_index) - return slicer_actor -def create_peaks_slicer(data, orientation, peak_values=None, mask=None, - color=None, peaks_width=1.0): +def create_peaks_slicer(data, orientation, slice_index, peak_values=None, + mask=None, color=None, peaks_width=1.0): """ Create a peaks slicer actor rendering a slice of the fODF peaks """ @@ -169,7 +250,7 @@ def create_peaks_slicer(data, orientation, peak_values=None, mask=None, peaks_slicer = actor.peak_slicer(data, peaks_values=peak_values, mask=mask, colors=color, linewidth=peaks_width) - set_display_extent(peaks_slicer, orientation, data.shape) + set_display_extent(peaks_slicer, orientation, data.shape, slice_index) return peaks_slicer @@ -197,15 +278,22 @@ def create_scene(actors, orientation, slice_index, volume_shape): return scene -def render_scene(scene, window_size, interactor, output, silent): +def render_scene(scene, window_size, interactor, + output, silent, interactables=[], title='Viewer'): """ Render a scene. If a output is supplied, a snapshot of the rendered scene is taken. """ if not silent: - showm = window.ShowManager(scene, size=window_size, + showm = window.ShowManager(scene, title=title, + size=window_size, reset_camera=False, interactor_style=interactor) + + if len(interactables) > 0: + showm.iren.AddObserver('KeyReleaseEvent', + KeyUpDownCallback(interactables)) + showm.initialize() showm.start() diff --git a/scripts/scil_visualize_fodf.py b/scripts/scil_visualize_fodf.py index 1ae8162e7..3afa3efa2 100755 --- a/scripts/scil_visualize_fodf.py +++ b/scripts/scil_visualize_fodf.py @@ -263,7 +263,7 @@ def main(): # Instantiate a texture slicer actor if a background image is supplied if 'bg' in data: - bg_actor = create_texture_slicer(data['bg'], + bg_actor = create_texture_slicer(data['bg'], mask, args.slice_index, args.bg_range, args.axis_name, @@ -282,6 +282,7 @@ def main(): np.ones(data['peaks'].shape[:-1]) * args.peaks_length peaks_actor = create_peaks_slicer(data['peaks'], args.axis_name, + args.slice_index, peaks_values, mask, args.peaks_color, diff --git a/scripts/scil_visualize_nifti.py b/scripts/scil_visualize_nifti.py new file mode 100755 index 000000000..50926638e --- /dev/null +++ b/scripts/scil_visualize_nifti.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Visualize 2-dimensional fODF slice loaded from disk. + +Given an image of SH coefficients, this script displays a slice in a +given orientation. The user can also add a background on top of which the +fODF are to be displayed. Using a full SH basis, the script can be used to +visualize asymmetric fODF. The user can supply a peaks image to visualize +peaks on top of fODF. +""" + +import argparse +import warnings + +import nibabel as nib +import numpy as np + +from dipy.data import get_sphere, SPHERE_FILES +from dipy.reconst.shm import order_from_ncoef + +from scilpy.io.utils import (add_sh_basis_args, add_overwrite_arg, + assert_inputs_exist, assert_outputs_exist) +from scilpy.io.image import get_data_as_mask +from scilpy.viz.scene_utils import (InteractableSlicer, + create_odf_slicer, + create_texture_slicer, + create_peaks_slicer, + create_scene, + render_scene) + + +def _build_arg_parser(): + p = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + # SH visualization arguments + p.add_argument('--sh', default=None, help='SH image file.') + + p.add_argument('--full_basis', action='store_true', + help='Use full SH basis to reconstruct SH from ' + 'coefficients.') + + p.add_argument('--sphere', default='symmetric724', + choices=sorted(SPHERE_FILES.keys()), + help='Name of the sphere used to reconstruct SF. ' + '[%(default)s]') + + p.add_argument('--sph_subdivide', type=int, + help='Number of subdivisions for given sphere. If not ' + 'supplied, use the given sphere as is.') + + p.add_argument('--mask', + help='Optional mask file. Only fODF inside ' + 'the mask are displayed.') + + p.add_argument('--colormap', default=None, + help='Colormap for the ODF slicer. If None, ' + 'then a RGB colormap will be used. [%(default)s]') + + p.add_argument('--scale', default=0.5, type=float, + help='Scaling factor for ODF. [%(default)s]') + + p.add_argument('--radial_scale_off', action='store_true', + help='Disable radial scale for ODF slicer.') + + p.add_argument('--norm_off', action='store_true', + help='Disable normalization of ODF slicer.') + + add_sh_basis_args(p) + + # Texture image options + p.add_argument('--texture', + help='Texture image file. If RGB, values must ' + 'be between 0 and 255.') + + p.add_argument('--tex_range', nargs=2, metavar=('MIN', 'MAX'), type=float, + help='The range of values mapped to range [0, 1] ' + 'for texture image. [(bg.min(), bg.max())]') + + p.add_argument('--tex_opacity', type=float, default=1.0, + help='The opacity of the texture image. Opacity of 0.0 ' + 'means transparent and 1.0 is completely visible. ' + '[%(default)s]') + + p.add_argument('--tex_offset', type=float, default=0.5, + help='The offset of the texture image with regard to the' + ' other images shown. [%(default)s]') + + p.add_argument('--tex_interpolation', + default='nearest', choices={'linear', 'nearest'}, + help='Interpolation mode for the Texture image. ' + '[%(default)s]') + + # Peaks input file options + p.add_argument('--peaks', help='Peaks image file.') + + p.add_argument('--peaks_color', nargs=3, type=float, + help='Color used for peaks. if None, ' + 'then a RGB colormap is used. [%(default)s]') + + p.add_argument('--peaks_width', default=1.0, type=float, + help='Width of peaks segments. [%(default)s]') + + peaks_scale_group = p.add_mutually_exclusive_group() + peaks_scale_group.add_argument('--peaks_vals', + help='Peaks values file.') + + peaks_scale_group.add_argument('--peaks_length', default=0.65, type=float, + help='Length of the peaks segments. ' + '[%(default)s]') + + # Window configuration options + p.add_argument('--slice_index', type=int, + help='Index of the slice to visualize along a given axis. ' + 'Defaults to middle of volume.') + + p.add_argument('--win_dims', nargs=2, metavar=('WIDTH', 'HEIGHT'), + default=(768, 768), type=int, + help='The dimensions for the vtk window. [%(default)s]') + + p.add_argument('--vtk_interactor_mode', default='trackball', + choices={'image', 'trackball'}, + help='Specify interactor mode for vtk window. ' + '[%(default)s]') + + p.add_argument('--orientation', default='axial', + choices={'axial', 'coronal', 'sagittal'}, + help='Name of the axis to visualize. [%(default)s]') + + p.add_argument('--silent', action='store_true', + help='Disable interactive visualization.') + + p.add_argument('--output', help='Path to output file.') + + add_overwrite_arg(p) + + return p + + +def _parse_args(parser): + args = parser.parse_args() + inputs = [] + output = [] + if args.sh: + inputs.append(args.sh) + if args.texture: + inputs.append(args.texture) + if args.peaks: + if args.full_basis: + # FURY doesn't support asymmetric peaks visualization + warnings.warn('Asymmetric peaks visualization is not supported ' + 'by FURY. Peaks shown as symmetric peaks.', + UserWarning) + inputs.append(args.peaks) + if args.peaks_vals: + inputs.append(args.peaks_vals) + else: + if args.peaks_vals: + parser.error('Peaks values image supplied without peaks. Specify ' + 'a peaks image with --peaks to use this feature.') + if len(inputs) == 0: + parser.error('No input is provided. Please specify at least ' + 'one input to continue.') + if args.mask: + inputs.append(args.mask) + + if args.output: + output.append(args.output) + else: + if args.silent: + parser.error('Silent mode is enabled but no output is specified.' + 'Specify an output with --output to use silent mode.') + + assert_inputs_exist(parser, inputs) + assert_outputs_exist(parser, args, output) + + return args + + +def validate_order(parser, sh_order, ncoeffs, full_basis): + """ + Check that the sh order agrees with the number + of coefficients in the input + """ + if full_basis: + expected_ncoeffs = (sh_order + 1)**2 + else: + expected_ncoeffs = (sh_order + 1) * (sh_order + 2) // 2 + if ncoeffs != expected_ncoeffs: + parser.error('Invalid number of coefficients for fODF. ' + 'Use --full_basis if your input is in ' + 'full SH basis.') + + +def validate_mask_matches_volume(parser, volume, mask): + if mask is None: + return + if volume.shape[:3] != mask.shape: + parser.error('Dimensions mismatch. {0} is not {1}' + .format(volume.shape, mask.shape)) + + +def main(): + parser = _build_arg_parser() + args = _parse_args(parser) + + sph = get_sphere(args.sphere) + mask = None + grid_shape = None + + if args.mask: + mask = get_data_as_mask(nib.load(args.mask), dtype=bool) + grid_shape = mask.shape + if args.sh: + sh = nib.load(args.sh).get_fdata(dtype=np.float32) + sh_order = order_from_ncoef(sh.shape[-1], args.full_basis) + validate_order(parser, sh_order, sh.shape[-1], args.full_basis) + validate_mask_matches_volume(parser, sh, mask) + if grid_shape is None: + grid_shape = sh.shape[:3] + if args.texture: + tex = nib.load(args.texture).get_fdata(dtype=np.float32) + validate_mask_matches_volume(parser, tex, mask) + if grid_shape is None: + grid_shape = tex.shape[:3] + if args.peaks: + peaks = nib.load(args.peaks).get_fdata(dtype=np.float32) + peaks = peaks.reshape(peaks.shape[:3] + (-1, 3)) + validate_mask_matches_volume(parser, peaks, mask) + if args.peaks_vals: + peaks_vals = nib.load(args.peaks_vals).get_fdata(dtype=np.float32) + if peaks_vals.shape[:4] != peaks.shape[:4]: + raise ValueError('Peaks values dimensions {0} do not agree ' + 'with peaks dimensions {1}.'.format( + peaks_vals.shape, + peaks.shape)) + if grid_shape is None: + grid_shape = peaks.shape[:3] + + actors = [] + if args.sh: + # Instantiate the ODF slicer actor + odf_actor = create_odf_slicer(sh, mask, sph, + args.sph_subdivide, sh_order, + args.sh_basis, args.full_basis, + args.orientation, args.scale, + not args.radial_scale_off, + not args.norm_off, args.colormap, + args.slice_index) + actors.append(odf_actor) + + # Instantiate a texture slicer actor if a texture image is supplied + if args.texture: + tex_actor = create_texture_slicer(tex, mask, + args.slice_index, + args.tex_range, + args.orientation, + args.tex_opacity, + args.tex_offset, + args.tex_interpolation) + actors.append(tex_actor) + + # Instantiate a peaks slicer actor if peaks are supplied + if args.peaks: + if not args.peaks_vals: + peaks_vals = np.ones(peaks.shape[:-1]) * args.peaks_length + peaks_actor = create_peaks_slicer(peaks, + args.orientation, + args.slice_index, + peaks_vals, + mask, + args.peaks_color, + args.peaks_width) + actors.append(peaks_actor) + + # Prepare and display the scene + scene = create_scene(actors, args.orientation, + args.slice_index, grid_shape) + interactables = [] + for actor in actors: + interactables.append(InteractableSlicer(actor, grid_shape, + args.orientation, + args.slice_index)) + render_scene(scene, args.win_dims, args.vtk_interactor_mode, + args.output, args.silent, interactables) + + +if __name__ == '__main__': + main() From 54cb316ae3b4c7ac3fa943b5c310d3e3ec88f9e1 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Tue, 13 Apr 2021 22:44:05 -0400 Subject: [PATCH 07/45] RF: Remove visualize_nifti script --- scripts/scil_visualize_nifti.py | 290 -------------------------------- 1 file changed, 290 deletions(-) delete mode 100755 scripts/scil_visualize_nifti.py diff --git a/scripts/scil_visualize_nifti.py b/scripts/scil_visualize_nifti.py deleted file mode 100755 index 50926638e..000000000 --- a/scripts/scil_visualize_nifti.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Visualize 2-dimensional fODF slice loaded from disk. - -Given an image of SH coefficients, this script displays a slice in a -given orientation. The user can also add a background on top of which the -fODF are to be displayed. Using a full SH basis, the script can be used to -visualize asymmetric fODF. The user can supply a peaks image to visualize -peaks on top of fODF. -""" - -import argparse -import warnings - -import nibabel as nib -import numpy as np - -from dipy.data import get_sphere, SPHERE_FILES -from dipy.reconst.shm import order_from_ncoef - -from scilpy.io.utils import (add_sh_basis_args, add_overwrite_arg, - assert_inputs_exist, assert_outputs_exist) -from scilpy.io.image import get_data_as_mask -from scilpy.viz.scene_utils import (InteractableSlicer, - create_odf_slicer, - create_texture_slicer, - create_peaks_slicer, - create_scene, - render_scene) - - -def _build_arg_parser(): - p = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) - - # SH visualization arguments - p.add_argument('--sh', default=None, help='SH image file.') - - p.add_argument('--full_basis', action='store_true', - help='Use full SH basis to reconstruct SH from ' - 'coefficients.') - - p.add_argument('--sphere', default='symmetric724', - choices=sorted(SPHERE_FILES.keys()), - help='Name of the sphere used to reconstruct SF. ' - '[%(default)s]') - - p.add_argument('--sph_subdivide', type=int, - help='Number of subdivisions for given sphere. If not ' - 'supplied, use the given sphere as is.') - - p.add_argument('--mask', - help='Optional mask file. Only fODF inside ' - 'the mask are displayed.') - - p.add_argument('--colormap', default=None, - help='Colormap for the ODF slicer. If None, ' - 'then a RGB colormap will be used. [%(default)s]') - - p.add_argument('--scale', default=0.5, type=float, - help='Scaling factor for ODF. [%(default)s]') - - p.add_argument('--radial_scale_off', action='store_true', - help='Disable radial scale for ODF slicer.') - - p.add_argument('--norm_off', action='store_true', - help='Disable normalization of ODF slicer.') - - add_sh_basis_args(p) - - # Texture image options - p.add_argument('--texture', - help='Texture image file. If RGB, values must ' - 'be between 0 and 255.') - - p.add_argument('--tex_range', nargs=2, metavar=('MIN', 'MAX'), type=float, - help='The range of values mapped to range [0, 1] ' - 'for texture image. [(bg.min(), bg.max())]') - - p.add_argument('--tex_opacity', type=float, default=1.0, - help='The opacity of the texture image. Opacity of 0.0 ' - 'means transparent and 1.0 is completely visible. ' - '[%(default)s]') - - p.add_argument('--tex_offset', type=float, default=0.5, - help='The offset of the texture image with regard to the' - ' other images shown. [%(default)s]') - - p.add_argument('--tex_interpolation', - default='nearest', choices={'linear', 'nearest'}, - help='Interpolation mode for the Texture image. ' - '[%(default)s]') - - # Peaks input file options - p.add_argument('--peaks', help='Peaks image file.') - - p.add_argument('--peaks_color', nargs=3, type=float, - help='Color used for peaks. if None, ' - 'then a RGB colormap is used. [%(default)s]') - - p.add_argument('--peaks_width', default=1.0, type=float, - help='Width of peaks segments. [%(default)s]') - - peaks_scale_group = p.add_mutually_exclusive_group() - peaks_scale_group.add_argument('--peaks_vals', - help='Peaks values file.') - - peaks_scale_group.add_argument('--peaks_length', default=0.65, type=float, - help='Length of the peaks segments. ' - '[%(default)s]') - - # Window configuration options - p.add_argument('--slice_index', type=int, - help='Index of the slice to visualize along a given axis. ' - 'Defaults to middle of volume.') - - p.add_argument('--win_dims', nargs=2, metavar=('WIDTH', 'HEIGHT'), - default=(768, 768), type=int, - help='The dimensions for the vtk window. [%(default)s]') - - p.add_argument('--vtk_interactor_mode', default='trackball', - choices={'image', 'trackball'}, - help='Specify interactor mode for vtk window. ' - '[%(default)s]') - - p.add_argument('--orientation', default='axial', - choices={'axial', 'coronal', 'sagittal'}, - help='Name of the axis to visualize. [%(default)s]') - - p.add_argument('--silent', action='store_true', - help='Disable interactive visualization.') - - p.add_argument('--output', help='Path to output file.') - - add_overwrite_arg(p) - - return p - - -def _parse_args(parser): - args = parser.parse_args() - inputs = [] - output = [] - if args.sh: - inputs.append(args.sh) - if args.texture: - inputs.append(args.texture) - if args.peaks: - if args.full_basis: - # FURY doesn't support asymmetric peaks visualization - warnings.warn('Asymmetric peaks visualization is not supported ' - 'by FURY. Peaks shown as symmetric peaks.', - UserWarning) - inputs.append(args.peaks) - if args.peaks_vals: - inputs.append(args.peaks_vals) - else: - if args.peaks_vals: - parser.error('Peaks values image supplied without peaks. Specify ' - 'a peaks image with --peaks to use this feature.') - if len(inputs) == 0: - parser.error('No input is provided. Please specify at least ' - 'one input to continue.') - if args.mask: - inputs.append(args.mask) - - if args.output: - output.append(args.output) - else: - if args.silent: - parser.error('Silent mode is enabled but no output is specified.' - 'Specify an output with --output to use silent mode.') - - assert_inputs_exist(parser, inputs) - assert_outputs_exist(parser, args, output) - - return args - - -def validate_order(parser, sh_order, ncoeffs, full_basis): - """ - Check that the sh order agrees with the number - of coefficients in the input - """ - if full_basis: - expected_ncoeffs = (sh_order + 1)**2 - else: - expected_ncoeffs = (sh_order + 1) * (sh_order + 2) // 2 - if ncoeffs != expected_ncoeffs: - parser.error('Invalid number of coefficients for fODF. ' - 'Use --full_basis if your input is in ' - 'full SH basis.') - - -def validate_mask_matches_volume(parser, volume, mask): - if mask is None: - return - if volume.shape[:3] != mask.shape: - parser.error('Dimensions mismatch. {0} is not {1}' - .format(volume.shape, mask.shape)) - - -def main(): - parser = _build_arg_parser() - args = _parse_args(parser) - - sph = get_sphere(args.sphere) - mask = None - grid_shape = None - - if args.mask: - mask = get_data_as_mask(nib.load(args.mask), dtype=bool) - grid_shape = mask.shape - if args.sh: - sh = nib.load(args.sh).get_fdata(dtype=np.float32) - sh_order = order_from_ncoef(sh.shape[-1], args.full_basis) - validate_order(parser, sh_order, sh.shape[-1], args.full_basis) - validate_mask_matches_volume(parser, sh, mask) - if grid_shape is None: - grid_shape = sh.shape[:3] - if args.texture: - tex = nib.load(args.texture).get_fdata(dtype=np.float32) - validate_mask_matches_volume(parser, tex, mask) - if grid_shape is None: - grid_shape = tex.shape[:3] - if args.peaks: - peaks = nib.load(args.peaks).get_fdata(dtype=np.float32) - peaks = peaks.reshape(peaks.shape[:3] + (-1, 3)) - validate_mask_matches_volume(parser, peaks, mask) - if args.peaks_vals: - peaks_vals = nib.load(args.peaks_vals).get_fdata(dtype=np.float32) - if peaks_vals.shape[:4] != peaks.shape[:4]: - raise ValueError('Peaks values dimensions {0} do not agree ' - 'with peaks dimensions {1}.'.format( - peaks_vals.shape, - peaks.shape)) - if grid_shape is None: - grid_shape = peaks.shape[:3] - - actors = [] - if args.sh: - # Instantiate the ODF slicer actor - odf_actor = create_odf_slicer(sh, mask, sph, - args.sph_subdivide, sh_order, - args.sh_basis, args.full_basis, - args.orientation, args.scale, - not args.radial_scale_off, - not args.norm_off, args.colormap, - args.slice_index) - actors.append(odf_actor) - - # Instantiate a texture slicer actor if a texture image is supplied - if args.texture: - tex_actor = create_texture_slicer(tex, mask, - args.slice_index, - args.tex_range, - args.orientation, - args.tex_opacity, - args.tex_offset, - args.tex_interpolation) - actors.append(tex_actor) - - # Instantiate a peaks slicer actor if peaks are supplied - if args.peaks: - if not args.peaks_vals: - peaks_vals = np.ones(peaks.shape[:-1]) * args.peaks_length - peaks_actor = create_peaks_slicer(peaks, - args.orientation, - args.slice_index, - peaks_vals, - mask, - args.peaks_color, - args.peaks_width) - actors.append(peaks_actor) - - # Prepare and display the scene - scene = create_scene(actors, args.orientation, - args.slice_index, grid_shape) - interactables = [] - for actor in actors: - interactables.append(InteractableSlicer(actor, grid_shape, - args.orientation, - args.slice_index)) - render_scene(scene, args.win_dims, args.vtk_interactor_mode, - args.output, args.silent, interactables) - - -if __name__ == '__main__': - main() From bac45bb0a19eaa0ee2cb4e84755a336639bd23c4 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Tue, 13 Apr 2021 22:48:45 -0400 Subject: [PATCH 08/45] NF: Update requirements for fury 0.7 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index bb4750ea2..c491d1cdf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ coloredlogs==10.0.* cycler==0.10.* Cython==0.29.* dipy==1.3.* --e git+https://github.com/fury-gl/fury.git@c7505e207105684ba4353389a5a4fe7d8c10bcf0 #egg=fury +fury==0.7.* future==0.17.* h5py==2.10.* kiwisolver==1.0.* From 55f73de7cd06a76ef2a54ccefc783845bceb5cc6 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Wed, 12 May 2021 10:29:21 -0400 Subject: [PATCH 09/45] ENH: Add support for asym peaks visualization --- scilpy/viz/scene_utils.py | 6 ++++-- scripts/scil_visualize_fodf.py | 8 ++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/scilpy/viz/scene_utils.py b/scilpy/viz/scene_utils.py index 9bac584dd..2cc763556 100644 --- a/scilpy/viz/scene_utils.py +++ b/scilpy/viz/scene_utils.py @@ -238,7 +238,8 @@ def create_texture_slicer(texture, mask, slice_index, value_range=None, def create_peaks_slicer(data, orientation, slice_index, peak_values=None, - mask=None, color=None, peaks_width=1.0): + mask=None, color=None, peaks_width=1.0, + symmetric=False): """ Create a peaks slicer actor rendering a slice of the fODF peaks """ @@ -249,7 +250,8 @@ def create_peaks_slicer(data, orientation, slice_index, peak_values=None, # Instantiate peaks slicer peaks_slicer = actor.peak_slicer(data, peaks_values=peak_values, mask=mask, colors=color, - linewidth=peaks_width) + linewidth=peaks_width, + symmetric=symmetric) set_display_extent(peaks_slicer, orientation, data.shape, slice_index) return peaks_slicer diff --git a/scripts/scil_visualize_fodf.py b/scripts/scil_visualize_fodf.py index 3afa3efa2..0117e6040 100755 --- a/scripts/scil_visualize_fodf.py +++ b/scripts/scil_visualize_fodf.py @@ -154,11 +154,6 @@ def _parse_args(parser): inputs.append(args.background) if args.peaks: - if args.full_basis: - # FURY doesn't support asymmetric peaks visualization - warnings.warn('Asymmetric peaks visualization is not supported ' - 'by FURY. Peaks shown as symmetric peaks.', - UserWarning) inputs.append(args.peaks) if args.peaks_values: inputs.append(args.peaks_values) @@ -286,7 +281,8 @@ def main(): peaks_values, mask, args.peaks_color, - args.peaks_width) + args.peaks_width, + not args.full_basis) actors.append(peaks_actor) From 1e583d331d0607fd9abaa51d42b9b26e8419cae5 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Wed, 12 May 2021 10:36:57 -0400 Subject: [PATCH 10/45] RF: Delete unused methods --- scilpy/viz/scene_utils.py | 84 +-------------------------------------- 1 file changed, 1 insertion(+), 83 deletions(-) diff --git a/scilpy/viz/scene_utils.py b/scilpy/viz/scene_utils.py index 2cc763556..5c53d4935 100644 --- a/scilpy/viz/scene_utils.py +++ b/scilpy/viz/scene_utils.py @@ -19,84 +19,6 @@ class CamParams(Enum): ZOOM_FACTOR = 'zoom_factor' -class InteractableSlicer(): - def __init__(self, slicer, shape, orientation, slice_index): - self.slicer = slicer - self.shape = shape - if orientation not in ['sagittal', 'coronal', 'axial']: - raise ValueError('Invalid orientation for slicer.') - self.orientation = orientation - if slice_index is None: - slice_index = get_middle_slice_index(orientation, shape) - self.slice_index = slice_index - - def next_slice(self): - if self.orientation == 'sagittal': - index = min(self.slice_index + 1, self.shape[0] - 1) - self.slicer.display_extent(index, index, 0, - self.shape[1] - 1, - 0, self.shape[2] - 1) - elif self.orientation == 'coronal': - index = min(self.slice_index + 1, self.shape[1] - 1) - self.slicer.display_extent(0, self.shape[0] - 1, - index, index, - 0, self.shape[2] - 1) - elif self.orientation == 'axial': - index = min(self.slice_index + 1, self.shape[2] - 1) - self.slicer.display_extent(0, self.shape[0] - 1, - 0, self.shape[1] - 1, - index, index) - self.slice_index = index - - def prev_slice(self): - if self.orientation == 'sagittal': - index = max(self.slice_index - 1, 0) - self.slicer.display_extent(index, index, 0, - self.shape[1] - 1, - 0, self.shape[2] - 1) - elif self.orientation == 'coronal': - index = max(self.slice_index - 1, 0) - self.slicer.display_extent(0, self.shape[0] - 1, - index, index, - 0, self.shape[2] - 1) - elif self.orientation == 'axial': - index = max(self.slice_index - 1, 0) - self.slicer.display_extent(0, self.shape[0] - 1, - 0, self.shape[1] - 1, - index, index) - self.slice_index = index - - -class KeyUpDownCallback(object): - """ - Callback for keyboard up/down interaction. - """ - def __init__(self, interactables): - self.interactables = interactables - - def __call__(self, caller, ev): - key = caller.GetKeySym() - if key == 'Up': - for i in self.interactables: - i.next_slice() - elif key == 'Down': - for i in self.interactables: - i.prev_slice() - caller.Render() - - -def get_middle_slice_index(orientation, shape): - if orientation == 'sagittal': - slice_index = shape[0] // 2 - elif orientation == 'coronal': - slice_index = shape[1] // 2 - elif orientation == 'axial': - slice_index = shape[2] // 2 - else: - raise ValueError('Invalid axis name: {0}'.format(orientation)) - return slice_index - - def initialize_camera(orientation, slice_index, volume_shape): """ Initialize a camera for a given orientation. @@ -281,7 +203,7 @@ def create_scene(actors, orientation, slice_index, volume_shape): def render_scene(scene, window_size, interactor, - output, silent, interactables=[], title='Viewer'): + output, silent, title='Viewer'): """ Render a scene. If a output is supplied, a snapshot of the rendered scene is taken. @@ -292,10 +214,6 @@ def render_scene(scene, window_size, interactor, reset_camera=False, interactor_style=interactor) - if len(interactables) > 0: - showm.iren.AddObserver('KeyReleaseEvent', - KeyUpDownCallback(interactables)) - showm.initialize() showm.start() From eb9a50ecce6e7484d21f3c009d3891579189530d Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Wed, 12 May 2021 10:38:32 -0400 Subject: [PATCH 11/45] TEST: Update tests --- scripts/tests/test_visualize_fodf.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/scripts/tests/test_visualize_fodf.py b/scripts/tests/test_visualize_fodf.py index dce8956dd..679fb76c5 100644 --- a/scripts/tests/test_visualize_fodf.py +++ b/scripts/tests/test_visualize_fodf.py @@ -16,25 +16,6 @@ def test_help_option(script_runner): assert ret.success -def test_peaks_full_basis(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_fodf = os.path.join(get_home(), 'tracking', 'fodf.nii.gz') - in_peaks = os.path.join(get_home(), 'tracking', 'peaks.nii.gz') - # Tests that the use of a full SH basis with peaks raises a warning - with warnings.catch_warnings(record=True) as w: - ret = script_runner.run('scil_visualize_fodf.py', in_fodf, - '--full_basis', '--peaks', in_peaks) - assert(len(w) > 0) - assert(issubclass(w[0].category, UserWarning)) - assert('Asymmetric peaks visualization is not supported ' - 'by FURY. Peaks shown as symmetric peaks.' in - str(w[0].message)) - - # The whole execution should fail because - # the input fODF is not in full basis - assert (not ret.success) - - def test_full_basis_input_without_arg(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) in_fodf = os.path.join(get_home(), 'tracking', 'fodf_full.nii.gz') From 4cae12bd7d04a7e37c4ec0a85ac0a454c743ffef Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Thu, 13 May 2021 20:01:02 -0400 Subject: [PATCH 12/45] pep8 --- scilpy/tractanalysis/features.py | 2 +- scripts/tests/test_extract_ushape.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index 7efbd9f45..066c4c0cf 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -85,7 +85,7 @@ def remove_loops_and_sharp_turns(streamlines, ids.append(i) streamlines_clean.append(s) else: - streamlines_clean.append = np.empty((1,3)) + streamlines_clean.append = np.empty((1, 3)) if use_qb: ids = [] diff --git a/scripts/tests/test_extract_ushape.py b/scripts/tests/test_extract_ushape.py index 5cfe6069d..5255fcaac 100644 --- a/scripts/tests/test_extract_ushape.py +++ b/scripts/tests/test_extract_ushape.py @@ -8,8 +8,8 @@ # If they already exist, this only takes 5 seconds (check md5sum) -#fetch_data(get_testing_files_dict(), keys=['connectivity.zip']) -#tmp_dir = tempfile.TemporaryDirectory() +# fetch_data(get_testing_files_dict(), keys=['connectivity.zip']) +# tmp_dir = tempfile.TemporaryDirectory() def test_help_option(script_runner): From 653cf0109c89e8484af17b18a770b313771af619 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Thu, 3 Jun 2021 11:49:41 -0400 Subject: [PATCH 13/45] remove boundary ushape --- scripts/scil_extract_ushape.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 453cb5dc9..4ed094284 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -69,9 +69,9 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - if min(args.ufactor) <= 0 or max(args.ufactor) > 1: - parser.error('U-factor "{}" '.format(args.ufactor) + - 'must be greater than 0 and lower or equal to 1.') + #if min(args.ufactor) <= 0 or max(args.ufactor) > 1: + # parser.error('U-factor "{}" '.format(args.ufactor) + + # 'must be greater than 0 and lower or equal to 1.') tractogram = load_tractogram_with_reference( parser, args, args.in_tractogram) From 822e0dd74c8eae70d4cbc460eb3ab700a65d5672 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antoine=20Th=C3=A9berge?= Date: Thu, 3 Jun 2021 13:30:44 -0400 Subject: [PATCH 14/45] FIX: Fix GT bundle reference --- scripts/scil_score_tractogram.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/scil_score_tractogram.py b/scripts/scil_score_tractogram.py index 721d57517..5f481ed90 100755 --- a/scripts/scil_score_tractogram.py +++ b/scripts/scil_score_tractogram.py @@ -150,7 +150,13 @@ def main(): logging.info("Verifying compatibility with ground-truth") for gt in args.gt_bundles: - compatible = is_header_compatible(sft, gt) + _, gt_ext = os.path.splitext(gt) + if gt_ext in ['.trk', '.tck']: + gt_bundle = load_tractogram_with_reference( + parser, args, gt, bbox_check=False) + else: + gt_bundle = gt + compatible = is_header_compatible(sft, gt_bundle) if not compatible: parser.error("Input tractogram incompatible with" " {}".format(gt)) From 5a357ed27535aeff595a35906b645ebcc51959f0 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 4 Jun 2021 11:13:55 -0400 Subject: [PATCH 15/45] fix francois comments --- scilpy/tractanalysis/features.py | 17 +++++++++-------- scripts/scil_extract_ushape.py | 14 ++++++-------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index 066c4c0cf..c9e8ecfb4 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -7,16 +7,16 @@ from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric from dipy.tracking import metrics as tm +from scilpy.tracking.tools import resample_streamlines_num_points import numpy as np -def detect_ushape(streamlines, ufactor=[0.5, 1]): +def detect_ushape(sft, ufactor=[0.5, 1]): """ Remove loops and sharp turns from a list of streamlines. Parameters ---------- - streamlines: list of ndarray - The list of streamlines from which to remove loops and sharp turns. + sft: State Full tractogram used to remove loops and sharp turns. threshold: float Threshold bla bla bla @@ -26,12 +26,13 @@ def detect_ushape(streamlines, ufactor=[0.5, 1]): Only the ids are returned so proper filtering can be done afterwards """ ids = [] - for i, s in enumerate(streamlines): - if len(s) >= 4: + new_sft = resample_streamlines_num_points(sft, 4) + for i, s in enumerate(new_sft.streamlines): + if len(s) == 4: first_point = s[0] last_point = s[-1] - second_point = s[round(len(s)/3)] - third_point = s[round(2*len(s)/3)] + second_point = s[1] + third_point = s[2] v1 = first_point - second_point v2 = second_point - third_point @@ -43,7 +44,7 @@ def detect_ushape(streamlines, ufactor=[0.5, 1]): val = np.dot(np.cross(v1, v2), np.cross(v2, v3)) - if min(ufactor) < val < max(ufactor): + if min(ufactor) <= val <= max(ufactor): ids.append(i) return ids diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 4ed094284..3f4b51d85 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -69,22 +69,20 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - #if min(args.ufactor) <= 0 or max(args.ufactor) > 1: - # parser.error('U-factor "{}" '.format(args.ufactor) + - # 'must be greater than 0 and lower or equal to 1.') + if not( -1 <= min(args.ufactor) <= 1 and -1 <= max(args.ufactor) <= 1): + parser.error('U-factor "{}" '.format(args.ufactor) + + 'must be between -1 and 1.') tractogram = load_tractogram_with_reference( parser, args, args.in_tractogram) - streamlines = tractogram.streamlines - ids_c = [] ids_l = [] - if len(streamlines) > 1: - ids_c = detect_ushape(streamlines, args.ufactor) - ids_l = np.setdiff1d(np.arange(len(streamlines)), ids_c) + if len(tractogram.streamlines) > 1: + ids_c = detect_ushape(tractogram, args.ufactor) + ids_l = np.setdiff1d(np.arange(len(tractogram.streamlines)), ids_c) else: parser.error( 'Zero or one streamline in {}'.format(args.in_tractogram) + From 47ef3e96a076382c96925e78342d14888a412fcd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antoine=20Th=C3=A9berge?= Date: Mon, 7 Jun 2021 11:05:22 -0400 Subject: [PATCH 16/45] FIX: Todi chunk bug --- scilpy/tractanalysis/todi.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scilpy/tractanalysis/todi.py b/scilpy/tractanalysis/todi.py index 7e12cfa53..a1e385313 100644 --- a/scilpy/tractanalysis/todi.py +++ b/scilpy/tractanalysis/todi.py @@ -200,7 +200,8 @@ def smooth_todi_spatial(self, sigma=0.5): new_todi = deepcopy(tmp_todi) else: new_todi = np.hstack((new_todi, tmp_todi)) - self.todi = np.delete(self.todi, range(0, chunk_size), axis=1) + self.todi = np.delete(self.todi, range( + 0, min(self.todi.shape[1], chunk_size)), axis=1) chunk_count -= 1 self.mask = new_mask From f1436b310143d78fa8d9e90db1470c828d8bfc98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antoine=20Th=C3=A9berge?= Date: Mon, 7 Jun 2021 11:26:23 -0400 Subject: [PATCH 17/45] FIX: Add reference --- scripts/scil_generate_priors_from_bundle.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/scripts/scil_generate_priors_from_bundle.py b/scripts/scil_generate_priors_from_bundle.py index b6d864c6a..ef53b88e6 100755 --- a/scripts/scil_generate_priors_from_bundle.py +++ b/scripts/scil_generate_priors_from_bundle.py @@ -12,13 +12,14 @@ import os from dipy.data import get_sphere -from dipy.io.streamline import load_tractogram from dipy.reconst.shm import sf_to_sh, sh_to_sf import nibabel as nib import numpy as np from scilpy.io.image import get_data_as_mask +from scilpy.io.streamlines import load_tractogram_with_reference from scilpy.io.utils import (add_overwrite_arg, + add_reference_arg, add_sh_basis_args, assert_inputs_exist, assert_outputs_exist) @@ -60,6 +61,7 @@ def _build_arg_parser(): 'default is current directory.') add_overwrite_arg(p) + add_reference_arg(p) return p @@ -93,8 +95,7 @@ def main(): sh_order = find_order_from_nb_coeff(sh_shape) img_mask = nib.load(args.in_mask) - sft = load_tractogram(args.in_bundle, args.in_fodf, - trk_header_check=True) + sft = load_tractogram_with_reference(parser, args, args.in_bundle) sft.to_vox() if len(sft.streamlines) < 1: raise ValueError('The input bundle contains no streamline.') @@ -108,7 +109,8 @@ def main(): # Fancy masking of 1d indices to limit spatial dilation to WM sub_mask_3d = np.logical_and(get_data_as_mask(img_mask), - todi_obj.reshape_to_3d(todi_obj.get_mask())) + todi_obj.reshape_to_3d( + todi_obj.get_mask())) sub_mask_1d = sub_mask_3d.flatten()[todi_obj.get_mask()] todi_sf = todi_obj.get_todi()[sub_mask_1d] ** 2 From 4bc2a046c73dc1979f6c72a2d013f5bc257a19fb Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 7 Jun 2021 13:45:24 -0400 Subject: [PATCH 18/45] test condition review --- Jenkinsfile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index dbc7910db..29b14ecd4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -55,7 +55,12 @@ pipeline { cleanWs() script { if (env.CHANGE_ID) { - pullRequest.createReviewRequests(['arnaudbore']) + if (pullRequest.createdBy != "arnaudbore"){ + pullRequest.createReviewRequests(['arnaudbore']) + } + else{ + pullRequest.createReviewRequests(['GuillaumeTh']) + } } } } From 1ca9a1b354e7580a95aa31a38ea23be912296225 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Mon, 7 Jun 2021 14:45:40 -0400 Subject: [PATCH 19/45] fix most of Francois comments + add test --- scilpy/tractanalysis/features.py | 2 -- scripts/scil_extract_ushape.py | 17 ++++++++--------- scripts/tests/test_extract_ushape.py | 15 +++++++++++++-- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index c9e8ecfb4..e0b6fdb01 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -85,8 +85,6 @@ def remove_loops_and_sharp_turns(streamlines, if tm.winding(s) < max_angle: ids.append(i) streamlines_clean.append(s) - else: - streamlines_clean.append = np.empty((1, 3)) if use_qb: ids = [] diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 3f4b51d85..8ae1fb99b 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -73,30 +73,29 @@ def main(): parser.error('U-factor "{}" '.format(args.ufactor) + 'must be between -1 and 1.') - tractogram = load_tractogram_with_reference( + sft = load_tractogram_with_reference( parser, args, args.in_tractogram) ids_c = [] - ids_l = [] - if len(tractogram.streamlines) > 1: - ids_c = detect_ushape(tractogram, args.ufactor) - ids_l = np.setdiff1d(np.arange(len(tractogram.streamlines)), ids_c) + if len(sft.streamlines) > 1: + ids_c = detect_ushape(sft, args.ufactor) + ids_l = np.setdiff1d(np.arange(len(sft.streamlines)), ids_c) else: parser.error( 'Zero or one streamline in {}'.format(args.in_tractogram) + '. The file must have more than one streamline.') if len(ids_c) > 0: - sft_c = filter_tractogram_data(tractogram, ids_c) + sft_c = sft[ids_c] save_tractogram(sft_c, args.out_tractogram) else: logging.warning( - 'No clean streamlines in {}'.format(args.in_tractogram)) + 'No u-shape streamlines in {}'.format(args.in_tractogram)) if args.display_counts: - sc_bf = len(tractogram.streamlines) + sc_bf = len(sft.streamlines) sc_af = len(sft_c.streamlines) print(json.dumps({'streamline_count_before_filtering': int(sc_bf), 'streamline_count_after_filtering': int(sc_af)}, @@ -105,7 +104,7 @@ def main(): if len(ids_l) == 0: logging.warning('No loops in {}'.format(args.in_tractogram)) elif args.remaining_tractogram: - sft_l = filter_tractogram_data(tractogram, ids_l) + sft_l = sft[ids_l] save_tractogram(sft_l, args.remaining_tractogram) diff --git a/scripts/tests/test_extract_ushape.py b/scripts/tests/test_extract_ushape.py index 5255fcaac..e1593d736 100644 --- a/scripts/tests/test_extract_ushape.py +++ b/scripts/tests/test_extract_ushape.py @@ -8,11 +8,22 @@ # If they already exist, this only takes 5 seconds (check md5sum) -# fetch_data(get_testing_files_dict(), keys=['connectivity.zip']) -# tmp_dir = tempfile.TemporaryDirectory() +fetch_data(get_testing_files_dict(), keys=['tracking.zip']) +tmp_dir = tempfile.TemporaryDirectory() def test_help_option(script_runner): ret = script_runner.run('scil_extract_ushape.py', '--help') assert ret.success + +def test_execution_processing(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_trk = os.path.join(get_home(), 'tracking', 'union.trk') + out_trk = 'ushape.trk' + remaining_trk = 'remaining.trk' + ret = script_runner.run('scil_extract_ushape.py', in_trk, out_trk, + '--ufactor', '0.5', '1', + '--remaining_tractogram' , remaining_trk, + '--display_counts') + assert ret.success From c7854c9a22a8e50e9e7c5e214b9ec5d3a25f2584 Mon Sep 17 00:00:00 2001 From: ppoulin91 Date: Tue, 8 Jun 2021 10:24:09 -0400 Subject: [PATCH 20/45] Remove python3.6 from Jenkinsfile Also update numpy to 1.20 - Add python3.8 in the future --- Jenkinsfile | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index dbc7910db..412707502 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,21 +4,11 @@ pipeline { stages { stage('Build') { stages { - stage('Python3.6') { - steps { - withPythonEnv('CPython-3.6') { - sh ''' - pip3 install numpy==1.18.* wheel - pip3 install -e . - ''' - } - } - } stage('Python3.7') { steps { withPythonEnv('CPython-3.7') { sh ''' - pip3 install numpy==1.18.* wheel + pip3 install numpy==1.20.* wheel pip3 install -e . ''' } @@ -31,7 +21,7 @@ pipeline { steps { withPythonEnv('CPython-3.7') { sh ''' - pip3 install numpy==1.18.* wheel + pip3 install numpy==1.20.* wheel pip3 install -e . export MPLBACKEND="agg" export OPENBLAS_NUM_THREADS=1 From 9bfa101b9a3abc838a93cd596d20bfffc7c9f347 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Thu, 25 Feb 2021 00:46:57 -0500 Subject: [PATCH 21/45] Add possibility to block_size image generator to only extract between bounds --- scilpy/image/utils.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/scilpy/image/utils.py b/scilpy/image/utils.py index fd7c6f0c6..80aa3646d 100644 --- a/scilpy/image/utils.py +++ b/scilpy/image/utils.py @@ -34,7 +34,7 @@ def count_non_zero_voxels(image): return nb_voxels -def volume_iterator(img, blocksize=1): +def volume_iterator(img, blocksize=1, start=0, end=0): """Generator that iterates on volumes of data. Parameters @@ -49,18 +49,23 @@ def volume_iterator(img, blocksize=1): tuple of (list of int, ndarray) The ids of the selected volumes, and the selected data as a 4D array """ + assert end <= img.shape[-1], "End limit provided is greater than the " \ + "total number of volumes in image" + nb_volumes = img.shape[-1] + end = end if end else img.shape[-1] if blocksize == nb_volumes: - yield list(range(nb_volumes)), img.get_fdata(dtype=np.float32) + yield list(range(start, end)), \ + img.get_fdata(dtype=np.float32)[..., start:end] else: - start, end = 0, 0 - for i in range(0, nb_volumes - blocksize, blocksize): - start, end = i, i + blocksize - logging.info("Loading volumes {} to {}.".format(start, end - 1)) - yield list(range(start, end)), img.dataobj[..., start:end] + stop = start + for i in range(start, end - blocksize, blocksize): + start, stop = i, i + blocksize + logging.info("Loading volumes {} to {}.".format(start, stop - 1)) + yield list(range(start, stop)), img.dataobj[..., start:stop] - if end < nb_volumes: + if stop < end: logging.info( - "Loading volumes {} to {}.".format(end, nb_volumes - 1)) - yield list(range(end, nb_volumes)), img.dataobj[..., end:] + "Loading volumes {} to {}.".format(stop, end - 1)) + yield list(range(stop, end)), img.dataobj[..., stop:end] From 2bbd32fe1e5e5989ea67e003127316e604cca559 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Thu, 25 Feb 2021 01:04:06 -0500 Subject: [PATCH 22/45] Extract b0 volumes from image, either : all the b0s, the mean of b0s or the first. Works also on clusters of b0s : can extract the first b0 of each cluster or the mean of each cluster. --- scilpy/utils/bvec_bval_tools.py | 96 +++++++++++++++++++++++++++++++-- scripts/scil_extract_b0.py | 65 ++++++++++++---------- 2 files changed, 129 insertions(+), 32 deletions(-) diff --git a/scilpy/utils/bvec_bval_tools.py b/scilpy/utils/bvec_bval_tools.py index 788f87175..7fff301c6 100644 --- a/scilpy/utils/bvec_bval_tools.py +++ b/scilpy/utils/bvec_bval_tools.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import logging +from enum import Enum import numpy as np @@ -11,6 +12,12 @@ DEFAULT_B0_THRESHOLD = 20 +class B0ExtractionStrategy(Enum): + FIRST = "first" + MEAN = "mean" + ALL = "all" + + def is_normalized_bvecs(bvecs): """ Check if b-vectors are normalized. @@ -282,11 +289,11 @@ def extract_dwi_shell(dwi, bvals, bvecs, bvals_to_extract, tol=20, bvals_to_extract : list of int The list of b-values to extract. tol : int - Loads the data using this block size. Useful when the data is too - large to be loaded in memory. - block_size : int The tolerated gap between the b-values to extract and the actual b-values. + block_size : int + Loads the data using this block size. Useful when the data is too + large to be loaded in memory. Returns ------- @@ -335,6 +342,89 @@ def extract_dwi_shell(dwi, bvals, bvecs, bvals_to_extract, tol=20, return indices, shell_data, output_bvals, output_bvecs +def extract_b0(dwi, b0_mask, extract_in_cluster=False, + strategy=B0ExtractionStrategy.MEAN, block_size=None): + """ + Extract a set of b0 volumes from a dwi dataset + + Parameters + ---------- + dwi : nib.Nifti1Image + Original multi-shell volume. + b0_mask: array of bool + Mask over the time dimension (4th) identifying b0 volumes. + extract_in_cluster: bool + Specify to extract b0's in each continuous sets of b0 volumes + appearing in the input data. + strategy: Enum + The extraction strategy, of either select the first b0 found, select + them all or average them. When used in conjunction with the batch + parameter set to True, the strategy is applied individually on each + continuous set found. + block_size : int + Loads the data using this block size. Useful when the data is too + large to be loaded in memory. + + Returns + ------- + b0_data : ndarray + Extracted b0 volumes. + """ + + indices = np.where(b0_mask)[0] + + if block_size is None: + block_size = dwi.shape[-1] + + if not extract_in_cluster and strategy == B0ExtractionStrategy.FIRST: + idx = np.min(indices) + output_b0 = dwi.dataobj[..., idx:idx + 1].squeeze() + else: + # Generate list of clustered b0 in the data + mask = np.ma.masked_array(b0_mask) + mask[~b0_mask] = np.ma.masked + b0_clusters = np.ma.notmasked_contiguous(mask, axis=0) + + if extract_in_cluster or strategy == B0ExtractionStrategy.ALL: + if strategy == B0ExtractionStrategy.ALL: + time_d = len(indices) + else: + time_d = len(b0_clusters) + + output_b0 = np.zeros(dwi.shape[:-1] + (time_d,)) + + for idx, cluster in enumerate(b0_clusters): + if strategy == B0ExtractionStrategy.FIRST: + data = dwi.dataobj[..., cluster.start:cluster.start + 1] + output_b0[..., idx] = data.squeeze() + else: + vol_it = volume_iterator(dwi, block_size, + cluster.start, cluster.stop) + + for vi, data in vol_it: + if strategy == B0ExtractionStrategy.ALL: + in_volume = np.array([i in vi for i in indices]) + output_b0[..., in_volume] = data + elif strategy == B0ExtractionStrategy.MEAN: + output_b0[..., idx] += np.sum(data, -1) + + if strategy == B0ExtractionStrategy.MEAN: + output_b0[..., idx] /= cluster.stop - cluster.start + + else: + output_b0 = np.zeros(dwi.shape[:-1]) + for cluster in b0_clusters: + vol_it = volume_iterator(dwi, block_size, + cluster.start, cluster.stop) + + for _, data in vol_it: + output_b0 += np.sum(data, -1) + + output_b0 /= len(indices) + + return output_b0 + + def flip_mrtrix_gradient_sampling(gradient_sampling_filename, gradient_sampling_flipped_filename, axes): """ diff --git a/scripts/scil_extract_b0.py b/scripts/scil_extract_b0.py index e2e093971..db78495aa 100755 --- a/scripts/scil_extract_b0.py +++ b/scripts/scil_extract_b0.py @@ -19,7 +19,8 @@ from scilpy.io.utils import (add_verbose_arg, assert_inputs_exist, add_force_b0_arg) -from scilpy.utils.bvec_bval_tools import check_b0_threshold +from scilpy.utils.bvec_bval_tools import (check_b0_threshold, extract_b0, + B0ExtractionStrategy) from scilpy.utils.filenames import split_name_with_nii logger = logging.getLogger(__file__) @@ -47,38 +48,36 @@ def _build_arg_parser(): 'the output file.') group.add_argument('--mean', action='store_true', help='Extract mean b0.') + p.add_argument('--cluster', action='store_true', + help='Apply the extraction strategy to clusters of ' + 'continuous b0 volumes in the data') + + p.add_argument('--block-size', '-s', + metavar='INT', type=int, + help='Loads the data using this block size. ' + 'Useful\nwhen the data is too large to be ' + 'loaded in memory.') + + p.add_argument('--single-image', action='store_true', + help='If output b0 volume has multiple time points, only ' + 'outputs a single image instead of a numbered series ' + 'of images.') + add_force_b0_arg(p) add_verbose_arg(p) return p -def _keep_time_step(dwi, time, output): - image = nib.load(dwi) - data = image.get_fdata(dtype=np.float32) - +def _split_time_steps(b0, affine, header, output): fname, fext = split_name_with_nii(os.path.basename(output)) - multi_b0 = len(time) > 1 - for t in time: - t_data = data[..., t] - + multiple_b0 = b0.shape[-1] > 1 + for t in range(b0.shape[-1]): out_name = os.path.join( os.path.dirname(os.path.abspath(output)), - '{}_{}{}'.format(fname, t, fext)) if multi_b0 else output - nib.save(nib.Nifti1Image(t_data, image.affine, image.header), - out_name) - - -def _mean_in_time(dwi, time, output): - image = nib.load(dwi) - - data = image.get_fdata(dtype=np.float32) - data = data[..., time] - data = np.mean(data, axis=3, dtype=data.dtype) - - nib.save(nib.Nifti1Image(data, image.affine, image.header), - output) + '{}_{}{}'.format(fname, t, fext)) if multiple_b0 else output + nib.save(nib.Nifti1Image(b0[..., t], affine, header), out_name) def main(): @@ -103,14 +102,22 @@ def main(): logger.info('Number of b0 images in the data: {}'.format(len(b0_idx))) + strategy = B0ExtractionStrategy.FIRST if args.mean: - logger.info('Using mean of indices {} for b0'.format(b0_idx)) - _mean_in_time(args.in_dwi, b0_idx, args.out_b0) + strategy = B0ExtractionStrategy.MEAN + elif args.all: + strategy = B0ExtractionStrategy.ALL + + image = nib.load(args.in_dwi) + + b0_volumes = extract_b0( + image, gtab.b0s_mask, args.cluster, strategy, args.block_size) + + if len(b0_volumes.shape) > 3 and not args.single_image: + _split_time_steps(b0_volumes, image.affine, image.header, args.out_b0) else: - if not args.all: - b0_idx = [b0_idx[0]] - logger.info("Keeping {} for b0".format(b0_idx)) - _keep_time_step(args.in_dwi, b0_idx, args.out_b0) + nib.save(nib.Nifti1Image(b0_volumes, image.affine, image.header), + args.out_b0) if __name__ == '__main__': From 26b1078ea2b51d60cde4e2656bbcecfed69f96f8 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Tue, 23 Mar 2021 11:06:07 -0400 Subject: [PATCH 23/45] Reformat arguments for cluster extraction of b0s --- scripts/scil_extract_b0.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/scripts/scil_extract_b0.py b/scripts/scil_extract_b0.py index db78495aa..48a799664 100755 --- a/scripts/scil_extract_b0.py +++ b/scripts/scil_extract_b0.py @@ -47,10 +47,11 @@ def _build_arg_parser(): help='Extract all b0. Index number will be appended to ' 'the output file.') group.add_argument('--mean', action='store_true', help='Extract mean b0.') - - p.add_argument('--cluster', action='store_true', - help='Apply the extraction strategy to clusters of ' - 'continuous b0 volumes in the data') + group.add_argument('--cluster-mean', action='store_true', + help='Extracts mean of each continuous cluster of b0s') + group.add_argument('--cluster-first', action='store_true', + help='Extracts first b0 of each ' + 'continuous cluster of b0s') p.add_argument('--block-size', '-s', metavar='INT', type=int, @@ -102,16 +103,19 @@ def main(): logger.info('Number of b0 images in the data: {}'.format(len(b0_idx))) - strategy = B0ExtractionStrategy.FIRST - if args.mean: + strategy, extract_in_cluster = B0ExtractionStrategy.FIRST, False + if args.mean or args.cluster_mean: strategy = B0ExtractionStrategy.MEAN + extract_in_cluster = args.cluster_mean elif args.all: strategy = B0ExtractionStrategy.ALL + elif args.cluster_first: + extract_in_cluster = True image = nib.load(args.in_dwi) b0_volumes = extract_b0( - image, gtab.b0s_mask, args.cluster, strategy, args.block_size) + image, gtab.b0s_mask, extract_in_cluster, strategy, args.block_size) if len(b0_volumes.shape) > 3 and not args.single_image: _split_time_steps(b0_volumes, image.affine, image.header, args.out_b0) From e842876610cc7f82abd713a7e96537bbe2c59850 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Wed, 12 May 2021 13:42:51 -0400 Subject: [PATCH 24/45] Add default to b0 threshold --- scripts/scil_extract_b0.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_extract_b0.py b/scripts/scil_extract_b0.py index 48a799664..e13950247 100755 --- a/scripts/scil_extract_b0.py +++ b/scripts/scil_extract_b0.py @@ -40,7 +40,7 @@ def _build_arg_parser(): p.add_argument('--b0_thr', type=float, default=0.0, help='All b-values with values less than or equal ' 'to b0_thr are considered as b0s i.e. without ' - 'diffusion weighting.') + 'diffusion weighting. [%(default)s]') group = p.add_mutually_exclusive_group() group.add_argument('--all', action='store_true', From e744e2cee8aafe0c6ad9939d370586fac51144bf Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Tue, 8 Jun 2021 12:50:23 -0400 Subject: [PATCH 25/45] Fix typos and documentation --- scilpy/image/utils.py | 5 +++++ scripts/scil_extract_b0.py | 8 ++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/scilpy/image/utils.py b/scilpy/image/utils.py index 80aa3646d..73141d09d 100644 --- a/scilpy/image/utils.py +++ b/scilpy/image/utils.py @@ -43,6 +43,11 @@ def volume_iterator(img, blocksize=1, start=0, end=0): Image of a 4D volume with shape X,Y,Z,N blocksize : int, optional Number of volumes to return in a single batch + start : int, optional + Starting iteration index in the 4D volume + end : int, optional + Stopping iteration index in the 4D volume + (the volume at this index is excluded) Yields ------- diff --git a/scripts/scil_extract_b0.py b/scripts/scil_extract_b0.py index e13950247..44d7e3f9f 100755 --- a/scripts/scil_extract_b0.py +++ b/scripts/scil_extract_b0.py @@ -48,14 +48,14 @@ def _build_arg_parser(): 'the output file.') group.add_argument('--mean', action='store_true', help='Extract mean b0.') group.add_argument('--cluster-mean', action='store_true', - help='Extracts mean of each continuous cluster of b0s') + help='Extract mean of each continuous cluster of b0s.') group.add_argument('--cluster-first', action='store_true', - help='Extracts first b0 of each ' - 'continuous cluster of b0s') + help='Extract first b0 of each ' + 'continuous cluster of b0s.') p.add_argument('--block-size', '-s', metavar='INT', type=int, - help='Loads the data using this block size. ' + help='Load the data using this block size. ' 'Useful\nwhen the data is too large to be ' 'loaded in memory.') From 1fcaadf14d289a865e8d5eed627542a03c663941 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Tue, 8 Jun 2021 13:24:19 -0400 Subject: [PATCH 26/45] Fix typos --- scilpy/utils/bvec_bval_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scilpy/utils/bvec_bval_tools.py b/scilpy/utils/bvec_bval_tools.py index 7fff301c6..e115fc04f 100644 --- a/scilpy/utils/bvec_bval_tools.py +++ b/scilpy/utils/bvec_bval_tools.py @@ -292,7 +292,7 @@ def extract_dwi_shell(dwi, bvals, bvecs, bvals_to_extract, tol=20, The tolerated gap between the b-values to extract and the actual b-values. block_size : int - Loads the data using this block size. Useful when the data is too + Load the data using this block size. Useful when the data is too large to be loaded in memory. Returns @@ -362,7 +362,7 @@ def extract_b0(dwi, b0_mask, extract_in_cluster=False, parameter set to True, the strategy is applied individually on each continuous set found. block_size : int - Loads the data using this block size. Useful when the data is too + Load the data using this block size. Useful when the data is too large to be loaded in memory. Returns From d1d948608dfaf40b370eb3d6f8c0bb09c1da4787 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Tue, 8 Jun 2021 14:03:28 -0400 Subject: [PATCH 27/45] ENH: Autodetect SH order and fullness from ncoeffs --- fibercup | 1 + scilpy/reconst/utils.py | 16 ++++++++++++++++ scripts/scil_visualize_fodf.py | 27 ++++----------------------- 3 files changed, 21 insertions(+), 23 deletions(-) create mode 120000 fibercup diff --git a/fibercup b/fibercup new file mode 120000 index 000000000..4ce0d2ba7 --- /dev/null +++ b/fibercup @@ -0,0 +1 @@ +/home/charles/research/data/inputs/fibercup_clean/ssst_fodf/sh_6/ \ No newline at end of file diff --git a/scilpy/reconst/utils.py b/scilpy/reconst/utils.py index f239a6361..d23a89fc9 100644 --- a/scilpy/reconst/utils.py +++ b/scilpy/reconst/utils.py @@ -15,6 +15,22 @@ def find_order_from_nb_coeff(data): return int((-3 + np.sqrt(1 + 8 * shape[-1])) / 2) +def get_sh_order_and_fullness(ncoeffs): + """ + Get the order of the SH basis from the number of SH coefficients + as well as a boolean indicating if the basis is full. + """ + # the two curves (sym and full) intersect at ncoeffs = 1, in what + # case both bases correspond to order 1. + sym_order = (-3.0 + np.sqrt(1.0 + 8.0 * ncoeffs)) / 2.0 + if sym_order.is_integer(): + return sym_order, False + full_order = np.sqrt(ncoeffs) - 1.0 + if full_order.is_integer(): + return full_order, True + raise ValueError('Invalid number of coefficients for SH basis.') + + def _honor_authorsnames_sh_basis(sh_basis_type): sh_basis = sh_basis_type if sh_basis_type == 'fibernav': diff --git a/scripts/scil_visualize_fodf.py b/scripts/scil_visualize_fodf.py index 0117e6040..09a11f78b 100755 --- a/scripts/scil_visualize_fodf.py +++ b/scripts/scil_visualize_fodf.py @@ -19,6 +19,7 @@ from dipy.data import get_sphere from dipy.reconst.shm import order_from_ncoef +from scilpy.reconst.utils import get_sh_order_and_fullness from scilpy.io.utils import (add_sh_basis_args, add_overwrite_arg, assert_inputs_exist, assert_outputs_exist) from scilpy.io.image import get_data_as_mask @@ -62,10 +63,6 @@ def _build_arg_parser(): # Optional FODF personalization arguments add_sh_basis_args(p) - p.add_argument('--full_basis', action='store_true', - help='Use full SH basis to reconstruct fODF from ' - 'coefficients.') - sphere_choices = {'symmetric362', 'symmetric642', 'symmetric724', 'repulsion724', 'repulsion100', 'repulsion200'} p.add_argument('--sphere', default='symmetric724', choices=sphere_choices, @@ -215,28 +212,12 @@ def _get_data_from_inputs(args): return data -def validate_order(sh_order, ncoeffs, full_basis): - """ - Check that the sh order agrees with the number - of coefficients in the input - """ - if full_basis: - expected_ncoeffs = (sh_order + 1)**2 - else: - expected_ncoeffs = (sh_order + 1) * (sh_order + 2) // 2 - return ncoeffs == expected_ncoeffs - - def main(): parser = _build_arg_parser() args = _parse_args(parser) data = _get_data_from_inputs(args) sph = get_sphere(args.sphere) - sh_order = order_from_ncoef(data['fodf'].shape[-1], args.full_basis) - if not validate_order(sh_order, data['fodf'].shape[-1], args.full_basis): - parser.error('Invalid number of coefficients for fODF. ' - 'Use --full_basis if your input is in ' - 'full SH basis.') + sh_order, full_basis = get_sh_order_and_fullness(data['fodf'].shape[-1]) actors = [] @@ -249,7 +230,7 @@ def main(): # Instantiate the ODF slicer actor odf_actor = create_odf_slicer(data['fodf'], mask, sph, args.sph_subdivide, sh_order, - args.sh_basis, args.full_basis, + args.sh_basis, full_basis, args.axis_name, args.scale, not args.radial_scale_off, not args.norm_off, args.colormap, @@ -282,7 +263,7 @@ def main(): mask, args.peaks_color, args.peaks_width, - not args.full_basis) + not full_basis) actors.append(peaks_actor) From 64e829f98e06b0b90d494a85f020c6c5c970ae21 Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Tue, 8 Jun 2021 14:04:46 -0400 Subject: [PATCH 28/45] Remove accidental ADD on symlink --- fibercup | 1 - 1 file changed, 1 deletion(-) delete mode 120000 fibercup diff --git a/fibercup b/fibercup deleted file mode 120000 index 4ce0d2ba7..000000000 --- a/fibercup +++ /dev/null @@ -1 +0,0 @@ -/home/charles/research/data/inputs/fibercup_clean/ssst_fodf/sh_6/ \ No newline at end of file From 1f1d6c3407fddebed37656365308ef1af4d2aa7b Mon Sep 17 00:00:00 2001 From: "charles.poirier" Date: Tue, 8 Jun 2021 14:06:29 -0400 Subject: [PATCH 29/45] TEST: Remove full_basis arg testing --- scripts/tests/test_visualize_fodf.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/scripts/tests/test_visualize_fodf.py b/scripts/tests/test_visualize_fodf.py index 679fb76c5..1f4d04c17 100644 --- a/scripts/tests/test_visualize_fodf.py +++ b/scripts/tests/test_visualize_fodf.py @@ -16,15 +16,6 @@ def test_help_option(script_runner): assert ret.success -def test_full_basis_input_without_arg(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_fodf = os.path.join(get_home(), 'tracking', 'fodf_full.nii.gz') - ret = script_runner.run('scil_visualize_fodf.py', in_fodf) - - # Using a full SH basis without --full_basis argument should fail - assert (not ret.success) - - def test_silent_without_output(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) in_fodf = os.path.join(get_home(), 'tracking', 'fodf.nii.gz') From 77ffb901b37fba4a994650a4fd2740cf661126e1 Mon Sep 17 00:00:00 2001 From: ppoulin91 Date: Tue, 8 Jun 2021 10:07:10 -0400 Subject: [PATCH 30/45] Update numpy version for CVXPY - Fixes error : `ModuleNotFoundError: No module named 'cvxpy.cvxcore.python._cvxcore' During handling of the above exception, another exception occurred: RuntimeError: module compiled against API version 0xe but this version of numpy is 0xd` --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5237362ae..800be85ed 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ kiwisolver==1.0.* matplotlib==2.2.* nibabel==3.0.* nilearn==0.6.* -numpy==1.18.* +numpy==1.20.* Pillow==7.1.* bids-validator==1.6.0 pybids==0.10.* From 12734356534b818e1308cd1e7bd80adfcd79076c Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Thu, 10 Jun 2021 17:59:48 -0400 Subject: [PATCH 31/45] update with two values for ufactor --- scilpy/tractanalysis/features.py | 17 ++++++++++------- scripts/scil_extract_ushape.py | 26 ++++++++++++++++++-------- scripts/tests/test_extract_ushape.py | 6 ++++-- 3 files changed, 32 insertions(+), 17 deletions(-) diff --git a/scilpy/tractanalysis/features.py b/scilpy/tractanalysis/features.py index e0b6fdb01..9d955b0b9 100644 --- a/scilpy/tractanalysis/features.py +++ b/scilpy/tractanalysis/features.py @@ -11,19 +11,22 @@ import numpy as np -def detect_ushape(sft, ufactor=[0.5, 1]): +def detect_ushape(sft, minU, maxU): """ - Remove loops and sharp turns from a list of streamlines. + Extract streamlines depending of their "u-shapeness". Parameters ---------- - sft: State Full tractogram used to remove loops and sharp turns. - threshold: float - Threshold bla bla bla + sft: Statefull tractogram + Tractogram used to extract streamlines depending on their ushapeness. + minU: Float + Minimum ufactor of a streamline. + maxU: Float + Maximum ufactor of a streamline. Returns ------- list: the ids of clean streamlines - Only the ids are returned so proper filtering can be done afterwards + Only the ids are returned so proper filtering can be done afterwards. """ ids = [] new_sft = resample_streamlines_num_points(sft, 4) @@ -44,7 +47,7 @@ def detect_ushape(sft, ufactor=[0.5, 1]): val = np.dot(np.cross(v1, v2), np.cross(v2, v3)) - if min(ufactor) <= val <= max(ufactor): + if minU <= val <= maxU: ids.append(i) return ids diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 8ae1fb99b..8f8e27111 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """ -This script can be used to extract U-Shape streamlines. +This script extracts streamlines depending on the U-shapeness. The main idea comes from trackvis code: pt 1: 1 st end point @@ -15,8 +15,13 @@ v2: pt2 -> pt3 v3: pt3 -> pt4 -U-factor:dot product of v1 X v2 and v2 X v3. +ufactor:dot product of v1 X v2 and v2 X v3. X is the cross product of two vectors. + +When ufactor is close to: +* 0 it defines straight streamlines +* 1 it defines U-fibers +* -1 it defines S-fibers ---------------------------------------------------------------------------- """ @@ -44,9 +49,14 @@ def _build_arg_parser(): p.add_argument('in_tractogram', help='Tractogram input file name.') p.add_argument('out_tractogram', - help='Output tractogram with ushape.') - p.add_argument('--ufactor', default=[0.5, 1], type=float, nargs=2, - help='U factor defines U-shapeness of the track.') + help='Output tractogram file name.') + p.add_argument('--minU', + default=0.5, type=float, + help='Min ufactor value. [%(default)s]') + p.add_argument('--maxU', + default=1.0, type=float, + help='Max ufactor value. [%(default)s]') + p.add_argument('--remaining_tractogram', help='If set, saves remaining streamlines.') p.add_argument('--display_counts', action='store_true', @@ -69,8 +79,8 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - if not( -1 <= min(args.ufactor) <= 1 and -1 <= max(args.ufactor) <= 1): - parser.error('U-factor "{}" '.format(args.ufactor) + + if not( -1 <= args.minU <= 1 and -1 <= args.maxU <= 1): + parser.error('Min-Max ufactor "{},{}" '.format(args.minU, args.maxU) + 'must be between -1 and 1.') sft = load_tractogram_with_reference( @@ -80,7 +90,7 @@ def main(): ids_l = [] if len(sft.streamlines) > 1: - ids_c = detect_ushape(sft, args.ufactor) + ids_c = detect_ushape(sft, args.minU, args.maxU) ids_l = np.setdiff1d(np.arange(len(sft.streamlines)), ids_c) else: parser.error( diff --git a/scripts/tests/test_extract_ushape.py b/scripts/tests/test_extract_ushape.py index e1593d736..83307902b 100644 --- a/scripts/tests/test_extract_ushape.py +++ b/scripts/tests/test_extract_ushape.py @@ -17,13 +17,15 @@ def test_help_option(script_runner): '--help') assert ret.success + def test_execution_processing(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) in_trk = os.path.join(get_home(), 'tracking', 'union.trk') out_trk = 'ushape.trk' remaining_trk = 'remaining.trk' ret = script_runner.run('scil_extract_ushape.py', in_trk, out_trk, - '--ufactor', '0.5', '1', - '--remaining_tractogram' , remaining_trk, + '--minU', '0.5', + '--maxU', '1', + '--remaining_tractogram', remaining_trk, '--display_counts') assert ret.success From 8f51c447123a361f5e38fe9843e9162a56f5198c Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Thu, 10 Jun 2021 18:13:07 -0400 Subject: [PATCH 32/45] pep8 --- scripts/scil_extract_ushape.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 8f8e27111..754dc144a 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -79,7 +79,7 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - if not( -1 <= args.minU <= 1 and -1 <= args.maxU <= 1): + if not(-1 <= args.minU <= 1 and -1 <= args.maxU <= 1): parser.error('Min-Max ufactor "{},{}" '.format(args.minU, args.maxU) + 'must be between -1 and 1.') From 7cd23b7ca8fb1a1fff9037d05b01b5e2067cb397 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 11 Jun 2021 10:35:58 -0400 Subject: [PATCH 33/45] answer francois comments --- scripts/scil_extract_ushape.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 754dc144a..715f54276 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -22,7 +22,6 @@ * 0 it defines straight streamlines * 1 it defines U-fibers * -1 it defines S-fibers ----------------------------------------------------------------------------- """ import argparse @@ -98,8 +97,7 @@ def main(): '. The file must have more than one streamline.') if len(ids_c) > 0: - sft_c = sft[ids_c] - save_tractogram(sft_c, args.out_tractogram) + save_tractogram(sft[ids_c], args.out_tractogram) else: logging.warning( 'No u-shape streamlines in {}'.format(args.in_tractogram)) @@ -112,10 +110,9 @@ def main(): indent=args.indent)) if len(ids_l) == 0: - logging.warning('No loops in {}'.format(args.in_tractogram)) + logging.warning('No u-shape streamlines in {}'.format(args.in_tractogram)) elif args.remaining_tractogram: - sft_l = sft[ids_l] - save_tractogram(sft_l, args.remaining_tractogram) + save_tractogram(sft[ids_l], args.remaining_tractogram) if __name__ == "__main__": From 6bfbbd46b062431df073976f4699798083e67e96 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 11 Jun 2021 10:37:46 -0400 Subject: [PATCH 34/45] rename warning message --- scripts/scil_extract_ushape.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 715f54276..250b0a1e9 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -110,7 +110,7 @@ def main(): indent=args.indent)) if len(ids_l) == 0: - logging.warning('No u-shape streamlines in {}'.format(args.in_tractogram)) + logging.warning('No remaining streamlines in {}'.format(args.remaining_tractogram)) elif args.remaining_tractogram: save_tractogram(sft[ids_l], args.remaining_tractogram) From ef8ed57cd9b0918c0262d47fdd36768892f8f217 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 11 Jun 2021 11:23:24 -0400 Subject: [PATCH 35/45] fix error test --- scripts/scil_extract_ushape.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 250b0a1e9..70e780e06 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -104,7 +104,7 @@ def main(): if args.display_counts: sc_bf = len(sft.streamlines) - sc_af = len(sft_c.streamlines) + sc_af = len(ids_c) print(json.dumps({'streamline_count_before_filtering': int(sc_bf), 'streamline_count_after_filtering': int(sc_af)}, indent=args.indent)) From 8eabf510ee688ad5326a655fe066495973f4934a Mon Sep 17 00:00:00 2001 From: frheault Date: Fri, 11 Jun 2021 10:45:48 -0500 Subject: [PATCH 36/45] First pass from Slack comments --- scripts/scil_run_commit.py | 103 ++++++++++++++----------------------- 1 file changed, 39 insertions(+), 64 deletions(-) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index bd1e9ca2b..bf1bbcda4 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -20,9 +20,9 @@ fiting error (Root Mean Square Error) - results.pickle Dictionary containing the experiment parameters and final weights -- compartment_EC.nii.gz (Extra-Cellular) -- compartment_IC.nii.gz (Intra-Cellular) -- compartment_ISO.nii.gz (isotropic volume fraction (freewater comportment)) +- compartment_EC.nii.gz (est. Extra-Cellular signal fraction) +- compartment_IC.nii.gz (est. Intra-Cellular signal fraction) +- compartment_ISO.nii.gz (est. isotropic signal fraction (freewater comportment)) Each of COMMIT compartments - commit_weights.txt Text file containing the commit weights for each streamline of the @@ -32,12 +32,9 @@ - tot_commit_weights Text file containing the total commit weights of each streamline. Equal to commit_weights * streamlines_length (W_i * L_i) -- commit_weights.txt - Text file containing the commit weights for each streamline of the - input tractogram. - essential.trk / non_essential.trk Tractograms containing the streamlines below or equal (essential) and - above (non_essential) the --threshold_weights argument. + above (non_essential) a threshold_weights of 0. This script can divide the input tractogram in two using a threshold to apply on the streamlines' weight. Typically, the threshold should be 0, keeping only @@ -117,13 +114,13 @@ def _build_arg_parser(): help='Number of directions, on the half of the sphere,\n' 'representing the possible orientations of the ' 'response functions [%(default)s].') - p.add_argument('--nbr_iter', type=int, default=500, + p.add_argument('--nbr_iter', type=int, default=1000, help='Maximum number of iterations [%(default)s].') p.add_argument('--in_peaks', help='Peaks file representing principal direction(s) ' 'locally,\n typically coming from fODFs. This file is ' 'mandatory for the default\n stick-zeppelin-ball ' - 'model, when used with multi-shell data.') + 'model.') p.add_argument('--in_tracking_mask', help='Binary mask where tratography was allowed.\n' 'If not set, uses a binary mask computed from ' @@ -138,8 +135,9 @@ def _build_arg_parser(): g1 = p.add_argument_group(title='Model options') g1.add_argument('--ball_stick', action='store_true', - help='Use the ball&Stick model.\nDisable ' - 'the zeppelin compartment for single-shell data.') + help='Use the ball&Stick model, disable the zeppelin ' + 'compartment.\nOnly model suitable for single-shell ' + 'data.') g1.add_argument('--para_diff', type=float, help='Parallel diffusivity in mm^2/s.\n' 'Default for ball_stick: 1.7E-3\n' @@ -147,7 +145,7 @@ def _build_arg_parser(): g1.add_argument('--perp_diff', nargs='+', type=float, help='Perpendicular diffusivity in mm^2/s.\n' 'Default for ball_stick: None\n' - 'Default for stick_zeppelin_ball: [0.85E-3, 0.51E-3]') + 'Default for stick_zeppelin_ball: [0.51E-3]') g1.add_argument('--iso_diff', nargs='+', type=float, help='Istropic diffusivity in mm^2/s.\n' 'Default for ball_stick: [2.0E-3]\n' @@ -157,11 +155,6 @@ def _build_arg_parser(): g2.add_argument('--keep_whole_tractogram', action='store_true', help='Save a tractogram copy with streamlines weights in ' 'the data_per_streamline\n[%(default)s].') - g2.add_argument('--threshold_weights', metavar='THRESHOLD', - default=0., - help='Split the tractogram in two; essential and\n' - 'nonessential, based on the provided threshold ' - '[%(default)s].\n Use None to skip this step.') g3 = p.add_argument_group(title='Kernels options') kern = g3.add_mutually_exclusive_group() @@ -226,10 +219,8 @@ def _save_results_wrapper(args, tmp_dir, ext, hdf5_file, offsets_list, tmp_commit_weights = \ commit_weights[offsets_list[i]:offsets_list[i+1]] - if args.threshold_weights is None: - args.threshold_weights = -1 essential_ind = np.where( - tmp_commit_weights > args.threshold_weights)[0] + tmp_commit_weights > 0)[0] tmp_commit_weights = tmp_commit_weights[essential_ind] tmp_streamlines = reconstruct_streamlines(old_group['data'], @@ -267,41 +258,34 @@ def _save_results_wrapper(args, tmp_dir, ext, hdf5_file, offsets_list, for f in files: shutil.copy(os.path.join(commit_results_dir, f), out_dir) - # Save split tractogram (essential/nonessential) and/or saving the - # tractogram with data_per_streamline updated - if args.keep_whole_tractogram or args.threshold_weights is not None: - dps_key = 'commit2_weights' if is_commit_2 else \ - 'commit1_weights' - dps_key_tot = 'tot_commit2_weights' if is_commit_2 else \ - 'tot_commit1_weights' - # Reload is needed because of COMMIT handling its file by itself - sft.data_per_streamline[dps_key] = commit_weights - sft.data_per_streamline[dps_key_tot] = commit_weights*length_list - - if args.threshold_weights is None: - args.threshold_weights = -1 - essential_ind = np.where( - commit_weights > args.threshold_weights)[0] - nonessential_ind = np.where( - commit_weights <= args.threshold_weights)[0] - logging.debug('{} essential streamlines were kept at ' - 'threshold {}'.format(len(essential_ind), - args.threshold_weights)) - logging.debug('{} nonessential streamlines were kept at ' - 'threshold {}'.format(len(nonessential_ind), - args.threshold_weights)) - - save_tractogram(sft[essential_ind], - os.path.join(out_dir, - 'essential_tractogram.trk')) - save_tractogram(sft[nonessential_ind], - os.path.join(out_dir, - 'nonessential_tractogram.trk')) - if args.keep_whole_tractogram: - output_filename = os.path.join(out_dir, 'tractogram.trk') - logging.debug('Saving tractogram with weights as {}'.format( - output_filename)) - save_tractogram(sft, output_filename) + dps_key = 'commit2_weights' if is_commit_2 else \ + 'commit1_weights' + dps_key_tot = 'tot_commit2_weights' if is_commit_2 else \ + 'tot_commit1_weights' + # Reload is needed because of COMMIT handling its file by itself + sft.data_per_streamline[dps_key] = commit_weights + sft.data_per_streamline[dps_key_tot] = commit_weights*length_list + + essential_ind = np.where( + commit_weights > 0)[0] + nonessential_ind = np.where( + commit_weights <= 0)[0] + logging.debug('{} essential streamlines were kept at'.format( + len(essential_ind))) + logging.debug('{} nonessential streamlines were kept'.format( + len(nonessential_ind))) + + save_tractogram(sft[essential_ind], + os.path.join(out_dir, + 'essential_tractogram.trk')) + save_tractogram(sft[nonessential_ind], + os.path.join(out_dir, + 'nonessential_tractogram.trk')) + if args.keep_whole_tractogram: + output_filename = os.path.join(out_dir, 'tractogram.trk') + logging.debug('Saving tractogram with weights as {}'.format( + output_filename)) + save_tractogram(sft, output_filename) def main(): @@ -346,15 +330,6 @@ def main(): parser.error('{} does not have a compatible header with {}'.format( args.in_tractogram, args.in_dwi)) - if args.threshold_weights == 'None' or args.threshold_weights == 'none': - args.threshold_weights = None - if not args.keep_whole_tractogram and ext != '.h5': - logging.warning('Not thresholding weight with trk file without ' - 'the --keep_whole_tractogram will not save a ' - 'tractogram.') - else: - args.threshold_weights = float(args.threshold_weights) - # COMMIT has some c-level stdout and non-logging print that cannot # be easily stopped. Manual redirection of all printed output if args.verbose: From 3481938a7b4eb95ea55f49dca1a70b39102d13c4 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 11 Jun 2021 11:58:39 -0400 Subject: [PATCH 37/45] pep8 --- scripts/scil_extract_ushape.py | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 70e780e06..7ad07e8bd 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -2,21 +2,8 @@ # -*- coding: utf-8 -*- """ -This script extracts streamlines depending on the U-shapeness. -The main idea comes from trackvis code: - -pt 1: 1 st end point -pt 2: 1/3 location on the track -pt 3: 2/3 location on the track -pt 4: 2nd end point - -Compute 3 normalized vectors: -v1: pt1 -> pt2 -v2: pt2 -> pt3 -v3: pt3 -> pt4 - -ufactor:dot product of v1 X v2 and v2 X v3. -X is the cross product of two vectors. +This script extracts streamlines depending on their U-shapeness. +This script is a replica of Trackvis method. When ufactor is close to: * 0 it defines straight streamlines @@ -38,7 +25,6 @@ assert_inputs_exist, assert_outputs_exist, check_tracts_same_format) -from scilpy.utils.streamlines import filter_tractogram_data from scilpy.tractanalysis.features import detect_ushape @@ -110,7 +96,8 @@ def main(): indent=args.indent)) if len(ids_l) == 0: - logging.warning('No remaining streamlines in {}'.format(args.remaining_tractogram)) + logging.warning('No remaining streamlines ' + 'in {}'.format(args.remaining_tractogram)) elif args.remaining_tractogram: save_tractogram(sft[ids_l], args.remaining_tractogram) From e06af3a57130ee7674fa113ca09881faa6ee9bf6 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 11 Jun 2021 12:55:04 -0400 Subject: [PATCH 38/45] typo --- scripts/scil_extract_ushape.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 7ad07e8bd..7ec72453f 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -45,7 +45,7 @@ def _build_arg_parser(): p.add_argument('--remaining_tractogram', help='If set, saves remaining streamlines.') p.add_argument('--display_counts', action='store_true', - help='Print streamline count before and after filtering') + help='Print streamline count before and after filtering.') add_overwrite_arg(p) add_reference_arg(p) From 1574c74cf4ec86d08fd918220af906f4bbbd60e2 Mon Sep 17 00:00:00 2001 From: frheault Date: Mon, 14 Jun 2021 13:41:34 -0500 Subject: [PATCH 39/45] Post meeting update --- scripts/scil_run_commit.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/scripts/scil_run_commit.py b/scripts/scil_run_commit.py index bf1bbcda4..a5fd1b43a 100755 --- a/scripts/scil_run_commit.py +++ b/scripts/scil_run_commit.py @@ -10,8 +10,10 @@ multi-shell data and a peak file (principal fiber directions in each voxel, typically from a field of fODFs). -It is possible to use the ball-and-stick model for single-shell data. In this -case, the peak file is not mandatory. +It is possible to use the ball-and-stick model for single-shell and multi-shell +data. In this case, the peak file is not mandatory. Multi-shell should follow a +"NODDI protocol" (low and high b-values), multiple shells with similar b-values +should not be used with COMMIT. The output from COMMIT is: - fit_NRMSE.nii.gz @@ -37,14 +39,23 @@ above (non_essential) a threshold_weights of 0. This script can divide the input tractogram in two using a threshold to apply -on the streamlines' weight. Typically, the threshold should be 0, keeping only +on the streamlines' weight. The threshold used is 0.0, keeping only streamlines that have non-zero weight and that contribute to explain the DWI signal. Streamlines with 0 weight are essentially not necessary according to COMMIT. COMMIT2 is available only for HDF5 data from scil_decompose_connectivity.py and with the --ball_stick option. Use the --commit2 option to activite it, slightly -longer computation time. +longer computation time. This wrapper offers a simplify way to call COMMIT, but +does not allow to use (or fine-tune) every parameters. If you want to use COMMIT +with full access to all parameters, visit: https://github.com/daducci/COMMIT + +When tunning parameters, such as --iso_diff, --para_diff, --perp_diff or +--lambda_commit_2 you should evaluate the quality of results by: + - Looking at the 'density' (GTM) of the connnectome (essential tractogram) + - Confirm the quality of WM bundles reconstruction (essential tractogram) + - Inspect the (N)RMSE map and look for peaks or anomalies + - Compare the density map before and after (essential tractogram) """ import argparse From 880971c31b84e919853b161ade7d68ccd9266793 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Tue, 15 Jun 2021 10:07:20 -0400 Subject: [PATCH 40/45] upgrade pillow --- docs/requirements.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 9b8e74a8a..c551aac3e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -6,7 +6,7 @@ matplotlib==2.2.* nibabel==3.0.* nilearn==0.6.* numpy==1.18.* -Pillow==7.1.* +Pillow==8.2.* pybids==0.10.* pyparsing==2.2.* python-dateutil==2.7.* diff --git a/requirements.txt b/requirements.txt index 787245905..eca25ebb9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ matplotlib==2.2.* nibabel==3.0.* nilearn==0.6.* numpy==1.20.* -Pillow==7.1.* +Pillow==8.2.* bids-validator==1.6.0 pybids==0.10.* pyparsing==2.2.* From faaaf7656ae095a387fb63e4075216035ddf9619 Mon Sep 17 00:00:00 2001 From: Guillaume Theaud Date: Tue, 15 Jun 2021 10:24:48 -0400 Subject: [PATCH 41/45] Add action --- .github/workflows/build.yml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..b2c3c96d9 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,28 @@ +name: Build Docker with selected version + +on: + workflow_dispatch: + inputs: + scilpy_commit: + description: Scilpy commit id + required: true + +jobs: + Build_Docker: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + name: Check out repository + - name: Change scilpy version + run: sed -i '/ENV SCILPY_VERSION=/c\ENV SCILPY_VERSION=${{ github.event.inputs.scilpy_commit }}' containers/Dockerfile + - uses: mr-smithers-excellent/docker-build-push@v3.1 + name: Docker Build & Push + with: + image: scilus/scilpy + tag: dev + dockerfile: containers/Dockerfile + registry: docker.io + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} From 2ae86de565425423d41a610365bb145db5db5665 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Tue, 15 Jun 2021 20:22:03 -0400 Subject: [PATCH 42/45] add no_empty --- scripts/scil_extract_ushape.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 7ec72453f..9183ce295 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -44,6 +44,8 @@ def _build_arg_parser(): p.add_argument('--remaining_tractogram', help='If set, saves remaining streamlines.') + p.add_argument('--no_empty', action='store_true', + help='Do not write file if there is no streamline.') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering.') @@ -82,11 +84,16 @@ def main(): 'Zero or one streamline in {}'.format(args.in_tractogram) + '. The file must have more than one streamline.') - if len(ids_c) > 0: - save_tractogram(sft[ids_c], args.out_tractogram) - else: - logging.warning( - 'No u-shape streamlines in {}'.format(args.in_tractogram)) + if len(ids_c) == 0: + if args.no_empty: + logging.debug("The file {} won't be written " + "(0 streamline).".format(args.out_tractogram)) + return + + logging.debug('The file {} contains 0 streamline.'.format( + args.out_tractogram)) + + save_tractogram(sft[ids_c], args.out_tractogram) if args.display_counts: sc_bf = len(sft.streamlines) From 0b536b313c02e619d94a183547aaf829a1ce89d5 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Wed, 16 Jun 2021 06:28:10 -0400 Subject: [PATCH 43/45] Better management of remaining streamlines --- scripts/scil_extract_ushape.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/scripts/scil_extract_ushape.py b/scripts/scil_extract_ushape.py index 9183ce295..553d6b0a0 100755 --- a/scripts/scil_extract_ushape.py +++ b/scripts/scil_extract_ushape.py @@ -73,16 +73,8 @@ def main(): sft = load_tractogram_with_reference( parser, args, args.in_tractogram) - ids_c = [] - ids_l = [] - - if len(sft.streamlines) > 1: - ids_c = detect_ushape(sft, args.minU, args.maxU) - ids_l = np.setdiff1d(np.arange(len(sft.streamlines)), ids_c) - else: - parser.error( - 'Zero or one streamline in {}'.format(args.in_tractogram) + - '. The file must have more than one streamline.') + ids_c = detect_ushape(sft, args.minU, args.maxU) + ids_l = np.setdiff1d(np.arange(len(sft.streamlines)), ids_c) if len(ids_c) == 0: if args.no_empty: @@ -102,10 +94,15 @@ def main(): 'streamline_count_after_filtering': int(sc_af)}, indent=args.indent)) - if len(ids_l) == 0: - logging.warning('No remaining streamlines ' - 'in {}'.format(args.remaining_tractogram)) - elif args.remaining_tractogram: + if args.remaining_tractogram: + if len(ids_l) == 0: + if args.no_empty: + logging.debug("The file {} won't be written (0 streamline" + ").".format(args.remaining_tractogram)) + return + + logging.warning('No remaining streamlines.') + save_tractogram(sft[ids_l], args.remaining_tractogram) From 09e144770166b4e8fe34723719b277757fdf6b15 Mon Sep 17 00:00:00 2001 From: frheault Date: Fri, 25 Jun 2021 09:49:19 -0500 Subject: [PATCH 44/45] Reconstruct on the fly --- scilpy/segment/voting_scheme.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/scilpy/segment/voting_scheme.py b/scilpy/segment/voting_scheme.py index d12c3efea..f435bb18c 100644 --- a/scilpy/segment/voting_scheme.py +++ b/scilpy/segment/voting_scheme.py @@ -288,13 +288,13 @@ def __call__(self, input_tractogram_path, nbr_processes=1, seeds=None): slr_transform_type, seed]) tmp_dir, tmp_memmap_filenames = streamlines_to_memmap(wb_streamlines) + del wb_streamlines comb_param_cluster = product(self.tractogram_clustering_thr, seeds) # Clustring is now parallelize pool = multiprocessing.Pool(nbr_processes) all_rbx_dict = pool.map(single_clusterize_and_rbx_init, - zip(repeat(wb_streamlines), - repeat(tmp_memmap_filenames), + zip(repeat(tmp_memmap_filenames), comb_param_cluster, repeat(self.nb_points))) pool.close() @@ -363,8 +363,6 @@ def single_clusterize_and_rbx_init(args): Parameters ---------- - wb_streamlines : list or ArraySequence - All streamlines of the tractogram to segment. tmp_memmap_filename: tuple (3) Temporary filename for the data, offsets and lengths. @@ -381,11 +379,11 @@ def single_clusterize_and_rbx_init(args): rbx : dict Initialisation of the recobundles class using specific parameters. """ - wb_streamlines = args[0] - tmp_memmap_filename = args[1] - clustering_thr = args[2][0] - seed = args[2][1] - nb_points = args[3] + tmp_memmap_filename = args[0] + wb_streamlines = reconstruct_streamlines_from_memmap(tmp_memmap_filename) + clustering_thr = args[1][0] + seed = args[1][1] + nb_points = args[2] rbx = {} base_thresholds = [45, 35, 25] From 67f69136ac76f76b665357a91285b979f4d102b1 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 8 Jul 2021 10:10:16 -0500 Subject: [PATCH 45/45] fix_dsi --- scripts/scil_fix_dsi_studio_trk.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/scil_fix_dsi_studio_trk.py b/scripts/scil_fix_dsi_studio_trk.py index a7ccc4537..4bed9d7cd 100755 --- a/scripts/scil_fix_dsi_studio_trk.py +++ b/scripts/scil_fix_dsi_studio_trk.py @@ -23,6 +23,9 @@ This script was tested on various datasets and worked on all of them. However, always verify the results and if a specific case does not work. Open an issue on the Scilpy GitHub repository. + +WARNING: This script is still experimental, DSI-Studio evolves quickly and +results may vary depending on the data itself as well as DSI-studio version. """ import argparse