Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/scilus/scilpy into new_co…
Browse files Browse the repository at this point in the history
…nnectivity_pairwise_metric
  • Loading branch information
frheault committed Jul 8, 2021
2 parents cc7645c + c44e5c2 commit e26a03d
Show file tree
Hide file tree
Showing 20 changed files with 550 additions and 274 deletions.
28 changes: 28 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Build Docker with selected version

on:
workflow_dispatch:
inputs:
scilpy_commit:
description: Scilpy commit id
required: true

jobs:
Build_Docker:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
name: Check out repository
- name: Change scilpy version
run: sed -i '/ENV SCILPY_VERSION=/c\ENV SCILPY_VERSION=${{ github.event.inputs.scilpy_commit }}' containers/Dockerfile
- uses: mr-smithers-excellent/[email protected]
name: Docker Build & Push
with:
image: scilus/scilpy
tag: dev
dockerfile: containers/Dockerfile
registry: docker.io
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
21 changes: 8 additions & 13 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,11 @@ pipeline {
stages {
stage('Build') {
stages {
stage('Python3.6') {
steps {
withPythonEnv('CPython-3.6') {
sh '''
pip3 install numpy==1.18.* wheel
pip3 install -e .
'''
}
}
}
stage('Python3.7') {
steps {
withPythonEnv('CPython-3.7') {
sh '''
pip3 install numpy==1.18.* wheel
pip3 install numpy==1.20.* wheel
pip3 install -e .
'''
}
Expand All @@ -31,7 +21,7 @@ pipeline {
steps {
withPythonEnv('CPython-3.7') {
sh '''
pip3 install numpy==1.18.* wheel
pip3 install numpy==1.20.* wheel
pip3 install -e .
export MPLBACKEND="agg"
export OPENBLAS_NUM_THREADS=1
Expand All @@ -55,7 +45,12 @@ pipeline {
cleanWs()
script {
if (env.CHANGE_ID) {
pullRequest.createReviewRequests(['arnaudbore'])
if (pullRequest.createdBy != "arnaudbore"){
pullRequest.createReviewRequests(['arnaudbore'])
}
else{
pullRequest.createReviewRequests(['GuillaumeTh'])
}
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ matplotlib==2.2.*
nibabel==3.0.*
nilearn==0.6.*
numpy==1.18.*
Pillow==7.1.*
Pillow==8.2.*
pybids==0.10.*
pyparsing==2.2.*
python-dateutil==2.7.*
Expand Down
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ coloredlogs==10.0.*
cycler==0.10.*
Cython==0.29.*
dipy==1.3.*
fury==0.6.*
fury==0.7.*
future==0.17.*
h5py==2.10.*
kiwisolver==1.0.*
matplotlib==2.2.*
nibabel==3.0.*
nilearn==0.6.*
numpy==1.18.*
Pillow==7.1.*
numpy==1.20.*
Pillow==8.2.*
bids-validator==1.6.0
pybids==0.10.*
pyparsing==2.2.*
Expand Down
30 changes: 20 additions & 10 deletions scilpy/image/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def count_non_zero_voxels(image):
return nb_voxels


def volume_iterator(img, blocksize=1):
def volume_iterator(img, blocksize=1, start=0, end=0):
"""Generator that iterates on volumes of data.
Parameters
Expand All @@ -43,24 +43,34 @@ def volume_iterator(img, blocksize=1):
Image of a 4D volume with shape X,Y,Z,N
blocksize : int, optional
Number of volumes to return in a single batch
start : int, optional
Starting iteration index in the 4D volume
end : int, optional
Stopping iteration index in the 4D volume
(the volume at this index is excluded)
Yields
-------
tuple of (list of int, ndarray)
The ids of the selected volumes, and the selected data as a 4D array
"""
assert end <= img.shape[-1], "End limit provided is greater than the " \
"total number of volumes in image"

nb_volumes = img.shape[-1]
end = end if end else img.shape[-1]

if blocksize == nb_volumes:
yield list(range(nb_volumes)), img.get_fdata(dtype=np.float32)
yield list(range(start, end)), \
img.get_fdata(dtype=np.float32)[..., start:end]
else:
start, end = 0, 0
for i in range(0, nb_volumes - blocksize, blocksize):
start, end = i, i + blocksize
logging.info("Loading volumes {} to {}.".format(start, end - 1))
yield list(range(start, end)), img.dataobj[..., start:end]
stop = start
for i in range(start, end - blocksize, blocksize):
start, stop = i, i + blocksize
logging.info("Loading volumes {} to {}.".format(start, stop - 1))
yield list(range(start, stop)), img.dataobj[..., start:stop]

if end < nb_volumes:
if stop < end:
logging.info(
"Loading volumes {} to {}.".format(end, nb_volumes - 1))
yield list(range(end, nb_volumes)), img.dataobj[..., end:]
"Loading volumes {} to {}.".format(stop, end - 1))
yield list(range(stop, end)), img.dataobj[..., stop:end]
16 changes: 16 additions & 0 deletions scilpy/reconst/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,22 @@ def find_order_from_nb_coeff(data):
return int((-3 + np.sqrt(1 + 8 * shape[-1])) / 2)


def get_sh_order_and_fullness(ncoeffs):
"""
Get the order of the SH basis from the number of SH coefficients
as well as a boolean indicating if the basis is full.
"""
# the two curves (sym and full) intersect at ncoeffs = 1, in what
# case both bases correspond to order 1.
sym_order = (-3.0 + np.sqrt(1.0 + 8.0 * ncoeffs)) / 2.0
if sym_order.is_integer():
return sym_order, False
full_order = np.sqrt(ncoeffs) - 1.0
if full_order.is_integer():
return full_order, True
raise ValueError('Invalid number of coefficients for SH basis.')


def _honor_authorsnames_sh_basis(sh_basis_type):
sh_basis = sh_basis_type
if sh_basis_type == 'fibernav':
Expand Down
16 changes: 7 additions & 9 deletions scilpy/segment/voting_scheme.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,13 +288,13 @@ def __call__(self, input_tractogram_path, nbr_processes=1, seeds=None):
slr_transform_type, seed])

tmp_dir, tmp_memmap_filenames = streamlines_to_memmap(wb_streamlines)
del wb_streamlines
comb_param_cluster = product(self.tractogram_clustering_thr, seeds)

# Clustring is now parallelize
pool = multiprocessing.Pool(nbr_processes)
all_rbx_dict = pool.map(single_clusterize_and_rbx_init,
zip(repeat(wb_streamlines),
repeat(tmp_memmap_filenames),
zip(repeat(tmp_memmap_filenames),
comb_param_cluster,
repeat(self.nb_points)))
pool.close()
Expand Down Expand Up @@ -363,8 +363,6 @@ def single_clusterize_and_rbx_init(args):
Parameters
----------
wb_streamlines : list or ArraySequence
All streamlines of the tractogram to segment.
tmp_memmap_filename: tuple (3)
Temporary filename for the data, offsets and lengths.
Expand All @@ -381,11 +379,11 @@ def single_clusterize_and_rbx_init(args):
rbx : dict
Initialisation of the recobundles class using specific parameters.
"""
wb_streamlines = args[0]
tmp_memmap_filename = args[1]
clustering_thr = args[2][0]
seed = args[2][1]
nb_points = args[3]
tmp_memmap_filename = args[0]
wb_streamlines = reconstruct_streamlines_from_memmap(tmp_memmap_filename)
clustering_thr = args[1][0]
seed = args[1][1]
nb_points = args[2]

rbx = {}
base_thresholds = [45, 35, 25]
Expand Down
43 changes: 43 additions & 0 deletions scilpy/tractanalysis/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,52 @@
from dipy.segment.metric import ResampleFeature
from dipy.segment.metric import AveragePointwiseEuclideanMetric
from dipy.tracking import metrics as tm
from scilpy.tracking.tools import resample_streamlines_num_points
import numpy as np


def detect_ushape(sft, minU, maxU):
"""
Extract streamlines depending of their "u-shapeness".
Parameters
----------
sft: Statefull tractogram
Tractogram used to extract streamlines depending on their ushapeness.
minU: Float
Minimum ufactor of a streamline.
maxU: Float
Maximum ufactor of a streamline.
Returns
-------
list: the ids of clean streamlines
Only the ids are returned so proper filtering can be done afterwards.
"""
ids = []
new_sft = resample_streamlines_num_points(sft, 4)
for i, s in enumerate(new_sft.streamlines):
if len(s) == 4:
first_point = s[0]
last_point = s[-1]
second_point = s[1]
third_point = s[2]

v1 = first_point - second_point
v2 = second_point - third_point
v3 = third_point - last_point

v1 = v1 / np.linalg.norm(v1)
v2 = v2 / np.linalg.norm(v2)
v3 = v3 / np.linalg.norm(v3)

val = np.dot(np.cross(v1, v2), np.cross(v2, v3))

if minU <= val <= maxU:
ids.append(i)

return ids


def remove_loops_and_sharp_turns(streamlines,
max_angle,
use_qb=False,
Expand Down
3 changes: 2 additions & 1 deletion scilpy/tractanalysis/todi.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,8 @@ def smooth_todi_spatial(self, sigma=0.5):
new_todi = deepcopy(tmp_todi)
else:
new_todi = np.hstack((new_todi, tmp_todi))
self.todi = np.delete(self.todi, range(0, chunk_size), axis=1)
self.todi = np.delete(self.todi, range(
0, min(self.todi.shape[1], chunk_size)), axis=1)
chunk_count -= 1

self.mask = new_mask
Expand Down
Loading

0 comments on commit e26a03d

Please sign in to comment.