Skip to content

Commit

Permalink
Merge pull request #371 from frheault/lesions_analysis
Browse files Browse the repository at this point in the history
Lesions analysis
  • Loading branch information
arnaudbore authored Jul 8, 2021
2 parents f4ea3d3 + d7cda04 commit e9280d2
Show file tree
Hide file tree
Showing 7 changed files with 445 additions and 35 deletions.
2 changes: 1 addition & 1 deletion scilpy/io/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def check(path):
if check_dir_exists:
path_dir = os.path.dirname(path)
if path_dir and not os.path.isdir(path_dir):
parser.error('Directory {} \n for a given output file '
parser.error('Directory {}/ \n for a given output file '
'does not exists.'.format(path_dir))

if isinstance(required, str):
Expand Down
92 changes: 83 additions & 9 deletions scilpy/utils/metrics_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,80 @@
from scilpy.utils.filenames import split_name_with_nii


def compute_lesion_stats(map_data, lesion_atlas, single_label=True,
voxel_sizes=[1.0, 1.0, 1.0], min_lesion_vol=7,
precomputed_lesion_labels=None):
"""
Returns information related to lesion inside of a binary mask or voxel
labels map (bundle, for tractometry).
Parameters
------------
map_data : np.ndarray
Either a binary mask (uint8) or a voxel labels map (int16).
lesion_atlas : np.ndarray (3)
Labelled atlas of lesion. Should be int16.
single_label : boolean
If true, does not add an extra layer for number of labels.
voxel_sizes : np.ndarray (3)
If not specified, returns voxel count (instead of volume)
min_lesion_vol : float
Minimum lesion volume in mm3 (default: 7, cross-shape).
precomputed_lesion_labels : np.ndarray (N)
For connectivity analysis, when the unique lesion labels are known,
provided a pre-computed list of labels save computation.
Returns
---------
lesion_load_dict : dict
For each label, volume and lesion count
"""
voxel_vol = np.prod(voxel_sizes)

if single_label:
labels_list = [1]
else:
labels_list = np.unique(map_data)[1:].astype(np.int32)

section_dict = {'lesion_total_volume': {}, 'lesion_volume': {},
'lesion_count': {}}
for label in labels_list:
zlabel = str(label).zfill(3)
if not single_label:
tmp_mask = np.zeros(map_data.shape, dtype=np.int16)
tmp_mask[map_data == label] = 1
tmp_mask *= lesion_atlas
else:
tmp_mask = lesion_atlas * map_data

lesion_vols = []
if precomputed_lesion_labels is None:
computed_lesion_labels = np.unique(tmp_mask)[1:]
else:
computed_lesion_labels = precomputed_lesion_labels

for lesion in computed_lesion_labels:
curr_vol = np.count_nonzero(tmp_mask[tmp_mask == lesion]) \
* voxel_vol
if curr_vol >= min_lesion_vol:
lesion_vols.append(curr_vol)
if lesion_vols:
section_dict['lesion_total_volume'][zlabel] = round(
np.sum(lesion_vols), 3)
section_dict['lesion_volume'][zlabel] = np.round(lesion_vols, 3).tolist()
section_dict['lesion_count'][zlabel] = float(len(lesion_vols))
else:
section_dict['lesion_total_volume'][zlabel] = 0.0
section_dict['lesion_volume'][zlabel] = [0.0]
section_dict['lesion_count'][zlabel] = 0.0

if single_label:
section_dict = {'lesion_total_volume': section_dict['lesion_total_volume']['001'],
'lesion_volume': section_dict['lesion_volume']['001'],
'lesion_count': section_dict['lesion_count']['001']}

return section_dict


def get_bundle_metrics_profiles(sft, metrics_files):
"""
Returns the profile of each metric along each streamline from a sft.
Expand Down Expand Up @@ -161,11 +235,11 @@ def get_bundle_metrics_mean_std_per_point(streamlines, bundle_name,
unique_labels = np.unique(labels)
num_digits_labels = 3
if density_weighting:
track_count = compute_tract_counts_map(streamlines,
metrics[0].shape)
streamlines_count = compute_tract_counts_map(streamlines,
metrics[0].shape)
else:
track_count = np.ones(metrics[0].shape)
track_count = track_count.astype(np.float64)
streamlines_count = np.ones(metrics[0].shape)
streamlines_count = streamlines_count.astype(np.float64)

# Bigger weight near the centroid streamline
distances_to_centroid_streamline = 1.0 / distances_to_centroid_streamline
Expand All @@ -190,9 +264,9 @@ def get_bundle_metrics_mean_std_per_point(streamlines, bundle_name,
label_metric = metric_data[label_indices[:, 0],
label_indices[:, 1],
label_indices[:, 2]]
track_weight = track_count[label_indices[:, 0],
label_indices[:, 1],
label_indices[:, 2]]
track_weight = streamlines_count[label_indices[:, 0],
label_indices[:, 1],
label_indices[:, 2]]
label_weight = track_weight
if distance_weighting:
label_weight *= distances_to_centroid_streamline[labels == i]
Expand Down Expand Up @@ -259,8 +333,8 @@ def plot_metrics_stats(means, stds, title=None, xlabel=None,
std = np.std(means, axis=1)
alpha = 0.5
else:
mean = np.array(means)
std = np.array(stds)
mean = np.array(means).ravel()
std = np.array(stds).ravel()
alpha = 0.9

dim = np.arange(1, len(mean)+1, 1)
Expand Down
157 changes: 157 additions & 0 deletions scripts/scil_analyse_lesions_load.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
This script will output informations about lesion load in bundle(s).
The input can either be streamlines, binary bundle map, or a bundle voxel
label map.
To be considered a valid lesion, the lesion volume must be at least
min_lesion_vol mm3. This avoid the detection of thousand of single voxel
lesions if an automatic lesion segmentation tool is used.
"""

import argparse
import json
import os

import nibabel as nib
import numpy as np
import scipy.ndimage as ndi


from scilpy.io.image import get_data_as_mask, get_data_as_label
from scilpy.io.streamlines import load_tractogram_with_reference
from scilpy.io.utils import (add_overwrite_arg,
assert_inputs_exist,
add_json_args,
assert_outputs_exist,
add_reference_arg)
from scilpy.segment.streamlines import filter_grid_roi
from scilpy.tractanalysis.streamlines_metrics import compute_tract_counts_map
from scilpy.utils.filenames import split_name_with_nii
from scilpy.utils.metrics_tools import compute_lesion_stats


def _build_arg_parser():
p = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawTextHelpFormatter)

p.add_argument('in_lesion',
help='Binary mask of the lesion(s) (.nii.gz).')
p.add_argument('out_json',
help='Output file for lesion information (.json).')
p1 = p.add_mutually_exclusive_group()
p1.add_argument('--bundle',
help='Path of the bundle file (.trk).')
p1.add_argument('--bundle_mask',
help='Path of the bundle binary mask (.nii.gz).')
p1.add_argument('--bundle_labels_map',
help='Path of the bundle labels map (.nii.gz).')

p.add_argument('--min_lesion_vol', type=float, default=7,
help='Minimum lesion volume in mm3 [%(default)s].')
p.add_argument('--out_lesion_atlas', metavar='FILE',
help='Save the labelized lesion(s) map (.nii.gz).')
p.add_argument('--out_lesion_stats', metavar='FILE',
help='Save the lesion-wise volume measure (.json).')
p.add_argument('--out_streamlines_stats', metavar='FILE',
help='Save the lesion-wise streamline count (.json).')

add_json_args(p)
add_overwrite_arg(p)
add_reference_arg(p)

return p


def main():
parser = _build_arg_parser()
args = parser.parse_args()

if (not args.bundle) and (not args.bundle_mask) \
and (not args.bundle_labels_map):
parser.error('One of the option --bundle or --map must be used')

assert_inputs_exist(parser, [args.in_lesion],
optional=[args.bundle, args.bundle_mask,
args.bundle_labels_map])
assert_outputs_exist(parser, args, args.out_json,
optional=[args.out_lesion_stats,
args.out_streamlines_stats])

lesion_img = nib.load(args.in_lesion)
lesion_data = get_data_as_mask(lesion_img, dtype=np.bool)

if args.bundle:
bundle_name, _ = split_name_with_nii(os.path.basename(args.bundle))
sft = load_tractogram_with_reference(parser, args, args.bundle)
sft.to_vox()
sft.to_corner()
streamlines = sft.get_streamlines_copy()
map_data = compute_tract_counts_map(streamlines,
lesion_data.shape)
map_data[map_data > 0] = 1
elif args.bundle_mask:
bundle_name, _ = split_name_with_nii(
os.path.basename(args.bundle_mask))
map_img = nib.load(args.bundle_mask)
map_data = get_data_as_mask(map_img)
else:
bundle_name, _ = split_name_with_nii(os.path.basename(
args.bundle_labels_map))
map_img = nib.load(args.bundle_labels_map)
map_data = get_data_as_label(map_img)

is_single_label = args.bundle_labels_map is None
voxel_sizes = lesion_img.header.get_zooms()[0:3]
lesion_atlas, _ = ndi.label(lesion_data)

lesion_load_dict = compute_lesion_stats(
map_data, lesion_atlas, single_label=is_single_label,
voxel_sizes=voxel_sizes, min_lesion_vol=args.min_lesion_vol)

if args.out_lesion_atlas:
# lesion_atlas *= map_data.astype(np.bool)
nib.save(nib.Nifti1Image(lesion_atlas, lesion_img.affine),
args.out_lesion_atlas)

volume_dict = {bundle_name: lesion_load_dict}
with open(args.out_json, 'w') as outfile:
json.dump(volume_dict, outfile,
sort_keys=args.sort_keys, indent=args.indent)

if args.out_streamlines_stats or args.out_lesion_stats:
lesion_dict = {}
for lesion in np.unique(lesion_atlas)[1:]:
curr_vol = np.count_nonzero(lesion_atlas[lesion_atlas == lesion]) \
* np.prod(voxel_sizes)
if curr_vol >= args.min_lesion_vol:
key = str(lesion).zfill(4)
lesion_dict[key] = {'volume': curr_vol}
if args.bundle:
tmp = np.zeros(lesion_atlas.shape)
tmp[lesion_atlas == lesion] = 1
new_sft, _ = filter_grid_roi(sft, tmp, 'any', False)
lesion_dict[key]['strs_count'] = len(new_sft)

lesion_vol_dict = {bundle_name: {}}
streamlines_count_dict = {bundle_name: {'streamlines_count': {}}}
for key in lesion_dict.keys():
lesion_vol_dict[bundle_name][key] = lesion_dict[key]['volume']
if args.bundle:
streamlines_count_dict[bundle_name]['streamlines_count'][key] = \
lesion_dict[key]['strs_count']

if args.out_lesion_stats:
with open(args.out_lesion_stats, 'w') as outfile:
json.dump(lesion_vol_dict, outfile,
sort_keys=args.sort_keys, indent=args.indent)
if args.out_streamlines_stats:
with open(args.out_streamlines_stats, 'w') as outfile:
json.dump(streamlines_count_dict, outfile,
sort_keys=args.sort_keys, indent=args.indent)


if __name__ == "__main__":
main()
Loading

0 comments on commit e9280d2

Please sign in to comment.