From f32f6dd169d955c793b5db977152215fe90e6d7f Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 20 Mar 2024 16:20:29 -0400 Subject: [PATCH] Add band over censored volumes in executive summary carpet plots (#1077) --- .circleci/config.yml | 16 +- xcp_d/cli/run.py | 2 +- xcp_d/interfaces/plotting.py | 44 +++-- xcp_d/tests/test_utils_plotting.py | 28 ++- xcp_d/utils/bids.py | 2 +- xcp_d/utils/plotting.py | 276 ++++++++++++----------------- xcp_d/workflows/plotting.py | 1 + 7 files changed, 170 insertions(+), 199 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d001166f5..41812221a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -155,7 +155,7 @@ jobs: paths: - .coverage.fmriprep_without_freesurfer - store_artifacts: - path: /src/xcp_d/.circleci/out/test_fmriprep_without_freesurfer/xcp_d/ + path: /src/xcp_d/.circleci/out/test_fmriprep_without_freesurfer/ ds001419_nifti: <<: *dockersetup @@ -186,7 +186,7 @@ jobs: paths: - .coverage.ds001419_nifti - store_artifacts: - path: /src/xcp_d/.circleci/out/test_ds001419_nifti/xcp_d/ + path: /src/xcp_d/.circleci/out/test_ds001419_nifti/ ds001419_cifti: <<: *dockersetup @@ -217,7 +217,7 @@ jobs: paths: - .coverage.ds001419_cifti - store_artifacts: - path: /src/xcp_d/.circleci/out/test_ds001419_cifti/xcp_d/ + path: /src/xcp_d/.circleci/out/test_ds001419_cifti/ ukbiobank: <<: *dockersetup @@ -235,7 +235,7 @@ jobs: key: ukbiobank-08 - run: *runinstall - run: - name: Run full xcp_d on nifti with freesurfer + name: Run full xcp_d on UK Biobank data no_output_timeout: 1h command: | pytest -rP -o log_cli=true -m "ukbiobank" --cov-append --cov-report term-missing --cov=xcp_d --data_dir=/src/xcp_d/.circleci/data --output_dir=/src/xcp_d/.circleci/out --working_dir=/src/xcp_d/.circleci/work xcp_d @@ -248,7 +248,7 @@ jobs: paths: - .coverage.ukbiobank - store_artifacts: - path: /src/xcp_d/.circleci/out/test_ukbiobank/xcp_d/ + path: /src/xcp_d/.circleci/out/test_ukbiobank/ nibabies: <<: *dockersetup @@ -279,7 +279,7 @@ jobs: paths: - .coverage.nibabies - store_artifacts: - path: /src/xcp_d/.circleci/out/test_nibabies/xcp_d/ + path: /src/xcp_d/.circleci/out/test_nibabies/ pnc_cifti: <<: *dockersetup @@ -310,7 +310,7 @@ jobs: paths: - .coverage.pnc_cifti - store_artifacts: - path: /src/xcp_d/.circleci/out/test_pnc_cifti/xcp_d/ + path: /src/xcp_d/.circleci/out/test_pnc_cifti/ pnc_cifti_t2wonly: <<: *dockersetup @@ -341,7 +341,7 @@ jobs: paths: - .coverage.pnc_cifti_t2wonly - store_artifacts: - path: /src/xcp_d/.circleci/out/test_pnc_cifti_t2wonly/xcp_d/ + path: /src/xcp_d/.circleci/out/test_pnc_cifti_t2wonly/ pytests: <<: *dockersetup diff --git a/xcp_d/cli/run.py b/xcp_d/cli/run.py index 323832231..157e7f3e3 100644 --- a/xcp_d/cli/run.py +++ b/xcp_d/cli/run.py @@ -893,7 +893,7 @@ def build_workflow(opts, retval): ``multiprocessing.Process`` that allows fmriprep to enforce a hard-limited memory-scope. """ - from bids import BIDSLayout + from bids.layout import BIDSLayout from nipype import config as ncfg from nipype import logging as nlogging diff --git a/xcp_d/interfaces/plotting.py b/xcp_d/interfaces/plotting.py index d74aa5006..443c9a71d 100644 --- a/xcp_d/interfaces/plotting.py +++ b/xcp_d/interfaces/plotting.py @@ -84,17 +84,18 @@ def _run_interface(self, runtime): # The number of colors in the palette depends on whether there are random censors or not palette = sns.color_palette("colorblind", 4 + censoring_df.shape[1]) - fig, ax = plt.subplots(figsize=(16, 8)) - time_array = np.arange(preproc_fd_timeseries.size) * self.inputs.TR - ax.plot( - time_array, - preproc_fd_timeseries, - label="Raw Framewise Displacement", - color=palette[0], - ) - ax.axhline(self.inputs.fd_thresh, label="Outlier Threshold", color="gray", alpha=0.5) + with sns.axes_style("whitegrid"): + fig, ax = plt.subplots(figsize=(8, 4)) + + ax.plot( + time_array, + preproc_fd_timeseries, + label="Raw Framewise Displacement", + color=palette[0], + ) + ax.axhline(self.inputs.fd_thresh, label="Outlier Threshold", color="salmon", alpha=0.5) dummy_scans = self.inputs.dummy_scans # This check is necessary, because init_prepare_confounds_wf connects dummy_scans from the @@ -182,9 +183,9 @@ def _run_interface(self, runtime): alpha=0.5, ) - ax.set_xlabel("Time (seconds)", fontsize=20) - ax.set_ylabel("Movement (millimeters)", fontsize=20) - ax.legend(fontsize=20) + ax.set_xlabel("Time (seconds)", fontsize=10) + ax.set_ylabel("Movement (millimeters)", fontsize=10) + ax.legend(fontsize=10) fig.tight_layout() self._results["out_file"] = fname_presuffix( @@ -563,14 +564,19 @@ class _QCPlotsESInputSpec(BaseInterfaceInputSpec): mandatory=True, desc="TSV file with filtered motion parameters.", ) + temporal_mask = File( + exists=True, + mandatory=True, + desc="TSV file with temporal mask.", + ) TR = traits.Float(default_value=1, desc="Repetition time") standardize = traits.Bool( mandatory=True, desc=( "Whether to standardize the data or not. " "If False, then the preferred DCAN version of the plot will be generated, " - "where the BOLD data are not rescaled, and the carpet plot has color limits of -600 " - "and 600. " + "where the BOLD data are not rescaled, and the carpet plot has color limits from " + "the 2.5th percentile to the 97.5th percentile. " "If True, then the BOLD data will be z-scored and the color limits will be -2 and 2." ), ) @@ -610,14 +616,14 @@ class QCPlotsES(SimpleInterface): output_spec = _QCPlotsESOutputSpec def _run_interface(self, runtime): - preprocessed_bold_figure = fname_presuffix( + preprocessed_figure = fname_presuffix( "carpetplot_before_", suffix="file.svg", newpath=runtime.cwd, use_ext=False, ) - denoised_bold_figure = fname_presuffix( + denoised_figure = fname_presuffix( "carpetplot_after_", suffix="file.svg", newpath=runtime.cwd, @@ -635,12 +641,12 @@ def _run_interface(self, runtime): self._results["before_process"], self._results["after_process"] = plot_fmri_es( preprocessed_bold=self.inputs.preprocessed_bold, - denoised_censored_bold=self.inputs.denoised_interpolated_bold, denoised_interpolated_bold=self.inputs.denoised_interpolated_bold, TR=self.inputs.TR, filtered_motion=self.inputs.filtered_motion, - preprocessed_bold_figure=preprocessed_bold_figure, - denoised_bold_figure=denoised_bold_figure, + temporal_mask=self.inputs.temporal_mask, + preprocessed_figure=preprocessed_figure, + denoised_figure=denoised_figure, standardize=self.inputs.standardize, temporary_file_dir=runtime.cwd, mask=mask_file, diff --git a/xcp_d/tests/test_utils_plotting.py b/xcp_d/tests/test_utils_plotting.py index 2cb2c70ca..037ea54d4 100644 --- a/xcp_d/tests/test_utils_plotting.py +++ b/xcp_d/tests/test_utils_plotting.py @@ -2,6 +2,9 @@ import os +import numpy as np +import pandas as pd + from xcp_d.utils import plotting @@ -10,25 +13,32 @@ def test_plot_fmri_es(ds001419_data, tmp_path_factory): tmpdir = tmp_path_factory.mktemp("test_plot_fmri_es") preprocessed_bold = ds001419_data["cifti_file"] - denoised_censored_bold = ds001419_data["cifti_file"] denoised_interpolated_bold = ds001419_data["cifti_file"] # Using unfiltered FD instead of calculating filtered version. filtered_motion = ds001419_data["confounds_file"] - preprocessed_bold_figure = os.path.join(tmpdir, "unprocessed.svg") - denoised_bold_figure = os.path.join(tmpdir, "processed.svg") + preprocessed_figure = os.path.join(tmpdir, "unprocessed.svg") + denoised_figure = os.path.join(tmpdir, "processed.svg") t_r = 2 + n_volumes = pd.read_table(filtered_motion).shape[0] + tmask_arr = np.zeros(n_volumes, dtype=bool) + tmask_arr[:10] = True # flag first 10 volumes as bad + tmask_arr = tmask_arr.astype(int) + temporal_mask = os.path.join(tmpdir, "temporal_mask.tsv") + pd.DataFrame(columns=["framewise_displacement"], data=tmask_arr).to_csv( + temporal_mask, sep="\t", index=False + ) out_file1, out_file2 = plotting.plot_fmri_es( preprocessed_bold=preprocessed_bold, - denoised_censored_bold=denoised_censored_bold, denoised_interpolated_bold=denoised_interpolated_bold, filtered_motion=filtered_motion, - preprocessed_bold_figure=preprocessed_bold_figure, - denoised_bold_figure=denoised_bold_figure, + preprocessed_figure=preprocessed_figure, + denoised_figure=denoised_figure, TR=t_r, standardize=False, temporary_file_dir=tmpdir, + temporal_mask=temporal_mask, ) assert os.path.isfile(out_file1) assert os.path.isfile(out_file2) @@ -38,14 +48,14 @@ def test_plot_fmri_es(ds001419_data, tmp_path_factory): out_file1, out_file2 = plotting.plot_fmri_es( preprocessed_bold=preprocessed_bold, - denoised_censored_bold=denoised_censored_bold, denoised_interpolated_bold=denoised_interpolated_bold, filtered_motion=filtered_motion, - preprocessed_bold_figure=preprocessed_bold_figure, - denoised_bold_figure=denoised_bold_figure, + preprocessed_figure=preprocessed_figure, + denoised_figure=denoised_figure, TR=t_r, standardize=True, temporary_file_dir=tmpdir, + temporal_mask=temporal_mask, ) assert os.path.isfile(out_file1) assert os.path.isfile(out_file2) diff --git a/xcp_d/utils/bids.py b/xcp_d/utils/bids.py index ae3476843..fd18dd5d4 100644 --- a/xcp_d/utils/bids.py +++ b/xcp_d/utils/bids.py @@ -11,7 +11,7 @@ import nibabel as nb import yaml -from bids import BIDSLayout +from bids.layout import BIDSLayout from nipype import logging from packaging.version import Version diff --git a/xcp_d/utils/plotting.py b/xcp_d/utils/plotting.py index e327b1a29..30e9ed214 100644 --- a/xcp_d/utils/plotting.py +++ b/xcp_d/utils/plotting.py @@ -21,26 +21,34 @@ from xcp_d.utils.write_save import read_ndata, write_ndata -def _decimate_data(data, seg_data, size): +def _decimate_data(data, seg_data, temporal_mask, size): """Decimate timeseries data. Parameters ---------- data : ndarray - 2 element array of timepoints and samples + 2D array of timepoints and samples seg_data : ndarray - 1 element array of samples + 1D array of samples + temporal_mask : ndarray or None + 1D array of timepoints. May be None. size : tuple 2 element for P/T decimation """ + # Decimate the data in the spatial dimension p_dec = 1 + data.shape[0] // size[0] if p_dec: data = data[::p_dec, :] seg_data = seg_data[::p_dec] + + # Decimate the data in the temporal dimension t_dec = 1 + data.shape[1] // size[1] if t_dec: data = data[:, ::t_dec] - return data, seg_data + if temporal_mask is not None: + temporal_mask = temporal_mask[::t_dec] + + return data, seg_data, temporal_mask def plot_confounds( @@ -77,7 +85,6 @@ def plot_confounds( time_series_axis grid_specification """ - sns.set_style("whitegrid") # Define TR and number of frames no_repetition_time = False if TR is None: # Set default Repetition Time @@ -248,8 +255,6 @@ def plot_confounds( def plot_dvars_es(time_series, ax, run_index=None): """Create DVARS plot for the executive summary.""" - sns.set_style("whitegrid") - ax.grid(False) ntsteps = time_series.shape[0] @@ -269,7 +274,6 @@ def plot_dvars_es(time_series, ax, run_index=None): colors = { "Pre regression": "#68AC57", - "Post regression": "#8E549F", "Post all": "#EF8532", } for c in columns: @@ -280,7 +284,7 @@ def plot_dvars_es(time_series, ax, run_index=None): if run_index is not None: for run_location in run_index: - ax.axvline(run_location, color="yellow") + ax.axvline(run_location, color="black", linestyle="--") # Set limits and format minimum_x_value = [abs(x) for x in minimum_values] @@ -304,8 +308,6 @@ def plot_dvars_es(time_series, ax, run_index=None): def plot_global_signal_es(time_series, ax, run_index=None): """Create global signal plot for the executive summary.""" - sns.set_style("whitegrid") - ntsteps = time_series.shape[0] ax.grid(False) @@ -353,7 +355,7 @@ def plot_global_signal_es(time_series, ax, run_index=None): if run_index is not None: for run_location in run_index: - ax.axvline(run_location, color="yellow") + ax.axvline(run_location, color="black", linestyle="--") ax.set_xlim((0, ntsteps - 1)) @@ -382,8 +384,6 @@ def plot_framewise_displacement_es( run_index=None, ): """Create framewise displacement plot for the executive summary.""" - sns.set_style("whitegrid") - ntsteps = time_series.shape[0] ax.grid(axis="y") @@ -392,7 +392,7 @@ def plot_framewise_displacement_es( xticks = list(range(0, ntsteps)[::interval]) ax.set_xticks(xticks) - # Set the x-axis labels + # Set the x-axis labels based on time, not index ax.set_xlabel("Time (s)") labels = TR * np.array(xticks) labels = labels.astype(int) @@ -458,9 +458,9 @@ def plot_framewise_displacement_es( ) if run_index is not None: - run_index = run_index * TR + # FD plots use time series index, not time, as x-axis for run_location in run_index: - ax.axvline(run_location, color="yellow") + ax.axvline(run_location, color="black", linestyle="--") ax.set_xlim((0, ntsteps - 1)) ax.set_ylim(0, ymax) @@ -480,13 +480,14 @@ def plot_framewise_displacement_es( @fill_doc def plot_fmri_es( + *, preprocessed_bold, - denoised_censored_bold, denoised_interpolated_bold, TR, filtered_motion, - preprocessed_bold_figure, - denoised_bold_figure, + temporal_mask, + preprocessed_figure, + denoised_figure, standardize, temporary_file_dir, mask=None, @@ -499,18 +500,21 @@ def plot_fmri_es( ---------- preprocessed_bold : :obj:`str` Preprocessed BOLD file, dummy scan removal. - denoised_censored_bold %(denoised_interpolated_bold)s %(TR)s %(filtered_motion)s - preprocessed_bold_figure : :obj:`str` + %(temporal_mask)s + Only non-outlier (low-motion) volumes in the temporal mask will be used to scale + the carpet plot. + preprocessed_figure : :obj:`str` output file svg before processing - denoised_bold_figure : :obj:`str` + denoised_figure : :obj:`str` output file svg after processing standardize : :obj:`bool` Whether to standardize the data or not. If False, then the preferred DCAN version of the plot will be generated, - where the BOLD data are not rescaled, and the carpet plot has color limits of -600 and 600. + where the BOLD data are not rescaled, and the carpet plot has color limits from the + 2.5th percentile to the 97.5th percentile. If True, then the BOLD data will be z-scored and the color limits will be -2 and 2. temporary_file_dir : :obj:`str` Path in which to store temporary files. @@ -524,79 +528,58 @@ def plot_fmri_es( If not None, this should be an array/list of integers, indicating the volumes. """ # Compute dvars correctly if not already done - preprocessed_bold_arr = read_ndata(datafile=preprocessed_bold, maskfile=mask) - uncensored_denoised_bold_arr = read_ndata(datafile=denoised_censored_bold, maskfile=mask) - filtered_denoised_bold_arr = read_ndata(datafile=denoised_interpolated_bold, maskfile=mask) - - preprocessed_bold_dvars = compute_dvars(preprocessed_bold_arr) - uncensored_denoised_bold_dvars = compute_dvars(uncensored_denoised_bold_arr) - filtered_denoised_bold_dvars = compute_dvars(filtered_denoised_bold_arr) - - if not ( - preprocessed_bold_dvars.shape - == uncensored_denoised_bold_dvars.shape - == filtered_denoised_bold_dvars.shape - ): - raise ValueError( - "Shapes do not match:\n" - f"\t{preprocessed_bold}: {preprocessed_bold_arr.shape}\n" - f"\t{denoised_censored_bold}: {uncensored_denoised_bold_arr.shape}\n" - f"\t{denoised_interpolated_bold}: {filtered_denoised_bold_arr.shape}\n\n" - ) + preprocessed_arr = read_ndata(datafile=preprocessed_bold, maskfile=mask) + denoised_interpolated_arr = read_ndata(datafile=denoised_interpolated_bold, maskfile=mask) - if not ( - preprocessed_bold_arr.shape - == uncensored_denoised_bold_arr.shape - == filtered_denoised_bold_arr.shape - ): + preprocessed_dvars = compute_dvars(preprocessed_arr) + denoised_interpolated_dvars = compute_dvars(denoised_interpolated_arr) + + if preprocessed_arr.shape != denoised_interpolated_arr.shape: raise ValueError( "Shapes do not match:\n" - f"\t{preprocessed_bold}: {preprocessed_bold_arr.shape}\n" - f"\t{denoised_censored_bold}: {uncensored_denoised_bold_arr.shape}\n" - f"\t{denoised_interpolated_bold}: {filtered_denoised_bold_arr.shape}\n\n" + f"\t{preprocessed_bold}: {preprocessed_arr.shape}\n" + f"\t{denoised_interpolated_bold}: {denoised_interpolated_arr.shape}\n\n" ) - # Formatting & setting of files - sns.set_style("whitegrid") - # Create dataframes for the bold_data DVARS, FD dvars_regressors = pd.DataFrame( { - "Pre regression": preprocessed_bold_dvars, - "Post regression": uncensored_denoised_bold_dvars, - "Post all": filtered_denoised_bold_dvars, + "Pre regression": preprocessed_dvars, + "Post all": denoised_interpolated_dvars, } ) fd_regressor = pd.read_table(filtered_motion)["framewise_displacement"].values + tmask_arr = pd.read_table(temporal_mask)["framewise_displacement"].values.astype(bool) # The mean and standard deviation of the preprocessed data, # after mean-centering and detrending. - preprocessed_bold_timeseries = pd.DataFrame( + preprocessed_timeseries = pd.DataFrame( { - "Mean": np.nanmean(preprocessed_bold_arr, axis=0), - "Std": np.nanstd(preprocessed_bold_arr, axis=0), + "Mean": np.nanmean(preprocessed_arr, axis=0), + "Std": np.nanstd(preprocessed_arr, axis=0), } ) # The mean and standard deviation of the denoised data, with bad volumes included. - uncensored_denoised_bold_timeseries = pd.DataFrame( + denoised_interpolated_timeseries = pd.DataFrame( { - "Mean": np.nanmean(uncensored_denoised_bold_arr, axis=0), - "Std": np.nanstd(uncensored_denoised_bold_arr, axis=0), + "Mean": np.nanmean(denoised_interpolated_arr, axis=0), + "Std": np.nanstd(denoised_interpolated_arr, axis=0), } ) + atlaslabels = None if seg_data is not None: atlaslabels = nb.load(seg_data).get_fdata() - else: - atlaslabels = None + rm_temp_file = False + temp_preprocessed_file = preprocessed_bold if not standardize: # The plot going to carpet plot will be mean-centered and detrended, # but will not otherwise be rescaled. - detrended_preprocessed_bold_arr = clean( - preprocessed_bold_arr.T, + detrended_preprocessed_arr = clean( + preprocessed_arr.T, t_r=TR, detrend=True, filter=False, @@ -612,19 +595,16 @@ def plot_fmri_es( # Write out the scaled data temp_preprocessed_file = write_ndata( - data_matrix=detrended_preprocessed_bold_arr, - template=denoised_censored_bold, # residuals file is censored, so length matches + data_matrix=detrended_preprocessed_arr, + template=preprocessed_bold, filename=temp_preprocessed_file, mask=mask, TR=TR, ) - else: - rm_temp_file = False - temp_preprocessed_file = preprocessed_bold - files_for_carpet = [temp_preprocessed_file, denoised_censored_bold] - figure_names = [preprocessed_bold_figure, denoised_bold_figure] - data_arrays = [preprocessed_bold_timeseries, uncensored_denoised_bold_timeseries] + files_for_carpet = [temp_preprocessed_file, denoised_interpolated_bold] + figure_names = [preprocessed_figure, denoised_figure] + data_arrays = [preprocessed_timeseries, denoised_interpolated_timeseries] for i_fig, figure_name in enumerate(figure_names): file_for_carpet = files_for_carpet[i_fig] data_arr = data_arrays[i_fig] @@ -668,11 +648,15 @@ def plot_fmri_es( plot_carpet( func=file_for_carpet, atlaslabels=atlaslabels, - TR=TR, + standardize=standardize, # Data are already detrended if standardize is False + size=(950, 800), + labelsize=30, subplot=grid[2], # Use grid for now. - detrend=standardize, # Data are already detrended if standardize is False - legend=False, + output_file=None, + TR=TR, colorbar=True, + lut=None, + temporal_mask=tmask_arr, ) # The FD plot at the bottom @@ -684,7 +668,7 @@ def plot_fmri_es( wspace=0.0, ) ax3 = plt.subplot(gridspec3[1]) - plot_framewise_displacement_es(fd_regressor, ax3, run_index=run_index, TR=TR) + plot_framewise_displacement_es(fd_regressor, ax3, TR=TR, run_index=run_index) # Save out the before processing file fig.savefig(figure_name, bbox_inches="tight", pad_inches=None, dpi=300) @@ -695,7 +679,7 @@ def plot_fmri_es( os.remove(temp_preprocessed_file) # Save out the after processing file - return preprocessed_bold_figure, denoised_bold_figure + return preprocessed_figure, denoised_figure @fill_doc @@ -737,7 +721,6 @@ def __init__( self.TR = TR or _get_tr(func_img) self.mask_data = None self.seg_data = None - sns.set_style("whitegrid") if not isinstance(func_img, nb.Cifti2Image): # If Nifti self.mask_data = nb.fileslice.strided_scalar(func_img.shape[:3], np.uint8(1)) @@ -770,7 +753,6 @@ def __init__( def plot(self, labelsize, figure=None): """Perform main plotting step.""" # Layout settings - sns.set_style("whitegrid") sns.set_context("paper", font_scale=1) if figure is None: @@ -810,29 +792,34 @@ def plot(self, labelsize, figure=None): # Carpet plot plot_carpet( - self.func_file, + func=self.func_file, atlaslabels=self.seg_data, + standardize=True, + size=(950, 800), + labelsize=labelsize, subplot=grid[-1], - detrend=True, + output_file=None, TR=self.TR, - labelsize=labelsize, colorbar=False, + lut=None, + temporal_mask=None, ) return figure def plot_carpet( + *, func, - atlaslabels=None, - detrend=True, + atlaslabels, + TR, + standardize, + temporal_mask=None, size=(950, 800), labelsize=30, subplot=None, - output_file=None, - legend=True, - TR=None, lut=None, colorbar=False, + output_file=None, ): """Plot an image representation of voxel intensities across time. @@ -847,7 +834,7 @@ def plot_carpet( A 3D array of integer labels from an atlas, resampled into ``img`` space. Required if ``func`` is a NIfTI image. Unused if ``func`` is a CIFTI. - detrend : bool, optional + standardize : bool, optional Detrend and standardize the data prior to plotting. size : tuple, optional Size of figure. @@ -868,11 +855,9 @@ def plot_carpet( colorbar : bool, optional Default is False. """ - epinii = None - segnii = None img = nb.load(func) - sns.set_style("whitegrid") - if isinstance(img, nb.Cifti2Image): # Cifti + + if isinstance(img, nb.Cifti2Image): # CIFTI assert ( img.nifti_header.get_intent()[0] == "ConnDenseSeries" ), f"Not a dense timeseries: {img.nifti_header.get_intent()[0]}, {func}" @@ -899,24 +884,8 @@ def plot_carpet( seg_data[brain_model.index_offset : index_final] = lidx assert len(seg_data[seg_data < 1]) == 0, "Unassigned labels" - # Decimate data - data, seg_data = _decimate_data(data, seg_data, size) - # Preserve continuity - order = seg_data.argsort(kind="stable") - # Get color maps - cmap = ListedColormap([cm.get_cmap("Paired").colors[i] for i in (1, 0, 7, 3)]) - assert len(cmap.colors) == len( - struct_map - ), "Mismatch between expected # of structures and colors" - - # ensure no legend for CIFTI - legend = False - else: # Volumetric NIfTI - img_nii = check_niimg_4d( - img, - dtype="auto", - ) # Check the image is in nifti format + img_nii = check_niimg_4d(img, dtype="auto") # Check the image is in nifti format func_data = safe_get_data(img_nii, ensure_finite=True) ntsteps = func_data.shape[-1] data = func_data[atlaslabels > 0].reshape(-1, ntsteps) @@ -932,59 +901,35 @@ def plot_carpet( # Apply lookup table seg_data = lut[oseg.astype(int)] - # Decimate data - data, seg_data = _decimate_data(data, seg_data, size) + # Decimate data + data, seg_data, temporal_mask = _decimate_data(data, seg_data, temporal_mask, size) + + if isinstance(img, nb.Cifti2Image): + # Preserve continuity + order = seg_data.argsort(kind="stable") + # Get color maps + cmap = ListedColormap([cm.get_cmap("Paired").colors[i] for i in (1, 0, 7, 3)]) + assert len(cmap.colors) == len( + struct_map + ), "Mismatch between expected # of structures and colors" + else: # Order following segmentation labels order = np.argsort(seg_data)[::-1] # Set colormap cmap = ListedColormap(cm.get_cmap("tab10").colors[:4][::-1]) - if legend: - epiavg = func_data.mean(3) - epinii = nb.Nifti1Image(epiavg, img_nii.affine, img_nii.header) - segnii = nb.Nifti1Image(lut[atlaslabels.astype(int)], epinii.affine, epinii.header) - segnii.set_data_dtype("uint8") - - return _carpet( - func, - data, - seg_data, - order, - cmap, - labelsize, - TR=TR, - detrend=detrend, - colorbar=colorbar, - subplot=subplot, - output_file=output_file, - ) - - -def _carpet( - func, - data, - seg_data, - order, - cmap, - labelsize, - TR=None, - detrend=True, - colorbar=False, - subplot=None, - output_file=None, -): - """Build carpetplot for volumetric / CIFTI plots.""" - if TR is None: - TR = 1.0 # Default TR - - sns.set_style("white") - # Detrend and z-score data - if detrend: + if standardize: + # This does not account for the temporal mask. data = clean(data.T, t_r=TR, detrend=True, filter=False, standardize="zscore_sample").T vlimits = (-2, 2) + elif temporal_mask is not None: + # If standardize is False and a temporal mask is provided, + # then we use only low-motion timepoints to define the vlimits. + # The executive summary uses the following range for native BOLD units. + vlimits = tuple(np.percentile(data[:, ~temporal_mask], q=(2.5, 97.5))) else: - # If detrend is False, then the data are assumed to have native BOLD units. + # If standardize is False, then the data are assumed to have native BOLD units. # The executive summary uses the following range for native BOLD units. vlimits = tuple(np.percentile(data, q=(2.5, 97.5))) @@ -1056,6 +1001,19 @@ def _carpet( ax1.set_xticks([]) ax1.set_xticklabels([]) + if temporal_mask is not None: + # Add color bands to the carpet plot corresponding to censored volumes + outlier_idx = list(np.where(temporal_mask)[0]) + gaps = [ + [start, end] for start, end in zip(outlier_idx, outlier_idx[1:]) if start + 1 < end + ] + edges = iter(outlier_idx[:1] + sum(gaps, []) + outlier_idx[-1:]) + consecutive_outliers_idx = list(zip(edges, edges)) + for band in consecutive_outliers_idx: + start = band[0] - 0.5 + end = band[1] + 0.5 + ax1.axvspan(start, end, color="red", alpha=0.5) + # Remove and redefine spines for side in ["top", "right"]: # Toggle the spine objects @@ -1075,11 +1033,7 @@ def _carpet( ax2.set_xticks([]) ax2.set_yticks([]) fig = ax2.get_figure() - cbar = fig.colorbar( - pos, - cax=ax2, - ticks=vlimits, - ) + cbar = fig.colorbar(pos, cax=ax2, ticks=vlimits) cbar.ax.tick_params(size=0, labelsize=20) # Write out file diff --git a/xcp_d/workflows/plotting.py b/xcp_d/workflows/plotting.py index 0ead0d6ce..8938a5225 100644 --- a/xcp_d/workflows/plotting.py +++ b/xcp_d/workflows/plotting.py @@ -365,6 +365,7 @@ def init_qc_report_wf( ("preprocessed_bold", "preprocessed_bold"), ("denoised_interpolated_bold", "denoised_interpolated_bold"), ("filtered_motion", "filtered_motion"), + ("temporal_mask", "temporal_mask"), ("run_index", "run_index"), ]), ])