Skip to content

Commit

Permalink
Merge pull request #169 from dvm-shlee/main
Browse files Browse the repository at this point in the history
Implement New Plugin Architecture and Prepare for 0.4.0 Release
  • Loading branch information
dvm-shlee authored May 6, 2024
2 parents 1176d25 + 458c0e7 commit 4ddcc16
Show file tree
Hide file tree
Showing 52 changed files with 3,202 additions and 935 deletions.
4 changes: 3 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ env

**/__pycache__
**/.pytest_cache
**/.mypy_cache

.idea/**

Expand All @@ -16,4 +17,5 @@ env
tests/*
paper

.DS_Store
.DS_Store

5 changes: 4 additions & 1 deletion .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,7 @@ exclude =
env,
venv,
max-line-length = 127
max-complexity=10
max-complexity = 10
ignore = W291, W293
docstring-convention = google
mypy-config = ./mypy.ini
2 changes: 1 addition & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ Changes proposed in this pull request:
-


@BrkRaw/Bruker
@BrkRaw/brkraw
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install .[dev]
pip install .[SimpleITK]
pip install .[legacy]
- name: Install tutorial
run: make tests/tutorials
Expand Down
11 changes: 8 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,12 @@ build
*.egg-info
*.egg-info/*
.DS_Store
.mypy_cache
.pytest_cache

tests/.brkraw
tests/_*.ipynb
tests/tutorials
_test*.py
_*.ipynb
_*.log
tests/_datasets

.python-version
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

## BrkRaw: A comprehensive tool to access raw Bruker Biospin MRI data
#### Version: 0.3.11

### Description

The ‘BrkRaw’ is a python module designed to provide a comprehensive tool to access raw data acquired from
Expand Down
9 changes: 7 additions & 2 deletions brkraw/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
from .lib import *
from xnippet import XnippetManager

__version__ = '0.3.11'
__all__ = ['BrukerLoader', '__version__', 'config']
__version__ = '0.4.0'
config = XnippetManager(package_name=__package__,
package_version=__version__,
package__file__=__file__,
config_filename='config.yaml')

__all__ = ['BrukerLoader', '__version__', 'config']

def load(path):
return BrukerLoader(path)
7 changes: 4 additions & 3 deletions brkraw/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .data import Study
from ..config import ConfigManager
from xnippet.snippet.plugin import PlugIn as PlugInSnippet
from xnippet.formatter import PathFormatter

__all__ = ['Study', 'ConfigManager']

__all__ = ['PlugInSnippet', 'PathFormatter']
13 changes: 13 additions & 0 deletions brkraw/api/analyzer/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,16 @@
"""Analyzer module initialization.
This module imports and exposes various analyzer classes used to parse and process
information from raw datasets into more readable formats. Each analyzer provides
specific functionalities tailored to different aspects of data processing and analysis.
Exposed Classes:
BaseAnalyzer: Provides common features and utilities shared among all analyzers.
ScanInfoAnalyzer: Specializes in parsing and analyzing scan information from raw datasets.
AffineAnalyzer: Handles the computation and analysis of affine matrices from dataset parameters.
DataArrayAnalyzer: Focuses on parsing and returning structured data arrays and related metadata.
"""

from .base import BaseAnalyzer
from .scaninfo import ScanInfoAnalyzer
from .affine import AffineAnalyzer
Expand Down
39 changes: 39 additions & 0 deletions brkraw/api/analyzer/affine.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
"""Affine Matrix Analyzer Module.
This module focuses on analyzing and processing affine matrices derived from imaging data.
It provides functionalities to calculate, adjust, and standardize affine transformations based
on specific imaging parameters and subject orientations, thereby facilitating accurate spatial
orientation and alignment of imaging data.
"""

from __future__ import annotations
from brkraw.api import helper
from .base import BaseAnalyzer
Expand All @@ -22,7 +30,24 @@


class AffineAnalyzer(BaseAnalyzer):
"""Processes affine matrices from raw dataset parameters to ensure proper spatial orientation.
This analyzer calculates affine matrices based on imaging data and subject configurations.
It supports various adjustments based on subject type and pose, ensuring the matrices are
suitable for specific analysis and visualization requirements.
Args:
infoobj (ScanInfo): The information object containing imaging parameters and subject orientation.
Attributes:
resolution (list[tuple]): Resolution details extracted from imaging data.
affine (np.ndarray or list[np.ndarray]): The calculated affine matrices.
subj_type (str): The type of the subject (e.g., Biped, Quadruped).
subj_position (str): The position of the subject during the scan.
"""
def __init__(self, infoobj: 'ScanInfo'):
"""Initialize the AffineAnalyzer with an information object.
"""
infoobj = copy(infoobj)
if infoobj.image['dim'] == 2:
xr, yr = infoobj.image['resolution']
Expand All @@ -43,6 +68,8 @@ def __init__(self, infoobj: 'ScanInfo'):
self.subj_position = infoobj.orientation['subject_position'] if hasattr(infoobj, 'orientation') else None

def get_affine(self, subj_type: Optional[str] = None, subj_position: Optional[str] = None):
"""Retrieve the affine matrix, applying corrections based on subject type and position.
"""
subj_type = subj_type or self.subj_type
subj_position = subj_position or self.subj_position
if isinstance(self.affine, list):
Expand All @@ -52,6 +79,8 @@ def get_affine(self, subj_type: Optional[str] = None, subj_position: Optional[st
return affine

def _calculate_affine(self, infoobj: 'ScanInfo', slicepack_id: Optional[int] = None):
"""Calculate the initial affine matrix based on the imaging data and subject orientation.
"""
sidx = infoobj.orientation['orientation_desc'][slicepack_id].index(2) \
if slicepack_id else infoobj.orientation['orientation_desc'].index(2)
slice_orient = SLICEORIENT[sidx]
Expand All @@ -69,12 +98,16 @@ def _calculate_affine(self, infoobj: 'ScanInfo', slicepack_id: Optional[int] = N

@staticmethod
def _correct_origin(orientation, volume_origin, slice_distance):
"""Adjust the origin of the volume based on slice orientation and distance.
"""
new_origin = orientation.dot(volume_origin)
new_origin[-1] += slice_distance
return orientation.T.dot(new_origin)

@staticmethod
def _compose_affine(resolution, orientation, volume_origin, slice_orient):
"""Compose the affine transformation matrix using the provided resolution, orientation, and origin.
"""
resol = np.array(resolution)
if slice_orient in ['axial', 'sagital']:
resol = np.diag(resol)
Expand All @@ -86,6 +119,8 @@ def _compose_affine(resolution, orientation, volume_origin, slice_orient):

@staticmethod
def _est_rotate_angle(subj_pose):
"""Estimate the rotation angle needed based on the subject's pose.
"""
rotate_angle = {'rad_x':0, 'rad_y':0, 'rad_z':0}
if subj_pose:
if subj_pose == 'Head_Supine':
Expand All @@ -112,6 +147,8 @@ def _est_rotate_angle(subj_pose):

@classmethod
def _correct_orientation(cls, affine, subj_pose, subj_type):
"""Correct the orientation of the affine matrix based on the subject's type and pose.
"""
cls._inspect_subj_info(subj_pose, subj_type)
rotate_angle = cls._est_rotate_angle(subj_pose)
affine = helper.rotate_affine(affine, **rotate_angle)
Expand All @@ -122,6 +159,8 @@ def _correct_orientation(cls, affine, subj_pose, subj_type):

@staticmethod
def _inspect_subj_info(subj_pose, subj_type):
"""Validate subject type and pose information.
"""
if subj_pose:
part, side = subj_pose.split('_')
assert part in SUBJPOSE['part'], 'Invalid subject position'
Expand Down
20 changes: 20 additions & 0 deletions brkraw/api/analyzer/base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,23 @@
"""Base components for data analysis.
This module provides foundational classes and utilities that are shared across different
analyzers within the helper module. These components serve as the base for more specialized
data processing and analysis tasks.
"""

class BaseAnalyzer:
"""A base class providing common functionalities for data analyzers.
This class serves as a parent to various specialized analyzers, providing shared methods
and utility functions to assist in data analysis tasks.
Methods:
to_dict: Returns a dictionary representation of the instance's attributes.
"""
def to_dict(self):
"""Convert the analyzer's attributes to a dictionary format.
Returns:
dict: A dictionary containing all attributes of the analyzer instance.
"""
return self.__dict__
36 changes: 34 additions & 2 deletions brkraw/api/analyzer/dataarray.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,49 @@
"""Data Array Analyzer Module.
This module is dedicated to the analysis of data arrays, focusing on extracting and structuring
data array information from raw datasets. It provides functionalities to interpret and convert
data arrays into more accessible formats, complementing the broader data processing framework.
"""

from __future__ import annotations
from .base import BaseAnalyzer
import numpy as np
from copy import copy
from typing import TYPE_CHECKING, Union
from .base import BaseAnalyzer
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..data import ScanInfo
from typing import Union
from io import BufferedReader
from zipfile import ZipExtFile


class DataArrayAnalyzer(BaseAnalyzer):
"""Analyzes specific data array information and returns structured data arrays and related metadata.
This analyzer takes raw data array inputs and processes them to extract significant array metadata,
such as data type and shape, and prepares the data array for further analytical processing.
Args:
infoobj (ScanInfo): The information object containing metadata related to data arrays.
fileobj (Union[BufferedReader, ZipExtFile]): The file object from which the data array is read.
Attributes:
slope (float): The scaling factor applied to the data array values.
offset (float): The offset added to the data array values.
dtype (type): The data type of the data array.
shape (list[int]): The dimensions of the data array.
shape_desc (list[str]): Descriptions of the data array dimensions.
"""
def __init__(self, infoobj: 'ScanInfo', fileobj: Union[BufferedReader, ZipExtFile]):
"""Initialize the DataArrayAnalyzer with an information object and a file object.
"""
infoobj = copy(infoobj)
self._parse_info(infoobj)
self.buffer = fileobj

def _parse_info(self, infoobj: 'ScanInfo'):
"""Parse the information object to set the data array properties such as slope, offset, and data type.
"""
if not hasattr(infoobj, 'dataarray'):
raise AttributeError
self.slope = infoobj.dataarray['slope']
Expand All @@ -27,10 +55,14 @@ def _parse_info(self, infoobj: 'ScanInfo'):
self._calc_array_shape(infoobj)

def _calc_array_shape(self, infoobj: 'ScanInfo'):
"""Calculate and extend the shape and description of the data array based on frame group information.
"""
self.shape.extend(infoobj.frame_group['shape'][:])
self.shape_desc.extend([fgid.replace('FG_', '').lower() for fgid in infoobj.frame_group['id']])

def get_dataarray(self):
"""Read and return the structured data array from the buffer, applying data type and shape transformations.
"""
self.buffer.seek(0)
return np.frombuffer(self.buffer.read(), self.dtype).reshape(self.shape, order='F')

33 changes: 28 additions & 5 deletions brkraw/api/analyzer/scaninfo.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
"""Scan information analysis module.
This module defines the ScanInfoAnalyzer, which is essential for parsing and interpreting
metadata from multiple parameter files, making it more human-readable and accessible
for further processing and analysis tasks.
"""

from __future__ import annotations
from collections import OrderedDict
from brkraw.api import helper
Expand All @@ -10,18 +17,27 @@
class ScanInfoAnalyzer(BaseAnalyzer):
"""Helps parse metadata from multiple parameter files to make it more human-readable.
This analyzer is crucial for reconstructing and interpreting various scan parameters
from raw dataset files, supporting enhanced data insights and accessibility.
Args:
pvobj (PvScan): The PvScan object containing acquisition and method parameters.
reco_id (int, optional): The reconstruction ID. Defaults to None.
pvobj (Union[PvScan, PvReco, PvFiles]): The PvObject containing various acquisition
and method parameters.
reco_id (int, optional): Specifies the reconstruction ID for targeted analysis.
Defaults to None.
debug (bool): Flag to enable debugging outputs for detailed tracing.
Raises:
NotImplementedError: If an operation is not implemented.
Attributes:
info_protocol (dict): Stores protocol-related information.
info_fid (dict): Contains information extracted from FID files.
visu_pars (OrderedDict): Visualization parameters extracted for analysis.
"""
def __init__(self,
pvobj: Union['PvScan', 'PvReco', 'PvFiles'],
reco_id:Optional[int] = None,
debug:bool = False):

"""Initialize the ScanInfoAnalyzer with specified parameters and optionally in debug mode.
"""
self._set_pars(pvobj, reco_id)
if not debug:
self.info_protocol = helper.Protocol(self).get_info()
Expand All @@ -30,6 +46,7 @@ def __init__(self,
self._parse_info()

def _set_pars(self, pvobj: Union['PvScan', 'PvReco', 'PvFiles'], reco_id: Optional[int]):
"""Set parameters from the PvObject for internal use."""
for p in ['acqp', 'method']:
try:
vals = getattr(pvobj, p)
Expand All @@ -49,6 +66,8 @@ def _set_pars(self, pvobj: Union['PvScan', 'PvReco', 'PvFiles'], reco_id: Option
setattr(self, 'visu_pars', visu_pars)

def _parse_info(self):
"""Parse and process detailed information from the visualization parameters and other sources.
"""
self.info_dataarray = helper.DataArray(self).get_info()
self.info_frame_group = helper.FrameGroup(self).get_info()
self.info_image = helper.Image(self).get_info()
Expand All @@ -59,7 +78,11 @@ def _parse_info(self):
self.info_orientation = helper.Orientation(self).get_info()

def __dir__(self):
"""List dynamic attributes of the instance related to informational properties.
"""
return [attr for attr in self.__dict__.keys() if 'info_' in attr]

def get(self, key):
"""Retrieve information properties based on a specified key.
"""
return getattr(self, key) if key in self.__dir__() else None
Loading

0 comments on commit 4ddcc16

Please sign in to comment.