Skip to content

Commit

Permalink
Merge pull request #156 from rolfverberg/main
Browse files Browse the repository at this point in the history
fix: bugs accociated with new detector configuration
  • Loading branch information
rolfverberg authored Nov 1, 2024
2 parents 99c8808 + b0536b9 commit 79772a2
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 54 deletions.
6 changes: 3 additions & 3 deletions CHAP/common/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,9 +521,9 @@ def read(
nxscans[scan_number].data = nxdata
# nxpaths.append(
# f'spec_scans/{nxscans.nxname}/{scan_number}/data')
for detector_id in detector_ids:
nxdata[detector_id] = NXfield(
value=scanparser.get_detector_data(detector_id))
for detector in detectors.detectors:
nxdata[detector.id] = NXfield(
value=scanparser.get_detector_data(detector.id))

if detectors is None and config.experiment_type == 'EDD':
detectors = DetectorConfig(
Expand Down
63 changes: 30 additions & 33 deletions CHAP/tomo/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@
def get_nxroot(data, schema=None, remove=True):
"""Look through `data` for an item whose value for the `'schema'`
key matches `schema` (if supplied) and whose value for the `'data'`
key matches a nexusformat.nexus.NXobject object and return this
key matches a `nexusformat.nexus.NXobject` object and return this
object.
:param data: Input list of `PipelineData` objects.
:type data: list[PipelineData]
:param schema: Name associated with the nexusformat.nexus.NXobject
object to match in `data`.
:param schema: Name associated with the
`nexusformat.nexus.NXobject` object to match in `data`.
:type schema: str, optional
:param remove: Removes the matching entry in `data` when found,
defaults to `True`.
Expand All @@ -59,6 +59,7 @@ def get_nxroot(data, schema=None, remove=True):

# Local modules
from nexusformat.nexus import NXobject

nxobject = None
if isinstance(data, list):
for i, item in enumerate(deepcopy(data)):
Expand All @@ -83,26 +84,25 @@ def get_nxroot(data, schema=None, remove=True):
class TomoCHESSMapConverter(Processor):
"""
A processor to convert a CHESS style tomography map with dark and
bright field configurations to an nexusformat.nexus.NXtomo style
input format.
bright field configurations to an NeXus style input format.
"""

def process(self, data):
"""
Process the input map and configuration and return a
nexusformat.nexus.NXroot object based on the
nexusformat.nexus.NXtomo style format.
`nexusformat.nexus.NXroot` object based on the
`nexusformat.nexus.NXtomo` style format.
:param data: Input map and configuration for tomographic image
reduction/reconstruction.
:type data: list[PipelineData]
:raises ValueError: Invalid input or configuration parameter.
:return: nexusformat.nexus.NXtomo style tomography input
configuration.
:return: NeXus style tomography input configuration.
:rtype: nexusformat.nexus.NXroot
"""
# System modules
from copy import deepcopy
from json import dumps

# Third party modules
from json import loads
Expand Down Expand Up @@ -149,7 +149,10 @@ def process(self, data):
num_tomo_stack = len(map_config.spec_scans[0].scan_numbers)

# Check available independent dimensions
independent_dimensions = tomofields.data.attrs['axes']
if 'axes' in tomofields.data.attrs:
independent_dimensions = tomofields.data.attrs['axes']
else:
independent_dimensions = tomofields.data.attrs['unstructured_axes']
if isinstance(independent_dimensions, str):
independent_dimensions = [independent_dimensions]
matched_dimensions = deepcopy(independent_dimensions)
Expand Down Expand Up @@ -198,7 +201,7 @@ def process(self, data):

# Add configuration fields
nxentry.definition = 'NXtomo'
nxentry.map_config = tomofields.map_config
nxentry.map_config = dumps(map_config.dict())

# Add an NXinstrument to the NXentry
nxinstrument = NXinstrument()
Expand All @@ -219,12 +222,7 @@ def process(self, data):

# Add an NXdetector to the NXinstrument
# (do not fill in data fields yet)
detector_names = list(np.asarray(tomofields.detector_names, dtype=str))
detector_prefix = detector_config.prefix
if detector_prefix not in detector_names:
raise ValueError(
f'Data for detector {detector_prefix} is unavailable '
f'(available detectors: {detector_names})')
nxdetector = NXdetector()
nxinstrument.detector = nxdetector
nxdetector.local_name = detector_prefix
Expand Down Expand Up @@ -340,8 +338,7 @@ def process(self, data):
num_image*[smb_pars[z_translation_name]]

# Collect tomography fields data
tomo_stacks = tomofields.data.detector_data.nxdata[
detector_names.index(detector_prefix)]
tomo_stacks = tomofields.data[detector_prefix].nxdata
tomo_stack_shape = tomo_stacks.shape
assert len(tomo_stack_shape) == 3
assert tomo_stack_shape[-2] == detector_config.rows
Expand Down Expand Up @@ -408,7 +405,7 @@ def process(self, data):
class TomoDataProcessor(Processor):
"""
A processor to reconstruct a set of tomographic images returning
either a dictionary or a nexusformat.nexus.NXroot object
either a dictionary or a `nexusformat.nexus.NXroot` object
containing the (meta) data after processing each individual step.
"""

Expand All @@ -419,7 +416,7 @@ def process(
"""
Process the input map or configuration with the step specific
instructions and return either a dictionary or a
nexusformat.nexus.NXroot object with the processed result.
`nexusformat.nexus.NXroot` object with the processed result.
:param data: Input configuration and specific step instructions
for tomographic image reduction.
Expand Down Expand Up @@ -808,7 +805,7 @@ def reduce_data(
reduced_data = self._gen_tomo(
nxentry, reduced_data, image_key, calibrate_center_rows)

# Create a copy of the input Nexus object and remove raw and
# Create a copy of the input NeXus object and remove raw and
# any existing reduced data
exclude_items = [
f'{nxentry.nxname}/reduced_data/data',
Expand Down Expand Up @@ -870,7 +867,7 @@ def find_centers(self, nxroot, tool_config, calibrate_center_rows=False):

# Select the image stack to find the calibrated center axis
# reduced data axes order: stack,theta,row,column
# Note: Nexus can't follow a link if the data it points to is
# Note: NeXus can't follow a link if the data it points to is
# too big get the data from the actual place, not from
# nxentry.data
num_tomo_stacks = nxentry.reduced_data.data.tomo_fields.shape[0]
Expand Down Expand Up @@ -1051,7 +1048,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config):
# Reconstruct tomography data
# - reduced data axes order: stack,theta,row,column
# - reconstructed data axes order: row/-z,y,x
# Note: Nexus can't follow a link if the data it points to is
# Note: NeXus can't follow a link if the data it points to is
# too big get the data from the actual place, not from
# nxentry.data
if 'zoom_perc' in nxentry.reduced_data:
Expand Down Expand Up @@ -1250,7 +1247,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config):
nxprocess.data = NXdata(
NXfield(tomo_recon_stacks, 'reconstructed_data'))

# Create a copy of the input Nexus object and remove reduced
# Create a copy of the input NeXus object and remove reduced
# data
exclude_items = [
f'{nxentry.nxname}/reduced_data/data',
Expand All @@ -1259,7 +1256,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config):
]
nxroot = nxcopy(nxroot, exclude_nxpaths=exclude_items)

# Add the reconstructed data NXprocess to the new Nexus object
# Add the reconstructed data NXprocess to the new NeXus object
nxentry = nxroot[nxroot.default]
nxentry.reconstructed_data = nxprocess
if 'data' not in nxentry:
Expand Down Expand Up @@ -1323,10 +1320,10 @@ def combine_data(self, nxroot, tool_config):

# Get and combine the reconstructed stacks
# - reconstructed axis data order: stack,row/-z,y,x
# Note: Nexus can't follow a link if the data it points to is
# Note: NeXus can't follow a link if the data it points to is
# too big. So get the data from the actual place, not from
# nxentry.data
# Also load one stack at a time to reduce risk of hitting Nexus
# Also load one stack at a time to reduce risk of hitting NeXus
# data access limit
t0 = time()
tomo_recon_combined = \
Expand Down Expand Up @@ -1485,15 +1482,15 @@ def combine_data(self, nxroot, tool_config):
NXfield(
x, 'x', attrs={'units': detector.column_pixel_size.units}),))

# Create a copy of the input Nexus object and remove
# Create a copy of the input NeXus object and remove
# reconstructed data
exclude_items = [
f'{nxentry.nxname}/reconstructed_data/data',
f'{nxentry.nxname}/data/reconstructed_data',
]
nxroot = nxcopy(nxroot, exclude_nxpaths=exclude_items)

# Add the combined data NXprocess to the new Nexus object
# Add the combined data NXprocess to the new NeXus object
nxentry = nxroot[nxroot.default]
nxentry.combined_data = nxprocess
if 'data' not in nxentry:
Expand Down Expand Up @@ -3116,7 +3113,7 @@ def process(self, data):
if num_tomo_stack == 1:
tomo_fields_stack = tomo_fields_stack[0]

# Create Nexus object and write to file
# Create a NeXus object and write to file
nxroot = NXroot()
nxroot.entry = NXentry()
nxroot.entry.sample = NXsample()
Expand Down Expand Up @@ -3247,7 +3244,7 @@ def process(self, data, num_image=5):
dark_field = int(background_intensity) * np.ones(
(num_image, detector_size[0], detector_size[1]), dtype=np.int64)

# Create Nexus object and write to file
# Create a NeXus object and write to file
nxdark = NXroot()
nxdark.entry = NXentry()
nxdark.entry.sample = nxroot.entry.sample
Expand Down Expand Up @@ -3336,7 +3333,7 @@ def process(self, data, num_image=5):
outer_indices = np.where(abs(img_row_coords) > slit_size/2)[0]
bright_field[:,outer_indices,:] = 0

# Create Nexus object and write to file
# Create a NeXus object and write to file
nxbright = NXroot()
nxbright.entry = NXentry()
nxbright.entry.sample = nxroot.entry.sample
Expand Down Expand Up @@ -3440,7 +3437,7 @@ def process(self, data, scan_numbers=None):
f'Inconsistent number of scans ({num_scan}), '
f'len(scan_numbers) = {len(scan_numbers)})')

# Create the output data structure in Nexus format
# Create the output data structure in NeXus format
nxentry = NXentry()

# Create the SPEC file header
Expand Down
12 changes: 6 additions & 6 deletions examples/tomo/pipeline_id3a_brick.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ pipeline:
schema: common.models.map.MapConfig
- common.MapProcessor:
num_proc: 1
detector_names:
- sim
detectors:
- id: sim
schema: tomofields
- pipeline.MultiplePipelineItem:
items:
Expand All @@ -27,8 +27,8 @@ pipeline:
- spec_file: ../data/hollow_brick/spec.log
scan_numbers:
- 1
detector_names:
- sim
detectors:
- id: sim
schema: darkfield
- common.SpecReader:
inputdir: ../data/hollow_brick
Expand All @@ -39,8 +39,8 @@ pipeline:
- spec_file: spec.log
scan_numbers:
- 2
detector_names:
- sim
detectors:
- id: sim
schema: brightfield
- common.YAMLReader:
filename: detector.yaml
Expand Down
12 changes: 6 additions & 6 deletions examples/tomo/pipeline_id3a_pyramid.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ pipeline:
# data_type: smb_par
# name: ramsz
num_proc: 1
detector_names:
- sim
detectors:
- id: sim
schema: tomofields
# - common.NexusWriter:
# filename: map_hollow_pyramid.nxs
Expand All @@ -55,8 +55,8 @@ pipeline:
- spec_file: ../data/hollow_pyramid/spec.log
scan_numbers:
- 1
detector_names:
- sim
detectors:
- id: sim
schema: darkfield
- common.SpecReader:
inputdir: ../data/hollow_pyramid
Expand All @@ -67,8 +67,8 @@ pipeline:
- spec_file: spec.log
scan_numbers:
- 2
detector_names:
- sim
detectors:
- id: sim
schema: brightfield
- common.YAMLReader:
filename: detector_pyramid.yaml
Expand Down
12 changes: 6 additions & 6 deletions examples/tomo/pipeline_id3b.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ pipeline:
schema: common.models.map.MapConfig
- common.MapProcessor:
num_proc: 1
detector_names:
- sim
detectors:
- id: sim
schema: tomofields
# - common.NexusWriter:
# filename: map_hollow_cube.nxs
Expand All @@ -33,8 +33,8 @@ pipeline:
- spec_file: ../data/hollow_cube/hollow_cube
scan_numbers:
- 1
detector_names:
- sim
detectors:
- id: sim
schema: darkfield
- common.SpecReader:
inputdir: ../data/hollow_cube
Expand All @@ -45,8 +45,8 @@ pipeline:
- spec_file: hollow_cube
scan_numbers:
- 2
detector_names:
- sim
detectors:
- id: sim
schema: brightfield
- common.YAMLReader:
filename: detector.yaml
Expand Down

0 comments on commit 79772a2

Please sign in to comment.