diff --git a/CHAP/common/reader.py b/CHAP/common/reader.py index c51987c..e98b346 100755 --- a/CHAP/common/reader.py +++ b/CHAP/common/reader.py @@ -521,9 +521,9 @@ def read( nxscans[scan_number].data = nxdata # nxpaths.append( # f'spec_scans/{nxscans.nxname}/{scan_number}/data') - for detector_id in detector_ids: - nxdata[detector_id] = NXfield( - value=scanparser.get_detector_data(detector_id)) + for detector in detectors.detectors: + nxdata[detector.id] = NXfield( + value=scanparser.get_detector_data(detector.id)) if detectors is None and config.experiment_type == 'EDD': detectors = DetectorConfig( diff --git a/CHAP/tomo/processor.py b/CHAP/tomo/processor.py index 6577ee3..8f719f8 100755 --- a/CHAP/tomo/processor.py +++ b/CHAP/tomo/processor.py @@ -38,13 +38,13 @@ def get_nxroot(data, schema=None, remove=True): """Look through `data` for an item whose value for the `'schema'` key matches `schema` (if supplied) and whose value for the `'data'` - key matches a nexusformat.nexus.NXobject object and return this + key matches a `nexusformat.nexus.NXobject` object and return this object. :param data: Input list of `PipelineData` objects. :type data: list[PipelineData] - :param schema: Name associated with the nexusformat.nexus.NXobject - object to match in `data`. + :param schema: Name associated with the + `nexusformat.nexus.NXobject` object to match in `data`. :type schema: str, optional :param remove: Removes the matching entry in `data` when found, defaults to `True`. @@ -59,6 +59,7 @@ def get_nxroot(data, schema=None, remove=True): # Local modules from nexusformat.nexus import NXobject + nxobject = None if isinstance(data, list): for i, item in enumerate(deepcopy(data)): @@ -83,26 +84,25 @@ def get_nxroot(data, schema=None, remove=True): class TomoCHESSMapConverter(Processor): """ A processor to convert a CHESS style tomography map with dark and - bright field configurations to an nexusformat.nexus.NXtomo style - input format. + bright field configurations to an NeXus style input format. """ def process(self, data): """ Process the input map and configuration and return a - nexusformat.nexus.NXroot object based on the - nexusformat.nexus.NXtomo style format. + `nexusformat.nexus.NXroot` object based on the + `nexusformat.nexus.NXtomo` style format. :param data: Input map and configuration for tomographic image reduction/reconstruction. :type data: list[PipelineData] :raises ValueError: Invalid input or configuration parameter. - :return: nexusformat.nexus.NXtomo style tomography input - configuration. + :return: NeXus style tomography input configuration. :rtype: nexusformat.nexus.NXroot """ # System modules from copy import deepcopy + from json import dumps # Third party modules from json import loads @@ -149,7 +149,10 @@ def process(self, data): num_tomo_stack = len(map_config.spec_scans[0].scan_numbers) # Check available independent dimensions - independent_dimensions = tomofields.data.attrs['axes'] + if 'axes' in tomofields.data.attrs: + independent_dimensions = tomofields.data.attrs['axes'] + else: + independent_dimensions = tomofields.data.attrs['unstructured_axes'] if isinstance(independent_dimensions, str): independent_dimensions = [independent_dimensions] matched_dimensions = deepcopy(independent_dimensions) @@ -198,7 +201,7 @@ def process(self, data): # Add configuration fields nxentry.definition = 'NXtomo' - nxentry.map_config = tomofields.map_config + nxentry.map_config = dumps(map_config.dict()) # Add an NXinstrument to the NXentry nxinstrument = NXinstrument() @@ -219,12 +222,7 @@ def process(self, data): # Add an NXdetector to the NXinstrument # (do not fill in data fields yet) - detector_names = list(np.asarray(tomofields.detector_names, dtype=str)) detector_prefix = detector_config.prefix - if detector_prefix not in detector_names: - raise ValueError( - f'Data for detector {detector_prefix} is unavailable ' - f'(available detectors: {detector_names})') nxdetector = NXdetector() nxinstrument.detector = nxdetector nxdetector.local_name = detector_prefix @@ -340,8 +338,7 @@ def process(self, data): num_image*[smb_pars[z_translation_name]] # Collect tomography fields data - tomo_stacks = tomofields.data.detector_data.nxdata[ - detector_names.index(detector_prefix)] + tomo_stacks = tomofields.data[detector_prefix].nxdata tomo_stack_shape = tomo_stacks.shape assert len(tomo_stack_shape) == 3 assert tomo_stack_shape[-2] == detector_config.rows @@ -408,7 +405,7 @@ def process(self, data): class TomoDataProcessor(Processor): """ A processor to reconstruct a set of tomographic images returning - either a dictionary or a nexusformat.nexus.NXroot object + either a dictionary or a `nexusformat.nexus.NXroot` object containing the (meta) data after processing each individual step. """ @@ -419,7 +416,7 @@ def process( """ Process the input map or configuration with the step specific instructions and return either a dictionary or a - nexusformat.nexus.NXroot object with the processed result. + `nexusformat.nexus.NXroot` object with the processed result. :param data: Input configuration and specific step instructions for tomographic image reduction. @@ -808,7 +805,7 @@ def reduce_data( reduced_data = self._gen_tomo( nxentry, reduced_data, image_key, calibrate_center_rows) - # Create a copy of the input Nexus object and remove raw and + # Create a copy of the input NeXus object and remove raw and # any existing reduced data exclude_items = [ f'{nxentry.nxname}/reduced_data/data', @@ -870,7 +867,7 @@ def find_centers(self, nxroot, tool_config, calibrate_center_rows=False): # Select the image stack to find the calibrated center axis # reduced data axes order: stack,theta,row,column - # Note: Nexus can't follow a link if the data it points to is + # Note: NeXus can't follow a link if the data it points to is # too big get the data from the actual place, not from # nxentry.data num_tomo_stacks = nxentry.reduced_data.data.tomo_fields.shape[0] @@ -1051,7 +1048,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config): # Reconstruct tomography data # - reduced data axes order: stack,theta,row,column # - reconstructed data axes order: row/-z,y,x - # Note: Nexus can't follow a link if the data it points to is + # Note: NeXus can't follow a link if the data it points to is # too big get the data from the actual place, not from # nxentry.data if 'zoom_perc' in nxentry.reduced_data: @@ -1250,7 +1247,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config): nxprocess.data = NXdata( NXfield(tomo_recon_stacks, 'reconstructed_data')) - # Create a copy of the input Nexus object and remove reduced + # Create a copy of the input NeXus object and remove reduced # data exclude_items = [ f'{nxentry.nxname}/reduced_data/data', @@ -1259,7 +1256,7 @@ def reconstruct_data(self, nxroot, center_info, tool_config): ] nxroot = nxcopy(nxroot, exclude_nxpaths=exclude_items) - # Add the reconstructed data NXprocess to the new Nexus object + # Add the reconstructed data NXprocess to the new NeXus object nxentry = nxroot[nxroot.default] nxentry.reconstructed_data = nxprocess if 'data' not in nxentry: @@ -1323,10 +1320,10 @@ def combine_data(self, nxroot, tool_config): # Get and combine the reconstructed stacks # - reconstructed axis data order: stack,row/-z,y,x - # Note: Nexus can't follow a link if the data it points to is + # Note: NeXus can't follow a link if the data it points to is # too big. So get the data from the actual place, not from # nxentry.data - # Also load one stack at a time to reduce risk of hitting Nexus + # Also load one stack at a time to reduce risk of hitting NeXus # data access limit t0 = time() tomo_recon_combined = \ @@ -1485,7 +1482,7 @@ def combine_data(self, nxroot, tool_config): NXfield( x, 'x', attrs={'units': detector.column_pixel_size.units}),)) - # Create a copy of the input Nexus object and remove + # Create a copy of the input NeXus object and remove # reconstructed data exclude_items = [ f'{nxentry.nxname}/reconstructed_data/data', @@ -1493,7 +1490,7 @@ def combine_data(self, nxroot, tool_config): ] nxroot = nxcopy(nxroot, exclude_nxpaths=exclude_items) - # Add the combined data NXprocess to the new Nexus object + # Add the combined data NXprocess to the new NeXus object nxentry = nxroot[nxroot.default] nxentry.combined_data = nxprocess if 'data' not in nxentry: @@ -3116,7 +3113,7 @@ def process(self, data): if num_tomo_stack == 1: tomo_fields_stack = tomo_fields_stack[0] - # Create Nexus object and write to file + # Create a NeXus object and write to file nxroot = NXroot() nxroot.entry = NXentry() nxroot.entry.sample = NXsample() @@ -3247,7 +3244,7 @@ def process(self, data, num_image=5): dark_field = int(background_intensity) * np.ones( (num_image, detector_size[0], detector_size[1]), dtype=np.int64) - # Create Nexus object and write to file + # Create a NeXus object and write to file nxdark = NXroot() nxdark.entry = NXentry() nxdark.entry.sample = nxroot.entry.sample @@ -3336,7 +3333,7 @@ def process(self, data, num_image=5): outer_indices = np.where(abs(img_row_coords) > slit_size/2)[0] bright_field[:,outer_indices,:] = 0 - # Create Nexus object and write to file + # Create a NeXus object and write to file nxbright = NXroot() nxbright.entry = NXentry() nxbright.entry.sample = nxroot.entry.sample @@ -3440,7 +3437,7 @@ def process(self, data, scan_numbers=None): f'Inconsistent number of scans ({num_scan}), ' f'len(scan_numbers) = {len(scan_numbers)})') - # Create the output data structure in Nexus format + # Create the output data structure in NeXus format nxentry = NXentry() # Create the SPEC file header diff --git a/examples/tomo/pipeline_id3a_brick.yaml b/examples/tomo/pipeline_id3a_brick.yaml index 726914d..1f44a29 100644 --- a/examples/tomo/pipeline_id3a_brick.yaml +++ b/examples/tomo/pipeline_id3a_brick.yaml @@ -14,8 +14,8 @@ pipeline: schema: common.models.map.MapConfig - common.MapProcessor: num_proc: 1 - detector_names: - - sim + detectors: + - id: sim schema: tomofields - pipeline.MultiplePipelineItem: items: @@ -27,8 +27,8 @@ pipeline: - spec_file: ../data/hollow_brick/spec.log scan_numbers: - 1 - detector_names: - - sim + detectors: + - id: sim schema: darkfield - common.SpecReader: inputdir: ../data/hollow_brick @@ -39,8 +39,8 @@ pipeline: - spec_file: spec.log scan_numbers: - 2 - detector_names: - - sim + detectors: + - id: sim schema: brightfield - common.YAMLReader: filename: detector.yaml diff --git a/examples/tomo/pipeline_id3a_pyramid.yaml b/examples/tomo/pipeline_id3a_pyramid.yaml index 2f0a54c..44e08b9 100644 --- a/examples/tomo/pipeline_id3a_pyramid.yaml +++ b/examples/tomo/pipeline_id3a_pyramid.yaml @@ -36,8 +36,8 @@ pipeline: # data_type: smb_par # name: ramsz num_proc: 1 - detector_names: - - sim + detectors: + - id: sim schema: tomofields # - common.NexusWriter: # filename: map_hollow_pyramid.nxs @@ -55,8 +55,8 @@ pipeline: - spec_file: ../data/hollow_pyramid/spec.log scan_numbers: - 1 - detector_names: - - sim + detectors: + - id: sim schema: darkfield - common.SpecReader: inputdir: ../data/hollow_pyramid @@ -67,8 +67,8 @@ pipeline: - spec_file: spec.log scan_numbers: - 2 - detector_names: - - sim + detectors: + - id: sim schema: brightfield - common.YAMLReader: filename: detector_pyramid.yaml diff --git a/examples/tomo/pipeline_id3b.yaml b/examples/tomo/pipeline_id3b.yaml index 7c37a00..3d40b35 100644 --- a/examples/tomo/pipeline_id3b.yaml +++ b/examples/tomo/pipeline_id3b.yaml @@ -14,8 +14,8 @@ pipeline: schema: common.models.map.MapConfig - common.MapProcessor: num_proc: 1 - detector_names: - - sim + detectors: + - id: sim schema: tomofields # - common.NexusWriter: # filename: map_hollow_cube.nxs @@ -33,8 +33,8 @@ pipeline: - spec_file: ../data/hollow_cube/hollow_cube scan_numbers: - 1 - detector_names: - - sim + detectors: + - id: sim schema: darkfield - common.SpecReader: inputdir: ../data/hollow_cube @@ -45,8 +45,8 @@ pipeline: - spec_file: hollow_cube scan_numbers: - 2 - detector_names: - - sim + detectors: + - id: sim schema: brightfield - common.YAMLReader: filename: detector.yaml