Skip to content

Commit

Permalink
Merge pull request #9 from LCOGT/fix/output_file_data
Browse files Browse the repository at this point in the history
Fix/output file data
  • Loading branch information
jnation3406 authored May 2, 2024
2 parents befb4aa + 5e902a3 commit 3216981
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 9 deletions.
7 changes: 6 additions & 1 deletion datalab/datalab_session/data_operations/data_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,12 @@ def create_and_store_fits(self, hdu_list: fits.HDUList, percent=None, cur_percen
large_jpg_url = add_file_to_bucket(f'{self.cache_key}/{self.cache_key}-{index}-large.jpg', large_jpg_path)
thumbnail_jpg_url = add_file_to_bucket(f'{self.cache_key}/{self.cache_key}-{index}-small.jpg', thumbnail_jpg_path)

output.append({'large_url': large_jpg_url, 'thumbnail_url': thumbnail_jpg_url})
output.append({
'large_url': large_jpg_url,
'thumbnail_url': thumbnail_jpg_url,
'basename': f'{self.cache_key}-{index}',
'source': 'datalab'}
)

if percent is not None and cur_percent is not None:
self.set_percent_completion(cur_percent + index/total_files * percent)
Expand Down
2 changes: 1 addition & 1 deletion datalab/datalab_session/data_operations/long.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def wizard_description():
}

def operate(self):
num_files = len(self.input_data.get('input_files', []))
num_files = max(len(self.input_data.get('input_files', [])), 1)
per_image_timeout = ceil(float(self.input_data.get('duration', 60.0)) / num_files)
for i, file in enumerate(self.input_data.get('input_files', [])):
print(f"Processing long operation on file {file.get('basename', 'No basename found')}")
Expand Down
17 changes: 10 additions & 7 deletions datalab/datalab_session/data_operations/median.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,18 +44,21 @@ def operate(self):

log.info(f'Executing median operation on {len(input)} files')

image_data_list = self.get_fits_npdata(input, percent=0.4, cur_percent=0.0)
if len(input) > 0:
image_data_list = self.get_fits_npdata(input, percent=0.4, cur_percent=0.0)

stacked_data = stack_arrays(image_data_list)
stacked_data = stack_arrays(image_data_list)

# using the numpy library's median method
median = np.median(stacked_data, axis=2)
# using the numpy library's median method
median = np.median(stacked_data, axis=2)

hdu_list = create_fits(self.cache_key, median)
hdu_list = create_fits(self.cache_key, median)

output = self.create_and_store_fits(hdu_list, percent=0.6, cur_percent=0.4)
output = self.create_and_store_fits(hdu_list, percent=0.6, cur_percent=0.4)

output = {'output_files': output}
output = {'output_files': output}
else:
output = {'output_files': []}

log.info(f'Median operation output: {output}')
self.set_percent_completion(1.0)
Expand Down

0 comments on commit 3216981

Please sign in to comment.