Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/histogram slider #41

Merged
merged 2 commits into from
Nov 20, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 34 additions & 2 deletions datalab/datalab_session/analysis/raw_data.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import math
from PIL import Image
from datalab.datalab_session.s3_utils import get_fits
from datalab.datalab_session.file_utils import get_hdu
Expand All @@ -15,7 +16,7 @@ def raw_data(input: dict):
# Compute the fits2image autoscale params to send with the image
samples = extract_samples(image_data, sci_hdu.header, 2000)
median = np.median(samples)
_, zmax, _ = calc_zscale_min_max(samples, contrast=0.1, iterations=1)
zmin, zmax, _ = calc_zscale_min_max(samples, contrast=0.1, iterations=1)

# resize the image to max. 500 pixels on an axis by default for the UI
max_size = input.get('max_size', 500)
Expand All @@ -25,17 +26,48 @@ def raw_data(input: dict):
match bitpix:
case 8:
datatype = np.uint8
max_value = np.iinfo(datatype).max
case 16:
datatype = np.float16
max_value = np.finfo(datatype).max
case 32:
datatype = np.float32
max_value = np.finfo(datatype).max
scaled_array = np.asarray(newImage).astype(datatype)
scaled_array_flipped = np.flip(scaled_array, axis=0)

# Here we do a crazy histogram scaling to stretch the points in between zmin and zmax since that is where most detail is
# We have 10 bins before zmin, 100 between zmin and zmax and 10 after zmax.
lower_bound = int(zmin * 0.8) # Increase resolution slightly below zmin
upper_bound = int(zmax*1.2) # Increase resolution slightly beyond zmax
lower_step = int(lower_bound / 10)
upper_step = int((max_value - upper_bound) / 10)
step = int((upper_bound - lower_bound) / 100)
bins = np.arange(0, lower_bound, lower_step).tolist()
bins += np.arange(lower_bound, upper_bound, step).tolist()
bins += np.arange(upper_bound, max_value, upper_step).tolist()
histogram, bin_edges = np.histogram(samples, bins=bins)
bin_middles = []
previous_edge = 0
for edge in bin_edges:
if edge != 0:
bin_middles.append(previous_edge + int((edge-previous_edge) / 2.0))
previous_edge = edge

# Using np.log10 on the histogram made some wild results, so just apply log10 to each value
hist = []
for h in histogram:
if h > 0:
hist.append(math.log10(h))
else:
hist.append(0)

return {'data': scaled_array_flipped.flatten().tolist(),
'height': scaled_array.shape[0],
'width': scaled_array.shape[1],
'histogram': hist,
'bins': bin_middles,
'zmin': int(median),
'zmax': int(zmax),
'bitdepth': 16
'bitdepth': bitpix
}
Loading