Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add option for different backends #452

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 25 additions & 19 deletions nuztf/base_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,12 @@ def __init__(
filter_class=DecentFilter,
cone_nside=64,
cones_to_scan=None,
output_nside: None | int = None,
logger=None,
):
self.cone_nside = cone_nside
self.t_min = t_min

(
self.map_coords,
self.pixel_nos,
Expand All @@ -65,7 +67,7 @@ def __init__(
self.data,
self.total_pixel_area,
self.key,
) = self.unpack_skymap()
) = self.unpack_skymap(output_nside)

if not hasattr(self, "prob_threshold"):
self.prob_threshold = None
Expand Down Expand Up @@ -141,7 +143,7 @@ def get_cache_dir(self) -> Path:
cache_dir.mkdir(exist_ok=True, parents=True)
return cache_dir

def unpack_skymap(self):
def unpack_skymap(self, output_nside: None | int = None):
raise NotImplementedError

@staticmethod
Expand Down Expand Up @@ -846,8 +848,21 @@ def text_summary(self):
return text

def calculate_overlap_with_observations(
self, first_det_window_days=3.0, min_sep=0.01, fields=None
self,
first_det_window_days: float = 3.0,
min_sep: float = 0.01,
fields: list[int] | None = None,
backend: str = "best",
):
"""
Calculate the overlap of the skymap with observations

:param first_det_window_days: First detection window in days
:param min_sep: Minimum separation between detections in days
:param fields: Fields to consider (if None, all fields are considered)
:param backend: Backend to use for coverage calculation
:return:
"""

if fields is not None:
new = []
Expand All @@ -874,32 +889,24 @@ def calculate_overlap_with_observations(
data = pd.concat(new)

else:
mns = get_obs_summary(t_min=self.t_min, max_days=first_det_window_days)
mns = get_obs_summary(
t_min=self.t_min, max_days=first_det_window_days, backend=backend
)

if mns is None:
return None, None, None

data = mns.data.copy()

mask = data["status"] == 0

self.logger.info(
f"Found {mask.sum()} successful observations in the depot, "
f"corresponding to {np.mean(mask)*100:.2f}% of the total."
)

self.logger.info("Unpacking observations")

if self.nside > 256:
(
self.map_coords,
self.pixel_nos,
self.nside,
self.map_probs,
self.data,
self.total_pixel_area,
self.key,
) = self.unpack_skymap(output_nside=256)

pix_map = dict()
pix_obs_times = dict()

Expand Down Expand Up @@ -1014,17 +1021,15 @@ def calculate_overlap_with_observations(
)

def plot_overlap_with_observations(
self,
first_det_window_days=None,
min_sep=0.01,
fields=None,
self, first_det_window_days=None, min_sep=0.01, fields=None, backend="best"
):
"""
Function to plot the overlap of the field with observations.

:param first_det_window_days: Window of time in days to consider for the first detection.
:param min_sep: Minimum separation between observations to consider them as separate.
:param fields: Fields to consider.
:param backend: Backend to use for coverage calculation

"""

Expand All @@ -1036,6 +1041,7 @@ def plot_overlap_with_observations(
first_det_window_days=first_det_window_days,
min_sep=min_sep,
fields=fields,
backend=backend,
)

if coverage_df is None:
Expand Down
25 changes: 19 additions & 6 deletions nuztf/neutrino_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def __init__(
cone_nside: int = 128,
t_precursor: float = None,
config: dict = None,
output_nside: int = 1024,
):
self.logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -87,6 +88,7 @@ def __init__(
t_min=nu_time,
run_config=self.config,
cone_nside=cone_nside,
output_nside=output_nside,
)
self.prob_threshold = 0.9
self.rectangular_area = (
Expand Down Expand Up @@ -150,15 +152,19 @@ def filter_f_no_prv(self, res: dict):
# Require 2 detections separated by 15 mins
if (endhist - starthist) < 0.01:
self.logger.debug(
f"❌ {ztf_id}: Does have 2 detections, but these are not separated by >15 mins (delta t = {(endhist-starthist)*1440:.0f} min)"
f"❌ {ztf_id}: Does have 2 detections, but these are not separated by "
f">15 mins (delta t = {(endhist-starthist)*1440:.0f} min)"
)
return False

self.logger.debug(f"✅ {ztf_id}: Passes first filtering stage (no prv).")
return True

def filter_f_history(self, res: dict):
"""Filter based on 2 detection requirement and probability contour requirement"""
"""
Filter based on 2 detection requirement
and probability contour requirement
"""

ztf_id = res["objectId"]

Expand Down Expand Up @@ -213,17 +219,24 @@ def in_contour(self, ra_deg, dec_deg):

return np.logical_and(in_ra, in_dec)

def unpack_skymap(self, skymap=None, output_nside: None | int = None):
""" """
output_nside = 1024
def unpack_skymap(self, output_nside: None | int = None):
"""
Unpack the skymap and return the pixel coordinates and probabilities

:param output_nside: Nside of the output map
:return: Map coordinates, pixel numbers, output nside,
map probabilities, data, total pixel area, key
"""
output_nside = 2048 if output_nside is None else output_nside

map_coords = []
pixel_nos = []

center_ra = np.radians(np.mean([self.ra_max, self.ra_min]))
center_dec = np.radians(np.mean([self.dec_max, self.dec_min]))
# Take the larger of the two sides and convert to radians
# To make sure to include all pixels until the edge of the rectangle, we have to devide by sqrt(2)
# To make sure to include all pixels until the edge of the rectangle,
# we have to devide by sqrt(2)
# (not 2 as previously done here!)
rad = np.radians(
max(self.ra_max - self.ra_min, self.dec_max - self.dec_min)
Expand Down
Loading
Loading