Skip to content

Commit

Permalink
Fix missing import (#189)
Browse files Browse the repository at this point in the history
* Fix missing import

* Update filter test (#190)
  • Loading branch information
JulienPeloton authored Jul 29, 2024
1 parent 58e8801 commit 318271e
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 30 deletions.
80 changes: 50 additions & 30 deletions fink_filters/filter_anomaly_notification/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,16 @@


def anomaly_notification_(
df_proc, threshold=10,
send_to_tg=False, channel_id=None,
send_to_slack=False, channel_name=None,
trick_par=10, cut_coords=False, history_period=90):
df_proc,
threshold=10,
send_to_tg=False,
channel_id=None,
send_to_slack=False,
channel_name=None,
trick_par=10,
cut_coords=False,
history_period=90,
):
""" Create event notifications with a high `anomaly_score` value
Notes
Expand Down Expand Up @@ -115,7 +121,7 @@ def anomaly_notification_(
... send_to_tg=False, channel_id=None,
... send_to_slack=False, channel_name=None)
>>> print(sorted(pdf_anomalies['objectId'].values))
['ZTF18aaakhsv', 'ZTF18aabeyfi', 'ZTF18aapgymv', 'ZTF18aapoack', 'ZTF18abbtxsx', 'ZTF18abgjtxx', 'ZTF18abzvnya', 'ZTF19aboujyi', 'ZTF19acevxhv', 'ZTF21acoshvy']
['ZTF18aabeyfi', 'ZTF18aapgymv', 'ZTF18aaypnnd', 'ZTF18abbtxsx', 'ZTF18abgjtxx', 'ZTF18abhxigz', 'ZTF18abjuixy', 'ZTF18abtrvkm', 'ZTF18acaksuq', 'ZTF21acoshvy']
# Check cut_coords
>>> pdf_anomalies = anomaly_notification_(df_proc, threshold=10,
Expand All @@ -127,71 +133,85 @@ def anomaly_notification_(
"""
# Filtering by coordinates
if cut_coords:
df_proc = df_proc.filter('dec <= 20 AND (ra >= 160 AND ra <= 240)')
df_proc = df_proc.filter("dec <= 20 AND (ra >= 160 AND ra <= 240)")
# We need to know the total number of objects per night which satisfy the condition on coordinates
cut_count = df_proc.count()
if cut_count == 0:
return pd.DataFrame()

# Compute the median for the night
med = df_proc.select('anomaly_score').approxQuantile('anomaly_score', [0.5], 0.05)
med = df_proc.select("anomaly_score").approxQuantile("anomaly_score", [0.5], 0.05)
med = round(med[0], 2)

# Extract anomalous objects

pdf_anomalies_ext = df_proc.sort(['anomaly_score'], ascending=True).limit(trick_par * threshold).toPandas()
pdf_anomalies_ext = pdf_anomalies_ext.drop_duplicates(['objectId'])
upper_bound = np.max(pdf_anomalies_ext['anomaly_score'].values[:threshold])
pdf_anomalies = pdf_anomalies_ext[pdf_anomalies_ext['anomaly_score'] <= upper_bound]
pdf_anomalies_ext = (
df_proc.sort(["anomaly_score"], ascending=True)
.limit(trick_par * threshold)
.toPandas()
)
pdf_anomalies_ext = pdf_anomalies_ext.drop_duplicates(["objectId"])
upper_bound = np.max(pdf_anomalies_ext["anomaly_score"].values[:threshold])
pdf_anomalies = pdf_anomalies_ext[pdf_anomalies_ext["anomaly_score"] <= upper_bound]

history_objects = filter_utils.get_an_history(history_period)

tg_data, slack_data = [], []
for _, row in pdf_anomalies.iterrows():
gal = SkyCoord(ra=row.ra * u.degree, dec=row.dec * u.degree, frame='icrs').galactic
gal = SkyCoord(
ra=row.ra * u.degree, dec=row.dec * u.degree, frame="icrs"
).galactic
oid = filter_utils.get_OID(row.ra, row.dec)
t1a = f'ID: [{row.objectId}](https://fink-portal.org/{row.objectId})'
t1b = f'ID: <https://fink-portal.org/{row.objectId}|{row.objectId}>'
t1a = f"ID: [{row.objectId}](https://fink-portal.org/{row.objectId})"
t1b = f"ID: <https://fink-portal.org/{row.objectId}|{row.objectId}>"
t_oid_1a = f"DR OID (<1''): [{oid}](https://ztf.snad.space/view/{oid})"
t_oid_1b = f"DR OID (<1''): <https://ztf.snad.space/view/{oid}|{oid}>"
t2_ = f'GAL coordinates: {round(gal.l.deg, 6)}, {round(gal.b.deg, 6)}'
t_ = f'''
EQU: {row.ra}, {row.dec}'''
t2_ = f"GAL coordinates: {round(gal.l.deg, 6)}, {round(gal.b.deg, 6)}"
t_ = f"""
EQU: {row.ra}, {row.dec}"""
t2_ += t_
t3_ = f'UTC: {str(row.timestamp)[:-3]}'
t4_ = f'Real bogus: {round(row.rb, 2)}'
t5_ = f'Anomaly score: {round(row.anomaly_score, 2)}'
t3_ = f"UTC: {str(row.timestamp)[:-3]}"
t4_ = f"Real bogus: {round(row.rb, 2)}"
t5_ = f"Anomaly score: {round(row.anomaly_score, 2)}"
if row.objectId in history_objects:
t5_ += f'''
Detected as top-{threshold} in the last {history_period} days: {history_objects[row.objectId]} {'times' if history_objects[row.objectId] > 1 else 'time'}.'''
cutout, curve, cutout_perml, curve_perml = filter_utils.get_data_permalink_slack(row.objectId)
t5_ += f"""
Detected as top-{threshold} in the last {history_period} days: {history_objects[row.objectId]} {'times' if history_objects[row.objectId] > 1 else 'time'}."""
cutout, curve, cutout_perml, curve_perml = (
filter_utils.get_data_permalink_slack(row.objectId)
)
curve.seek(0)
cutout.seek(0)
cutout_perml = f"<{cutout_perml}|{' '}>"
curve_perml = f"<{curve_perml}|{' '}>"
tg_data.append((f'''{t1a}
tg_data.append(
(
f"""{t1a}
{t_oid_1a}
{t2_}
{t3_}
{t4_}
{t5_}''', cutout, curve))
slack_data.append(f'''==========================
{t5_}""",
cutout,
curve,
)
)
slack_data.append(f"""==========================
{t1b}
{t_oid_1b}
{t2_}
{t3_}
{t4_}
{t5_}
{cutout_perml}{curve_perml}''')
init_msg = f'Median anomaly score overnight: {med}.'
{cutout_perml}{curve_perml}""")
init_msg = f"Median anomaly score overnight: {med}."
if cut_coords:
init_msg += f'''
init_msg += f"""
(of the objects in the sky area)
Sky area:
1) delta <= 20°
2) alpha ∈ (160°, 240°)
Total number of objects per night in the area: {cut_count}.
'''
"""
if send_to_slack:
filter_utils.msg_handler_slack(slack_data, channel_name, init_msg)
if send_to_tg:
Expand Down
2 changes: 2 additions & 0 deletions fink_filters/filter_dwarf_agn/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
from pyspark.sql.types import StringType

from fink_science.xmatch.utils import cross_match_astropy
from fink_filters import __file__

from astropy.coordinates import SkyCoord
from astropy import units as u

Expand Down

0 comments on commit 318271e

Please sign in to comment.