Skip to content

Commit

Permalink
O4b realtime (#58)
Browse files Browse the repository at this point in the history
Small changes for v1.2.2: scripts to calculate bkg, schema v4.0.0, small bugfixes
  • Loading branch information
jessiethw authored Jul 26, 2024
1 parent b0f474c commit 25f3609
Show file tree
Hide file tree
Showing 10 changed files with 243 additions and 25 deletions.
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
project = 'FastResponseAnalysis'
copyright = '2023, Alex Pizzuto, Jessie Thwaites'
author = 'Alex Pizzuto, Jessie Thwaites'
release = '1.2.1'
release = '1.2.2'

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
Expand Down
9 changes: 7 additions & 2 deletions fast_response/FastResponseAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -1220,7 +1220,12 @@ def unblind_TS(self):

def find_coincident_events(self):
r"""Find "coincident events" for the analysis.
These are ontime events that have a spatial times energy weight greater than 10
These are ontime events that satisfy:
Spatial weight * energy weight [* temporal weight] > 10
(Note that for this box time window, all events in the ontime window
have the same temporal weight.)
"""
spatial_weights = self.llh.llh_model.signal(
self.ra, self.dec, self.llh._events,
Expand Down Expand Up @@ -1294,7 +1299,7 @@ def upper_limit(self, n_per_sig=100, p0=None):
Returns
--------
upperlimit: float
Value of E^2 dN / dE in units of TeV / cm^2
Flux upper limit in [TeV cm^2 s]^-1
"""
if self.inj is None:
self.initialize_injector()
Expand Down
3 changes: 2 additions & 1 deletion fast_response/listeners/gcn_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ def process_gcn(payload, root):
print('--skymap={} --time={} --alert_id={}'.format(skymap, str(event_mjd), run_id+':'+event_id))
return

print('Running {}'.format(command))
print('\nRunning {} --skymap={} --time={} --alert_id={} --suffix={}'.format(
command, skymap, str(event_mjd), run_id+':'+event_id, suffix))
subprocess.call([command, '--skymap={}'.format(skymap),
'--time={}'.format(str(event_mjd)),
'--alert_id={}'.format(run_id+':'+event_id),
Expand Down
2 changes: 1 addition & 1 deletion fast_response/listeners/gw_gcn_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def process_gcn(payload, root):
webhook = f.readline().rstrip('\n')
bot_name = f.readline().rstrip('\n')

for channel in ['#fra-shifting','#gwnu']:
for channel in ['#fra-shifting','#gwnu-heartbeat']:
bot = slackbot(channel, bot_name, webhook)
bot.send_message(slack_message,'gw')

Expand Down
100 changes: 100 additions & 0 deletions fast_response/precomputed_background/alert_bkg_trials_withprior.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/usr/bin/env python

r"""
Run trials for background, with a given prior.
Record TS, best-fit location, and fitted events
around best fit location
"""
import numpy as np
import healpy as hp
import os, argparse
from astropy.time import Time
import pickle

from fast_response.AlertFollowup import AlertFollowup

parser = argparse.ArgumentParser(description='Fast Response Analysis')
parser.add_argument('--deltaT', type=float, default=None,
help='Time Window in seconds')
parser.add_argument('--ntrials', type=int, default = 10000,
help='Trials')
parser.add_argument("--alert_mjd", default=60298.0, type=float,
help="mjd of alert event (default Dec 20, 2023)")
parser.add_argument("--skymap", default=None, type=str,
help='skymap link')
parser.add_argument('--seed', default=1, type=int,
help='Unique seed for running on the cluster')
parser.add_argument('--outdir',type=str, default=None,
help='Output directory to save npz (default = FAST_RESPONSE_OUTPUT env variable or cwd)')
args = parser.parse_args()

if args.outdir == None:
try:
outdir = os.environ.get('FAST_RESPONSE_OUTPUT')
except:
outdir = './'
else:
outdir = args.outdir
if not os.path.exists(outdir+'/trials/'):
os.mkdir(outdir+'/trials/')
outdir=outdir+'/trials/'

start_mjd = args.alert_mjd - (args.deltaT / 86400. /2.)
stop_mjd = start_mjd + (args.deltaT / 86400.)
start_iso = Time(start_mjd, format='mjd').iso
stop_iso = Time(stop_mjd, format='mjd').iso
deltaT = args.deltaT / 86400.

f = AlertFollowup('Precompute_trials_test', args.skymap,
start_iso, stop_iso, save=False)
# f.llh.nbackground=args.bkg*args.deltaT/1000.
inj = f.initialize_injector() #just put this here to initialize f.spatial_prior
# print(f.llh.nbackground)

ntrials = args.ntrials
stop = ntrials * (args.seed+1)
start = stop-ntrials
seed_counter = start

npix = hp.nside2npix(f.nside)

ra, dec, TS_list =[], [], []
ns, seed =[], []

for jj in range(ntrials):
# seed_counter += 1
seed_counter = np.random.randint(0, 1000000)
val = f.llh.scan(0.0, 0.0, scramble=True, seed = seed_counter,
spatial_prior = f.spatial_prior,
time_mask = [deltaT / 2., (start_mjd + stop_mjd) / 2.],
pixel_scan = [f.nside, f._pixel_scan_nsigma], inject = None)

try:
maxLoc = np.argmax(val['TS_spatial_prior_0']) #pick out max of all likelihood ratios at diff pixels
except ValueError:
continue

TS_list.append(val['TS_spatial_prior_0'].max())
ns.append(val['nsignal'][maxLoc])
ra.append(val['ra'][maxLoc])
dec.append(val['dec'][maxLoc])
# gamma.append(val['gamma'][maxLoc]) #fixed gamma in llh
seed.append(seed_counter)

print("DONE")

outfilename = outdir+'seed_delta_t_{:.1e}_{}.npz'.format(args.deltaT, seed_counter)
results={
'TS_List':TS_list,
'ns_fit':ns,
# 'gamma_fit':gamma,
'ra':ra,
'dec':dec,
'seed':seed
}
print('saving everything')
with open(outfilename, 'wb') as f:
pickle.dump(results, f)

print("Saved to {}".format(outfilename))
90 changes: 90 additions & 0 deletions fast_response/precomputed_background/submit_prior_trials.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
'''
Script to submit a bunch of trials with random seeds
with a given spatial prior
As needed for bkg trials in realtime
'''

import pycondor
import argparse
import pwd, os
import fast_response

parser = argparse.ArgumentParser(
description='Submit script')
parser.add_argument(
'--outdir', type=str, required=True,
help = 'Output directory (will make and output to a subdirectory in this directory called trials/)')
parser.add_argument(
'--skymap', type=str, required=True,
help='Skymap path or link')
parser.add_argument(
'--alert_mjd', type=float, required=True,
help='MJD of alert event')
parser.add_argument(
'--deltaT', type=float, default=172800., #[-0.1, +14]day: 1218240
help='time window to use (in sec)')
parser.add_argument(
'--ntrials', type=int, default=10000,
help='Number of precomputed trials to run, default 30,000')
parser.add_argument(
'--seed_start', type=int,default=0,
help='Seed to start with when running trials')
parser.add_argument(
'--n_per_batch', default=500, type=int,
help='Number of trials to run in each set (default: 500)')
# parser.add_argument(
# '--type', default='alert', type=str,
# help='run alert or GW trials. options are alert (default) or gw')
args = parser.parse_args()

if not os.path.exists(os.path.join(args.outdir, 'trials')):
os.mkdir(os.path.join(args.outdir, 'trials'))
print('Output trials to directory: {}/trials'.format(args.outdir))

fra_dir = os.path.dirname(fast_response.__file__)
script = os.path.join(fra_dir, 'precomputed_background/alert_bkg_trials_withprior.py')
deltaT = args.deltaT
skymap = args.skymap
alert_mjd = args.alert_mjd

username = pwd.getpwuid(os.getuid())[0]
if not os.path.exists(f'/scratch/{username}/'):
os.mkdir(f'/scratch/{username}/')
if not os.path.exists(f'/scratch/{username}/fra/'):
os.mkdir(f'/scratch/{username}/fra/')
if not os.path.exists(f'/scratch/{username}/fra/condor/'):
os.mkdir(f'/scratch/{username}/fra/condor')

error = f'/scratch/{username}/fra/condor/error'
output = f'/scratch/{username}/fra/condor/output'
log = f'/scratch/{username}/fra/condor/log'
submit = f'/scratch/{username}/fra/condor/submit'

### Create Dagman to submit jobs to cluster
job = pycondor.Job(
'alert_precomp_trials',
script,
error=error,
output=output,
log=log,
submit=submit,
getenv=True,
universe='vanilla',
verbose=2,
request_cpus=8,#10,
request_memory=8000,#10000,
extra_lines=[
'should_transfer_files = YES',
'when_to_transfer_output = ON_EXIT']
)

for trial in range(int(args.ntrials / args.n_per_batch)):
job.add_arg(f'--deltaT {deltaT} --ntrials {args.n_per_batch} --outdir {args.outdir} --skymap {skymap} --alert_mjd {alert_mjd}')

dagman = pycondor.Dagman(
'fra_precompbg',
submit=submit, verbose=2)

dagman.add_job(job)
dagman.build_submit()

40 changes: 29 additions & 11 deletions fast_response/scripts/combine_results_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,7 @@ def format_ontime_events_uml(events, event_mjd):
'localization':{
'ra' : round(np.rad2deg(event['ra']), 2),
'dec' : round(np.rad2deg(event['dec']), 2),
"uncertainty_shape": "circle",
'ra_uncertainty': [round(np.rad2deg(event['sigma']*2.145966),2)],
'ra_dec_error': round(np.rad2deg(event['sigma']*2.145966),2),
"containment_probability": 0.9,
"systematic_included": False
},
Expand All @@ -114,8 +113,7 @@ def format_ontime_events_llama(events):
'localization':{
'ra' : round(event['ra'], 2),
'dec' : round(event['dec'], 2),
"uncertainty_shape": "circle",
'ra_uncertainty': [round(np.rad2deg(np.deg2rad(event['sigma'])*2.145966),3)],
'ra_dec_error': round(np.rad2deg(np.deg2rad(event['sigma'])*2.145966),2),
"containment_probability": 0.9,
"systematic_included": False
},
Expand Down Expand Up @@ -184,10 +182,10 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
if int(params['Significant'])==0:
subthreshold=True
logger.warning('low-significance alert found. ')
if params['Group'] == 'Burst':
if params['Group'] == 'Burst' or params["Pipeline"] =='CWB':
wait_for_llama = False
m = 'Significant' if not subthreshold else 'Subthreshold'
logger.warning('{} burst alert found. '.format(m))
logger.warning('{} burst or CWB alert found. '.format(m))
if len(params['Instruments'].split(','))==1:
#wait_for_llama = False
logger.warning('One detector event found. ')
Expand All @@ -198,7 +196,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
return

collected_results = {}
collected_results["$schema"]= "https://gcn.nasa.gov/schema/v3.0.0/gcn/notices/icecube/lvk_nu_track_search.schema.json"
collected_results["$schema"]= "https://gcn.nasa.gov/schema/v4.0.0/gcn/notices/icecube/lvk_nu_track_search.schema.json"
collected_results["type"]= "IceCube LVK Alert Nu Track Search"

eventtime = record.find('.//ISOTime').text
Expand Down Expand Up @@ -304,11 +302,14 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
else:
logger.warning('Both analyses not finished after {:.0f} min wait.'.format(max_wait))
logger.warning('Not sending GCN.')

if record.attrib['role']=='observation' and not heartbeat:
err_msg = '--missing_llama=True --missing_uml=True' if not subthreshold else '--missing_llama=True'
err_msg = ['/home/jthwaites/private/make_call.py', '--troubleshoot_gcn=True',
'--missing_llama=True']
if not subthreshold: err_msg.append('--missing_uml=True')

try:
subprocess.call(['/home/jthwaites/private/make_call.py',
'--troubleshoot_gcn=True', err_msg])
subprocess.call(err_msg)
except:
logger.warning('Failed to send alert to shifters: Issue finding both results. ')
return
Expand Down Expand Up @@ -482,7 +483,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
my_key = f.readline()

if not subthreshold:
channels = ['#gwnu-heartbeat', '#gwnu','#alerts']
channels = ['#gwnu-heartbeat', '#alerts']#, '#gwnu']
else:
channels = ['#gwnu-heartbeat']
for channel in channels:
Expand Down Expand Up @@ -516,6 +517,23 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
logger.info('Sent alert to ROC for p<0.01')
except:
logger.warning('Failed to send email/SMS notification.')

# try:
# if params['Group'] == 'Burst':
# merger_type = 'Burst'
# else:
# k = ['BNS','NSBH','BBH']
# probs = {j: float(params[j]) for j in k}
# merger_type = max(zip(probs.values(), probs.keys()))[1]
# except:
# logger.info('Could not determine type of event')
# merger_type = None

# try:
# subprocess.call(['/home/jthwaites/private/make_call.py', f'--type={merger_type}', '--call_anyway'])
# except Exception as e:
# logger.warning('Call for p<0.01 failed.')

else:
logger.info('p>0.01: no email/sms sent')
else:
Expand Down
14 changes: 9 additions & 5 deletions fast_response/scripts/run_track_followup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
Author: Alex Pizzuto
May 2020'''

import os, argparse
import os, argparse, glob
from astropy.time import Time

from fast_response.AlertFollowup import TrackFollowup
Expand Down Expand Up @@ -49,10 +49,14 @@

# look for contour files
base_skymap_path = '/home/followup/output_plots/'
contour_fs = [base_skymap_path \
+ f'run{run_id:08d}.evt{ev_id:012d}.HESE.contour_{containment}.txt' \
for containment in ['50', '90']]
contour_fs = [f for f in contour_fs if os.path.exists(f)]
contour_fs = []
for containment in ['50', '90']:
contour_fs = contour_fs + glob.glob(base_skymap_path +
f'run{run_id:08d}.evt{ev_id:012d}.*.contour_{containment}.txt')
# contour_fs = [base_skymap_path \
# + f'run{run_id:08d}.evt{ev_id:012d}.HESE.contour_{containment}.txt' \
# for containment in ['50', '90']]
# contour_fs = [f for f in contour_fs if os.path.exists(f)]
if len(contour_fs) == 0:
contour_fs = None

Expand Down
6 changes: 3 additions & 3 deletions fast_response/web_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def updateFastResponsePlots(gw=False):
plt.gca().invert_xaxis()
plt.grid(which = 'both', alpha = 0.2)
plt.xlim(1.1e0,1e-3)
plt.ylim(3e-3, 1e0)
plt.ylim(1e-3, 1e0)
plt.xlabel('p-value', fontsize = 18)
plt.ylabel('Fraction of Analyses', fontsize = 18)
plt.tick_params(labelsize = 18)
Expand All @@ -324,7 +324,7 @@ def updateFastResponsePlots(gw=False):
pval_dist_path=f'/home/{username}/public_html/FastResponse/webpage/output/pvalue_distribution_liveupdate.png'
plt.title("{} Fast Response Analyses as of {}".format(len(df), today), fontsize = 20)
#plt.text(7e-3, 5e-2, "IceCube\nPreliminary", fontsize = 20, color = 'r')
plt.ylim(3e-3, 1e0)
# plt.ylim(3e-3, 1e0)

plt.savefig(pval_dist_path, dpi=200, bbox_inches='tight')
#plt.savefig(f'/home/{username}/public_html/FastResponse/webpage/output/pvalue_distribution_liveupdate.png', dpi=200, bbox_inches='tight')
Expand Down Expand Up @@ -490,7 +490,7 @@ def createGWEventPage(analysis):
</table>
</table>
'''.format(e, event['event_dt'], event['localization']['ra'], event['localization']['dec'],
event['localization']['ra_uncertainty'][0],
event['localization']['ra_dec_error'],
event['event_pval_generic'], event['event_pval_bayesian'])
e+=1

Expand Down
Loading

0 comments on commit 25f3609

Please sign in to comment.