Skip to content

Commit

Permalink
Update realtime (#57)
Browse files Browse the repository at this point in the history
* arg for type of trials

* fix bug in nbackground

* docs fixes (#56)

* fix naming, add readthedocs config

* fix build err

* fix failing imports

* mistake in path

* fix inputerr

* avoid some latex issues

* change to 3 hours, rather than 2

* mark bad runs with no stop time as 0 livetime

* add warning to not include underscores

* fix syntaxerr

* fix glob missing last file

* find correct file names

* correct centertime for scan

* improve help message

* fix seed bug

* handle low pval rounding

* save sets of trials for large tw

* load mult files for bkg trials

* fix typo

* include prints
  • Loading branch information
jessiethw authored Apr 12, 2024
1 parent 1bc1eb8 commit fb59063
Show file tree
Hide file tree
Showing 13 changed files with 190 additions and 121 deletions.
28 changes: 22 additions & 6 deletions fast_response/FastResponseAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,11 @@ def plot_ontime(self, with_contour=False, contour_files=None, label_events=False
self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files)
except Exception as e:
print('Failed to make skymap zoom plot')
self.plot_skymap(with_contour=with_contour, contour_files=contour_files, label_events=label_events)

try:
self.plot_skymap(with_contour=with_contour, contour_files=contour_files, label_events=label_events)
except Exception as e:
print('Failed to make FULL skymap plot')

def plot_skymap_zoom(self, with_contour=False, contour_files=None):
r"""Make a zoomed in portion of a skymap with
Expand Down Expand Up @@ -643,9 +647,9 @@ def plot_skymap(self, with_contour=False, contour_files=None, label_events=False
max_val = max(skymap)
min_val = min(skymap)
moll_cbar = True if self.skymap is not None else None

try:
hp.mollview(skymap, coord='C', cmap=cmap, rot=180, cbar=moll_cbar)
hp.mollview(skymap, coord='C', cmap=cmap, rot=180, cbar=moll_cbar) #cbar=None) #
except Exception as e:
if min_val<1.0e-16:
#for some reason, sometimes have an underflow issue here
Expand Down Expand Up @@ -719,11 +723,14 @@ def plot_skymap(self, with_contour=False, contour_files=None, label_events=False
handles.append(Line2D([0], [0], lw=2, c='k', label=r"Skymap (90\% cont.)"))
for i in range(1, len(theta)):
hp.projplot(theta[i], phi[i], linewidth=2., c='k')

# plt.title('Fast Response Skymap')

plt.title(self.name.replace('_', ' '))
plt.legend(loc=1, handles=handles)
plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap.png',bbox_inches='tight')
try:
plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap.png',bbox_inches='tight')
except:
plt.title('Fast Response Skymap')
plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap.png',bbox_inches='tight')
plt.close()

def generate_report(self):
Expand Down Expand Up @@ -877,6 +884,15 @@ def find_coincident_events(self):
exp_pix = hp.ang2pix(self.nside, exp_theta, exp_phi)
overlap = np.isin(exp_pix, self.ipix_90)
events = events[overlap]

# print nearby events, as a check (if needed)
# msk1 = (self.llh.exp[t_mask]['ra'] < (self.skymap_fit_ra+np.radians(5)))*(self.llh.exp[t_mask]['ra'] > (self.skymap_fit_ra-np.radians(5)))
# msk2 = (self.llh.exp[t_mask]['dec'] < (self.skymap_fit_dec+np.radians(5)))*((self.llh.exp[t_mask]['dec'] > self.skymap_fit_dec-np.radians(5)))
# msk3 = msk1*msk2
# print('Nearby events:')
# print("[run, event, ra, dec, sigma, logE, time]")
# for e in self.llh.exp[t_mask][msk3]: print([e[k] for k in ['run', 'event', 'ra', 'dec', 'sigma', 'logE', 'time']])

if len(events) == 0:
coincident_events = []
else:
Expand Down
51 changes: 31 additions & 20 deletions fast_response/GWFollowup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
mpl.use('agg')
import matplotlib.pyplot as plt
import pickle
import glob

from .FastResponseAnalysis import PriorFollowup
from .reports import GravitationalWaveReport
Expand Down Expand Up @@ -77,27 +78,37 @@ def run_background_trials(self, month=None, ntrials=1000):
bg_trial_dir = '/data/ana/analyses/NuSources/' \
+ '2023_realtime_gw_analysis/fast_response/' \
+ 'precomputed_background/'

pre_ts_array = sparse.load_npz(
bg_trial_dir
+ 'gw_precomputed_trials_delta_t_'
+ '{:.2e}_trials_rate_{:.1f}_low_stats.npz'.format(
self.duration * 86400., closest_rate, self.duration * 86400.))

#check for nside mismatch
#if hp.pixelfunc.get_nside(pre_ts_array.shape[1]) != self.nside:
# print('Error! Analysis uses nside of %i '.format(self.nside)+
# 'while precomputed BG is nside %i'.format(hp.pixelfunc.get_nside(pre_ts_array.shape[1])))

ts_norm = np.log(np.amax(self.skymap))
ts_prior = pre_ts_array.copy()
ts_prior.data += 2.*(np.log(self.skymap[pre_ts_array.indices]) - ts_norm)
ts_prior.data[~np.isfinite(ts_prior.data)] = 0.
ts_prior.data[ts_prior.data < 0] = 0.
tsd = ts_prior.max(axis=1).A
tsd = np.array(tsd)
self.tsd = tsd
return tsd
files = glob.glob(bg_trial_dir +
'gw_precomputed_trials_delta_t_{:.2e}_trials_rate_{:.1f}_low_stats*.npz'.format(
self.duration * 86400., closest_rate))

i=0
for fi in files:
print('Loading {}/{} files for bkg trials'.format(i+1, len(files)), end='')
pre_ts_array = sparse.load_npz(fi)

#check for nside mismatch
#if hp.pixelfunc.get_nside(pre_ts_array.shape[1]) != self.nside:
# print('Error! Analysis uses nside of %i '.format(self.nside)+
# 'while precomputed BG is nside %i'.format(hp.pixelfunc.get_nside(pre_ts_array.shape[1])))

ts_norm = np.log(np.amax(self.skymap))
ts_prior = pre_ts_array.copy()
ts_prior.data += 2.*(np.log(self.skymap[pre_ts_array.indices]) - ts_norm)
ts_prior.data[~np.isfinite(ts_prior.data)] = 0.
ts_prior.data[ts_prior.data < 0] = 0.
tsd = ts_prior.max(axis=1).A
tsd = np.array(tsd)
if i ==0:
tss = tsd
else:
tss = np.concatenate([tss, tsd])
i+=1
print(' . . . Done')

self.tsd = tss
return tss

else:
#Case: load 1000s precomputed trials (run by Raamis in O3)
Expand Down
7 changes: 2 additions & 5 deletions fast_response/MonitoringAndMocks/Data_Display.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,11 @@ def dial_up(who="jessie"):
all_dictionary['Time_Stamp'].append(c_datestamp)
print('Finished loading all latencies.')

#Setting conditions for call to be made if script fails. Condition: if more than 2 hours pass between alerts
if (Time(datetime.datetime.utcnow()).mjd - max(Time(all_dictionary["Time_Stamp"]).mjd)) > 3600.*2/86400:
#print(datetime.datetime.utcnow())
#print(max(all_dictionary["Time_Stamp"]))
#Setting conditions for call to be made if script fails. Condition: if more than 3 hours pass between alerts
if (Time(datetime.datetime.utcnow()).mjd - max(Time(all_dictionary["Time_Stamp"]).mjd)) > 3600.*3/86400:
dial_up()
x = (Time(datetime.datetime.utcnow()).mjd - max(Time(all_dictionary["Time_Stamp"]).mjd))*24
print("It has been " +str(x) +" hours since last update to gw mocks.")
#print("Uh oh... spaghetti-o's")

#Sorting pickle files by date created and by most correct version
if pwd.getpwuid(os.getuid())[0] == 'realtime':
Expand Down
82 changes: 44 additions & 38 deletions fast_response/precomputed_background/glob_precomputed_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,63 +16,69 @@
help='directory for where trials are, will save npz to dir+/glob_trials/')
parser.add_argument('--nside',type=int, default=256,
help='nside used when running trials (default 256)')
parser.add_argument('--type', type=str, default='alert',
help='type of trials run (alert or gw)')
parser.add_argument('--mult', action='store_true', default=False,
help='Raise this flag to save sets of trials (rather than one large file)')
args = parser.parse_args()

def glob_allsky_scans(delta_t, rate, dir, low_stats=False):
def glob_allsky_scans(delta_t, rate, dir, low_stats=False, typ='alert', save_sets=False):
"""
Glob the all-sky scans together
"""
#n_per_job = {1000.: 1000, 172800.: 50, 2678400.: 30}
#jobs_per_window = {1000.: 20, 172800.: 100, 2678400.: 100}

files = glob(dir+'/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_*.npz'.format(rate, delta_t))
if typ == 'gw':
files = sorted(glob(dir+'/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_*.npz'.format(rate, delta_t)))
elif typ =='alert':
files = glob(dir+'/{:.1f}_mHz_seed_*_delta_t_{:.1e}.npz'.format(rate, delta_t))
else:
print('Error: type is not alert or gw')
return

nside = args.nside
print('Nside: {}'.format(args.nside))
npix = hp.nside2npix(nside)
maps = sparse.csr_matrix((0, npix), dtype=float)

#print len(files)
#return None
print('Found {} files to load'.format(len(files)))
if len(files)==0: return None

print('Starting to load at {}'.format(time.ctime()))
final_ind = -1 if not low_stats else 10
for f in files[:final_ind]:
scan = sparse.load_npz(f)
maps = sparse.vstack((maps, scan))

# Change format of sparse array
print("Starting to change from COO to CSR at {}".format(time.ctime()))
scans = maps.tocsr()
print("Finished at {}".format(time.ctime()))
nfiles = range(len(files)//20) if save_sets and low_stats else range(1)

# Save the sparse array
stats_str = '' if not low_stats else '_low_stats'
outfilename = 'gw_precomputed_trials_delta_t_{:.2e}_trials_rate_{:.1f}{}.npz'.format(delta_t, rate, stats_str)
save_dir = os.path.join(dir, 'glob_trials')
if not os.path.exists(save_dir):
os.mkdir(save_dir)
#scan_path = '/data/user/apizzuto/fast_response_skylab/fast-response/fast_response/precomputed_background/glob_trials/'
sparse.save_npz(os.path.join(save_dir,outfilename), scans)
for n in nfiles:
first_ind = n*20
maps = sparse.csr_matrix((0, npix), dtype=float)
final_ind = len(files) if not low_stats else 20 + first_ind
for f in files[first_ind:final_ind]:
scan = sparse.load_npz(f)
maps = sparse.vstack((maps, scan))

# Change format of sparse array
print("Starting to change from COO to CSR at {}".format(time.ctime()))
scans = maps.tocsr()
print("Finished at {}".format(time.ctime()))

# Save the sparse array
stats_str = '' if not low_stats else '_low_stats'
if save_sets: stats_str = stats_str + '_{}'.format(n)
outfilename = '{}_precomputed_trials_delta_t_{:.2e}_trials_rate_{:.1f}{}.npz'.format(
typ, delta_t, rate, stats_str)
save_dir = os.path.join(dir, 'glob_trials')

if not os.path.exists(save_dir):
os.mkdir(save_dir)
sparse.save_npz(os.path.join(save_dir,outfilename), scans)

return maps

for rate in [6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2]:
for low_stats in [True, False]:
for rate in [6.0, 6.8, 7.0, 7.2, 7.4]: # [6.2, 6.4, 6.6, 6.8, 7.0, 7.2, 7.4]:
for low_stats in [True]:# [True, False]:
print("Rate: {} mHz, low stats: {}".format(rate, low_stats))
maps = glob_allsky_scans(args.deltaT, rate, args.dir, low_stats=low_stats)
maps = glob_allsky_scans(args.deltaT, rate, args.dir, typ=args.type, low_stats=low_stats,
save_sets=args.mult)
del maps
print ('done')


#for rate in [6.2, 6.4, 6.6, 6.8, 7.0, 7.2]:
# for low_stats in [True, False]:
# print("Rate: {} mHz, low stats: {}".format(rate, low_stats))
# maps = glob_allsky_scans(1000., rate, low_stats=low_stats)
# del maps
# print ('')

#for rate in [7.0, 7.2]:
# for low_stats in [True]:
# print("Rate: {} mHz, low stats: {}".format(rate, low_stats))
# maps = glob_allsky_scans(2.*86400., rate, low_stats=low_stats)
# del maps
# print ('')
19 changes: 11 additions & 8 deletions fast_response/precomputed_background/precompute_ts.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,30 @@

skymap_files = glob('/data/ana/realtime/alert_catalog_v2/2yr_prelim/fits_files/Run13*.fits.gz')

start_mjd = 58484.0 #Jan 1, 2019
#start_mjd = 58484.0 #Jan 1, 2019
start_mjd = 60298.0 #Dec 20, 2023
stop_mjd = start_mjd + (args.deltaT / 86400.)
start = Time(start_mjd, format='mjd').iso
stop = Time(stop_mjd, format='mjd').iso
start_iso = Time(start_mjd, format='mjd').iso
stop_iso = Time(stop_mjd, format='mjd').iso
deltaT = args.deltaT / 86400.

trials_per_sig = args.ntrials
seed_counter = args.seed

#skymap required for initialization, but not used here
f = AlertFollowup('Precompute_trials_test', skymap_files[0],
start, stop, save=False)
start_iso, stop_iso, save=False)
f.llh.nbackground=args.bkg*args.deltaT/1000.
#inj = f.initialize_injector(gamma=2.5) #just put this here to initialize f.spatial_prior
#print f.llh.nbackground
#results_array = []

ntrials = args.ntrials
stop = ntrials * (args.seed+1)
start = stop-ntrials
seed_counter = start

npix = hp.nside2npix(f.nside)
shape = (args.ntrials, npix)
maps = sparse.lil_matrix(shape, dtype=float)
for jj in range(trials_per_sig):
for jj in range(ntrials):
seed_counter += 1
val = f.llh.scan(0.0, 0.0, scramble=True, seed = seed_counter,
#spatial_prior = f.spatial_prior,
Expand Down
7 changes: 4 additions & 3 deletions fast_response/precomputed_background/precompute_ts_gw.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
p = argparse.ArgumentParser(description="Calculates Sensitivity and Discovery"
" Potential Fluxes for Background Gravitational wave/Neutrino Coincidence study",
formatter_class=argparse.RawTextHelpFormatter)
p.add_argument("--ntrials", default=1000, type=int,
p.add_argument("--ntrials", default=100, type=int,
help="Number of trials (default=1000")
p.add_argument("--seed", default=0, type=int,
help="Process ID to save unique numpy array after running (Default=0)")
Expand Down Expand Up @@ -110,7 +110,7 @@ def config_llh(self, scramble = True):

f.llh = config_llh(f)

f.llh.nbackground=args.bkg
f.llh.nbackground=args.bkg*args.deltaT/1000.
print('nside = {}'.format(f.nside))

ntrials = args.ntrials
Expand All @@ -126,7 +126,8 @@ def config_llh(self, scramble = True):
t1 = time.time()
val = f.llh.scan(0.0, 0.0, seed = seed_counter, scramble=True,
#spatial_prior = f.spatial_prior,
time_mask = [delta_t_days / 2., (start_mjd.mjd + stop_mjd.mjd) / 2.],
#time_mask = [delta_t_days / 2., gw_time.mjd],
time_mask = [f.duration/2., f.centertime],
pixel_scan = [f.nside, f._pixel_scan_nsigma], inject = None)

if val['TS'] is not None:
Expand Down
16 changes: 11 additions & 5 deletions fast_response/precomputed_background/submit_glob.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@
parser = argparse.ArgumentParser(
description='Submit script')
parser.add_argument(
'--tw', type=float, default=1000., #[-0.1, +14]day: 1218240
'--tw', type=float, default=1000., #[-0.1, +14]day: 1218240, +/-1day: 172800
help='time window to use (in sec)')
parser.add_argument(
'--dir', type=str, default='./',
help='directory where trials are loaded from, saves output to dir+/glob_trials/')
parser.add_argument('--nside',type=int, default=256,
help='nside used when running trials (default 256)')
parser.add_argument('--type', default='alert', type=str,
help='type of trial to load (gw or alert - default)')
args = parser.parse_args()

username = pwd.getpwuid(os.getuid())[0]
Expand All @@ -35,7 +37,7 @@

### Create Dagman to submit jobs to cluster
job = pycondor.Job(
'gw_precomp_trials',
'fra_glob_trials',
'/data/user/jthwaites/FastResponseAnalysis/fast_response/precomputed_background/glob_precomputed_trials.py',
error=error,
output=output,
Expand All @@ -44,14 +46,18 @@
getenv=True,
universe='vanilla',
verbose=2,
request_cpus=8,
request_memory=10000,
request_cpus=6,
request_memory=20000,
extra_lines=[
'should_transfer_files = YES',
'when_to_transfer_output = ON_EXIT']
)

job.add_arg('--deltaT {} --dir {} --nside {}'.format(args.tw, args.dir, args.nside))
if args.tw > 1000000.:
#save in sets, so that these are loadable
job.add_arg('--deltaT {} --dir {} --nside {} --type {} --mult'.format(args.tw, args.dir, args.nside, args.type))
else:
job.add_arg('--deltaT {} --dir {} --nside {} --type {}'.format(args.tw, args.dir, args.nside, args.type))

job.build_submit()

15 changes: 10 additions & 5 deletions fast_response/precomputed_background/submit_precomputed_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
parser = argparse.ArgumentParser(
description='Submit script')
parser.add_argument(
'--deltaT', type=float, default=1000., #[-0.1, +14]day: 1218240
'--deltaT', type=float, default=1000., #[-0.1, +14]day: 1218240, +/-1day: 172800
help='time window to use (in sec)')
parser.add_argument(
'--ntrials', type=int, default=30000,
Expand Down Expand Up @@ -80,13 +80,18 @@
'when_to_transfer_output = ON_EXIT']
)

for bg_rate in [6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2]:
for seed in range(args.seed_start, int(args.ntrials/args.n_per_batch)+1):
for bg_rate in [6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2, 7.4]:
for seed in range(args.seed_start, int(args.ntrials/args.n_per_batch)):
if args.no_overwrite==1:
if args.type == 'gw':
filename = 'trials/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_{}.npz'.format(bg_rate, deltaT, seed)
else:
filename = 'trials/{:.1f}_mHz_seed_{}_delta_t_{:.1e}.npz'.format(bg_rate, seed, deltaT)

import glob
already_run = glob.glob(os.path.join(args.outdir,
'trials/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_{}.npz'.format(bg_rate, deltaT, seed)))
already_run = glob.glob(os.path.join(args.outdir, filename))
if len(already_run) != 0: continue

job.add_arg('--deltaT {} --ntrials {} --seed {} --bkg {} --outdir {}'.format(
deltaT, args.n_per_batch, seed, bg_rate, args.outdir))

Expand Down
Loading

0 comments on commit fb59063

Please sign in to comment.