Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated scripts #76

Merged
merged 2 commits into from
Sep 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 22 additions & 3 deletions src/nectarchain/user_scripts/vmarandon/CalibrationData.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@ class CalibrationCameraDisplay(CameraDisplay):

def __init__(self,*args, **kwargs):
super().__init__(*args,**kwargs)
self.clickfunc = None

def set_function(self,func_name):
self.clickfunc = func_name

def on_pixel_clicked(self, pix_id):
self.clickfunc(pix_id)
if self.clickfunc is not None:
self.clickfunc(pix_id)


class CalibInfo:
Expand Down Expand Up @@ -156,8 +158,25 @@ def ShowPedestal(self):

plt.show()



class XYTableDataElement(TimedInfo):
'''Class to store waveforms for each position of the XY tables'''
def __init__(self,startTime=None, endTime=None,bloc=None):
super().__init__(startTime,endTime)
self.waveforms = None
self.masks = None
self.averaged_waveform = None
self.bloc_number = bloc

class XYTableDataIntegratedElement(TimedInfo):
'''Class to store integrated waveforms for each position of the XY tables'''
def __init__(self,startTime=None,endTime=None,bloc=None):
super().__init__(startTime,endTime)
self.pixels = None
self.times = None
self.pedestals = None
self.pedwidths = None
self.integrated = None
self.bloc_number = bloc


class FlatFieldInfo(TimedInfo):
Expand Down
4 changes: 2 additions & 2 deletions src/nectarchain/user_scripts/vmarandon/DBHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,14 @@ def __get_selection_condition__(self,table):
#df1b = pd.read_sql(f"SELECT * FROM 'monitoring_drawer_temperatures' WHERE time>datetime({int(test_time.timestamp())}, 'unixepoch')", con=sqlite3.connect(db_url))
time_cond = ""
if self.time_start is not None:
if isinstance( self.time_start, datetime ):
if isinstance( self.time_start, datetime.datetime ):
time_cond = f" WHERE time >= datetime({self.time_start.timestamp()}, 'unixepoch') "
else:
print(f"WARNING> {self.time_start} of type {type(self.time_start)} is of a non handled type ==> Won't be used (please correct code)")


if self.time_end is not None:
if isinstance( self.time_end, datetime ):
if isinstance( self.time_end, datetime.datetime ):
link_word = " WHERE " if not time_cond else " AND "
time_cond = f" {link_word} time <= datetime({self.time_end.timestamp()}, 'unixepoch') "
else:
Expand Down
38 changes: 20 additions & 18 deletions src/nectarchain/user_scripts/vmarandon/DataUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ def GetLongRunTimeEdges(run,path=None,event_type=None,delta_t_second=10.):
#print(nEvents)

data = DataReader(run,path=path)
data.Connect("trigger")
if not data.Connect("trigger"):
data = GetNectarCamEvents(run=run,path=path,applycalib=False)

times_edges = list()

Expand All @@ -119,26 +120,27 @@ def GetLongRunTimeEdges(run,path=None,event_type=None,delta_t_second=10.):

# if there is a time gap of more than delta_t seconds, then we consider that this is the end of a data block
delta_t = TimeDelta(delta_t_second,format="sec")

for evt in tqdm(data,total=nEvents):
current_time = data.trigger.time

if time_start is None:
time_start = current_time

if evt.trigger.event_type == EventType.SKY_PEDESTAL:

if previous_time is None:
previous_time = current_time
try:
for evt in tqdm(data,total=nEvents):
current_time = evt.trigger.time

if current_time - previous_time > delta_t:
#print(f"time: {time} previous time: {previous_time} delta: {(time - previous_time).to_value('s')}")
#if (previous_time - time) > delta_t:
times_edges.append( (time_start,previous_time) )
if time_start is None:
time_start = current_time

if evt.trigger.event_type == event_type:

previous_time = current_time

if previous_time is None:
previous_time = current_time

if current_time - previous_time > delta_t:
#print(f"time: {time} previous time: {previous_time} delta: {(time - previous_time).to_value('s')}")
#if (previous_time - time) > delta_t:
times_edges.append( (time_start,previous_time) )
time_start = current_time

previous_time = current_time
except Exception as err:
print(f"Error while reading file: [{err}]")
times_edges.append( (time_start,current_time) )
# write the last time
#print(f"There is : {len(times_edges)} intervals")
Expand Down
29 changes: 23 additions & 6 deletions src/nectarchain/user_scripts/vmarandon/ExtractInformation2.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from ctapipe.containers import EventType

from multiprocessing.dummy import Pool as ThreadPool
#from multiprocessing import Pool as ThreadPool



Expand All @@ -33,7 +34,7 @@

# data_path = FindDataPath(run,args.dataPath)

def ExtractInformationSingleRun(run,data_path,dest_path,data_block,applycalib=True,keepR1=True,nnint=False):
def ExtractInformationSingleRun(run,data_path,dest_path,data_block,applycalib=True,keepR1=True,nnint=False,onlytrigger=False):

sleep_time = random.uniform(0.,1.)
#print(sleep_time)
Expand Down Expand Up @@ -68,6 +69,9 @@ def ExtractInformationSingleRun(run,data_path,dest_path,data_block,applycalib=Tr

doIntegration = nnint

if onlytrigger:
doIntegration = False

for evt in tqdm(events):
#if data_block != 42 and data_block!=8:
# break
Expand Down Expand Up @@ -113,7 +117,10 @@ def ExtractInformationSingleRun(run,data_path,dest_path,data_block,applycalib=Tr

for k in evt.keys():
if k not in exclusion:
dd[k].dump( copy.deepcopy(getattr(evt,k)), time=event_time )
if onlytrigger and k!="trigger":
continue
else:
dd[k].dump( copy.deepcopy(getattr(evt,k)), time=event_time )

def TrueOrFalse(arg):
ua = str(arg).upper()
Expand All @@ -140,6 +147,7 @@ def ExtractInformation(arglist):
p.add_argument("--keep-r1",dest='keepR1',type=str,default="True",help="Save the R1 data if True")
p.add_argument("--split",dest='split',action='store_true',help='Split the files per groups. 0-1 together in a file, 2-3 in another, etc... Need the ctapipe_io_nectarcam version compatible with ctapipe 0.18')
p.add_argument("--nnint",dest='nnint',action='store_true',help='Do an integration of the data using Next Neighbor Peak Search. At the moment hard coded to be 10 ns -4 and +6 ns after the max. Will create charge and TO data set')
p.add_argument("--only-trigger",dest='onlytrig',action='store_true',help='Extract only the trigger information to a file. Useful for big runs')

args = p.parse_args(arglist)

Expand All @@ -164,7 +172,7 @@ def ExtractInformation(arglist):
dest_path = args.destPath


if args.split:
if False and args.split:

runs = list()
paths = list()
Expand All @@ -173,6 +181,7 @@ def ExtractInformation(arglist):
calib = list()
keepR1 = list()
nnints = list()
trigonly = list()

for block in range(GetNumberOfDataBlocks(run,data_path)):
#for block in range(8):
Expand All @@ -184,21 +193,29 @@ def ExtractInformation(arglist):
calib.append(applyCalib)
keepR1.append( keepR1)
nnints.append(args.nnint)
trigonly.append(args.onlytrig)

# Make the Pool of workers
pool = ThreadPool(1)
pool = ThreadPool(4)

# Open the URLs in their own threads
# and return the results
results = pool.starmap(ExtractInformationSingleRun, zip(runs,paths,dest_paths,blocks,calib,keepR1,nnints) )
results = pool.starmap(ExtractInformationSingleRun, zip(runs,paths,dest_paths,blocks,calib,keepR1,nnints,trigonly) )

# Close the pool and wait for the work to finish
pool.close()
pool.join()
#ExtractInformationSingleRun(run,args.dataPath,args.destPath,data_block=block)

else:
ExtractInformationSingleRun(run=run,data_path=data_path,dest_path=dest_path,data_block=-1,applycalib=applyCalib,keepR1=keepR1,nnint=args.nnint)
if args.split:
nBlocks = GetNumberOfDataBlocks(run,data_path)
for block in range(nBlocks):
print(f'block: {block+1}/{nBlocks}')
ExtractInformationSingleRun(run=run,data_path=data_path,dest_path=dest_path,data_block=block,applycalib=applyCalib,keepR1=keepR1,nnint=args.nnint,onlytrigger=args.onlytrig)
else:
ExtractInformationSingleRun(run=run,data_path=data_path,dest_path=dest_path,data_block=-1,applycalib=applyCalib,keepR1=keepR1,nnint=args.nnint,onlytrigger=args.onlytrig)

#def ExtractInformationSingleRun(run,data_path,dest_path,data_block,applycalib=True,keepR1=True):


Expand Down
Loading