Skip to content
This repository has been archived by the owner on Jun 5, 2024. It is now read-only.

Commit

Permalink
Strax interface (#19)
Browse files Browse the repository at this point in the history
* Updates for a strax plugin
* Updates
* Delete fast_simulator.py
* Delete settings.json
* Delete load_resource.py
* strax_interface cleanup
* Updates
* refactored fil
* Added doc string moved empty quanta removal from run_epix here
* Added EntryStart to options
* Added entry_start and n_simulated_events as return
* Seperated ttree from loader, if we want to read nveto data
* Removed strax interface
* Added offset based on number of simulated events
* Added event start to epix.loader, refactored timing, added sorting at
end
* Added JobId option, default is zero
* Changed source rate default to zero up help
* Added possibility to add no time delay to the events
* Added strax exit
* Fixed sorting
* Removed old init
* Addressed simple comments
* Fixed timings
* Added option to select by eventid
* Added support for eventid selection
* Small fixes
* fix cut_by_eventid flag

Co-authored-by: Peter Gaemers <[email protected]>
Co-authored-by: ramirezdiego <[email protected]>
  • Loading branch information
3 people authored Mar 24, 2021
1 parent e3cc08f commit 6f7ab39
Show file tree
Hide file tree
Showing 5 changed files with 335 additions and 246 deletions.
223 changes: 27 additions & 196 deletions bin/run_epix
Original file line number Diff line number Diff line change
@@ -1,236 +1,67 @@
#!/usr/bin/env python3

import os
import argparse
import time
import sys
import awkward as ak
import numpy as np
import pandas as pd
import warnings

import epix

def pars_args():
parser = argparse.ArgumentParser(description="Electron and Photon Instructions generator for XENON (wfsim)")
parser.add_argument('--InputFile', dest='input_file',
action='store', required=True,
help='Input Geant4 ROOT file')
help='Input Geant4 ROOT file.')
parser.add_argument('--Detector', dest='detector', type=str,
action='store', default='XENONnT',
help='Detector which should be used. Has to be defined in epix.detectors.')
parser.add_argument('--DetectorConfig', dest='detector_config', type=str,
parser.add_argument('--DetectorConfigOverride', dest='detector_config_override', type=str,
action='store', default='',
help='Config file to overwrite default detector settings.')
parser.add_argument('--CutOnEventid', dest='cut_by_eventid',
action='store_true', default=False,
help='If true eventy start/stop acts on eventid instead of rows.'),
parser.add_argument('--EntryStart', dest='entry_start', type=int,
action='store',
help='First event to be read. Defaulted is zero.'),
parser.add_argument('--EntryStop', dest='entry_stop', type=int,
action='store',
help='Number of entries to read from first. Defaulted to all')
help='Number of entries to read from first. Defaulted to all.')
parser.add_argument('--MicroSeparation', dest='micro_separation', type=float,
action='store', default=0.05,
help='Spatial resolution for DBSCAN micro-clustering [mm]')
help='Spatial resolution for DBSCAN micro-clustering [mm].')
parser.add_argument('--MicroSeparationTime', dest='micro_separation_time', type=float,
action='store', default=10,
help='Time resolution for DBSCAN micro-clustering [ns]')
help='Time resolution for DBSCAN micro-clustering [ns].')
parser.add_argument('--TagClusterBy', dest='tag_cluster_by', type=str,
action='store', default='time',
help=('Classification of the type of particle of a cluster, '
'based on most energetic contributor ("energy") or first '
'depositing particle ("time")'),
'depositing particle ("time").'),
choices={'time', 'energy'})
parser.add_argument('--MaxDelay', dest='max_delay', type=float,
action='store', default=1e7, #ns
help='Maximal time delay to first interaction which will be stored [ns]')
parser.add_argument('--EventRate', dest='event_rate', type=float,
action='store', default=-1,
help='Event rate for event separation. Use -1 for clean simulations'
'or give a rate >0 to space events randomly.')
help='Maximal time delay to first interaction which will be stored [ns].')
parser.add_argument('--SourceRate', dest='source_rate', type=float,
action='store', default=0,
help='Event rate for event separation. 0 (default) means no time shift is applied for the '
'different events. Use -1 for clean spacing or give a rate >0 to space events randomly.')
parser.add_argument('--JobNumber', dest='job_number', type=int,
action='store', default=0,
help='Job number in full chain simulation. Offset is computed as '
'"Job number * n_simulated_events/SourceRate", n_simulated_events '
'is read from file.')
parser.add_argument('--OutputPath', dest='output_path',
action='store', default="",
help=('Optional output path. If not specified the result will be saved'
'in the same dir as the input file.'))
parser.add_argument('--Debug', dest='debug',
action='store_true', default="",
help=('If specifed additional information is printed to the consol.')
action='store_true', default=False,
help=('If specifed additional information is printed to the console.')
)

args = parser.parse_args(sys.argv[1:])
return args


def main(args, return_df=False):
# TODO: remove setup from main for strax
path, file_name, detector, outer_cylinder = setup(args)

if args.debug:
print("epix configuration: ", args)
# TODO: also add memory information see starxer and change this to debug
# Getting time information:
starttime = time.time()
tnow = starttime

# Loading data:
inter = epix.loader(path, file_name, args.debug,
outer_cylinder=outer_cylinder,
kwargs_uproot_arrays={'entry_stop': args.entry_stop}
)

if args.debug:
tnow = monitor_time(tnow, 'load data.')
print((f'Finding clusters of interactions with a dr = {args.micro_separation} mm'
f' and dt = {args.micro_separation_time} ns'))

# Cluster finding and clustering:
inter = epix.find_cluster(inter, args.micro_separation/10, args.micro_separation_time)

if args.debug:
tnow = monitor_time(tnow, 'cluster finding.')

result = epix.cluster(inter, args.tag_cluster_by == 'energy')

if args.debug:
tnow = monitor_time(tnow, 'cluster merging.')

# Add eventid again:
result['evtid'] = ak.broadcast_arrays(inter['evtid'][:, 0], result['ed'])[0]

# Sort detector volumes and keep interactions in selected ones:
if args.debug:
print('Removing clusters not in volumes:', *[v.name for v in detector])
print(f'Number of clusters before: {np.sum(ak.num(result["ed"]))}')

# Returns all interactions which are inside in one of the volumes,
# Checks for volume overlap, assigns Xe density and create S2 to
# interactions. EField comes later since interpolated maps cannot be
# called inside numba functions.
res_det = epix.in_sensitive_volume(result, detector)

# Adding new fields to result:
for field in res_det.fields:
result[field] = res_det[field]
m = result['vol_id'] > 0 # All volumes have an id larger zero
result = result[m]

# Removing now empty events as a result of the selection above:
m = ak.num(result['ed']) > 0
result = result[m]

if args.debug:
print(f'Number of clusters after: {np.sum(ak.num(result["ed"]))}')
print('Assigning electric field to clusters')

if not ak.any(m):
# There are not any events left so return empty array:
warnings.warn('No interactions left, return empty DataFrame.')
if return_df:
if args.output_path and not os.path.isdir(args.output_path):
os.makedirs(args.output_path)

output_path_and_name = os.path.join(args.output_path, file_name[:-5] + "_wfsim_instructions.csv")
df = pd.DataFrame()
df.to_csv(output_path_and_name, index=False)
return

# Add electric field to array:
efields = np.zeros(np.sum(ak.num(result)), np.float32)
# Loop over volume and assign values:
for volume in detector:
if isinstance(volume.electric_field, (float, int)):
ids = epix.awkward_to_flat_numpy(result['vol_id'])
m = ids == volume.volume_id
efields[m] = volume.electric_field
else:
efields = volume.electric_field(epix.awkward_to_flat_numpy(result.x),
epix.awkward_to_flat_numpy(result.y),
epix.awkward_to_flat_numpy(result.z)
)

result['e_field'] = epix.reshape_awkward(efields, ak.num(result))

# Sort in time and set first cluster to t=0, then chop all delayed
# events which are too far away from the rest.
# (This is a requirement of WFSim)
result = result[ak.argsort(result['t'])]
result['t'] = result['t'] - result['t'][:, 0]
result = result[result['t'] <= args.max_delay]

#Separate event in time
number_of_events = len(result["t"])
if args.event_rate == -1:
dt = epix.times_for_clean_separation(number_of_events, args.max_delay)
if args.debug:
print('Clean event separation')
else:
dt = epix.times_from_fixed_rate(args.event_rate, number_of_events, args.max_delay)
if args.debug:
print(f'Fixed event rate of {args.event_rate} Hz')
result['t'] = result['t'][:, :] + dt

if args.debug:
print('Generating photons and electrons for events')
# Generate quanta:
photons, electrons = epix.quanta_from_NEST(epix.awkward_to_flat_numpy(result['ed']),
epix.awkward_to_flat_numpy(result['nestid']),
epix.awkward_to_flat_numpy(result['e_field']),
epix.awkward_to_flat_numpy(result['A']),
epix.awkward_to_flat_numpy(result['Z']),
epix.awkward_to_flat_numpy(result['create_S2']),
density=epix.awkward_to_flat_numpy(result['xe_density']))
result['photons'] = epix.reshape_awkward(photons, ak.num(result['ed']))
result['electrons'] = epix.reshape_awkward(electrons, ak.num(result['ed']))
if args.debug:
_ = monitor_time(tnow, 'get quanta.')

# Reshape instructions:
instructions = epix.awkward_to_wfsim_row_style(result)

# Remove entries with no quanta
instructions = instructions[instructions['amp'] > 0]
ins_df = pd.DataFrame(instructions)

if return_df:
if args.output_path and not os.path.isdir(args.output_path):
os.makedirs(args.output_path)

output_path_and_name = os.path.join(args.output_path, file_name[:-5] + "_wfsim_instructions.csv")
if os.path.isfile(output_path_and_name):
warnings.warn("Output file already exists - Overwriting")
ins_df.to_csv(output_path_and_name, index=False)

print('Done')
print('Instructions saved to ', output_path_and_name)
if args.debug:
_ = monitor_time(starttime, 'run epix.')


def monitor_time(prev_time, task):
t = time.time()
print(f'It took {(t - prev_time):.4f} sec to {task}')
return t


def setup(args):
"""
Function which sets-up configurations. (for strax conversion)
:return:
"""
# Getting file path and split it into directory and file name:
p = args.input_file
p = p.split('/')
if p[0] == "":
p[0] = "/"
path = os.path.join(*p[:-1])
file_name = p[-1]

# Init detector volume according to settings and get outer_cylinder
# for data reduction of non-relevant interactions.
detector = epix.init_detector(args.detector.lower(), args.detector_config)
outer_cylinder = getattr(epix.detectors, args.detector.lower())
_, outer_cylinder = outer_cylinder()
return path, file_name, detector, outer_cylinder


if __name__ == "__main__":
args = pars_args()
main(args, return_df=True)

args = vars(pars_args())
args = epix.run_epix.setup(args)
epix.run_epix.main(args, return_df=True)
3 changes: 2 additions & 1 deletion epix/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@
from .clustering import *
from .ElectricFieldHandler import *
from .event_separation import *
from .detectors import *
from .detectors import *
from .run_epix import *
14 changes: 8 additions & 6 deletions epix/event_separation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ def times_for_clean_separation(n_events, MaxDelay):
Args:
n_events (int): Number of events
MaxDelay (float): Time difference between events. Should be large enought to
MaxDelay (float): Time difference between events. Should be large enough to
prevent event pile-up.
Returns:
Expand All @@ -18,28 +18,30 @@ def times_for_clean_separation(n_events, MaxDelay):

return dt

def times_from_fixed_rate(rate, n_events, offset):
def times_from_fixed_rate(rate, n_events, n_simulated_events, offset=0):
"""
Function to generate event times with a fixed rate.
The event times are drawn from a uniform distribution.
For higher rates pile-up is possible. The normalization
is achived by a variable overall simulation length in time.
is achieved by a variable overall simulation length in time.
!Rate normalization is only valid for one simualtion job!
Args:
rate (int or float): Mean event rate in Hz.
n_events (int): Number of events
n_events (int): Number of events in file
n_events_simulated (int): True number of events which were
simulated.
offset (int or float): Time offset to shift all event times to larger values.
Returns:
event_times (numpy.array): Array containing the start times of the events.
"""

simulation_time = n_events/rate
simulation_time = n_simulated_events/rate
simulation_time *= 1e9

event_times = np.sort(np.random.uniform(low=offset, high=simulation_time+offset, size=n_events))

return event_times
return event_times
Loading

0 comments on commit 6f7ab39

Please sign in to comment.