diff --git a/bin/run_epix b/bin/run_epix index 2cddbc3..8cb3589 100755 --- a/bin/run_epix +++ b/bin/run_epix @@ -1,13 +1,7 @@ #!/usr/bin/env python3 -import os import argparse -import time import sys -import awkward as ak -import numpy as np -import pandas as pd -import warnings import epix @@ -15,222 +9,59 @@ def pars_args(): parser = argparse.ArgumentParser(description="Electron and Photon Instructions generator for XENON (wfsim)") parser.add_argument('--InputFile', dest='input_file', action='store', required=True, - help='Input Geant4 ROOT file') + help='Input Geant4 ROOT file.') parser.add_argument('--Detector', dest='detector', type=str, action='store', default='XENONnT', help='Detector which should be used. Has to be defined in epix.detectors.') - parser.add_argument('--DetectorConfig', dest='detector_config', type=str, + parser.add_argument('--DetectorConfigOverride', dest='detector_config_override', type=str, action='store', default='', help='Config file to overwrite default detector settings.') + parser.add_argument('--CutOnEventid', dest='cut_by_eventid', + action='store_true', default=False, + help='If true eventy start/stop acts on eventid instead of rows.'), + parser.add_argument('--EntryStart', dest='entry_start', type=int, + action='store', + help='First event to be read. Defaulted is zero.'), parser.add_argument('--EntryStop', dest='entry_stop', type=int, action='store', - help='Number of entries to read from first. Defaulted to all') + help='Number of entries to read from first. Defaulted to all.') parser.add_argument('--MicroSeparation', dest='micro_separation', type=float, action='store', default=0.05, - help='Spatial resolution for DBSCAN micro-clustering [mm]') + help='Spatial resolution for DBSCAN micro-clustering [mm].') parser.add_argument('--MicroSeparationTime', dest='micro_separation_time', type=float, action='store', default=10, - help='Time resolution for DBSCAN micro-clustering [ns]') + help='Time resolution for DBSCAN micro-clustering [ns].') parser.add_argument('--TagClusterBy', dest='tag_cluster_by', type=str, action='store', default='time', help=('Classification of the type of particle of a cluster, ' 'based on most energetic contributor ("energy") or first ' - 'depositing particle ("time")'), + 'depositing particle ("time").'), choices={'time', 'energy'}) parser.add_argument('--MaxDelay', dest='max_delay', type=float, action='store', default=1e7, #ns - help='Maximal time delay to first interaction which will be stored [ns]') - parser.add_argument('--EventRate', dest='event_rate', type=float, - action='store', default=-1, - help='Event rate for event separation. Use -1 for clean simulations' - 'or give a rate >0 to space events randomly.') + help='Maximal time delay to first interaction which will be stored [ns].') + parser.add_argument('--SourceRate', dest='source_rate', type=float, + action='store', default=0, + help='Event rate for event separation. 0 (default) means no time shift is applied for the ' + 'different events. Use -1 for clean spacing or give a rate >0 to space events randomly.') + parser.add_argument('--JobNumber', dest='job_number', type=int, + action='store', default=0, + help='Job number in full chain simulation. Offset is computed as ' + '"Job number * n_simulated_events/SourceRate", n_simulated_events ' + 'is read from file.') parser.add_argument('--OutputPath', dest='output_path', action='store', default="", help=('Optional output path. If not specified the result will be saved' 'in the same dir as the input file.')) parser.add_argument('--Debug', dest='debug', - action='store_true', default="", - help=('If specifed additional information is printed to the consol.') + action='store_true', default=False, + help=('If specifed additional information is printed to the console.') ) args = parser.parse_args(sys.argv[1:]) return args - -def main(args, return_df=False): - # TODO: remove setup from main for strax - path, file_name, detector, outer_cylinder = setup(args) - - if args.debug: - print("epix configuration: ", args) - # TODO: also add memory information see starxer and change this to debug - # Getting time information: - starttime = time.time() - tnow = starttime - - # Loading data: - inter = epix.loader(path, file_name, args.debug, - outer_cylinder=outer_cylinder, - kwargs_uproot_arrays={'entry_stop': args.entry_stop} - ) - - if args.debug: - tnow = monitor_time(tnow, 'load data.') - print((f'Finding clusters of interactions with a dr = {args.micro_separation} mm' - f' and dt = {args.micro_separation_time} ns')) - - # Cluster finding and clustering: - inter = epix.find_cluster(inter, args.micro_separation/10, args.micro_separation_time) - - if args.debug: - tnow = monitor_time(tnow, 'cluster finding.') - - result = epix.cluster(inter, args.tag_cluster_by == 'energy') - - if args.debug: - tnow = monitor_time(tnow, 'cluster merging.') - - # Add eventid again: - result['evtid'] = ak.broadcast_arrays(inter['evtid'][:, 0], result['ed'])[0] - - # Sort detector volumes and keep interactions in selected ones: - if args.debug: - print('Removing clusters not in volumes:', *[v.name for v in detector]) - print(f'Number of clusters before: {np.sum(ak.num(result["ed"]))}') - - # Returns all interactions which are inside in one of the volumes, - # Checks for volume overlap, assigns Xe density and create S2 to - # interactions. EField comes later since interpolated maps cannot be - # called inside numba functions. - res_det = epix.in_sensitive_volume(result, detector) - - # Adding new fields to result: - for field in res_det.fields: - result[field] = res_det[field] - m = result['vol_id'] > 0 # All volumes have an id larger zero - result = result[m] - - # Removing now empty events as a result of the selection above: - m = ak.num(result['ed']) > 0 - result = result[m] - - if args.debug: - print(f'Number of clusters after: {np.sum(ak.num(result["ed"]))}') - print('Assigning electric field to clusters') - - if not ak.any(m): - # There are not any events left so return empty array: - warnings.warn('No interactions left, return empty DataFrame.') - if return_df: - if args.output_path and not os.path.isdir(args.output_path): - os.makedirs(args.output_path) - - output_path_and_name = os.path.join(args.output_path, file_name[:-5] + "_wfsim_instructions.csv") - df = pd.DataFrame() - df.to_csv(output_path_and_name, index=False) - return - - # Add electric field to array: - efields = np.zeros(np.sum(ak.num(result)), np.float32) - # Loop over volume and assign values: - for volume in detector: - if isinstance(volume.electric_field, (float, int)): - ids = epix.awkward_to_flat_numpy(result['vol_id']) - m = ids == volume.volume_id - efields[m] = volume.electric_field - else: - efields = volume.electric_field(epix.awkward_to_flat_numpy(result.x), - epix.awkward_to_flat_numpy(result.y), - epix.awkward_to_flat_numpy(result.z) - ) - - result['e_field'] = epix.reshape_awkward(efields, ak.num(result)) - - # Sort in time and set first cluster to t=0, then chop all delayed - # events which are too far away from the rest. - # (This is a requirement of WFSim) - result = result[ak.argsort(result['t'])] - result['t'] = result['t'] - result['t'][:, 0] - result = result[result['t'] <= args.max_delay] - - #Separate event in time - number_of_events = len(result["t"]) - if args.event_rate == -1: - dt = epix.times_for_clean_separation(number_of_events, args.max_delay) - if args.debug: - print('Clean event separation') - else: - dt = epix.times_from_fixed_rate(args.event_rate, number_of_events, args.max_delay) - if args.debug: - print(f'Fixed event rate of {args.event_rate} Hz') - result['t'] = result['t'][:, :] + dt - - if args.debug: - print('Generating photons and electrons for events') - # Generate quanta: - photons, electrons = epix.quanta_from_NEST(epix.awkward_to_flat_numpy(result['ed']), - epix.awkward_to_flat_numpy(result['nestid']), - epix.awkward_to_flat_numpy(result['e_field']), - epix.awkward_to_flat_numpy(result['A']), - epix.awkward_to_flat_numpy(result['Z']), - epix.awkward_to_flat_numpy(result['create_S2']), - density=epix.awkward_to_flat_numpy(result['xe_density'])) - result['photons'] = epix.reshape_awkward(photons, ak.num(result['ed'])) - result['electrons'] = epix.reshape_awkward(electrons, ak.num(result['ed'])) - if args.debug: - _ = monitor_time(tnow, 'get quanta.') - - # Reshape instructions: - instructions = epix.awkward_to_wfsim_row_style(result) - - # Remove entries with no quanta - instructions = instructions[instructions['amp'] > 0] - ins_df = pd.DataFrame(instructions) - - if return_df: - if args.output_path and not os.path.isdir(args.output_path): - os.makedirs(args.output_path) - - output_path_and_name = os.path.join(args.output_path, file_name[:-5] + "_wfsim_instructions.csv") - if os.path.isfile(output_path_and_name): - warnings.warn("Output file already exists - Overwriting") - ins_df.to_csv(output_path_and_name, index=False) - - print('Done') - print('Instructions saved to ', output_path_and_name) - if args.debug: - _ = monitor_time(starttime, 'run epix.') - - -def monitor_time(prev_time, task): - t = time.time() - print(f'It took {(t - prev_time):.4f} sec to {task}') - return t - - -def setup(args): - """ - Function which sets-up configurations. (for strax conversion) - - :return: - """ - # Getting file path and split it into directory and file name: - p = args.input_file - p = p.split('/') - if p[0] == "": - p[0] = "/" - path = os.path.join(*p[:-1]) - file_name = p[-1] - - # Init detector volume according to settings and get outer_cylinder - # for data reduction of non-relevant interactions. - detector = epix.init_detector(args.detector.lower(), args.detector_config) - outer_cylinder = getattr(epix.detectors, args.detector.lower()) - _, outer_cylinder = outer_cylinder() - return path, file_name, detector, outer_cylinder - - if __name__ == "__main__": - args = pars_args() - main(args, return_df=True) - + args = vars(pars_args()) + args = epix.run_epix.setup(args) + epix.run_epix.main(args, return_df=True) diff --git a/epix/__init__.py b/epix/__init__.py index 71ded8c..b430ed5 100644 --- a/epix/__init__.py +++ b/epix/__init__.py @@ -8,4 +8,5 @@ from .clustering import * from .ElectricFieldHandler import * from .event_separation import * -from .detectors import * \ No newline at end of file +from .detectors import * +from .run_epix import * diff --git a/epix/event_separation.py b/epix/event_separation.py index 89ab58c..8cc7d75 100644 --- a/epix/event_separation.py +++ b/epix/event_separation.py @@ -6,7 +6,7 @@ def times_for_clean_separation(n_events, MaxDelay): Args: n_events (int): Number of events - MaxDelay (float): Time difference between events. Should be large enought to + MaxDelay (float): Time difference between events. Should be large enough to prevent event pile-up. Returns: @@ -18,28 +18,30 @@ def times_for_clean_separation(n_events, MaxDelay): return dt -def times_from_fixed_rate(rate, n_events, offset): +def times_from_fixed_rate(rate, n_events, n_simulated_events, offset=0): """ Function to generate event times with a fixed rate. The event times are drawn from a uniform distribution. For higher rates pile-up is possible. The normalization - is achived by a variable overall simulation length in time. + is achieved by a variable overall simulation length in time. !Rate normalization is only valid for one simualtion job! Args: rate (int or float): Mean event rate in Hz. - n_events (int): Number of events + n_events (int): Number of events in file + n_events_simulated (int): True number of events which were + simulated. offset (int or float): Time offset to shift all event times to larger values. Returns: event_times (numpy.array): Array containing the start times of the events. """ - simulation_time = n_events/rate + simulation_time = n_simulated_events/rate simulation_time *= 1e9 event_times = np.sort(np.random.uniform(low=offset, high=simulation_time+offset, size=n_events)) - return event_times \ No newline at end of file + return event_times diff --git a/epix/io.py b/epix/io.py index b57a09b..8bfa7c8 100755 --- a/epix/io.py +++ b/epix/io.py @@ -4,6 +4,7 @@ import os import warnings +import wfsim import configparser from .common import awkward_to_flat_numpy, offset_range @@ -59,51 +60,66 @@ def load_config(config_file_path): return settings -def loader(directory, file_name, arg_debug=False, outer_cylinder=None, kwargs_uproot_arrays={}): +def loader(directory, + file_name, + arg_debug=False, + outer_cylinder=None, + kwargs_uproot_arrays={}, + cut_by_eventid=False, + ): """ Function which loads geant4 interactions from a root file via uproot4. - Beside loading the a simple data selection is performed. Units are + Besides loading, a simple data selection is performed. Units are already converted into strax conform values. mm -> cm and s -> ns. Args: directory (str): Directory in which the data is stored. file_name (str): File name - - Kwargs: - arg_debug: If true, print out loading information. - outer_cylinder: If specified will cut all events outside of the + arg_debug (bool): If true, print out loading information. + outer_cylinder (dict): If specified will cut all events outside of the given cylinder. - kwargs_uproot_arrays: Keyword arguments passed to .arrays of + kwargs_uproot_arrays (dict): Keyword arguments passed to .arrays of uproot4. + cut_by_eventid (bool): If true event start/stop are applied to + eventids, instead of rows. Returns: awkward1.records: Interactions (eventids, parameters, types). - - Note: - We process eventids and the rest of the data in two different - arrays due to different array structures. Also the type strings - are split off since they suck. All arrays are finally merged. + integer: Number of events simulated. """ - root_dir = uproot.open(os.path.join(directory, file_name)) - - if root_dir.classname_of('events') == 'TTree': - ttree = root_dir['events'] - elif root_dir.classname_of('events/events') == 'TTree': - ttree = root_dir['events/events'] - else: - ttrees = [] - for k, v in root_dir.classnames().items(): - if v == 'TTree': - ttrees.append(k) - raise ValueError(f'Cannot find ttree object of "{file_name}".' - 'I tried to search in events and events/events.' - f'Found a ttree in {ttrees}?') + ttree, n_simulated_events = _get_ttree(directory, file_name) + if arg_debug: print(f'Total entries in input file = {ttree.num_entries}') - if kwargs_uproot_arrays['entry_stop']!=None: - print(f'... from which {kwargs_uproot_arrays["entry_stop"]} are read') + cutby_string='output file entry' + if cut_by_eventid: + cutby_string='g4 eventid' + + if kwargs_uproot_arrays['entry_start'] is not None: + print(f'Starting to read from {cutby_string} {kwargs_uproot_arrays["entry_start"]}') + if kwargs_uproot_arrays['entry_stop'] is not None: + print(f'Ending read in at {cutby_string} {kwargs_uproot_arrays["entry_stop"]}') + + # If user specified entry start/stop we have to update number of + # events for source rate computation: + if kwargs_uproot_arrays['entry_start'] is not None: + start = kwargs_uproot_arrays['entry_start'] + else: + start = 0 + + if kwargs_uproot_arrays['entry_stop'] is not None: + stop = kwargs_uproot_arrays['entry_stop'] + else: + stop = n_simulated_events + n_simulated_events = stop - start + + if cut_by_eventid: + # Start/stop refers to eventid so drop start drop from kwargs + # dict if specified, otherwise we cut again on rows. + kwargs_uproot_arrays.pop('entry_start', None) + kwargs_uproot_arrays.pop('entry_stop', None) # Columns to be read from the root_file: column_names = ["x", "y", "z", "t", "ed", @@ -139,33 +155,63 @@ def loader(directory, file_name, arg_debug=False, outer_cylinder=None, kwargs_up # Removing all events with zero energy deposit m = interactions['ed'] > 0 + if cut_by_eventid: + # ufunc does not work here... + m2 = (interactions['evtid'] >= start) & (interactions['evtid'] < stop) + m = m & m2 interactions = interactions[m] # Removing all events with no interactions: m = ak.num(interactions['ed']) > 0 interactions = interactions[m] - return interactions + return interactions, n_simulated_events + + +def _get_ttree(directory, file_name): + """ + Function which searches for the correct ttree in MC root file. + + :param directory: Directory where file is + :param file_name: Name of the file + :return: root ttree and number of simulated events + """ + root_dir = uproot.open(os.path.join(directory, file_name)) + + # Searching for TTree according to old/new MC file structure: + if root_dir.classname_of('events') == 'TTree': + ttree = root_dir['events'] + n_simulated_events = root_dir['nEVENTS'].members['fVal'] + elif root_dir.classname_of('events/events') == 'TTree': + ttree = root_dir['events/events'] + n_simulated_events = root_dir['events/nbevents'].members['fVal'] + else: + ttrees = [] + for k, v in root_dir.classnames().items(): + if v == 'TTree': + ttrees.append(k) + raise ValueError(f'Cannot find ttree object of "{file_name}".' + 'I tried to search in events and events/events.' + f'Found a ttree in {ttrees}?') + return ttree, n_simulated_events # ---------------------- # Outputing wfsim instructions: # ---------------------- -int_dtype = [(('Waveform simulator event number.', 'event_number'), np.int32), - (('Quanta type (S1 photons or S2 electrons)', 'type'), np.int8), - (('Time of the interaction [ns]', 'time'), np.int64), - (('X position of the cluster[cm]', 'x'), np.float32), - (('Y position of the cluster[cm]', 'y'), np.float32), - (('Z position of the cluster[cm]', 'z'), np.float32), - (('Number of quanta', 'amp'), np.int32), - (('Recoil type of interaction.', 'recoil'), np.int8), - (('Energy deposit of interaction', 'e_dep'), np.float32), - (('Eventid like in geant4 output rootfile', 'g4id'), np.int32), - (('Volume id giving the detector subvolume', 'vol_id'), np.int32) - ] +int_dtype = wfsim.instruction_dtype def awkward_to_wfsim_row_style(interactions): + """ + Converts awkward array instructions into instructions required by + WFSim. + + :param interactions: awkward.Array containing GEANT4 simulation + information. + :return: Structured numpy.array. Each row represents either a S1 or + S2 + """ ninteractions = np.sum(ak.num(interactions['ed'])) res = np.zeros(2 * ninteractions, dtype=int_dtype) @@ -190,6 +236,6 @@ def awkward_to_wfsim_row_style(interactions): else: res['amp'][i::2] = awkward_to_flat_numpy(interactions['photons']) - - #TODO: Add a function which generates a new event if interactions are too far apart + # Remove entries with no quanta + res = res[res['amp'] > 0] return res diff --git a/epix/run_epix.py b/epix/run_epix.py new file mode 100644 index 0000000..2393470 --- /dev/null +++ b/epix/run_epix.py @@ -0,0 +1,209 @@ +import os +import time +import awkward as ak +import numpy as np +import pandas as pd +import warnings + +import epix + + +def main(args, return_df=False, return_wfsim_instructions=False, strax=False): + """Call this function from the run_epix script""" + + if args['debug']: + print("epix configuration: ", args) + # TODO: also add memory information (see straxer) and change this to debug + # Getting time information: + starttime = time.time() + tnow = starttime + + # Loading data: + inter, n_simulated_events = epix.loader(args['path'], + args['file_name'], + args['debug'], + outer_cylinder=args['outer_cylinder'], + kwargs_uproot_arrays={'entry_start': args['entry_start'], + 'entry_stop': args['entry_stop']}, + cut_by_eventid=args['cut_by_eventid'] + ) + + if args['debug']: + tnow = monitor_time(tnow, 'load data.') + print(f"Finding clusters of interactions with a dr = {args['micro_separation']} mm" + f" and dt = {args['micro_separation_time']} ns") + + # Cluster finding and clustering (convert micro_separation mm -> cm): + inter = epix.find_cluster(inter, args['micro_separation']/10, args['micro_separation_time']) + + if args['debug']: + tnow = monitor_time(tnow, 'find clusters.') + + result = epix.cluster(inter, args['tag_cluster_by'] == 'energy') + + if args['debug']: + tnow = monitor_time(tnow, 'merge clusters.') + + # Add eventid again: + result['evtid'] = ak.broadcast_arrays(inter['evtid'][:, 0], result['ed'])[0] + + # Sort detector volumes and keep interactions in selected ones: + if args['debug']: + print('Removing clusters not in volumes:', *[v.name for v in args['detector_config']]) + print(f'Number of clusters before: {np.sum(ak.num(result["ed"]))}') + + # Returns all interactions which are inside in one of the volumes, + # Checks for volume overlap, assigns Xe density and create_S2 to + # interactions. EField comes later since interpolated maps cannot be + # called inside numba functions. + res_det = epix.in_sensitive_volume(result, args['detector_config']) + + # Adding new fields to result: + for field in res_det.fields: + result[field] = res_det[field] + m = result['vol_id'] > 0 # All volumes have an id larger zero + result = result[m] + + # Removing now empty events as a result of the selection above: + m = ak.num(result['ed']) > 0 + result = result[m] + + if args['debug']: + print(f'Number of clusters after: {np.sum(ak.num(result["ed"]))}') + print('Assigning electric field to clusters') + + if not ak.any(m): + # There are not any events left so return empty array: + #TODO: Add option for WFSim + warnings.warn('No interactions left, return empty DataFrame.') + if return_df: + if args.output_path and not os.path.isdir(args.output_path): + os.makedirs(args.output_path) + + output_path_and_name = os.path.join(args.output_path, + args['file_name'][:-5] + "_wfsim_instructions.csv") + df = pd.DataFrame() + df.to_csv(output_path_and_name, index=False) + return + + # Add electric field to array: + efields = np.zeros(np.sum(ak.num(result)), np.float32) + # Loop over volume and assign values: + for volume in args['detector_config']: + if isinstance(volume.electric_field, (float, int)): + ids = epix.awkward_to_flat_numpy(result['vol_id']) + m = ids == volume.volume_id + efields[m] = volume.electric_field + else: + efields = volume.electric_field(epix.awkward_to_flat_numpy(result.x), + epix.awkward_to_flat_numpy(result.y), + epix.awkward_to_flat_numpy(result.z) + ) + + result['e_field'] = epix.reshape_awkward(efields, ak.num(result)) + + # Sort entries (in an event) by in time, then chop all delayed + # events which are too far away from the rest. + # (This is a requirement of WFSim) + result = result[ak.argsort(result['t'])] + dt = result['t'] - result['t'][:, 0] + result = result[dt <= args['max_delay']] + + if args['debug']: + print('Generating photons and electrons for events') + # Generate quanta: + photons, electrons = epix.quanta_from_NEST(epix.awkward_to_flat_numpy(result['ed']), + epix.awkward_to_flat_numpy(result['nestid']), + epix.awkward_to_flat_numpy(result['e_field']), + epix.awkward_to_flat_numpy(result['A']), + epix.awkward_to_flat_numpy(result['Z']), + epix.awkward_to_flat_numpy(result['create_S2']), + density=epix.awkward_to_flat_numpy(result['xe_density'])) + result['photons'] = epix.reshape_awkward(photons, ak.num(result['ed'])) + result['electrons'] = epix.reshape_awkward(electrons, ak.num(result['ed'])) + if args['debug']: + _ = monitor_time(tnow, 'get quanta.') + + # Separate events in time + number_of_events = len(result["t"]) + if args['source_rate'] == -1: + # Only needed for a clean separation: + result['t'] = result['t'] - result['t'][:, 0] + + dt = epix.times_for_clean_separation(number_of_events, args['max_delay']) + if args['debug']: + print('Clean event separation') + elif args['source_rate'] == 0: + # In case no delay should be applied we just add zeros + dt = np.zeros(number_of_events) + else: + # Rate offset computed based on the specified rate and job_id. + # Assumes all jobs were started with the same number of events. + offset = (args['job_number']*n_simulated_events)/args['source_rate'] + dt = epix.times_from_fixed_rate(args['source_rate'], + number_of_events, + n_simulated_events, + offset + ) + if args['debug']: + print(f"Fixed event rate of {args['source_rate']} Hz") + + result['t'] = result['t'][:, :] + dt + + # Reshape instructions: + instructions = epix.awkward_to_wfsim_row_style(result) + if args['source_rate'] != 0: + # Only sort by time again if source rates were applied, otherwise + # things are already sorted within the events and should stay this way. + instructions = np.sort(instructions, order='time') + + ins_df = pd.DataFrame(instructions) + + if return_df: + if args['output_path'] and not os.path.isdir(args['output_path']): + os.makedirs(args['output_path']) + + output_path_and_name = os.path.join(args['output_path'], args['file_name'][:-5] + "_wfsim_instructions.csv") + if os.path.isfile(output_path_and_name): + warnings.warn("Output file already exists - Overwriting") + ins_df.to_csv(output_path_and_name, index=False) + + print('Done') + print('Instructions saved to ', output_path_and_name) + if args['debug']: + _ = monitor_time(starttime, 'run epix.') + + if return_wfsim_instructions: + return instructions + + +def monitor_time(prev_time, task): + t = time.time() + print(f'It took {(t - prev_time):.4f} sec to {task}') + return t + + +def setup(args): + """ + Function which sets-up configurations. (for strax conversion) + Is returning the dict like this nessecairy? + :return: + """ + # Getting file path and split it into directory and file name: + if not ('path' in args and 'file_name' in args): + p = args['input_file'] + if '/' in p: + p = p.split('/') + else: + p = ["", p] + if p[0] == "": + p[0] = "/" + args['path'] = os.path.join(*p[:-1]) + args['file_name'] = p[-1] + + # Init detector volume according to settings and get outer_cylinder + # for data reduction of non-relevant interactions. + args['detector_config'] = epix.init_detector(args['detector'].lower(), args['detector_config_override']) + outer_cylinder = getattr(epix.detectors, args['detector'].lower()) + _, args['outer_cylinder'] = outer_cylinder() + return args