Skip to content

Commit

Permalink
Merge branch 'main' into lineage_clustering
Browse files Browse the repository at this point in the history
  • Loading branch information
cfuselli authored May 30, 2024
2 parents ba07114 + d5dde95 commit ad7fa7e
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 44 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ resource_cache
dist/*
docs/source/release_notes.rst
docs/source/plugins/*
.ipynb_checkpoints
6 changes: 2 additions & 4 deletions fuse/plugins/detector_physics/csv_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class ChunkCsvInput(FuseBasePlugin):
"""Plugin which reads a CSV file containing instructions for the detector
physics simulation and returns the data in chunks."""

__version__ = "0.2.1"
__version__ = "0.2.2"

depends_on: Tuple = tuple()
provides = "microphysics_summary"
Expand Down Expand Up @@ -102,9 +102,7 @@ def compute(self):

self.source_done = source_done

return self.chunk(
start=chunk_left, end=chunk_right, data=data, data_type="geant4_interactions"
)
return self.chunk(start=chunk_left, end=chunk_right, data=data)

except StopIteration:
raise RuntimeError("Bug in chunk building!")
Expand Down
4 changes: 1 addition & 3 deletions fuse/plugins/micro_physics/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,7 @@ def compute(self):

self.source_done = source_done

return self.chunk(
start=chunk_left, end=chunk_right, data=chunk_data, data_type="geant4_interactions"
)
return self.chunk(start=chunk_left, end=chunk_right, data=chunk_data)

except StopIteration:
raise RuntimeError("Bug in chunk building!")
Expand Down
39 changes: 18 additions & 21 deletions fuse/plugins/truth_information/cluster_tagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,25 +10,25 @@ class ClusterTagging(strax.Plugin):
"""Plugin to tag if clusters contribute to the main or alternative s1/s2 in
an event, or successfully reconstructed as s0/s1/s2 peaks."""

__version__ = "0.0.4"
__version__ = "0.0.5"

depends_on = ("microphysics_summary", "photon_summary", "peak_basics", "event_basics")
provides = "tagged_clusters"
data_kind = "interactions_in_roi"

dtype = [
# Tags by events
("in_main_s1", np.bool_),
("in_main_s2", np.bool_),
("in_alt_s1", np.bool_),
("in_alt_s2", np.bool_),
("photons_in_main_s1", np.int32),
("photons_in_main_s2", np.int32),
("photons_in_alt_s1", np.int32),
("photons_in_alt_s2", np.int32),
(("Cluster in the main S1 of an event", "in_main_s1"), np.bool_),
(("Cluster in the main S2 of an event", "in_main_s2"), np.bool_),
(("Cluster in the alternative S1 of an event", "in_alt_s1"), np.bool_),
(("Cluster in the alternative S2 of an event", "in_alt_s2"), np.bool_),
(("Number of photons in the main S1", "photons_in_main_s1"), np.int32),
(("Number of photons in the main S2", "photons_in_main_s2"), np.int32),
(("Number of photons in the alternative S1", "photons_in_alt_s1"), np.int32),
(("Number of photons in the alternative S2", "photons_in_alt_s2"), np.int32),
# Tags by S1 peaks
("has_s1", np.bool_),
("s1_tight_coincidence", np.int32),
(("Cluster results in an S1", "has_s1"), np.bool_),
(("S1 Channel within tight range of mean", "s1_tight_coincidence"), np.int32),
] + strax.time_fields

photon_finding_window = straxen.URLConfig(
Expand Down Expand Up @@ -71,26 +71,23 @@ def compute(self, interactions_in_roi, propagated_photons, peaks, events):
"alt_s2": "alt_s2_index",
}

for peak_name in peak_name_dict.keys():
peak_idx = event_i[peak_name_dict[peak_name]]
for peak_name_cluster, peak_name_event in peak_name_dict.items():
peak_idx = event_i[peak_name_event]
if peak_idx >= 0:
photons_of_peak = peak_photons[event_i[peak_name_dict[peak_name]]]
photons_of_peak = peak_photons[event_i[peak_name_event]]

mask = photons_of_peak["cluster_id"] > 0
contributing_clusters_of_peak, photons_in_peak = np.unique(
photons_of_peak["cluster_id"], return_counts=True
photons_of_peak["cluster_id"][mask], return_counts=True
)
photons_in_peak = photons_in_peak[contributing_clusters_of_peak > 0]
contributing_clusters_of_peak = contributing_clusters_of_peak[
contributing_clusters_of_peak > 0
]

matching_clusters = np.argwhere(
np.isin(interactions_in_roi["cluster_id"], contributing_clusters_of_peak)
)
result["in_" + peak_name][matching_clusters] = True
result["in_" + peak_name_cluster][matching_clusters] = True

for cluster_i, photons_i in zip(contributing_clusters_of_peak, photons_in_peak):
mask = interactions_in_roi["cluster_id"] == cluster_i
result["photons_in_" + peak_name][mask] = photons_i
result["photons_in_" + peak_name_cluster][mask] = photons_i

return result
73 changes: 57 additions & 16 deletions fuse/plugins/truth_information/peak_truth.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

@export
class PeakTruth(strax.OverlapWindowPlugin):
__version__ = "0.0.5"
__version__ = "0.0.6"

depends_on = (
"photon_summary",
Expand All @@ -23,10 +23,38 @@ class PeakTruth(strax.OverlapWindowPlugin):
data_kind = "peaks"

dtype = [
("s1_photons_in_peak", np.int32),
("s2_photons_in_peak", np.int32),
("ap_photons_in_peak", np.int32),
("pi_photons_in_peak", np.int32),
(("Number of photons from S1 scintillation in the peak.", "s1_photons_in_peak"), np.int32),
(("Number of photons from S2 scintillation in the peak.", "s2_photons_in_peak"), np.int32),
(("Number of photons from PMT afterpulses in the peak.", "ap_photons_in_peak"), np.int32),
(("Number of photons from photoionization in the peak.", "pi_photons_in_peak"), np.int32),
(
(
"Number of photoelectrons from S1 scintillation in the peak.",
"s1_photoelectrons_in_peak",
),
np.int32,
),
(
(
"Number of photoelectrons from S2 scintillation in the peak.",
"s2_photoelectrons_in_peak",
),
np.int32,
),
(
(
"Number of photoelectrons from PMT afterpulses in the peak.",
"ap_photoelectrons_in_peak",
),
np.int32,
),
(
(
"Number of photoelectrons from photoionization in the peak.",
"pi_photoelectrons_in_peak",
),
np.int32,
),
("raw_area_truth", np.float32),
("observable_energy_truth", np.float32),
("number_of_contributing_clusters_s1", np.int16),
Expand Down Expand Up @@ -108,7 +136,7 @@ def compute(self, interactions_in_roi, propagated_photons, peaks):
result["time"] = peaks["time"]
result["endtime"] = peaks["endtime"]

photons_in_peaks = strax.split_by_containment(propagated_photons, peaks)
photons_in_peak = strax.split_by_containment(propagated_photons, peaks)

photon_type_dict = {
"s1": 1,
Expand All @@ -122,15 +150,33 @@ def compute(self, interactions_in_roi, propagated_photons, peaks):
photons_per_cluster_s1 = np.zeros(0, dtype=int)
photons_per_cluster_s2 = np.zeros(0, dtype=int)

photons = photons_in_peak[i]

for photon_type in photon_type_dict.keys():
photon_cut = photons_in_peaks[i]["photon_type"] == photon_type_dict[photon_type]
is_from_type = photons["photon_type"] == photon_type_dict[photon_type]
is_from_pi = (photons["cluster_id"] < 0) & (photons["photon_type"] == "s2")
has_dpe = photons["dpe"]

# For S1 S2 AP photons in peak, we want to exclude PI photons and PEs
# This is because we want to treat the PI as part of the bias
result[photon_type + "_photons_in_peak"][i] = np.sum(~is_from_pi & is_from_type)
result[photon_type + "_photoelectrons_in_peak"][i] = result[
photon_type + "_photons_in_peak"
][i]
result[photon_type + "_photoelectrons_in_peak"][i] += np.sum(
~is_from_pi & is_from_type & has_dpe
)

result[photon_type + "_photons_in_peak"][i] = np.sum(photon_cut)
# For PI photons they are generated following S2s.
if photon_type == "s2":
result["pi_photons_in_peak"][i] = np.sum(is_from_pi)
result["pi_photoelectrons_in_peak"][i] = (
np.sum(is_from_pi & has_dpe) + result["pi_photons_in_peak"][i]
)

unique_contributing_clusters, photons_per_cluster = np.unique(
photons_in_peaks[i][photon_cut]["cluster_id"], return_counts=True
photons[is_from_type]["cluster_id"], return_counts=True
)

if photon_type == "s1":
result["number_of_contributing_clusters_s1"][i] = np.sum(
unique_contributing_clusters != 0
Expand All @@ -151,10 +197,6 @@ def compute(self, interactions_in_roi, propagated_photons, peaks):
)
photons_per_cluster_s2 = photons_per_cluster

# Get the number of photons from delayed electrons
photon_cut &= photons_in_peaks[i]["cluster_id"] < 0
result["pi_photons_in_peak"][i] = np.sum(photon_cut)

if (result["s1_photons_in_peak"][i] + result["s2_photons_in_peak"][i]) > 0:
positions_to_evaluate = ["x", "y", "z", "x_obs", "y_obs", "z_obs"]

Expand Down Expand Up @@ -189,15 +231,14 @@ def compute(self, interactions_in_roi, propagated_photons, peaks):

# Calculate the raw area truth
# exclude PMT AP photons as well as photons from delayed electrons
masked_photons = photons_in_peaks[i]
masked_photons = photons_in_peak[i]
masked_photons = masked_photons[
(masked_photons["photon_type"] != 0) & (masked_photons["cluster_id"] > 0)
]

result["raw_area_truth"][i] = np.sum(
masked_photons["photon_gain"] / self.gains[masked_photons["channel"]]
)

return result


Expand Down

0 comments on commit ad7fa7e

Please sign in to comment.