From 80185ab99cfd57b8c0596b3c836dba32fc3cf72a Mon Sep 17 00:00:00 2001 From: samuel fogarty Date: Fri, 2 Jun 2023 16:36:17 +0200 Subject: [PATCH] Add various modifications to support updated input config, plus small additions/improvements --- reco/__pycache__/build_events.cpython-39.pyc | Bin 6747 -> 4963 bytes reco/__pycache__/calibrate.cpython-39.pyc | Bin 3201 -> 2829 bytes reco/__pycache__/consts.cpython-39.pyc | Bin 864 -> 450 bytes reco/__pycache__/preclustering.cpython-39.pyc | Bin 956 -> 1124 bytes reco/build_events.py | 16 +- reco/calibrate.py | 53 ++---- reco/light.py | 42 +++-- reco/matching.py | 21 ++- reco/reco.py | 162 +++++++++--------- 9 files changed, 135 insertions(+), 159 deletions(-) diff --git a/reco/__pycache__/build_events.cpython-39.pyc b/reco/__pycache__/build_events.cpython-39.pyc index 394ccb0eae0fc31ac6cef94f69b1af5007e53223..1382973893d10e59b04548585bc263fb88dbce50 100644 GIT binary patch literal 4963 zcmZWtNpsxB6-EOX3}zn=N2It(6h$o~QKIC?whpR9vgssp%8ILG#|cU)6gUlzAcB}t z1C%+e#))xN4sj*d8@F4>mOIza-2)I)sP#;sgoTI14q0f$E`tU^oWLb*5vgyn<5zHA3qP(i>rML~TV` zSUM;>l~J9lQ{^Tv{7G|a+~P&hI=6WVw86`~0@~#4sn)5UqGjz#hh^5zM^8U~L>#-( zHAp*pOwa{yZeqQ4t=bU}l5jJ+V~n1bvI3rS`sN5kVx=eut;QG()!Pq)8}eGzZNz@Z=R^?RO3 zhTA*+?%{zSCGocKxXb;nH+1)ci2K5gc<703;dlGnyN5x@UH=soyge9Zjc#}tCnz)U zy8C_-SI|IADF6C`z&L>fFcz9K#~;%8tIEn!kSVJB=y`{Spnndp*9a|sKUetGb8k)%m|{SV+-n7Ry^psUK~f6l?V(e&PuAX8}K+|K~|s{IdZCb zrkhLlxQLqHJR$eYDm_2>gy!n;PTYbbZX(fGleL-6bXH{+t25DtuB&n=7tee6Xr6;4rkWB(Yd)tvo=4G z3rcQJWJ}5CCUQ~93(`&(Whp%`o9U7)rpsu@Ot2a!41T#H>s&iz;-86y^{LB-TsbkB z7PU~SI4;HQwwA6yV@nNnR>74Nw?;jcYkO?e*9&qNXtbC{$ z8cJP6sdJz!pbM&WRkq}XJ=%%y&gc4MzVi9JG?DZ9+(273)z)RH$;(&)O|C1tjvO2! zJqB8rSR-zKgB1dYV~DtY;|=45MGffD&gnLm?rb0()Hug%oin#q|z zlXDCGg*Rh_w^4@t+E#wOJ(7wf$=lrAr=5OB&P{l56Q1glcTO}X4Bk~%W4sIPU49AU-Wrbh$E-VLH||1e6jf+vJG%nHZ8=*OalF{(4zDfJ^icEbP_tb9nB83)I{ zQ+(xe5$q+I+3h3fRl5DC>m`1K&nbGM7xzWtRQ5tIiTV*)%?j}##Q4lGh?C4X@CHaQ zNN`7)<;AoVj!DgR-pj1e+x5dZv$>aeU-t#iBT?a%EBw98eC2izG6(|<(d>CaBsQtX zt4Ol)Waa@1@+hR1IV)jyW0yLgl@*(-tr$T(;-WzoDC9U+_c;dOJJw70m4{B4RN5&g zN^-q0bjn9F{BVXpp7L{J&zY*~)2`)a@&*)NBQKd06?4=P(vcJ8li?J|KVl~zN8%q0 zoLZja_QL+I7iQ%oIPl{HAP5jBH|AJ*_nbQXF6d${X<>Xms!EM)i7xFW?KR>aVxQI4coEnEv6@e`X;oI!8aHJv(SdkM zZy3%){WWCN0A@_Jn5HyO%x7jg zmujh=HqmwoR$HJbo6aY|E{GuP#7NsoS=BI4jAw=PT)L1hrsvb8q>@yVnrx%QvRr;q z2TDT9D;VV}`qk(G&3s4bX$_JgSHWE%tO4s6p=rt%`b^R~;Th0Ngoi*k2+x3CCVT_B zNw@~|3i?d+s+?E!8sR9&*9nUpvcEC$XN-$9od{Q*v@_~OP4(giq&az!W^dm_+5aRh zAfR=5Np8r?a#LQBSLHQ%9TBb|*Q6yo(%|)Po6q$0X1Wz&L@(xfL*C#`pv{{jX-!F6 zh=vVWkz0EvBGTNGtdidkTsrtp$y-h-=%{EUu7T_Qlb1T5>EQ@<`y4OIdXj! zraG0kx>$kqgYjVRK6uIjAKb(J=Q5S*AOU}2q`wdTBI&=%_3Olck@J^k_>D0So9cWW zmA^vz-{s}6PWU>kULQ;1DRE!sWo{Bbs(WLGr(H2_cWZ{jNEXKoBFCc(t1(QkpNSXXFcD%S{hOhse^e@x{X0g)L| zjdj}GB=AjC^C*#H#CTe*Il635HSHY4jH|ghY-x_d<)mCzJF}?ohEdy#yl@x?af&&N zcadoNC~z682$CkkXB`0ysj)yI(5UJQdR4b5s1_(FQXoD1BBbgja+|PZ;WcrAN7((r z{LwDKjxM@rr85WMa(FNps;%NQ;{DL~M09&Jp8OAu`6BK|_$`AY4)6=EPdI1175uC= zz;4O^ssvHbnFG8Xg#83N1wUMfbF2|H1|cBk^Y_sl{b^AkHs2rAmzhbSp9wDiSd z3A25ulE6baIq*5p@!HgMg=75oY!RJg)bSN&RhrgH7M8tI|-Sm*j-22vz+>IO z@!Q*;$ob6;&kr17Y;?Q6U2k=nmyF}IJ8rw>p)J#7thq0;?aot=g^dEEIbazGVT618 z2SRft-Oj7`s3pB+ zQ-1;!Hjd!F>#cnpcq~|hdDrtdyIV~b?yuc%Z?GoYU%Tsfcio3!mo>M%wGTVqJ=T8e ztvo=7Zl~$5?R4FX%xiVmI^Gvv5IXnou=RWQ-&^Zz*|xFU_Fcz&>UF|kwYM+ET8KUL znA2;vw!JVYB8;F8p%_)IqFHKLUZ#qY%4)E;$ff@~i$WHcTwlM&Tf_QNIjx`!&mLx? zVyHgWu;Ple;-XSgGi9wDs?T(}1~u3XSK|d&hu0X_@EIsyX|A!UVFgvvdPdgA(|T6c zt7$za>osmhbzY1ncrKd6Ju+Ous+QLp@lya!uQ$G89G|i{@G`FS|+-C-H z&R~7d@(Q0teU6VyJdKtr7U~LYMcC${OVG!l%h1Q6$DmI_SMW8e&n&)(uW?%T7?+j; zALFMs={p{K@@IHpu2Eh}=6PrypB3HebNS`-C49#^p73xOT1F`?)2Ie! zFtr@2c*4AVXUniIyEQ~xf~F@u&6mP_T{(Kn`2J|6XFbE0=9JWer{m?sQpXc6(;W5h zcBr~_x(BQi(n@b}sc&^BF&oS76h>I)W1AW}+{L_q~qLgWVmGd+NBXy%`Fl+wp*)Ex=1N^g6giwiytBHqMG{ z43NTfn_=^dE^}E~4*t|(-li~~I;|a%dFu2$R~TE(c88rrGn*x!g2V+!Lb1r>nF1$h zA9%u&#n_x}J!&$9m3`toZTf*I^(QZ`8ssXhP}ZEL?+a^hpu9g&J~%3m4-FD`i^rGc z5>5P(H&rn~PV%5?TV!NWSW-zS7sVt5`ADLebWdtY7)dP&B(BwlLrGkcVB*@?Q8=+7 zL+>9?EvG3qWjiunk&VL;`LvJ7fn9d}Fp!WRiz?0jVvNo4{3a0X1Ur~RCs}gA3HSw{ zLnws`@VKm6T3M^8@t>jM4`@B97FBcjPq*Mhwt8x>sukVR&r0v2HZSeR)hhlb)mfGO z689KF`c)Wc(MqI0hfn7}A^mHyy_{lojO|BQA7lFw*2ma>g!M7DSAp#{V0#_dK9OSk zWPeVQwL8rVJqyqp2@5yq1yj3-SPPns~EG+{hx!g$gP65D5htw4ET z`^gmB7lG-E67h~h6SmI|Vf&)QyE(KGw$DQowjYBgY+r!xWBW-U-xSbn(d}b<85}~` zUVie;$h@6GJjEF6xx$waAy`CeYALwJmI__swCSvV1r|jK8&N84>tKP*lmZl04%IDn z6Ln&OXJw3V8eY=>xfY$llj?5Gt$zu;z6W1-BF1`YpM1rB7GuQx(|`xD{t*92j%g&! zb1w0ScLL{=TFgUzp9|oUei@~-OrsMIrIrC6T7h*%vOx)&c<2IefN93u3C!*a%{4!t zSf-_g-q`Zc8(VQ_U^_cvn@t(1fti?=jI`p;rHqtX-74PUiaU=nR(M%5(y?_$TPij| zB4r{rN!2M5(2Vtwb)4TNzTE4}l z99(%FRQbdUDO9cA^t&w(#4Qy11F%!*{C^0zxyNjbGy4N2?9U1M&crgHz4dB9yjQ+&@OEH%@A9C zVsDWNdVV`(Z^EOo%HASx93mCT!R#`Xu8_D&;u?vRec5%Y-XQTdiJK&TK;j(|w@KU~ z@h%C1VRnzidnDc`@k0_nBJpDqKOyl`68A}bKw_Q5ha?`5_!)_xlb{s?+HUXQNKcw5yVO)TFl3^NTgWDpCrZ>?%JYnxc{S0DoU7AeS#iz2H7f8zRCEnq zg|NtL(l#B~J`ZS1_^che$ zX>97HJ_~}TdIP=3w-mSdCp>j@E~%Ad?R;3}iy*NX97ay#h_TGif)38%FnZoCe_i}k zi!MYf=uzXK+GrJIyowRm(s-zLQPygRj%sfpI?`_e9j>`S`u17aI^GL`{x)pxS z9miq#dQv+tYd1h=*Kt&`KrJ^mvpDQj*F_;dx;wQ0f0@H7d?~XX#0wH|xBl>Z`u*h{ zNhu#+NtmK0- z0jxydhRGhIjBav>Hrf5yNAlWsT$c2flo!Z{%IGA=--&%B=g|O25dAk$iVz;rkQ4{& z#AAI<Xf}3Q+Dyeox300S&z?S67408*tvuUY^0u$Rq5H; zraZ+-8nJ8X)KiWL`55GK?9xHI=k(fpo{u+pU>8A}ly^g4jI>2K9(fUFi%4_E<94{j{Gj1LcGo5A-M~-gx%)qF_lV{HjrUXa(jk_BoCRU*vPe=yF34Q2< zXixQz{i6+anE)8I)+iq^YJ@A&1Eba&WeZ+0$~hm6Aw5HnG|bAw@yHl)zCmt!#AI?d z9&r)1*Q_~v?$w+t)&gaGv0v#=&+s}Nn4Xb__zjO3lQ|o)#&b4eOy+FF8qe9NQO?hOJ4CCY`R~)?~VdjidW4vx%hfe$Wp7iq#YxKq#3cZJ^C-Wh4_!?1Sbh z&7e=x4oNSn^-NvO8rJs~c1c;Z45S(KvR*b!ShMJ<{mb}1o1=XdB`F5Hw6amr2EEK2 zJTw;J?v0wA>FnA0TX4S}dgc2mjDCR*?L*t!R;c5it#_8`-GwazTMkw%+k9>Tj-tX}} z#=Tz1Q2zX%Q#pT)v40Y?Sp=B3QLFnPl1ZMkF}JN43$!A4X71SKH0R{rdrZ30d(6hZ z^koZeO9rxyHjtt0pl!>p?4b=Mf53+Qb7*CwAvYbFPU1zbWtM2L{AS|N)T#t%J=A-s zRSr_)BsbVoruj22#TN0-n%~B$?1;ND;@J`Z*yi!y2g(y zeL7E#lNH)Hl~(53Je6ss<2;HB86{;g$&QABY0cwgn(E55W=RxRRbl*DDHnNa!bOoi zTBOid8Sf;*a^s#v$&474PK!1DP=zq`u{f~Da-v7nWQRK14vlAvfIJ_6#`}O;y_Z|%W zu^Y>zQd^i;*N9vt@+}hgfr~myt(h9KO~WBAh;$iE(<|UB7lgHLa`kPr2|53|(0|da z{g*$WvvxsAZX_B9lvaqH&)I{k~Kf(5^lJ|WXty9q}vRZ5Vf-poi+Hz zwO7(&Lt4m}(k`S0Y!r-4TW2%!soC6!?B|M3c@8gS=X8EWy(3hVf32-I-noBp@7;q@ zl;mYHg&QikfCkpLUpaGU=&3$*r~!x(x*7+wxSB>;dHbSw=ga2puC|dXo9G)@Sam^| z=!-7z@?CuD``{C6cA7!Cbl3laflG#lLFuMjLwj^jZ!l|QYT-_jEx39MZbdwd4W6k_ zYUh}PTEi}VEuK2HFTJS?>QC7(833d^-&*OSmH?qvtaN+^9j^LJdj#m2fEyF+E>>A5 zp13wnD7q!;yCA01+~|nz|4R)1eziKVVEq2^?!nNvt~~Bt;1?weZA39QepZ~MN{_c+ zfs-}cFlZ1V&owUM#P2LP>U-eFy^Z~&TPD2NqZ(hm=tnZvF>U%~;I6$CSvf~(w`{_I z(+7G14+JXjI|08=9QFA?s3Eq$6urq~2vvJ^K7l`KAR)zLc1H0Rc&_?xEr8SCKm`JT zICg*)5SH$l_|vUrOM6cdlb`XYt!M0zeK`0)EQ4jcZUcj0pFKMK4RVl+@66$>7~E#n z@ht>v`hJTFP5w7(a;&C5enrz?FKG&n+wdh%`cH&@bw6CoOk8A^aM@XQmp$F7@z*$g zfz$VwR~qm7l&cZ`;tKp4K39izAD7#)d;q?)!MDM8iPzn_M{V(j?lmiPzme2eRuY3o z;#2-eK%#@up7qmklmLxOg)ZO&RC_-gc%<`$;)^@Vwh;TWH-OtvnAQoBPBc3){z)=J z`a`uJ;Q6^G0x(54sJbNhPyYG-{$_B_CT)g=B&9BLVyZeYmabDd;r&;xtQ##OK zPe~b|Ipq_2MmVGF5V4M)5f_SQC+bI#`nq!|N=3gywc(!9nc+%Nc!CB`+8et^Ykx@O zCXK(QD3*@57F9|qc$K)3Mf0>Wy^BqWH=QP{+ezb=^RzJTaaDpMl}Y1eI-OPOP3SY> z$Ehl-D9@&;X_rO1?xqD2HsxZ|ru1&Fsp;G7-#i|wY-R#_21L|r+oyo-238(}!-2wk zjwuQbHY|+S=OVZe9@g&=i{2fQ`Y{~|k;ndpGfhwN#)$Jp+N4pNP;F|p36*C^Hgdzf zgt+Q~yoFkkiCM4fb{(JNL%hGvf3Twa?tq7UV877DkbrlET|4X;5C1+twf0dzg%Yx7 zo;T^KvM?Z-<5i+7`{1yd+Quduc&wIXDGvCy+%>hyy~llmot?h}0!l^DCIG{a|NI3=K@Lu)K?%3g{TlMp* zUcG)@-~02b)e?c{ANL*p+a*H&hQ#8-fVcrIy$u~FoJM3o^~eScMiv=kb6`^3Ga~B_ z;U>2pkb%u@UVyQ{9bSad;U!*%vB)dD3S)`WZPKV6f>hFPP*vu^jyH`Y4}A$Fz3dyf zb@Vten$R|(rT3uAK$8{nh)DXFa&`u3V@{s~t@Mhyuwv;I`(y>qvd460NFy^cny~}p z4RdDBteHJ4WM*b%c2?LiLB~R0Ibq@sMKYZn5=uaKApJMJI1Zvg!GANHxFJ8h0a^`8 zN6WC#FqQ44a&HnSBaEdoQYqBwNx*|tdXekJ-1U=qCmc2$RhW4GD3GZtjD6Qj(^xq? zkby4~p-R&@ygv;_x3{!rLP7lU5x35R`OpZixQY z%~x9UQSbR)gwkZNI@wc|ITa>xYJ&pli_j5Pr`d%02zH%*ohKP zUcEB>*OQO`eCzJ*>kUH`K$5cbhAM4rZMvJUzSh69-LMCy$Nf~i0Q;hj-6`zA_=&Os zKQRpSW#-)(TrS}OS0?T#K;9HhkrXad@nsl&bdmXkA3ZkIcYv51Do0~u8Rf=^iA(4t zLpC$eNnkYNW!Q0G=LRqGlHMuuGVZ|n$_nAt5re_;yI2qjL3w>3T;TWxSJW(vX_NfDgF^z&|d-&7n z@Be(#`TBdq8-H5*>$N|8us;0d`xo93zrOh1@EdRcyw&{uyX(i?z2W>$h7UiH>wg*k zaebJ6_~FG%)^C0%@QMz>mB=App1|1=`$yLsR72M&h%)X~p;JtzgR-9pFhL2n)>$OF z183}|BR5Ps$FuJ92v%-yY~I@FyIh1jas@=D%g_;4qYk_!>M$HD@Lq;jT!cMeql{ee zAg*Y$+D6}o$uqbjN)UdysZoO9u|g0zJ%aUeGBU)s!HRL6YMO~mQfTavkxSy%2&Dj? z+#Zqd5U>^-!Vn;*CEjfS0jP4(CSTz@JRizmtsAL?5V(G%;S*m2dQe^XzS|xwoqU6P(!&ou58CMymZJ|Pq5+~({2yY}04+ofURGf* z&S$jFG}ki;1pOS7h{o?HU~d`Y6z*_|qZb>3$nB z2)^Hk29nJGE6LK5e)R=O?>{5Sx(5+~mwDwOlmC5QnDgwIknBpc@~kqeN+*MV1nuvE z_O;omjJ`$F0sH~P6O0d8DXT$juryr*y1YOGPgX%%7PBgj@da7RPsno4DJw@D)tuuk zdVdKx%CK71R$A|iuL3mt>+|G@nMenm7FtG>4Bc+K+i}}h+;-RPyzI8y?v<DS+Pft$cZocMrJI!{l+3UESZnJl_+3D#c0@Kz0t>?enYt2`hJoJWf zlFHC;=H}Z3ReEl_<94sP-LBi~fgb-;jb3X}jb5|eX?9NKpYA$fC)F4D(pfD;Es2dz3Y!*d_t9p71V+!VO=0}7IW4j zZ+w?~>)%Dum~|mXQ}%TKrTOnJR3)ox;NV`AuEQih^#v^rkf6213NqL7Ar^H;Kiq<_ zjz!(E;PI~b7I6J1y#%KG??KaP70U)ig*XcfAi?4~T($7*H?g~n>otLCLz&4Wh?R-p zD-#L;uB=c7<5awayQTd=B&i#Pqd*mtIG9g^csdTSf>A{*om{jrRnwJKUYe!hSUFf? zxi~B8GD}Y#%@2!>x~QQt2#=0%5Zva4-A)(DHyMA3x_4Ivfth3i8^1z!AhXz3N`NX4NhwIK|e)MOR2VpZr_dWq%zno)S=wUhY>D40Z z+#gmQu}^P(9k@fz)y~0BlXtQoV_ZFRYC-WW?%d40_~OLef}GUiTU<$rCCM4_#hF#9 zx7f?#Q;IUvN}~9{RD4QiaY literal 864 zcmZuv&2AGh5ZpXP#nj2rdzCz=>I>K|Nq=X20=_KhMk#Z#Ekqu3z7LxTtyF&(gTNDrmG( z%v)68Q4cD#0##Z?Jcqajb95eRbOB`%=BW=0v<{230X|)VI$eeaZKABe5?w`EgJrr7 zP1-`d0V{M9R_Q%h8+hC65x(GksOh!?j|QLGvRC!IiHA8v8#mXjn`^y4-XW((XhRH7 zauJ6r;@XZ#Pnb)qGLk6R4P`9MPO8i(4aj5*Nhf}(%0ee)Jtr^6&xwJML|Ww;Nkwk8 zOcgORwtkOgT3KaBnP85N#4^L6WBuif9iPyHba z5@uvV>qDkR=-|qbWdek;Jf68QsHCEeY19;VdR^8%I_;kfs2_2ePJvl)9X=PI8m1Hc)Eq!p@>Avpt zSrDt>k_Ad@5m>2G`Zza&-M%h{BE?2%8CrHFQ&3m#?nW7s$#)`VS1>|hvMlmj*Uur?1ltOQ~2kvuzYsd(f!`x0hxWBFHotmW+xjJ{tKL{ eJQjQ90GTJKY8Bj7lsVj$TFv39U3$v%Tm2WYtq}+S diff --git a/reco/__pycache__/preclustering.cpython-39.pyc b/reco/__pycache__/preclustering.cpython-39.pyc index 0448285a087eebed3923429d01817594d6eabb38..2bf076056f37bc2afa3398878f5571781069e7e8 100644 GIT binary patch delta 734 zcmYLH&2AGh5VpO)*)|bBQPYYZid0ps5Df?kaj1Ggy&-V`afyUhX6+42Hk-wE+HAB- z4k-d5aftE)EjQkQSKtMFMnar<02nu|7;C=IGoJB`cjvqJ({IscGe95@ADpm}wP>iEc@sQ_Ab_Hb- z<1sCxB+VHYQO+_dmUFx}bD@wlfXF(&gN3OaTgJYWHDCwS_~d&41&y>(1(GwUNtx|lUsaOzk75t`$3TsPpr zt}KQa`amGnNyrwe8VP~3pbC~Xc{2O`7bL{mrju007<#%A0viwXCaBy2=%Wg~+N|*> zbcPhJ@ErHy=CAmiEXX_i1qyL!|AM#+5!ZqKLrApM=Sn+jTx1+9vVUJ&13oLiVeJ>` zglEyD6uwDn0O;T#OQniO!{V?F_8(9*y**8fXcer=>Ha}BifJYfVRIE5y4pLAm`D?) zNnU72DjE;8LxrHFcHgW_FXt!bz2Vw3?G+HfmDEH@(Sa&GPsu3FheKh0msmH@G|&Rj z_JEhN7fi}&bWF1`m)cfftUBLV-m^N-O@_PyfXFrMU#<AgaCnrqEMSCqD@lA%eGaz9N+~? zUx3x_W9%#N0zPx$#49jP5sdZCd^4Zz@z1mO-<^+YwONAp+Vl8dI-XyHV7$G#z_@)+ z&KY;Oi_somK0$91!16&Wxs;I{`1_sgCqGh|jAG$`PeoHEH)7|9kVTe89lx9LK#F$e zr{b3=RM^^+ht1YE|2<9w)NgHUTR5gU%)u5&bB?xU@^^DFn&Lj9_=ipr4ojp&aJocB zR!l}rfube3gF9%Fi{B%Acv51{trb$*2*TM1Gx@aUAUj5Sh2qF5*`rljm4x?2$>Cn* zXI4ne{s53u(%S@L#^+oxaLKT`KygccPPS_uuyQ&D2wA_a2n<0XfI2v^LFFn|8_ok;cdKWYtCN7$=gfW{euhUq`wjNb Bdb$7r diff --git a/reco/build_events.py b/reco/build_events.py index aa5d89f..16b6b3c 100644 --- a/reco/build_events.py +++ b/reco/build_events.py @@ -6,7 +6,6 @@ from calibrate import * from preclustering import * import matplotlib.pyplot as plt -import scipy.stats import h5py def cluster_packets(eps,min_samples,txyz): @@ -19,14 +18,15 @@ def cluster_packets(eps,min_samples,txyz): def getEventIDs(txyz, mc_assn, tracks, event_ids): for i in range(len(txyz)): index = int(mc_assn[i][0][0]) + tracks_index = tracks[index] try: - event_id = tracks[index]['eventID'] + event_id = tracks_index['eventID'] except: - event_id = tracks[index]['spillID'] + event_id = tracks_index['spillID'] event_ids[i] = event_id def build_charge_events_clusters(labels,dataword,txyz,v_ref,v_cm,v_ped,gain,unix,io_group,unique_ids,event_dtype,\ - hits_size, hits_dtype,second,mc_assn,tracks): + hits_size, hits_dtype, second, mc_assn, tracks): ### Make hits and clusters datasets from DBSCAN clusters and corresponding hits # Inputs: # labels: list of labels from DBSCAN @@ -122,7 +122,7 @@ def build_charge_events_clusters(labels,dataword,txyz,v_ref,v_cm,v_ped,gain,unix results['light_index'] = np.ones(len(n_vals), dtype='i4')*-1 return results, hits -def analysis(packets,pixel_xy,mc_assn,tracks,detector,hits_clusters_max_cindex,sec): +def analysis(packets,pixel_xy,mc_assn,tracks,module,hits_clusters_max_cindex,sec): ## do charge reconstruction packet_type = packets['packet_type'] pkt_7_mask = packet_type == 7 @@ -130,7 +130,7 @@ def analysis(packets,pixel_xy,mc_assn,tracks,detector,hits_clusters_max_cindex,s pkt_0_mask = packet_type == 0 # grab the PPS timestamps of pkt type 7s and correct for PACMAN clock drift - PPS_pt7 = PACMAN_drift(packets, detector,mc_assn)[pkt_7_mask].astype('i8')*1e-1*1e3 # ns + PPS_pt7 = PACMAN_drift(packets, module)[pkt_7_mask].astype('i8')*1e-1*1e3 # ns # assign a unix timestamp to each packet based on the timestamp of the previous packet type 4 timestamps = packets['timestamp'].astype('i8') @@ -142,14 +142,14 @@ def analysis(packets,pixel_xy,mc_assn,tracks,detector,hits_clusters_max_cindex,s unix = unix_timestamps[pkt_0_mask].astype('i8') # apply a few PPS timestamp corrections, and select only data packets for analysis - ts, packets, mc_assn, unix = timestamp_corrector(packets, mc_assn, unix, detector) + ts, packets, mc_assn, unix = timestamp_corrector(packets, mc_assn, unix, module) dataword = packets['dataword'] io_group = packets['io_group'] # zip up y, z, and t values for clustering txyz = zip_pixel_tyz(packets,ts, pixel_xy) - v_ped, v_cm, v_ref, gain, unique_ids = calibrations(packets, mc_assn, detector) + v_ped, v_cm, v_ref, gain, unique_ids = calibrations(packets, mc_assn, module) # cluster packets to find track-like charge events db = cluster_packets(eps, min_samples, txyz) diff --git a/reco/calibrate.py b/reco/calibrate.py index 6716cab..b6e991e 100644 --- a/reco/calibrate.py +++ b/reco/calibrate.py @@ -3,13 +3,13 @@ from collections import defaultdict from consts import * -def calibrations(packets, mc_assn, detector): +def calibrations(packets, mc_assn, module): # unique id for each pixel, not to be confused with larnd-sim's pixel id unique_ids = ((((packets['io_group'].astype(int)) * 256 \ + packets['io_channel'].astype(int)) * 256 \ + packets['chip_id'].astype(int)) * 64 \ + packets['channel_id'].astype(int)).astype(str) - v_ped, v_cm, v_ref, gain = pedestal_and_config(unique_ids, mc_assn, detector) + v_ped, v_cm, v_ref, gain = pedestal_and_config(unique_ids, mc_assn, module) return v_ped, v_cm, v_ref, gain, unique_ids def adcs_to_ke(adcs, v_ref, v_cm, v_ped, gain): @@ -22,32 +22,17 @@ def adcs_to_ke(adcs, v_ref, v_cm, v_ped, gain): charge = (adcs.astype('float64')/float(ADC_COUNTS)*(v_ref - v_cm)+v_cm-v_ped)/gain * 1e-3 return charge -def PACMAN_drift(packets, detector, mc_assn): +def PACMAN_drift(packets, module): # only supports module-0 ts = packets['timestamp'].astype('i8') - if detector == 'module-0': - correction1 = [-9.597, 4.0021e-6] - correction2 = [-9.329, 1.1770e-6] - elif detector == 'module-1': - correction1 = [0.,0.] - correction2 = [0., 0.] - elif detector == 'module-2': - correction1 = [0.,0.] - correction2 = [0., 0.] - elif detector == 'module-3': - # assuming correction[0] is 0, certainly isn't exactly true - correction1 = [0., 3.267e-6] - correction2 = [0., -8.9467e-7] - if mc_assn is not None: - correction1 = [0.,0.] - correction2 = [0., 0.] + mask_io1 = packets['io_group'] == 1 mask_io2 = packets['io_group'] == 2 - ts[mask_io1] = (packets[mask_io1]['timestamp'].astype('i8') - correction1[0]) / (1. + correction1[1]) - ts[mask_io2] = (packets[mask_io2]['timestamp'].astype('i8') - correction2[0]) / (1. + correction2[1]) + ts[mask_io1] = (packets[mask_io1]['timestamp'].astype('i8') - module.PACMAN_clock_correction1[0]) / (1. + module.PACMAN_clock_correction1[1]) + ts[mask_io2] = (packets[mask_io2]['timestamp'].astype('i8') - module.PACMAN_clock_correction2[0]) / (1. + module.PACMAN_clock_correction2[1]) return ts -def timestamp_corrector(packets, mc_assn, unix, detector): +def timestamp_corrector(packets, mc_assn, unix, module): # Corrects larpix clock timestamps due to slightly different PACMAN clock frequencies # (from module0_flow timestamp_corrector.py) ts = packets['timestamp'].astype('i8') @@ -57,7 +42,7 @@ def timestamp_corrector(packets, mc_assn, unix, detector): if mc_assn is not None: mc_assn = mc_assn[packet_type_0] - if mc_assn is None and timestamp_cut: + if mc_assn is None and module.timestamp_cut: # cut needed due to noisy packets too close to PPS pulse # (unless this problem has been fixed in the hardware) timestamps = packets['timestamp'] @@ -65,12 +50,12 @@ def timestamp_corrector(packets, mc_assn, unix, detector): ts = ts[timestamp_data_cut] packets = packets[timestamp_data_cut] unix = unix[timestamp_data_cut] - if mc_assn is None and PACMAN_clock_correction: - ts = PACMAN_drift(packets, detector, mc_assn).astype('i8') + if mc_assn is None and module.PACMAN_clock_correction: + ts = PACMAN_drift(packets, module).astype('i8') return ts, packets, mc_assn, unix -def pedestal_and_config(unique_ids, mc_assn, detector): +def pedestal_and_config(unique_ids, mc_assn, module): # function to open the pedestal and configuration files to get the dictionaries # for the values of v_ped, v_cm, and v_ref for individual pixels. # Values are fixed in simulation but vary in data depending on pixel. @@ -82,18 +67,6 @@ def pedestal_and_config(unique_ids, mc_assn, detector): # note: mc_truth information for simulation (None for data) # Returns: # v_ped, v_cm, v_ref, gain arrays; size of packets dataset - if detector == 'module-0' and use_ped_config_files: - pedestal_file = 'pedestal/module-0/datalog_2021_04_02_19_00_46_CESTevd_ped.json' - config_file = 'config/module-0/evd_config_21-03-31_12-36-13.json' - elif detector == 'module-1' and use_ped_config_files: - pedestal_file = 'pedestal/module-1/packet_2022_02_08_01_40_31_CETevd_ped.json' - config_file = 'config/module-1/config_22-02-08_13-37-39.json' - elif detector == 'module-2' and use_ped_config_files: - pedestal_file = 'pedestal/module-2/ped-evd-2022_11_18_04_35_CET.json' - config_file = 'config/module-2/config-evd-2022_11_18_04_35_CET.json' - elif detector == 'module-3' and use_ped_config_files: - pedestal_file = 'pedestal/module-3/pedestal-diagnostic-packet-2023_01_28_22_33_CETevd_ped.json' - config_file = 'config/module-3/evd_config_23-01-29_11-12-16.json' config_dict = defaultdict(lambda: dict( vref_mv=1300, @@ -102,7 +75,9 @@ def pedestal_and_config(unique_ids, mc_assn, detector): pedestal_dict = defaultdict(lambda: dict( pedestal_mv=580 )) - if use_ped_config_files: + if module.use_ped_config_files: + pedestal_file = module.pedestal_file + config_file = module.config_file # reading the data from the file with open(pedestal_file,'r') as infile: for key, value in json.load(infile).items(): diff --git a/reco/light.py b/reco/light.py index 6cc6e68..5c4775b 100644 --- a/reco/light.py +++ b/reco/light.py @@ -4,27 +4,23 @@ from tqdm import tqdm from consts import * -def ADC_drift(timestamps, adc, detector): +def ADC_drift(timestamps, adc, module): ## correct for the clock drift in the different ADCs # input: tai_ns - if detector == 'module-0': - slope_0 = -1.18e-7 # sn 175780172 (#314C) - slope_1 = 1.18e-7 # sn 175854781 (#54BD) - elif detector == 'module-3': - slope_0 = 0 - slope_1 = 0 - clock_correction_factor = 0.625 + ADC_drift_slope_0 = module.ADC_drift_slope_0 + ADC_drift_slope_1 = module.ADC_drift_slope_1 + clock_correction_factor = module.clock_correction_factor if adc == 0: - slope = slope_0 + slope = ADC_drift_slope_0 elif adc == 1: - slope = slope_1 + slope = ADC_drift_slope_1 elif adc > 1 or adc < 0: raise ValueError('adc must be either 0 or 1.') return timestamps.astype('f8')/(1.0 + slope) * clock_correction_factor -def into_array(events_file_0, events_file_1, batch_size, event_dtype, detector,adc_sn_1, adc_sn_2): +def into_array(events_file_0, events_file_1, batch_size, event_dtype, module, adc_sn_1, adc_sn_2): events = np.zeros((0,), dtype=event_dtype) # loop through events in batch for i in range(batch_size): @@ -34,9 +30,9 @@ def into_array(events_file_0, events_file_1, batch_size, event_dtype, detector,a if event_time_0['tai_s'] == event_time_1['tai_s']: # get relevant info about event from each ADC event_tai_s = event_time_0[0]['tai_s'] - if detector == 'module-0': - event_tai_ns = (0.5*(ADC_drift(event_time_0[0]['tai_ns'] + event_time_0[0]['tai_s']*1e9, 0, detector) + \ - ADC_drift(event_time_1[0]['tai_ns'] + event_time_1[0]['tai_s']*1e9, 1, detector))).astype('i8') + if module.detector == 'module-0': + event_tai_ns = (0.5*(ADC_drift(event_time_0[0]['tai_ns'] + event_time_0[0]['tai_s']*1e9, 0, module) + \ + ADC_drift(event_time_1[0]['tai_ns'] + event_time_1[0]['tai_s']*1e9, 1, module))).astype('i8') else: event_tai_ns = (0.5*(event_time_0[0]['tai_ns'] + event_time_1[0]['tai_ns'])).astype('i8') @@ -62,12 +58,20 @@ def into_array(events_file_0, events_file_1, batch_size, event_dtype, detector,a return events # go through relevant seconds in .data files for LRS -def read_light_files(adc_file_1, adc_file_2, nSec_start, nSec_end, detector, adc_steps, nchannels_adc1, nchannels_adc2, adc_sn_1,adc_sn_2): +def read_light_files(module): + + input_light_filename_1 = module.adc_folder + module.input_light_filename_1 + input_light_filename_2 = module.adc_folder + module.input_light_filename_2 + nSec_start = module.nSec_start_light + nSec_end = module.nSec_end_light + adc_sn_1 = (module.input_light_filename_1).split('_')[0] + adc_sn_2 = (module.input_light_filename_2).split('_')[0] + go = True - event_dtype = np.dtype([('tai_s', '= nSec_start and current_sec <= nSec_end: @@ -83,7 +87,7 @@ def read_light_files(adc_file_1, adc_file_2, nSec_start, nSec_end, detector, adc set_of_tai_s = np.unique(light_events['tai_s']) - if detector == 'module-0': + if module.detector == 'module-0': if len(set_of_tai_s) > 1 and np.max(set_of_tai_s) == 1: if current_sec >= nSec_start and current_sec <= nSec_end: pbar.update(1) diff --git a/reco/matching.py b/reco/matching.py index 5f39b70..a51006d 100644 --- a/reco/matching.py +++ b/reco/matching.py @@ -4,12 +4,14 @@ from tqdm import tqdm # match the light triggers to packet type 7s in the packets dataset -def match_light_to_ext_trigger(events, PPS_charge, unix_charge, matching_tolerance_unix, matching_tolerance_PPS): +def match_light_to_ext_trigger(events, PPS_charge, unix_charge, module): ## INPUT # events: events array produced by read_light_files() # PPS_charge: time since last PPS of each packet type 7 # unix_charge: unix time of each packet type 7 - + matching_tolerance_unix = module.ext_trig_matching_tolerance_unix + matching_tolerance_PPS = module.ext_trig_matching_tolerance_PPS + # loop through light triggers and match to packet type 7s matched_triggers_unix = np.zeros_like(unix_charge, dtype=bool) matched_triggers_PPS = np.zeros_like(unix_charge, dtype=bool) @@ -39,33 +41,36 @@ def match_light_to_ext_trigger(events, PPS_charge, unix_charge, matching_toleran print('Total matched triggers based on PPS only = ', np.sum(matched_triggers_PPS), ' out of ', len(unix_charge), ' total triggers.') return indices_in_ext_triggers -def match_light_to_charge(light_events, charge_events, PPS_ext_triggers, unix_ext_triggers): +def match_light_to_charge(light_events, charge_events, PPS_ext_triggers, unix_ext_triggers, module): ### match light triggers to charge events charge_event_ns_min = charge_events['t_min'] charge_event_ns_max = charge_events['t_max'] charge_event_unix = charge_events['unix'] - PPS_window = int(drift_distance / v_drift * 1e3) - unix_window = 1 # s + charge_light_matching_PPS_window = module.charge_light_matching_PPS_window + charge_light_matching_unix_window = module.charge_light_matching_PPS_window matched_light_index = 0 indices_in_light_events = [] + # loop through light events and check for charge event within a drift window - for i in tqdm(range(len(light_events)), desc = ' Matching light events to charge events: '): + for i in tqdm(range(len(light_events)), desc = ' Matching light triggers to charge clusters: '): light_event = light_events[i] PPS_ext_trigger = PPS_ext_triggers[i] unix_ext_trigger = unix_ext_triggers[i] light_event['light_unique_id'] = i - matched_events_PPS = (charge_event_ns_min > PPS_ext_trigger - 0.25*PPS_window) & (charge_event_ns_max < PPS_ext_trigger + 1.25*PPS_window) - matched_events_unix = (charge_event_unix > unix_ext_trigger-unix_window) & (charge_event_unix < unix_ext_trigger + unix_window) + matched_events_PPS = (charge_event_ns_min > PPS_ext_trigger - 0.25*charge_light_matching_PPS_window) & (charge_event_ns_max < PPS_ext_trigger + 1.25*charge_light_matching_PPS_window) + matched_events_unix = (charge_event_unix > unix_ext_trigger-charge_light_matching_unix_window) & (charge_event_unix < unix_ext_trigger + charge_light_matching_unix_window) matched_events = (matched_events_PPS) & (matched_events_unix) matched_events_indices = np.where(matched_events)[0] + if len(matched_events_indices) > 0: indices_in_light_events.append(i) for index in matched_events_indices: charge_events[index]['matched'] = 1 charge_events[index]['light_index'] = matched_light_index matched_light_index += 1 + results_light_events = light_events[np.array(indices_in_light_events)] return charge_events, results_light_events diff --git a/reco/reco.py b/reco/reco.py index 96dcead..fc2afdb 100644 --- a/reco/reco.py +++ b/reco/reco.py @@ -15,7 +15,7 @@ import importlib.util def reco_loop(nSec_start, nSec_end, PPS_indices, packets,\ - mc_assn, tracks, pixel_xy, detector, hits_clusters_start_cindex): + mc_assn, tracks, pixel_xy, module, hits_clusters_start_cindex): ## loop through seconds of data and do charge reconstruction for sec in tqdm(range(nSec_start,int(nSec_end)+1),desc=" Seconds Processed: "): # Grab 1s at a time to analyze, plus the next 1s. @@ -42,7 +42,7 @@ def reco_loop(nSec_start, nSec_end, PPS_indices, packets,\ if sec == nSec_start: results_clusters, unix_pt7, PPS_pt7,\ hits_clusters = analysis(packets_1sec, pixel_xy,\ - mc_assn, tracks, detector, hits_clusters_start_cindex,sec) + mc_assn, tracks, module, hits_clusters_start_cindex,sec) elif sec > nSec_start: # making sure to continously increment cluster_index as we go onto the next PPS if np.size(hits_clusters['cluster_index']) > 0: @@ -52,7 +52,7 @@ def reco_loop(nSec_start, nSec_end, PPS_indices, packets,\ # run reconstruction and save temporary arrays of results results_clusters_temp, unix_pt7_temp, PPS_pt7_temp,\ hits_clusters_temp\ - = analysis(packets_1sec, pixel_xy, mc_assn, tracks, detector,\ + = analysis(packets_1sec, pixel_xy, mc_assn, tracks, module,\ hits_clusters_max_cindex,sec) # concatenate temp arrays to main arrays results_clusters = np.concatenate((results_clusters, results_clusters_temp)) @@ -62,9 +62,9 @@ def reco_loop(nSec_start, nSec_end, PPS_indices, packets,\ hits_clusters_max_cindex = np.max(hits_clusters['cluster_index'])+1 return results_clusters,unix_pt7,PPS_pt7, hits_clusters, hits_clusters_max_cindex -def reco_MC(packets, mc_assn, tracks, pixel_xy, detector): +def reco_MC(packets, mc_assn, tracks, pixel_xy, module): results_clusters, unix_pt7, PPS_pt7,\ - hits_clusters = analysis(packets, pixel_xy, mc_assn, tracks, detector, 0, 0) + hits_clusters = analysis(packets, pixel_xy, mc_assn, tracks, module, 0, 0) return results_clusters, unix_pt7,PPS_pt7, hits_clusters def run_reconstruction(input_config_filename): @@ -79,55 +79,55 @@ def run_reconstruction(input_config_filename): # set variables from config file detector = module.detector - input_packets_filename = module.input_packets_filename - input_light_filename_1 = module.input_light_filename_1 - input_light_filename_2 = module.input_light_filename_2 - output_events_filename = module.output_events_filename - nSec_start = module.nSec_start_packets - nSec_end = module.nSec_end_packets - nSec_start_light = module.nSec_start_light - nSec_end_light = module.nSec_end_light - sync_filename = module.sync_filename - light_time_steps = module.light_time_steps - nchannels_adc1 = module.nchannels_adc1 - nchannels_adc2 = module.nchannels_adc2 - matching_tolerance_unix = module.matching_tolerance_unix - matching_tolerance_PPS = module.matching_tolerance_PPS - disabled_channels_list = module.disabled_channels_list - - input_packets_filepath = input_packets_filename - output_events_filepath = output_events_filename - - if detector not in ['module-0','module-1','module-2', 'module-3','2x2']: - raise Exception("Possible values of 'detector' are only 'module-0', 'module-1','module-2', 'module-3', and '2x2' (without quotes).") - if os.path.exists(output_events_filepath): - raise Exception('Output file '+ str(output_events_filepath) + ' already exists.') - if nSec_start <= 0 or nSec_start - int(nSec_start) or nSec_end < -1 or nSec_end - int(nSec_end) > 0: - raise ValueError('nSec_start and nSec_end must be greater than zero and be an integer.') - if input_packets_filename.split('.')[-1] != 'h5': + data_type = module.data_type + + if data_type == 'data': + nSec_start = module.nSec_start_packets + nSec_end = module.nSec_end_packets + nSec_start_light = module.nSec_start_light + nSec_end_light = module.nSec_end_light + elif data_type == 'MC': + pass + else: + raise ValueError(f'Data type {data_type} not supported. Choose data or MC.') + + # do various file / parameter checks + if os.path.exists(module.output_events_filename): + raise Exception('Output file '+ str(module.output_events_filename) + ' already exists.') + if not os.path.exists(module.detector_dict_path): + raise Exception(f'Dictionary file {module.detector_dict_path} does not exist.') + else: + print('Using pixel layout dictionary: ', module.detector_dict_path) + if not os.path.exists(module.input_packets_filename): + raise Exception(f'Packets file {module.input_packets_filename} does not exist.') + else: + print('Opening packets file: ', module.input_packets_filename) + if module.input_packets_filename.split('.')[-1] != 'h5': raise Exception('Input file must be an h5 file.') - - # load dictionary for calculating hit position on pixel plane. Made using larpix-readout-parser. - if detector == 'module-0': - dict_path = 'layout/module-0/multi_tile_layout-2.3.16.pkl' - elif detector == 'module-1': - dict_path = 'layout/module-1/module1_layout-2.3.16.pkl' - elif detector == 'module-2': - dict_path = 'layout/module-2/multi_tile_layout-2022_11_18_04_35_CET.pkl' - elif detector == 'module-3': - dict_path = 'layout/module-3/multi_tile_layout-module3.pkl' - elif detector == '2x2': - dict_path = 'layout/2x2/multi_tile_layout-2.3.16_2x2.pkl' - pixel_xy = load_geom_dict(dict_path) - print('Using pixel layout dictionary: ', dict_path) + if data_type == 'data': + if nSec_start <= 0 or nSec_start - int(nSec_start) or nSec_end < -1 or nSec_end - int(nSec_end) > 0: + raise ValueError('nSec_start and nSec_end must be greater than zero and be an integer.') + if module.use_disabled_channels_list: + if not os.path.exists(module.disabled_channels_list): + raise Exception(f'Disabled channels file {module.disabled_channels_list} does not exist.') + elif os.path.exists(module.disabled_channels_list) and module.use_disabled_channels_list: + print('Using disabled channels list: ', module.disabled_channels_list) + else: + pass + if module.do_match_of_charge_to_light and data_type == 'data': + if not os.path.exists(module.adc_folder + module.input_light_filename_1): + raise Exception(f'Input light file {module.adc_folder + module.input_light_filename_1} does not exist.') + if not os.path.exists(module.adc_folder + module.input_light_filename_2): + raise Exception(f'Input light file {module.adc_folder + module.input_light_filename_2} does not exist.') + # detector dict file must be pkl file made with larpix_readout_parser + pixel_xy = load_geom_dict(module.detector_dict_path) # open packets file - print('Opening packets file: ', input_packets_filepath) - f_packets = h5py.File(input_packets_filepath) + f_packets = h5py.File(module.input_packets_filename) try: f_packets['packets'] except: - raise KeyError('Packets not found in ' + input_packets_filepath) + raise KeyError('Packets not found in ' + module.input_packets_filename) analysis_start = time.time() @@ -138,35 +138,34 @@ def run_reconstruction(input_config_filename): mc_assn = f_packets['mc_packets_assn'] tracks = f_packets['tracks'] except: - mc_assn=None print("No 'mc_packets_assn' dataset found, processing as real data.") # get packets and indices of PPS pulses packets = f_packets['packets'] - if nSec_end == -1 and mc_assn is None: - print('nSec_end was set to -1, so processing entire file.') - - if use_disabled_channels_list: - print('Using disabled channels list: ', disabled_channels_list) - disabled_channels = np.load(disabled_channels_list) - keys = disabled_channels['keys'] - - unique_ids_packets = ((((packets['io_group'].astype(int)) * 256 \ - + packets['io_channel'].astype(int)) * 256 \ - + packets['chip_id'].astype(int)) * 64 \ - + packets['channel_id'].astype(int)).astype(int) + if data_type == 'data': + if nSec_end == -1: + print('nSec_end was set to -1, so processing entire file.') - nonType0_mask = packets['packet_type'] != 0 - unique_ids_packets[nonType0_mask] = -1 # just to make sure we don't remove non data packets + if module.use_disabled_channels_list: + disabled_channels = np.load(disabled_channels_list) + keys = disabled_channels['keys'] + + unique_ids_packets = ((((packets['io_group'].astype(int)) * 256 \ + + packets['io_channel'].astype(int)) * 256 \ + + packets['chip_id'].astype(int)) * 64 \ + + packets['channel_id'].astype(int)).astype(int) + + nonType0_mask = packets['packet_type'] != 0 + unique_ids_packets[nonType0_mask] = -1 # just to make sure we don't remove non data packets - print('Removing noisy packets...') - packets_to_keep_mask = np.isin(unique_ids_packets, keys, invert=True) - packets = packets[packets_to_keep_mask] - print('Finished. Removed ', 100 - np.sum(packets['packet_type'] == 0)/np.sum(np.invert(nonType0_mask)) * 100, ' % of data packets.') + print('Removing noisy packets...') + packets_to_keep_mask = np.isin(unique_ids_packets, keys, invert=True) + packets = packets[packets_to_keep_mask] + print('Finished. Removed ', 100 - np.sum(packets['packet_type'] == 0)/np.sum(np.invert(nonType0_mask)) * 100, ' % of data packets.') # run reconstruction - if mc_assn is None: + if mc_assn is None or data_type == 'data': hits_clusters_start_cindex = 0 io_groups = np.unique(packets['io_group']) for io in tqdm(io_groups, desc = 'Processing io_groups: '): @@ -185,13 +184,13 @@ def run_reconstruction(input_config_filename): results_clusters, unix_pt7, \ PPS_pt7, hits_clusters,\ hits_clusters_start_cindex = \ - reco_loop(nSec_start, nSec_end, PPS_indices, packets_io, mc_assn, tracks, pixel_xy, detector,\ + reco_loop(nSec_start, nSec_end, PPS_indices, packets_io, mc_assn, tracks, pixel_xy, module,\ hits_clusters_start_cindex) else: results_clusters_temp, unix_pt7_temp, \ PPS_pt7_temp, hits_clusters_temp,\ hits_clusters_start_cindex = \ - reco_loop(nSec_start, nSec_end, PPS_indices, packets_io, mc_assn, tracks, pixel_xy, detector,\ + reco_loop(nSec_start, nSec_end, PPS_indices, packets_io, mc_assn, tracks, pixel_xy, module,\ hits_clusters_start_cindex) results_clusters = np.concatenate((results_clusters, results_clusters_temp)) unix_pt7 = np.concatenate((unix_pt7, unix_pt7_temp)) @@ -199,28 +198,21 @@ def run_reconstruction(input_config_filename): hits_clusters = np.concatenate((hits_clusters, hits_clusters_temp)) else: results_clusters, unix_pt7, PPS_pt7, hits_clusters = \ - reco_MC(packets, mc_assn, tracks, pixel_xy, detector) + reco_MC(packets, mc_assn, tracks, pixel_xy, module) # do cuts on charge events. See toggles for cuts in consts.py. # if all toggles are False then this command simply returns `results` unchanged. #results_small_clusters = charge_event_cuts.all_charge_event_cuts(results_small_clusters) - if do_match_of_charge_to_light and mc_assn is None: + if module.do_match_of_charge_to_light and mc_assn is None: # loop through the light files and only select triggers within the second ranges specified # note that not all these light events will have a match within the packets data selection - input_light_filepath_1 = adc_folder + input_light_filename_1 - input_light_filepath_2 = adc_folder + input_light_filename_2 - adc_sn_1 = input_light_filename_1.split('_')[0] - adc_sn_2 = input_light_filename_2.split('_')[0] - print('Using light file ', input_light_filepath_1) - print('Using light file ', input_light_filepath_2) print('Loading light files with a batch size of ', batch_size, ' ...') - light_events_all = read_light_files(input_light_filepath_1, input_light_filepath_2, nSec_start_light, nSec_end_light, detector, light_time_steps, nchannels_adc1, nchannels_adc2,adc_sn_1, adc_sn_2) + light_events_all = read_light_files(module) # match light events to ext triggers in packets (packet type 7) light_events_all = light_events_all[light_events_all['unix'] != 0] print('Matching light triggers to external triggers in packets...') - indices_in_ext_triggers = matching.match_light_to_ext_trigger(light_events_all, PPS_pt7, unix_pt7, \ - matching_tolerance_unix, matching_tolerance_PPS) # length of light_events_all + indices_in_ext_triggers = matching.match_light_to_ext_trigger(light_events_all, PPS_pt7, unix_pt7, module) # length of light_events_all evt_mask = indices_in_ext_triggers != -1 indices_in_ext_triggers = indices_in_ext_triggers[evt_mask] light_events_all = light_events_all[evt_mask] @@ -229,14 +221,14 @@ def run_reconstruction(input_config_filename): # match ext triggers / light events to charge events for the large clusters print('Performing charge-light matching for the charge clusters...') - results_clusters, results_clusters_light_events = matching.match_light_to_charge(light_events_all, results_clusters, PPS_pt7_light, unix_pt7_light) + results_clusters, results_clusters_light_events = matching.match_light_to_charge(light_events_all, results_clusters, PPS_pt7_light, unix_pt7_light, module) else: print('Skipping getting light events and doing charge-light matching...') - print('Saving events to ', output_events_filepath) - with h5py.File(output_events_filepath, 'w') as f: + print('Saving clusters and light data to ', module.output_events_filename) + with h5py.File(module.output_events_filename, 'w') as f: dset_hits_clusters = f.create_dataset('clusters_hits', data=hits_clusters, dtype=hits_clusters.dtype) - if do_match_of_charge_to_light: + if module.do_match_of_charge_to_light: dset_light_events = f.create_dataset('clusters_matched_light', data=results_clusters_light_events, dtype=results_clusters_light_events.dtype) dset_clusters = f.create_dataset('clusters', data=results_clusters, dtype=results_clusters.dtype)