Skip to content

Commit

Permalink
Try ipac depot
Browse files Browse the repository at this point in the history
  • Loading branch information
robertdstein committed May 2, 2023
1 parent 3925a67 commit 9d4f809
Show file tree
Hide file tree
Showing 6 changed files with 417 additions and 13 deletions.
157 changes: 150 additions & 7 deletions nuztf/base_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,10 @@
ensure_cutouts,
)
from nuztf.cat_match import ampel_api_tns, get_cross_match_info, query_ned_for_z
from nuztf.flatpix import get_flatpix
from nuztf.flatpix import get_flatpix, get_nested_pix
from nuztf.fritz import save_source_to_group
from nuztf.observations import get_obs_summary
from nuztf.observations_depot import get_obs_summary as alt_get_obs_summary
from nuztf.plot import lightcurve_from_alert
from nuztf.utils import cosmo

Expand Down Expand Up @@ -722,7 +723,7 @@ def text_summary(self):
def calculate_overlap_with_observations(
self, fields=None, pid=None, first_det_window_days=3.0, min_sep=0.01
):
print(fields)

if fields is None:
mns = self.get_multi_night_summary(first_det_window_days)

Expand Down Expand Up @@ -933,14 +934,156 @@ def __init__(self, data):
overlapping_fields,
)

def plot_overlap_with_observations(
self, fields=None, pid=None, first_det_window_days=None, min_sep=0.01
def calculate_overlap_with_depot_observations(
self, first_det_window_days=3.0, min_sep=0.01
):

mns = alt_get_obs_summary(t_min=self.t_min, max_days=first_det_window_days)

data = mns.data.copy()

mask = data["status"] == 0
self.logger.info(
f"Found {mask.sum()} successful observations in the depot, "
f"corresponding to {np.mean(mask)*100:.2f}% of the total."
)

self.logger.info("Unpacking observations")

pix_map = dict()
pix_obs_times = dict()

# field_pix = get_flatpix(nside=self.nside, logger=self.logger)
nested_pix = get_nested_pix(nside=self.nside, logger=self.logger)

for i, obs_time in enumerate(tqdm(list(set(data["obsjd"])))):

obs = data[data["obsjd"] == obs_time]

field = obs["field_id"].iloc[0]

flat_pix = nested_pix[field]

mask = obs["status"] == 0
indices = obs["qid"].values[mask]

for qid in indices:

pixels = flat_pix[qid]

for p in pixels:

if p not in pix_obs_times.keys():
pix_obs_times[p] = [obs_time]
else:
pix_obs_times[p] += [obs_time]

if p not in pix_map.keys():
pix_map[p] = [field]
else:
pix_map[p] += [field]

npix = hp.nside2npix(self.nside)
theta, phi = hp.pix2ang(self.nside, np.arange(npix), nest=False)
radecs = SkyCoord(ra=phi * u.rad, dec=(0.5 * np.pi - theta) * u.rad)
idx = np.where(np.abs(radecs.galactic.b.deg) <= 10.0)[0]

double_in_plane_pixels = []
double_in_plane_probs = []
single_in_plane_pixels = []
single_in_plane_prob = []
veto_pixels = []
plane_pixels = []
plane_probs = []
times = []
double_no_plane_prob = []
double_no_plane_pixels = []
single_no_plane_prob = []
single_no_plane_pixels = []

overlapping_fields = []

for i, p in enumerate(tqdm(hp.nest2ring(self.nside, self.pixel_nos))):
if p in pix_obs_times.keys():
if p in idx:
plane_pixels.append(p)
plane_probs.append(self.map_probs[i])

obs = pix_obs_times[p]

# check which healpix are observed twice
if max(obs) - min(obs) > min_sep:
# is it in galactic plane or not?
if p not in idx:
double_no_plane_prob.append(self.map_probs[i])
double_no_plane_pixels.append(p)
else:
double_in_plane_probs.append(self.map_probs[i])
double_in_plane_pixels.append(p)

else:
if p not in idx:
single_no_plane_pixels.append(p)
single_no_plane_prob.append(self.map_probs[i])
else:
single_in_plane_prob.append(self.map_probs[i])
single_in_plane_pixels.append(p)

overlapping_fields += pix_map[p]

times += list(obs)
else:
veto_pixels.append(p)

overlapping_fields = sorted(list(set(overlapping_fields)))

_observations = data.query("obsjd in @times").reset_index(drop=True)[
["obsjd", "exposure_time", "filter_id"]
]
bands = [self.fid_to_band(fid) for fid in _observations["filter_id"].values]
_observations["band"] = bands
_observations.drop(columns=["filter_id"], inplace=True)
self.observations = _observations

self.logger.info("All observations:")
self.logger.info(f"\n{self.observations}")

try:
self.first_obs = Time(min(times), format="jd")
self.first_obs.utc.format = "isot"
self.last_obs = Time(max(times), format="jd")
self.last_obs.utc.format = "isot"

except ValueError:
err = (
f"No observations of this field were found at any time between {self.t_min} and"
f"{times[-1]}. Coverage overlap is 0%, but recent observations might be missing!"
)
self.logger.error(err)
raise ValueError(err)

self.logger.info(f"Observations started at {self.first_obs.jd}")

return (
double_in_plane_pixels,
double_in_plane_probs,
single_in_plane_pixels,
single_in_plane_prob,
veto_pixels,
plane_pixels,
plane_probs,
times,
double_no_plane_prob,
double_no_plane_pixels,
single_no_plane_prob,
single_no_plane_pixels,
overlapping_fields,
)

def plot_overlap_with_observations(self, first_det_window_days=None, min_sep=0.01):
""" """

overlap_res = self.calculate_overlap_with_observations(
fields=fields,
pid=pid,
overlap_res = self.calculate_overlap_with_depot_observations(
first_det_window_days=first_det_window_days,
min_sep=min_sep,
)
Expand Down
13 changes: 13 additions & 0 deletions nuztf/credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,19 @@ def load_credentials(name: str, token_based: bool = False):
'No Credentials for "skyvision" found in environment' "Assuming they are set."
)

try:
io.set_account(
"ipacdepot",
username=os.environ["DEPOT_USER"],
password=os.environ["DEPOT_PASSWORD"],
)
logging.info('Set up "DEPOT" credentials')

except KeyError:
logging.info(
'No Credentials for "DEPOT" found in environment' "Assuming they are set."
)

try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
Expand Down
34 changes: 34 additions & 0 deletions nuztf/flatpix.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,15 @@ def get_flatpix_path(nside: int):
return outfile


def get_nested_pix_path(nside: int):
outdir = os.path.join(os.path.dirname(__file__), "data")
if not os.path.exists(outdir):
os.makedirs(outdir)

outfile = os.path.join(outdir, f"ztf_fields_nested_ipix_nside={nside}.pickle")
return outfile


def generate_flatpix_file(nside: int, logger=logging.getLogger(__name__)):
"""
Generate and save the fields-healpix lookup table
Expand All @@ -30,6 +39,7 @@ def generate_flatpix_file(nside: int, logger=logging.getLogger(__name__)):
decs = field_dataframe["Dec"].values

flat_pix_dict = dict()
nested_pix_dict = dict()

for i, field in tqdm(enumerate(fields), total=len(fields)):
ra = ras[i]
Expand All @@ -44,6 +54,7 @@ def generate_flatpix_file(nside: int, logger=logging.getLogger(__name__)):

flat_pix = list(set(flat_pix))
flat_pix_dict[field] = flat_pix
nested_pix_dict[field] = pix

outfile = get_flatpix_path(nside=nside)

Expand All @@ -52,6 +63,13 @@ def generate_flatpix_file(nside: int, logger=logging.getLogger(__name__)):
with open(outfile, "wb") as f:
pickle.dump(flat_pix_dict, f)

outfile = get_nested_pix_path(nside=nside)

logger.info(f"Saving to {outfile}")

with open(outfile, "wb") as f:
pickle.dump(nested_pix_dict, f)


def get_flatpix(nside: int, logger=logging.getLogger(__name__)):
infile = get_flatpix_path(nside=nside)
Expand All @@ -67,3 +85,19 @@ def get_flatpix(nside: int, logger=logging.getLogger(__name__)):
field_pix = pickle.load(f)

return field_pix


def get_nested_pix(nside: int, logger=logging.getLogger(__name__)):
infile = get_nested_pix_path(nside=nside)

# Generate a lookup table for field healpix
# if none exists (because this is computationally costly)
if not os.path.isfile(infile):
generate_flatpix_file(nside=nside, logger=logger)

logger.info(f"Loading from {infile}")

with open(infile, "rb") as f:
field_pix = pickle.load(f)

return field_pix
Loading

0 comments on commit 9d4f809

Please sign in to comment.