Coverage for lst_auto_rta/rta_var.py: 0%
347 statements
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-22 14:47 +0000
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-22 14:47 +0000
1#!/usr/bin/env python
3import argparse
4import logging
5import os
6import smtplib
7import ssl
8from argparse import Namespace
9from datetime import datetime
10from pathlib import Path
12import astropy.units as u
13import matplotlib.pyplot as plt
14import numpy as np
15import scipy
16from astropy.coordinates import SkyCoord
17from astropy.time import Time
18from gammapy.data import DataStore, Observations
19from gammapy.datasets import Datasets, FluxPointsDataset, SpectrumDataset
20from gammapy.estimators import FluxPointsEstimator, LightCurveEstimator
21from gammapy.estimators.points.core import FluxPoints
22from gammapy.makers import ReflectedRegionsBackgroundMaker, SpectrumDatasetMaker
23from gammapy.maps import MapAxis, RegionGeom, WcsGeom
24from gammapy.modeling import Fit
25from gammapy.modeling.models import LogParabolaSpectralModel, PowerLawSpectralModel, SkyModel
26from regions import CircleSkyRegion
27from scipy.stats import chi2
30def build_argparser():
31 parser = argparse.ArgumentParser(
32 description="For DL3 files of one observation" "night for a given source, search if the flux is variable."
33 )
35 parser.add_argument(
36 "--dl3_folder", default=None, dest="dl3_folder", type=str, required=True, help="Path of the DL3 files to use."
37 )
38 parser.add_argument(
39 "--output_dir", default=None, dest="output_dir", type=str, required=True, help="Path where results are saved."
40 )
41 parser.add_argument(
42 "--source",
43 "-s",
44 default="???",
45 dest="source_name",
46 type=str,
47 required=False,
48 help="Source name of the target.",
49 )
50 parser.add_argument("-ra", default=None, dest="ra", type=float, required=True, help="RA coordinate of the target.")
51 parser.add_argument(
52 "-dec", default=None, dest="dec", type=float, required=True, help="DEC coordinate of the target."
53 )
54 parser.add_argument(
55 "--date", "-d", default=None, dest="date", type=str, required=True, help="Date of the observation night."
56 )
57 parser.add_argument(
58 "--runlist",
59 "-rl",
60 default=None,
61 dest="runlist",
62 type=str,
63 required=False,
64 help="Run list to be analysed from txt file (--rl file.txt).",
65 )
66 parser.add_argument(
67 "--distance",
68 "-dis",
69 default=3,
70 dest="distance",
71 type=float,
72 required=False,
73 help="Max distance in degrees between the target position and the run pointing position "
74 "for the run selection, negative value means no selection using this parameter.",
75 )
77 return parser
80class SourceData:
81 def __init__(self, args: Namespace) -> None:
82 self.path = args.dl3_folder
83 self.date = args.date
84 self.runlist = args.runlist
85 self.source_position = SkyCoord(args.ra, args.dec, unit="deg")
86 self.distance = args.distance
87 self.obs_ids = None
88 self.datastore = None
89 self.obs_collection = None
90 self.required_irf = ["aeff", "edisp"]
92 self.compute()
94 def get_runs_database(self) -> list:
95 if self.runlist: # and self.distance < 0.:
96 obs_ids = np.loadtxt(self.runlist, unpack=True, dtype=int, ndmin=1)
98 elif self.distance > 0.0: # and not self.runlist :
99 obs_table = self.datastore.obs_table
100 obs_table = obs_table[obs_table["LIVETIME"].data > 5 * 60] # remove runs<5min
101 obs_ids = obs_table.select_sky_circle(self.source_position, self.distance * u.deg, inverted=False)[
102 "OBS_ID"
103 ].tolist()
104 # logging.info('Selected obs_ids : %s'%obs_ids)
106 else:
107 raise RuntimeError("Cannot make a run selection. Either (and only) runlist or distance>0 is needed.")
109 if len(obs_ids) == 0:
110 raise RuntimeError("No run selected with current input.")
111 # logging.info(obs_ids)
113 return obs_ids
115 def compute(self) -> None:
116 self.datastore = DataStore.from_dir(self.path)
117 self.obs_ids = self.get_runs_database()
118 self.obs_collection = self.datastore.get_observations(self.obs_ids, required_irf=self.required_irf)
120 def get_datastore(self) -> DataStore:
121 return self.datastore
123 def get_obs_collection(self) -> Observations:
124 if not self.obs_collection:
125 self.compute()
126 return self.obs_collection
128 def get_obs_ids(self) -> list:
129 if not self.obs_ids:
130 self.compute()
131 return self.obs_ids
134#########################
137class Significance:
138 def __init__(
139 self, source_position: SkyCoord, e_min_reco: float = 0.01, e_max_reco: float = 10, on_radius: float = 0.2
140 ) -> None:
141 self.source_position = source_position
142 self.e_min_reco = e_min_reco * u.TeV
143 self.e_max_reco = e_max_reco * u.TeV
144 self.on_radius = on_radius
145 self.obs_collection = None
146 self.initialize()
148 def initialize(self) -> None:
149 on_radius = self.on_radius * u.deg
150 on_region = CircleSkyRegion(center=self.source_position, radius=on_radius)
151 exclusion_radius = 0.35 * u.deg
152 exclude_region = CircleSkyRegion(center=self.source_position, radius=exclusion_radius)
153 energy_axis = MapAxis.from_edges(
154 np.logspace(np.log10(self.e_min_reco.to_value(u.TeV)), np.log10(self.e_max_reco.to_value(u.TeV)), 2),
155 unit="TeV",
156 name="energy",
157 interp="log",
158 )
160 geom_on_region = RegionGeom.create(region=on_region, axes=[energy_axis])
161 geom = WcsGeom.create(
162 skydir=self.source_position, npix=(200, 200), binsz=0.02, frame="icrs", axes=[energy_axis]
163 )
164 geom_image = geom.to_image()
165 exclusion_mask = ~geom_image.region_mask([exclude_region])
167 self.dataset_maker_spectrum_significance = SpectrumDatasetMaker(
168 selection=["counts", "exposure"], use_region_center=True
169 )
170 self.spectrum_dataset_empty_significance = SpectrumDataset.create(geom=geom_on_region)
171 self.bkg_maker_spectrum_significance = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
173 def compute(self, obs_collection: Observations) -> None:
174 datasets_significance = Datasets()
176 for obs in obs_collection:
177 dataset_spectrum_significance = self.dataset_maker_spectrum_significance.run(
178 self.spectrum_dataset_empty_significance.copy(name=f"obs-{obs.obs_id}"),
179 obs,
180 )
181 dataset_on_off_spectrum_significance = self.bkg_maker_spectrum_significance.run(
182 observation=obs,
183 dataset=dataset_spectrum_significance,
184 )
185 datasets_significance.append(dataset_on_off_spectrum_significance)
187 self.info_table = datasets_significance.info_table(cumulative=True)
189 def get_significance(self, obs_collection: Observations) -> float:
190 self.compute(obs_collection)
191 if self.info_table is not None:
192 significance = self.info_table["sqrt_ts"][-1]
193 if significance == np.nan:
194 return 0.0
195 else:
196 report.add("Significance (Li&Ma) = %.2f" % significance)
197 return significance
198 else:
199 return 0.0
201 def is_significant(self, obs_collection: Observations) -> bool:
202 bool_sign = self.get_significance(obs_collection) > 5.0
203 report.add("Is significant (>5σ) : %s" % bool_sign)
204 return bool_sign
207#########################
210class Flux:
211 def __init__(self, source_position: SkyCoord, e_min_reco: float = 0.2, e_max_reco: float = 10) -> None:
212 self.source_position = source_position
213 self.e_min_reco = e_min_reco * u.TeV
214 self.e_max_reco = e_max_reco * u.TeV
215 self.datasets_spectrum_joint_with_model = None
216 self.model = None
217 self.selected_model = None
218 self.energy_edges_flux_point = None
219 self.required_irf = ["aeff", "edisp"]
221 def initialize(self):
222 n_bin_per_decade_e_reco = 10
223 e_min_true = 0.005 * u.TeV
224 e_max_true = 40.0 * u.TeV
225 n_bin_per_decade_e_true = 20
227 e_min_flux_point = self.e_min_reco
228 e_max_flux_point = self.e_max_reco
229 n_bin_per_decade_flux_point = 10
231 n_bin_energy_reco = int(
232 (np.log10(self.e_max_reco.to_value(u.TeV)) - np.log10(self.e_min_reco.to_value(u.TeV)))
233 * n_bin_per_decade_e_reco
234 )
235 energy_axis_reco = MapAxis.from_edges(
236 np.logspace(
237 np.log10(self.e_min_reco.to_value(u.TeV)), np.log10(self.e_max_reco.to_value(u.TeV)), n_bin_energy_reco
238 ),
239 unit="TeV",
240 name="energy",
241 interp="log",
242 )
244 n_bin_energy_true = int(
245 (np.log10(e_max_true.to_value(u.TeV)) - np.log10(e_min_true.to_value(u.TeV))) * n_bin_per_decade_e_true
246 )
247 energy_axis_true = MapAxis.from_edges(
248 np.logspace(np.log10(e_min_true.to_value(u.TeV)), np.log10(e_max_true.to_value(u.TeV)), n_bin_energy_true),
249 unit="TeV",
250 name="energy_true",
251 interp="log",
252 )
254 on_radius = 0.2 * u.deg
255 on_region = CircleSkyRegion(center=self.source_position, radius=on_radius)
256 geom_on_region = RegionGeom.create(region=on_region, axes=[energy_axis_reco])
258 geom = WcsGeom.create(
259 skydir=self.source_position, npix=(200, 200), binsz=0.02, frame="icrs", axes=[energy_axis_reco]
260 )
261 geom_image = geom.to_image()
263 exclusion_radius = 0.35 * u.deg
264 exclude_region = CircleSkyRegion(center=self.source_position, radius=exclusion_radius)
265 exclusion_mask = ~geom_image.region_mask([exclude_region])
267 n_bin_energy_flux_point = int(
268 (np.log10(e_max_flux_point.to_value(u.TeV)) - np.log10(e_min_flux_point.to_value(u.TeV)))
269 * n_bin_per_decade_flux_point
270 )
271 energy_edges_flux_point = (
272 np.logspace(
273 np.log10(e_min_flux_point.to_value(u.TeV)),
274 np.log10(e_max_flux_point.to_value(u.TeV)),
275 n_bin_energy_flux_point,
276 )
277 * u.TeV
278 )
279 return geom_on_region, energy_axis_true, exclusion_mask, energy_axis_reco, energy_edges_flux_point
281 def get_datasets(self, obs_ids: list, obs_collection: Observations):
282 geom_on_region, energy_axis_true, exclusion_mask, energy_axis_reco, energy_edges_flux_point = self.initialize()
283 datasets_spectrum_joint = Datasets()
284 dataset_maker_spectrum = SpectrumDatasetMaker(
285 selection=["counts", "exposure", "edisp"], use_region_center=True
286 )
287 spectrum_dataset_empty = SpectrumDataset.create(geom=geom_on_region, energy_axis_true=energy_axis_true)
288 bkg_maker_spectrum = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
290 for i, obs in enumerate(obs_collection):
291 dataset_spectrum = dataset_maker_spectrum.run(spectrum_dataset_empty.copy(name=f"obs-{i}".format(i)), obs)
292 dataset_on_off_spectrum = bkg_maker_spectrum.run(observation=obs, dataset=dataset_spectrum)
293 datasets_spectrum_joint.append(dataset_on_off_spectrum)
295 return datasets_spectrum_joint, energy_axis_reco, energy_edges_flux_point
297 def compute_models(self, energy_axis_reco):
298 reference_energy = energy_axis_reco.center[int(energy_axis_reco.nbin / 2)]
299 spectral_model = {
300 "power_law": PowerLawSpectralModel(
301 amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"), index=2, reference=reference_energy
302 ),
303 "log_parabola": LogParabolaSpectralModel(
304 amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"), alpha=2, beta=0, reference=reference_energy
305 ),
306 }
308 model = {}
309 for k in spectral_model.keys():
310 model[k] = SkyModel(spectral_model=spectral_model[k], name=k)
312 return model, spectral_model
314 def select_model(self, khi2_flux) -> None:
315 """Select a model fit for the flux if pvalue >5 sigma with a power law as H0 hypothesis."""
317 challenger_model = min(khi2_flux, key=khi2_flux.get)
318 LR_statistic = khi2_flux["power_law"] - khi2_flux[challenger_model]
319 dof = 1
320 p_val = chi2.sf(LR_statistic, dof)
321 significance_challenger = np.sqrt(2) * scipy.special.erfinv(1 - p_val)
322 if significance_challenger > 5.0:
323 self.selected_model = challenger_model
324 else:
325 self.selected_model = "power_law"
327 def get_time_unix_from_lst_epoch(self, time):
328 if self.version == "lstchain":
329 return time
330 else:
331 LST_EPOCH = Time("2018-10-01T00:00:00", scale="utc")
332 time_lst_unix = Time(LST_EPOCH, format="unix", scale="utc").value
333 if type(time) is list:
334 return [t + time_lst_unix for t in time]
335 else:
336 return time + time_lst_unix
338 def get_dataset_joint(self, obs_collection: Observations, obs_ids: list) -> None:
339 datasets_spectrum_joint, energy_axis_reco, energy_edges_flux_point = self.get_datasets(obs_ids, obs_collection)
340 model, spectral_model = self.compute_models(energy_axis_reco)
341 datasets_spectrum_joint_with_model = {}
342 fit_joint = {}
343 result_fit_joint = {}
344 khi2_flux = {}
346 for k in spectral_model.keys():
347 datasets_spectrum_joint_with_model[k] = datasets_spectrum_joint.copy()
348 datasets_spectrum_joint_with_model[k].models = [model[k]]
349 fit_joint[k] = Fit()
350 result_fit_joint[k] = fit_joint[k].run(datasets=datasets_spectrum_joint_with_model[k])
351 khi2_flux[k] = result_fit_joint[k].total_stat
353 for k in spectral_model.keys():
354 if not result_fit_joint[k].success:
355 logging.warning("Fit of " + k + " did not converge.")
356 else:
357 self.select_model(khi2_flux)
358 self.datasets_spectrum_joint_with_model = datasets_spectrum_joint_with_model
359 self.model = model
360 self.energy_edges_flux_point = energy_edges_flux_point
361 return datasets_spectrum_joint_with_model
363 def get_plot(self, data: SourceData, output_file: Path):
364 self.required_irf = data.required_irf
365 obs_collection = data.get_obs_collection()
366 obs_ids = data.get_obs_ids()
367 self.get_dataset_joint(obs_collection, obs_ids)
368 fpe = FluxPointsEstimator(
369 energy_edges=self.energy_edges_flux_point,
370 source=self.selected_model,
371 selection_optional="all",
372 n_sigma_ul=3,
373 reoptimize=False,
374 )
375 flux_points = fpe.run(datasets=self.datasets_spectrum_joint_with_model[self.selected_model])
376 flux_point_dataset_loop = FluxPointsDataset(data=flux_points, models=self.model[self.selected_model])
377 flux_point_dataset_loop.plot_spectrum()
378 plt.legend(
379 ["{} fit".format(self.selected_model), "Stat. err.", "Flux"], loc=0, frameon=True, prop={"size": 12}
380 )
381 plt.tight_layout()
382 if output_file:
383 plt.savefig(output_file)
384 # plt.show()
385 plt.close()
388#######################
391class LightCurve:
392 def __init__(self, flux: Flux, obs_collection: Observations, obs_ids: list, output_path: Path = None) -> None:
393 self.output_path = output_path
394 self.obs_collection = obs_collection
395 self.obs_ids = obs_ids
396 self.flux = flux
397 self.energy_range = [0.01 * u.TeV, 10.0 * u.TeV]
398 self.name = "light_curve"
399 self.flux_points = None
400 self.flux_points_err = None
401 self.time = None
402 self.time_plot = None
403 self.tbin_l_plot = None
404 self.tbin_r_plot = None
405 self.up_lim = None
407 self.lc = self.compute()
409 def convert_mjd_unix(self, time):
410 return Time(time, format="mjd").to_value("unix")
412 def set_time_lc(self, t_init, time):
413 return self.convert_mjd_unix(time) - self.convert_mjd_unix(t_init)
415 def compute(self) -> None: # FluxPoints:
416 if self.flux.datasets_spectrum_joint_with_model is None:
417 datasets_spectrum_joint_with_model = self.flux.get_dataset_joint(self.obs_collection, self.obs_ids)
419 else:
420 datasets_spectrum_joint_with_model = self.flux.datasets_spectrum_joint_with_model
422 selected_model = self.flux.selected_model
424 light_curve_maker = LightCurveEstimator(
425 energy_edges=self.energy_range, reoptimize=False, n_sigma_ul=3, selection_optional="all"
426 )
427 lc = light_curve_maker.run(datasets_spectrum_joint_with_model[selected_model])
429 return lc
431 def get_plot(self) -> None:
432 plt.figure(figsize=(6, 4))
433 ax = self.lc.plot(sed_type="flux")
434 ax.tick_params(axis="x", labelsize=12)
435 ax.get_legend().remove()
437 plt.tight_layout()
438 if self.output_path:
439 plt.savefig(Path(self.output_path))
440 # plt.show()
441 plt.close()
443 def lc_extraction(self, lc) -> None:
444 lc_table = lc.to_table(sed_type="flux", format="lightcurve")
445 # len_table=len(lc_table)
446 lc_table = lc_table[~np.isnan(np.concatenate(lc_table["flux"]))] # sort nan values
447 # logging.info('Removed %i nan values from flux.'%(len_table-len(lc_table)))
449 self.flux_points = np.concatenate(lc_table["flux"].data)
450 flux_err = np.concatenate(lc_table["flux_err"].data)
451 self.up_lim = np.concatenate(lc_table["is_ul"].data)
453 # error of upper limit points is positive error
454 flux_points_errp = np.concatenate(lc_table["flux_errp"].data)
455 self.flux_points_err = [
456 flux_points_errp[i] if self.up_lim[i] == True else flux_err[i] for i in range(len(flux_err))
457 ]
459 tmin = lc_table["time_min"].data
460 tmax = lc_table["time_max"].data
461 self.time = (tmin + tmax) / 2
463 self.time_plot = self.set_time_lc(tmin[0], self.time)
464 self.tbin_l_plot = self.set_time_lc(tmin, self.time)
465 self.tbin_r_plot = self.set_time_lc(self.time, tmax)
467 def get_plot_var_fit(self, par: list, p_value: float, output_file: str) -> None:
468 def constant(x, a):
469 return a
471 fig, ax = plt.subplots(figsize=(6, 4))
472 ax.errorbar(
473 self.time_plot,
474 self.flux_points,
475 uplims=self.up_lim,
476 yerr=self.flux_points_err,
477 fmt="o",
478 label="flux error",
479 )
480 ax.errorbar(
481 self.time_plot,
482 self.flux_points,
483 uplims=self.up_lim,
484 xerr=np.array(list(zip(self.tbin_l_plot, self.tbin_r_plot))).T,
485 fmt="none",
486 capsize=4,
487 label="run",
488 color="darkblue",
489 )
490 ax.axhline(
491 constant(self.time_plot, par),
492 label="Fit with constant\np-value={}".format(np.format_float_scientific(p_value, precision=1)),
493 color="orange",
494 )
495 ax.set_ylabel("Integrated flux (cm-2 s-1)")
496 ax.set_xlabel("Time (s)")
497 ax.legend(prop={"size": 12})
498 fig.tight_layout()
499 if output_file:
500 fig.savefig(Path(output_file))
501 report.add("saving variability lc in %s" % output_file)
502 # plt.show()
503 plt.close()
505 def variability(self) -> float:
506 self.lc_extraction(self.lc)
508 if len(self.flux_points) <= 1:
509 logging.error("Not enought lightcurve points for fitting.")
510 return np.nan, np.nan
512 def constant(x, a):
513 return a
515 par, cov = scipy.optimize.curve_fit(
516 constant, self.time, self.flux_points, sigma=self.flux_points_err, absolute_sigma=True
517 )
519 syst_lst_source_indep = 0.06 # crab paper 2023
520 syst_err = syst_lst_source_indep * np.array(self.flux_points)
521 khi2 = np.sum(
522 np.square((constant(self.time, par[0]) - self.flux_points))
523 / (np.square(self.flux_points_err) + np.square(syst_err))
524 )
526 ndof = len(self.time) - len(par)
527 p_value = chi2.sf(khi2, ndof)
529 if self.output_path:
530 self.get_plot_var_fit(par, p_value, self.output_path)
531 report.add("p-value = %f" % p_value)
533 return p_value
535 def is_variable(self) -> bool:
536 bool_pvalue = self.variability() < 5 * 10 ** (-7) # 5sigma
537 report.add("Is variable (<5σ) : %s" % bool_pvalue)
539 return bool_pvalue
542#######################
545class report_manager:
546 def __init__(self, data: SourceData, args: Namespace):
547 self.date = data.date
548 self.txt = (
549 "creation date : " + datetime.today().strftime("20%y-%m-%d %H:%M") + "\n\n"
550 "observation night : " + "%s-%s-%s" % (self.date[:4], self.date[4:6], self.date[6:]) + "\n\n"
551 "source : " + args.source_name + "\n"
552 "ra : " + str(args.ra) + "\n"
553 "dec : " + str(args.dec) + "\n"
554 "Max distance : " + str(args.distance) + "°\n"
555 "obs_ids : " + str(data.obs_ids) + "\n"
556 )
558 def add(self, txtline) -> None:
559 self.txt += txtline
560 self.txt += "\n"
562 def print(self) -> None:
563 print(self.txt)
565 def save(self, filename) -> None:
566 file = open(filename, "w")
567 file.write(self.txt)
568 file.close
570 def send(self) -> None:
571 smtp_server = "smtp.cnrs.fr"
572 port = 587 # For starttls
574 # sender_login = "login"
575 # sender_email = "mail"
576 # password = 'password'
577 # receiver_email = 'mail'
579 sender_login = "cyann.buisson@ods.services"
580 sender_email = "cyann.plard@cnrs.fr"
581 password = input("password")
582 receiver_email = "cyann.plard@lapp.in2p3.fr"
584 context = ssl.create_default_context()
586 theme = "RTA report of %s night" % self.date
587 msg = f'From: {sender_email}\r\nTo: {receiver_email}\r\nContent-Type: text/plain; charset="utf-8"\r\nSubject: {theme}\r\n\r\n'
588 msg += self.txt
590 try:
591 server = smtplib.SMTP(smtp_server, port)
592 server.ehlo() # Can be omitted
593 server.starttls(context=context) # Secure the connection
594 server.ehlo() # Can be omitted
595 server.login(sender_login, password)
596 server.sendmail(sender_email, receiver_email, msg.encode("utf8"))
598 except Exception as e:
599 print(e)
600 finally:
601 server.quit()
604#######################
607def main():
608 parser = build_argparser()
609 args = parser.parse_args()
610 data = SourceData(args)
611 global report
612 report = report_manager(data, args)
614 if Significance(data.source_position).is_significant(data.get_obs_collection()):
615 # if (0 > -1):
616 # Flux(data.source_position).get_plot(data, os.path.join(args.output_dir,'%s_flux.pdf'%args.date))
617 # LightCurve(Flux(data.source_position), data.obs_collection, data.obs_ids, os.path.join(args.output_dir,'%s_lc.pdf'%args.date)).get_plot()
618 LightCurve(
619 Flux(data.source_position),
620 data.obs_collection,
621 data.obs_ids,
622 os.path.join(args.output_dir, "%s_lc_variability.pdf" % args.date),
623 ).is_variable()
625 report.print()
628if __name__ == "__main__":
629 main()
631# python rta_var.py --dl3_folder /home/plard/Documents/code/gamma/data/DL3_RTA/20240111/16342/reco/dl3/ --source crab -ra 83.63 -dec 22.01 --distance 3 --date 20240111 --output_dir /home/plard/Documents/code/gamma/figures-results/tests/RTA/