Coverage for lst_auto_rta/Spectra.py: 0%

193 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2024-09-18 19:29 +0000

1#!/usr/bin/env python 

2 

3import astropy 

4import gammapy 

5import matplotlib 

6import numpy as np 

7import regions 

8 

9print("gammapy:", gammapy.__version__) 

10print("numpy:", np.__version__) 

11print("astropy", astropy.__version__) 

12print("regions", regions.__version__) 

13print("matplotlib", matplotlib.__version__) 

14 

15import os 

16 

17import astropy.units as u 

18import matplotlib.pyplot as plt 

19import matplotlib.style as style 

20import numpy as np 

21import scipy 

22from astropy.coordinates import SkyCoord 

23 

24style.use("tableau-colorblind10") 

25import argparse 

26import pickle 

27from pathlib import Path 

28 

29from gammapy.data import DataStore 

30from gammapy.datasets import Datasets, FluxPointsDataset, MapDataset, SpectrumDataset 

31from gammapy.estimators import FluxPointsEstimator 

32from gammapy.makers import ( 

33 MapDatasetMaker, 

34 ReflectedRegionsBackgroundMaker, 

35 SafeMaskMaker, 

36 SpectrumDatasetMaker, 

37) 

38from gammapy.makers.utils import make_theta_squared_table 

39from gammapy.maps import Map, MapAxis, RegionGeom, WcsGeom 

40from gammapy.modeling import Fit 

41from gammapy.modeling.models import ( 

42 ExpCutoffPowerLawSpectralModel, 

43 LogParabolaSpectralModel, 

44 PowerLawSpectralModel, 

45 SkyModel, 

46) 

47from gammapy.visualization import plot_spectrum_datasets_off_regions, plot_theta_squared_table 

48from matplotlib.offsetbox import AnchoredText 

49from regions import CircleSkyRegion 

50 

51parser = argparse.ArgumentParser( 

52 description="Automatic Script for the DL1 check", formatter_class=argparse.ArgumentDefaultsHelpFormatter 

53) 

54parser.add_argument("-d", "--directory", default="/fefs/onsite/pipeline/rta/data/", help="Directory for data") 

55parser.add_argument("-da", "--date", default="20230705", help="Date of the run to check") 

56parser.add_argument("-r", "--run-id", default="13600", help="run id to check") 

57parser.add_argument("-add", "--add-string", default="", help="add a string to the path") 

58parser.add_argument("-RA", "--right-ascension", default="270.19042", help="right-ascension in deg") 

59parser.add_argument("-DEC", "--declination", default="78.46806", help="declination in deg") 

60 

61args = parser.parse_args() 

62config = vars(args) 

63 

64print(config) 

65 

66location_data = ( 

67 config["directory"] + config["date"] + "/" + config["run_id"] + "/" + config["add_string"] + "/DL3" 

68) # path to DL3 folder 

69source_name = config["run_id"] # e.g., Crab, GRB210807A 

70cut_type = "standard" # e.g., loose, hard, ... 

71filename_output = "{name}_{cut}".format(name=source_name, cut=cut_type) 

72 

73 

74source_position = SkyCoord(ra=config["right_ascension"], dec=config["declination"], unit="deg", frame="icrs") 

75max_offset_run = 5 * u.deg 

76work_directory = location_data 

77path_plot = Path(work_directory + "/../plots") 

78print(work_directory + "/../plots") 

79path_plot.mkdir(exist_ok=True) 

80path_background = Path(work_directory + "/../plots") 

81path_background.mkdir(exist_ok=True) 

82 

83on_radius = 0.2 * u.deg 

84exclusion_radius = 0.35 * u.deg 

85 

86e_min_reco = 0.02 * u.TeV 

87e_max_reco = 10.0 * u.TeV 

88n_bin_per_decade_e_reco = 10 

89 

90e_min_true = 0.005 * u.TeV 

91e_max_true = 40.0 * u.TeV 

92n_bin_per_decade_e_true = 20 

93 

94e_min_flux_point = 0.02 * u.TeV 

95e_max_flux_point = 10.0 * u.TeV 

96n_bin_per_decade_flux_point = 5 

97 

98 

99def plot_excess(dataset_to_plot, ax=None, kwargs_excess=None, kwargs_npred_signal=None, **kwargs): 

100 from gammapy.stats import CashCountsStatistic, WStatCountsStatistic 

101 

102 region = None 

103 

104 kwargs_excess = kwargs_excess or {} 

105 kwargs_npred_signal = kwargs_npred_signal or {} 

106 

107 counts, npred = dataset_to_plot.counts.copy(), dataset_to_plot.npred() 

108 

109 if dataset_to_plot.mask is None: 

110 mask = dataset_to_plot.copy() 

111 mask.data = 1 

112 else: 

113 mask = dataset_to_plot.mask 

114 counts *= mask 

115 npred *= mask 

116 

117 if dataset_to_plot.stat_type == "wstat": 

118 counts_off = dataset_to_plot.counts_off * mask 

119 with np.errstate(invalid="ignore"): 

120 alpha = dataset_to_plot.alpha * mask 

121 

122 stat = WStatCountsStatistic( 

123 n_on=counts, 

124 n_off=counts_off, 

125 alpha=alpha, 

126 # mu_sig=npred, 

127 ) 

128 elif dataset_to_plot.stat_type == "cash": 

129 stat = CashCountsStatistic(counts.data, npred.data) 

130 

131 yerr = np.zeros((2,) + counts.data.shape) 

132 yerr[0], yerr[1] = -stat.compute_errn(), stat.compute_errp() 

133 yerr = stat.error 

134 

135 plot_kwargs = kwargs.copy() 

136 plot_kwargs.update(kwargs_excess) 

137 plot_kwargs.setdefault("label", "Excess counts") 

138 ax = dataset_to_plot.excess.plot(ax, yerr=yerr, **plot_kwargs) 

139 

140 plot_kwargs = kwargs.copy() 

141 plot_kwargs.update(kwargs_npred_signal) 

142 plot_kwargs.setdefault("label", "Predicted signal counts") 

143 dataset_to_plot.npred_signal().plot_hist(ax, **plot_kwargs) 

144 

145 ax.legend(numpoints=1) 

146 return ax 

147 

148 

149def plot_fit( 

150 dataset_to_plot, 

151 ax_spectrum=None, 

152 ax_residuals=None, 

153 kwargs_spectrum=None, 

154 kwargs_residuals=None, 

155): 

156 from gammapy.datasets.utils import get_axes 

157 from matplotlib.gridspec import GridSpec 

158 

159 gs = GridSpec(7, 1) 

160 ax_spectrum, ax_residuals = get_axes( 

161 ax_spectrum, 

162 ax_residuals, 

163 8, 

164 7, 

165 [gs[:5, :]], 

166 [gs[5:, :]], 

167 kwargs2={"sharex": ax_spectrum}, 

168 ) 

169 kwargs_spectrum = kwargs_spectrum or {} 

170 kwargs_residuals = kwargs_residuals or {} 

171 

172 plot_excess(dataset_to_plot, ax_spectrum, **kwargs_spectrum) 

173 

174 dataset_to_plot.plot_residuals_spectral(ax_residuals, **kwargs_residuals) 

175 

176 method = kwargs_residuals.get("method", "diff") 

177 label = dataset_to_plot._residuals_labels[method] 

178 ax_residuals.set_ylabel(f"Residuals\n{label}") 

179 

180 return ax_spectrum, ax_residuals 

181 

182 

183data_store = DataStore.from_dir(location_data) 

184obs_ids = data_store.obs_table[source_position.separation(data_store.obs_table.pointing_radec) < max_offset_run][ 

185 "OBS_ID" 

186] 

187obs_collection = data_store.get_observations(obs_ids, required_irf="point-like") 

188 

189on_region = CircleSkyRegion(center=source_position, radius=on_radius) 

190exclude_region = CircleSkyRegion(center=source_position, radius=exclusion_radius) 

191 

192n_bin_energy_reco = int( 

193 (np.log10(e_max_reco.to_value(u.TeV)) - np.log10(e_min_reco.to_value(u.TeV))) * n_bin_per_decade_e_reco 

194) 

195energy_axis_reco = MapAxis.from_edges( 

196 np.logspace(np.log10(e_min_reco.to_value(u.TeV)), np.log10(e_max_reco.to_value(u.TeV)), n_bin_energy_reco), 

197 unit="TeV", 

198 name="energy", 

199 interp="log", 

200) 

201n_bin_energy_true = int( 

202 (np.log10(e_max_true.to_value(u.TeV)) - np.log10(e_min_true.to_value(u.TeV))) * n_bin_per_decade_e_true 

203) 

204energy_axis_true = MapAxis.from_edges( 

205 np.logspace(np.log10(e_min_true.to_value(u.TeV)), np.log10(e_max_true.to_value(u.TeV)), n_bin_energy_true), 

206 unit="TeV", 

207 name="energy_true", 

208 interp="log", 

209) 

210 

211geom_on_region = RegionGeom.create(region=on_region, axes=[energy_axis_reco]) 

212geom = WcsGeom.create( 

213 skydir=source_position, 

214 npix=(200, 200), 

215 binsz=0.05, 

216 frame="icrs", 

217 axes=[energy_axis_reco], 

218) 

219 

220geom_image = geom.to_image() 

221exclusion_mask = ~geom_image.region_mask([exclude_region]) 

222 

223 

224dataset_maker_spectrum = SpectrumDatasetMaker(selection=["counts", "exposure", "edisp"], use_region_center=True) 

225spectrum_dataset_empty = SpectrumDataset.create(geom=geom_on_region, energy_axis_true=energy_axis_true) 

226bkg_maker_spectrum = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask) 

227 

228dataset_stacked_map = MapDataset.create(geom=geom, name=source_name + "_stacked") 

229map_dataset_maker = MapDatasetMaker(selection=["counts"]) 

230map_maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") 

231 

232datasets_spectrum_joint = Datasets() 

233for obs in obs_collection: 

234 dataset_spectrum = dataset_maker_spectrum.run(spectrum_dataset_empty.copy(name=f"obs-{obs.obs_id}"), obs) 

235 dataset_on_off_spectrum = bkg_maker_spectrum.run(observation=obs, dataset=dataset_spectrum) 

236 datasets_spectrum_joint.append(dataset_on_off_spectrum) 

237 

238 map_cutout = dataset_stacked_map.cutout(obs.pointing_radec, width="6.5 deg") 

239 map_dataset = map_dataset_maker.run(map_cutout, obs) 

240 map_dataset = map_maker_safe_mask.run(map_dataset, obs) 

241 dataset_stacked_map.stack(map_dataset) 

242 

243info_table = datasets_spectrum_joint.info_table(cumulative=True) 

244 

245reference_energy = energy_axis_reco.center[int(energy_axis_reco.nbin / 2)] 

246 

247spectral_model = { 

248 "power_law": PowerLawSpectralModel( 

249 amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"), index=2, reference=reference_energy 

250 ), 

251 "log_parabola": LogParabolaSpectralModel( 

252 amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"), alpha=2, beta=0, reference=reference_energy 

253 ), 

254 "power_law_exponantial_cutoff": ExpCutoffPowerLawSpectralModel( 

255 amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"), index=2, lambda_=1.0 / reference_energy, reference=reference_energy 

256 ), 

257} 

258model = {} 

259for k in spectral_model.keys(): 

260 model[k] = SkyModel(spectral_model=spectral_model[k], name=source_name + "_" + k) 

261 

262datasets_spectrum_joint_with_model = {} 

263datasets_spectrum_joint_with_model_stacked = {} 

264fit_joint = {} 

265result_fit_joint = {} 

266for k in spectral_model.keys(): 

267 datasets_spectrum_joint_with_model[k] = datasets_spectrum_joint.copy() 

268 datasets_spectrum_joint_with_model[k].models = [model[k]] 

269 fit_joint[k] = Fit() 

270 result_fit_joint[k] = fit_joint[k].run(datasets=datasets_spectrum_joint_with_model[k]) 

271 

272 datasets_spectrum_joint_with_model_stacked[k] = datasets_spectrum_joint_with_model[k].stack_reduce() 

273 datasets_spectrum_joint_with_model_stacked[k].models = [model[k]] 

274 

275if result_fit_joint["power_law_exponantial_cutoff"].total_stat > result_fit_joint["log_parabola"].total_stat: 

276 chalenger_model = "log_parabola" 

277 print("Log parabola perform better than power law than exponantial cut off") 

278else: 

279 chalenger_model = "power_law_exponantial_cutoff" 

280 print("Power law than exponantial cut off perform better than log parabola") 

281 

282 

283# significance_chalenger = np.sqrt((result_fit_joint['power_law'].total_stat-result_fit_joint[chalenger_model].total_stat)) 

284print("\n\nLikelihood Ratio test\n") 

285LR_statistic = result_fit_joint["power_law"].total_stat - result_fit_joint[chalenger_model].total_stat 

286dof = 1 

287p_val = scipy.stats.chi2.sf(LR_statistic, dof) 

288 

289significance_chalenger = np.sqrt(2) * scipy.special.erfinv(1 - p_val) 

290print( 

291 "The significance of the {} model over power law is {:.5f} sigma".format(chalenger_model, significance_chalenger) 

292) 

293 

294if significance_chalenger > 5.0: 

295 selected_model = chalenger_model 

296 print("The spectral model " + chalenger_model + " is selected as significant") 

297else: 

298 selected_model = "power_law" 

299 print("The spectral model " + chalenger_model + " is rejected as not significant over power law") 

300 print("The selected spectral model is the power law") 

301 

302ax1, ax2 = plot_fit(datasets_spectrum_joint_with_model_stacked[k]) 

303excess = datasets_spectrum_joint_with_model_stacked[k].excess.data 

304n_on = datasets_spectrum_joint_with_model_stacked[k].counts.data 

305n_off = datasets_spectrum_joint_with_model_stacked[k].counts_off.data 

306alpha = datasets_spectrum_joint_with_model_stacked[k].alpha.data 

307low_excess = excess - np.sqrt(n_on + alpha * alpha * n_off) 

308low_excess = low_excess[low_excess > 0.0] 

309ax1.set_ylim(bottom=np.min(0.8 * low_excess)) 

310 

311print(datasets_spectrum_joint_with_model[selected_model].models) 

312 

313n_bin_energy_flux_point = int( 

314 (np.log10(e_max_flux_point.to_value(u.TeV)) - np.log10(e_min_flux_point.to_value(u.TeV))) 

315 * n_bin_per_decade_flux_point 

316) 

317energy_edges_flux_point = ( 

318 np.logspace( 

319 np.log10(e_min_flux_point.to_value(u.TeV)), np.log10(e_max_flux_point.to_value(u.TeV)), n_bin_energy_flux_point 

320 ) 

321 * u.TeV 

322) 

323 

324fpe = FluxPointsEstimator( 

325 energy_edges=energy_edges_flux_point, 

326 source=source_name + "_" + selected_model, 

327 selection_optional="all", 

328 n_sigma_ul=3, 

329 reoptimize=False, 

330) 

331flux_points = fpe.run(datasets=datasets_spectrum_joint_with_model[selected_model]) 

332 

333flux_points.to_table(sed_type="e2dnde", formatted=True) 

334 

335plt.figure(figsize=(8, 5)) 

336ax = flux_points.plot(sed_type="e2dnde", color="darkorange") 

337flux_points.plot_ts_profiles(ax=ax, sed_type="e2dnde") 

338plt.savefig(os.path.join(path_plot, "{}__flux_point.png".format(filename_output)), dpi=300) 

339 

340flux_points_dataset = FluxPointsDataset(data=flux_points, models=model[selected_model]) 

341flux_points_dataset.plot_fit() 

342plt.savefig(os.path.join(path_plot, "{}__spectra.png".format(filename_output)), dpi=300)