Coverage for lst_auto_rta/Ring_Background_Maps.py: 0%

149 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2024-09-18 19:29 +0000

1#!/usr/bin/env python 

2 

3import astropy 

4import gammapy 

5import matplotlib 

6import numpy as np 

7import regions 

8 

9print("gammapy:", gammapy.__version__) 

10print("numpy:", np.__version__) 

11print("astropy", astropy.__version__) 

12print("regions", regions.__version__) 

13print("matplotlib", matplotlib.__version__) 

14 

15import os 

16 

17import astropy.units as u 

18import matplotlib.pyplot as plt 

19import matplotlib.style as style 

20import numpy as np 

21from astropy.coordinates import SkyCoord 

22 

23style.use("tableau-colorblind10") 

24import argparse 

25from pathlib import Path 

26 

27from acceptance_modelisation import RadialAcceptanceMapCreator 

28from gammapy.data import DataStore 

29from gammapy.datasets import ( 

30 Datasets, 

31 MapDataset, 

32 SpectrumDataset, 

33) 

34from gammapy.estimators import ExcessMapEstimator 

35from gammapy.estimators.utils import find_peaks 

36from gammapy.makers import ( 

37 MapDatasetMaker, 

38 ReflectedRegionsBackgroundMaker, 

39 RingBackgroundMaker, 

40 SafeMaskMaker, 

41 SpectrumDatasetMaker, 

42) 

43from gammapy.maps import Map, MapAxis, RegionGeom, WcsGeom 

44from matplotlib.offsetbox import AnchoredText 

45from regions import CircleSkyRegion 

46from scipy.stats import norm 

47 

48parser = argparse.ArgumentParser( 

49 description="Automatic Script for the DL1 check", formatter_class=argparse.ArgumentDefaultsHelpFormatter 

50) 

51parser.add_argument("-d", "--directory", default="/fefs/onsite/pipeline/rta/data/", help="Directory for data") 

52parser.add_argument("-da", "--date", default="20230705", help="Date of the run to check") 

53parser.add_argument("-r", "--run-id", default="13600", help="run id to check") 

54parser.add_argument("-add", "--add-string", default="", help="add a string to the path") 

55parser.add_argument("-RA", "--right-ascension", default="270.19042", help="right-ascension in deg") 

56parser.add_argument("-DEC", "--declination", default="78.46806", help="declination in deg") 

57 

58args = parser.parse_args() 

59config = vars(args) 

60 

61location_data = ( 

62 config["directory"] + config["date"] + "/" + config["run_id"] + "/" + config["add_string"] + "/DL3" 

63) # path to DL3 folder 

64source_name = config["run_id"] # e.g., Crab, GRB210807A 

65cut_type = "standard" # e.g., loose, hard, ... 

66filename_output = "{name}_{cut}".format(name=source_name, cut=cut_type) 

67 

68source_position = SkyCoord(ra=config["right_ascension"], dec=config["declination"], unit="deg", frame="icrs") 

69max_offset_run = 5 * u.deg 

70work_directory = location_data 

71path_plot = Path(work_directory + "/../plots") 

72print(work_directory + "/../plots") 

73path_plot.mkdir(exist_ok=True) 

74path_background = Path(work_directory + "/../plots") 

75path_background.mkdir(exist_ok=True) 

76 

77e_min = 0.05 * u.TeV 

78e_max = 10.0 * u.TeV 

79n_bin_per_decade = 10 

80on_radius = 0.2 * u.deg 

81exclusion_radius = 0.0 * u.deg 

82fov_observation = 4.5 * u.deg 

83 

84r_in = 0.5 * u.deg 

85width = 0.4 * u.deg 

86correlation_radius = 0.2 * u.deg 

87 

88n_bin_per_decade_acceptance = 2.5 

89offset_bin_size_acceptance = 0.4 * u.deg 

90 

91data_store = DataStore.from_dir(location_data) 

92data_store.info() 

93 

94obs_ids = data_store.obs_table[source_position.separation(data_store.obs_table.pointing_radec) < max_offset_run][ 

95 "OBS_ID" 

96] 

97obs_collection = data_store.get_observations(obs_ids, required_irf=None) 

98 

99exclude_region = CircleSkyRegion(center=source_position, radius=exclusion_radius) 

100 

101n_bin_energy = int((np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade) 

102energy_axis = MapAxis.from_edges( 

103 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), n_bin_energy + 1), 

104 unit="TeV", 

105 name="energy", 

106 interp="log", 

107) 

108maximal_run_separation = np.max( 

109 source_position.separation(data_store.obs_table[np.isin(data_store.obs_table["OBS_ID"], obs_ids)].pointing_radec) 

110) 

111geom = WcsGeom.create( 

112 skydir=source_position, 

113 width=((maximal_run_separation + fov_observation) * 1.5, (maximal_run_separation + fov_observation) * 1.5), 

114 binsz=0.02, 

115 frame="icrs", 

116 axes=[energy_axis], 

117) 

118 

119geom_image = geom.to_image() 

120exclusion_mask = ~geom_image.region_mask([exclude_region]) 

121 

122stacked = MapDataset.create(geom=geom, name=source_name + "_stacked") 

123unstacked = Datasets() 

124maker = MapDatasetMaker(selection=["counts"]) 

125maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation) 

126 

127for obs in obs_collection: 

128 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg") 

129 dataset = maker.run(cutout, obs) 

130 dataset = maker_safe_mask.run(dataset, obs) 

131 stacked.stack(dataset) 

132 unstacked.append(dataset) 

133 

134n_bin_energy_acceptance = int( 

135 (np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade_acceptance 

136) 

137energyAxisAcceptance = MapAxis.from_edges( 

138 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), 1 + n_bin_energy_acceptance), 

139 unit="TeV", 

140 name="energy", 

141 interp="log", 

142) 

143n_bin_offset_acceptance = int(fov_observation.to_value(u.deg) / offset_bin_size_acceptance.to_value(u.deg)) 

144offsetAxisAcceptance = MapAxis.from_edges( 

145 np.linspace(0.0, fov_observation.to_value(u.deg), 1 + n_bin_offset_acceptance), 

146 unit="deg", 

147 name="offset", 

148 interp="lin", 

149) 

150 

151background_creator = RadialAcceptanceMapCreator( 

152 energyAxisAcceptance, 

153 offsetAxisAcceptance, 

154 exclude_regions=[ 

155 exclude_region, 

156 ], 

157 oversample_map=10, 

158) 

159background = background_creator.create_radial_acceptance_map_per_observation(obs_collection) 

160# background[list(background.keys())[0]].peek() 

161for obs_id in background.keys(): 

162 hdu_background = background[obs_id].to_table_hdu() 

163 hdu_background.writeto( 

164 os.path.join(path_background, filename_output + "_" + str(obs_id) + "_background.fits"), overwrite=True 

165 ) 

166 

167data_store.hdu_table.remove_rows(data_store.hdu_table["HDU_TYPE"] == "bkg") 

168 

169for obs_id in np.unique(data_store.hdu_table["OBS_ID"]): 

170 data_store.hdu_table.add_row( 

171 { 

172 "OBS_ID": obs_id, 

173 "HDU_TYPE": "bkg", 

174 "HDU_CLASS": "bkg_2d", 

175 "FILE_DIR": "", 

176 "FILE_NAME": os.path.join(path_background, filename_output + "_" + str(obs_id) + "_background.fits"), 

177 "HDU_NAME": "BACKGROUND", 

178 "SIZE": hdu_background.size, 

179 } 

180 ) 

181 

182data_store.hdu_table = data_store.hdu_table.copy() 

183obs_collection = data_store.get_observations(obs_ids, required_irf=None) 

184 

185stacked = MapDataset.create(geom=geom) 

186unstacked = Datasets() 

187maker = MapDatasetMaker(selection=["counts", "background"]) 

188maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation) 

189 

190for obs in obs_collection: 

191 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg") 

192 dataset = maker.run(cutout, obs) 

193 dataset = maker_safe_mask.run(dataset, obs) 

194 stacked.stack(dataset) 

195 unstacked.append(dataset) 

196 

197ring_bkg_maker = RingBackgroundMaker(r_in=r_in, width=width) # , exclusion_mask=exclusion_mask) 

198stacked_ring = ring_bkg_maker.run(stacked.to_image()) 

199estimator = ExcessMapEstimator(correlation_radius, correlate_off=False) 

200lima_maps = estimator.run(stacked_ring) 

201 

202significance_all = lima_maps["sqrt_ts"].data[np.isfinite(lima_maps["sqrt_ts"].data)] 

203significance_background = lima_maps["sqrt_ts"].data[ 

204 np.logical_and(np.isfinite(lima_maps["sqrt_ts"].data), exclusion_mask.data) 

205] 

206 

207bins = np.linspace( 

208 np.min(significance_all), 

209 np.max(significance_all), 

210 num=int((np.max(significance_all) - np.min(significance_all)) * 3), 

211) 

212 

213# Now, fit the off distribution with a Gaussian 

214mu, std = norm.fit(significance_background) 

215x = np.linspace(-8, 8, 50) 

216p = norm.pdf(x, mu, std) 

217 

218plt.figure(figsize=(8, 21)) 

219ax1 = plt.subplot(3, 1, 1, projection=lima_maps["sqrt_ts"].geom.wcs) 

220ax2 = plt.subplot(3, 1, 2, projection=lima_maps["sqrt_ts"].geom.wcs) 

221ax3 = plt.subplot(3, 1, 3) 

222 

223ax2.set_title("Significance map") 

224lima_maps["sqrt_ts"].plot(ax=ax2, add_cbar=True) 

225ax2.scatter( 

226 source_position.ra, 

227 source_position.dec, 

228 transform=ax2.get_transform("world"), 

229 marker="+", 

230 c="red", 

231 label=filename_output, 

232 s=[300], 

233 linewidths=3, 

234) 

235ax2.legend() 

236 

237sources = find_peaks( 

238 lima_maps["sqrt_ts"].get_image_by_idx((0,)), 

239 threshold=7, 

240 min_distance="0.2 deg", 

241) 

242print(sources) 

243# now = dt.datetime.now() 

244# timestamp_str = now.strftime("%Y-%m-%d %H:%M:%S") 

245# ax1.text(0.02, 0.98, timestamp_str, transform=ax1.transAxes, 

246# fontsize=11, fontweight='bold', va='top', ha='left') 

247# ax2.text(0.02, 0.98, timestamp_str, transform=ax2.transAxes, 

248# fontsize=11, fontweight='bold', va='top', ha='left') 

249if len(sources) > 0: 

250 ax2.scatter( 

251 sources["ra"], 

252 sources["dec"], 

253 transform=plt.gca().get_transform("icrs"), 

254 color="none", 

255 edgecolor="white", 

256 marker="o", 

257 s=300, 

258 lw=1.5, 

259 ) 

260 

261 

262ax1.set_title("Excess map") 

263lima_maps["npred_excess"].plot(ax=ax1, add_cbar=True) 

264ax1.scatter( 

265 source_position.ra, 

266 source_position.dec, 

267 transform=ax1.get_transform("world"), 

268 marker="+", 

269 c="red", 

270 label=filename_output, 

271 s=[300], 

272 linewidths=3, 

273) 

274ax1.legend() 

275 

276ax3.set_title("Significance distribution") 

277ax3.hist(significance_all, density=True, alpha=0.5, color="red", label="All bins", bins=bins) 

278ax3.hist(significance_background, density=True, alpha=0.5, color="blue", label="Background bins", bins=bins) 

279 

280ax3.plot(x, p, lw=2, color="black") 

281ax3.legend() 

282ax3.set_xlabel("Significance") 

283ax3.set_yscale("log") 

284ax3.set_ylim(1e-5, 1) 

285xmin, xmax = np.min(significance_all), np.max(significance_all) 

286ax3.set_xlim(xmin, xmax) 

287 

288text = text = r"$\mu$ = {:.2f}" f"\n" r"$\sigma$ = {:.2f}".format(mu, std) 

289box_prop = dict(boxstyle="Round", facecolor="white", alpha=0.5) 

290text_prop = dict(fontsize="x-large", bbox=box_prop) 

291# txt = AnchoredText(text, loc=2, transform=ax3.transAxes, prop=text_prop, frameon=False) 

292txt = AnchoredText(text, loc=2, prop=text_prop, frameon=False) 

293ax3.add_artist(txt) 

294 

295plt.savefig(os.path.join(path_plot, "{}__sky_map.png".format(filename_output)), dpi=300) 

296print(f"Fit results: mu = {mu:.2f}, std = {std:.2f}")