Coverage for lst_auto_rta/Ring_Background_Maps.py: 0%
170 statements
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-07 13:48 +0000
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-07 13:48 +0000
1#!/usr/bin/env python
3import astropy
4import gammapy
5import matplotlib
6import numpy as np
7import regions
9print("gammapy:", gammapy.__version__)
10print("numpy:", np.__version__)
11print("astropy", astropy.__version__)
12print("regions", regions.__version__)
13print("matplotlib", matplotlib.__version__)
15import os
17import astropy.units as u
18import matplotlib.pyplot as plt
19import matplotlib.style as style
20import numpy as np
21from astropy.coordinates import SkyCoord
23style.use("tableau-colorblind10")
24import argparse
25from pathlib import Path
27from acceptance_modelisation import RadialAcceptanceMapCreator
28from gammapy.data import DataStore
29from gammapy.datasets import (
30 Datasets,
31 MapDataset,
32 SpectrumDataset,
33)
34from gammapy.estimators import ExcessMapEstimator
35from gammapy.estimators.utils import find_peaks
36from gammapy.makers import (
37 MapDatasetMaker,
38 ReflectedRegionsBackgroundMaker,
39 RingBackgroundMaker,
40 SafeMaskMaker,
41 SpectrumDatasetMaker,
42)
43from gammapy.maps import Map, MapAxis, RegionGeom, WcsGeom
44from matplotlib.offsetbox import AnchoredText
45from regions import CircleSkyRegion
46from scipy.stats import norm
48parser = argparse.ArgumentParser(
49 description="Automatic Script for the DL1 check", formatter_class=argparse.ArgumentDefaultsHelpFormatter
50)
51parser.add_argument("-d", "--directory", default="/fefs/onsite/pipeline/rta/data/", help="Directory for data")
52parser.add_argument("-da", "--date", default="20230705", help="Date of the run to check")
53parser.add_argument("-r", "--run-id", default="13600", help="run id to check")
54parser.add_argument("-add", "--add-string", default="", help="add a string to the path")
55parser.add_argument("-RA", "--right-ascension", default="270.19042", help="right-ascension in deg")
56parser.add_argument("-DEC", "--declination", default="78.46806", help="declination in deg")
58args = parser.parse_args()
59config = vars(args)
61location_data = (
62 config["directory"] + config["date"] + "/" + config["run_id"] + "/" + config["add_string"] + "/DL3"
63) # path to DL3 folder
64source_name = config["run_id"] # e.g., Crab, GRB210807A
65cut_type = "standard" # e.g., loose, hard, ...
66filename_output = "{name}_{cut}".format(name=source_name, cut=cut_type)
68source_position = SkyCoord(ra=config["right_ascension"], dec=config["declination"], unit="deg", frame="icrs")
69max_offset_run = 10 * u.deg
70work_directory = location_data
71path_plot = Path(work_directory + "/../plots")
72print(work_directory + "/../plots")
73path_plot.mkdir(exist_ok=True)
74path_background = Path(work_directory + "/../plots")
75path_background.mkdir(exist_ok=True)
77e_min = 0.05 * u.TeV
78e_max = 10.0 * u.TeV
79n_bin_per_decade = 10
80on_radius = 0.2 * u.deg
81exclusion_radius = 0.0 * u.deg
82fov_observation = 7 * u.deg
84r_in = 0.5 * u.deg
85width = 0.4 * u.deg
86correlation_radius = 0.2 * u.deg
88n_bin_per_decade_acceptance = 1.
89offset_bin_size_acceptance =0.4 * u.deg
91data_store = DataStore.from_dir(location_data)
92data_store.info()
94obs_ids = data_store.obs_table[source_position.separation(data_store.obs_table.pointing_radec) < max_offset_run][
95 "OBS_ID"
96]
97obs_collection = data_store.get_observations(obs_ids, required_irf=None)
99exclude_region = CircleSkyRegion(center=source_position, radius=exclusion_radius)
101n_bin_energy = int((np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade)
102energy_axis = MapAxis.from_edges(
103 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), n_bin_energy + 1),
104 unit="TeV",
105 name="energy",
106 interp="log",
107)
108#maximal_run_separation = np.max(
109# source_position.separation(data_store.obs_table[np.isin(data_store.obs_table["OBS_ID"], obs_ids)].pointing_radec)
110#)
111#geom = WcsGeom.create(
112 # skydir=source_position,
113 # width=((maximal_run_separation + fov_observation)*1.2, (maximal_run_separation + fov_observation)*1.2),
114 # binsz=0.05,
115 #frame="icrs",
116 #axes=[energy_axis],
117#)
119pointing_coords = SkyCoord([obs.pointing_radec for obs in obs_collection])
121x = np.cos(pointing_coords.ra.radian) * np.cos(pointing_coords.dec.radian)
122y = np.sin(pointing_coords.ra.radian) * np.cos(pointing_coords.dec.radian)
123z = np.sin(pointing_coords.dec.radian)
125x_mean = np.mean(x)
126y_mean = np.mean(y)
127z_mean = np.mean(z)
129r = np.sqrt(x_mean**2 + y_mean**2 + z_mean**2)
130x_mean /= r
131y_mean /= r
132z_mean /= r
134ra_mean = np.arctan2(y_mean, x_mean) * u.rad
135dec_mean = np.arcsin(z_mean) * u.rad
137center_coord = SkyCoord(ra=ra_mean, dec=dec_mean, frame="icrs")
138max_separation = center_coord.separation(pointing_coords).max()
139map_width = (2 * max_separation + fov_observation)
141geom = WcsGeom.create(
142 skydir=center_coord,
143 width=(map_width, map_width),
144 binsz=0.05,
145 frame="icrs",
146 axes=[energy_axis],
147)
149geom_image = geom.to_image()
150exclusion_mask = ~geom_image.region_mask([exclude_region])
152stacked = MapDataset.create(geom=geom, name=source_name + "_stacked")
153unstacked = Datasets()
154maker = MapDatasetMaker(selection=["counts"])
155maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation)
157for obs in obs_collection:
158 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg")
159 dataset = maker.run(cutout, obs)
160 dataset = maker_safe_mask.run(dataset, obs)
161 stacked.stack(dataset)
162 unstacked.append(dataset)
164n_bin_energy_acceptance = int(
165 (np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade_acceptance
166)
167energyAxisAcceptance = MapAxis.from_edges(
168 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), 1 + n_bin_energy_acceptance),
169 unit="TeV",
170 name="energy",
171 interp="log",
172)
173n_bin_offset_acceptance = int(fov_observation.to_value(u.deg) / offset_bin_size_acceptance.to_value(u.deg))
174offsetAxisAcceptance = MapAxis.from_edges(
175 np.linspace(0.0, fov_observation.to_value(u.deg), 1 + n_bin_offset_acceptance),
176 unit="deg",
177 name="offset",
178 interp="lin",
179)
181background_creator = RadialAcceptanceMapCreator(
182 energyAxisAcceptance,
183 offsetAxisAcceptance,
184 exclude_regions=[
185 exclude_region,
186 ],
187 oversample_map=10,
188)
189background = background_creator.create_radial_acceptance_map_per_observation(obs_collection)
190# background[list(background.keys())[0]].peek()
191for obs_id in background.keys():
192 hdu_background = background[obs_id].to_table_hdu()
193 hdu_background.writeto(
194 os.path.join(path_background, filename_output + "_" + str(obs_id) + "_background.fits"), overwrite=True
195 )
197data_store.hdu_table.remove_rows(data_store.hdu_table["HDU_TYPE"] == "bkg")
199for obs_id in np.unique(data_store.hdu_table["OBS_ID"]):
200 data_store.hdu_table.add_row(
201 {
202 "OBS_ID": obs_id,
203 "HDU_TYPE": "bkg",
204 "HDU_CLASS": "bkg_2d",
205 "FILE_DIR": "",
206 "FILE_NAME": os.path.join(path_background, filename_output + "_" + str(obs_id) + "_background.fits"),
207 "HDU_NAME": "BACKGROUND",
208 "SIZE": hdu_background.size,
209 }
210 )
212data_store.hdu_table = data_store.hdu_table.copy()
213obs_collection = data_store.get_observations(obs_ids, required_irf=None)
215stacked = MapDataset.create(geom=geom)
216unstacked = Datasets()
217maker = MapDatasetMaker(selection=["counts", "background"])
218maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation)
220for obs in obs_collection:
221 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg")
222 dataset = maker.run(cutout, obs)
223 dataset = maker_safe_mask.run(dataset, obs)
224 stacked.stack(dataset)
225 unstacked.append(dataset)
227ring_bkg_maker = RingBackgroundMaker(r_in=r_in, width=width) # , exclusion_mask=exclusion_mask)
228stacked_ring = ring_bkg_maker.run(stacked.to_image())
229estimator = ExcessMapEstimator(correlation_radius, correlate_off=False)
230lima_maps = estimator.run(stacked_ring)
232significance_all = lima_maps["sqrt_ts"].data[np.isfinite(lima_maps["sqrt_ts"].data)]
233significance_background = lima_maps["sqrt_ts"].data[
234 np.logical_and(np.isfinite(lima_maps["sqrt_ts"].data), exclusion_mask.data)
235]
237bins = np.linspace(
238 np.min(significance_all),
239 np.max(significance_all),
240 num=int((np.max(significance_all) - np.min(significance_all)) * 3),
241)
243# Now, fit the off distribution with a Gaussian
244mu, std = norm.fit(significance_background)
245x = np.linspace(-8, 8, 50)
246p = norm.pdf(x, mu, std)
248plt.figure(figsize=(8, 21))
249ax1 = plt.subplot(3, 1, 1, projection=lima_maps["sqrt_ts"].geom.wcs)
250ax2 = plt.subplot(3, 1, 2, projection=lima_maps["sqrt_ts"].geom.wcs)
251ax3 = plt.subplot(3, 1, 3)
253ax2.set_title("Significance map")
254lima_maps["sqrt_ts"].plot(ax=ax2, add_cbar=True)
255ax2.scatter(
256 source_position.ra,
257 source_position.dec,
258 transform=ax2.get_transform("world"),
259 marker="+",
260 c="red",
261 label=filename_output,
262 s=[300],
263 linewidths=3,
264)
265ax2.legend()
267sources = find_peaks(
268 lima_maps["sqrt_ts"].get_image_by_idx((0,)),
269 threshold=5,
270 min_distance="0.2 deg",
271)
272#sources = find_peaks(
273# lima_maps["npred_excess"].get_image_by_idx((0,)),
274# threshold=150,
275# min_distance="0.2 deg",
276#)
277print(sources)
278# now = dt.datetime.now()
279# timestamp_str = now.strftime("%Y-%m-%d %H:%M:%S")
280# ax1.text(0.02, 0.98, timestamp_str, transform=ax1.transAxes,
281# fontsize=11, fontweight='bold', va='top', ha='left')
282# ax2.text(0.02, 0.98, timestamp_str, transform=ax2.transAxes,
283# fontsize=11, fontweight='bold', va='top', ha='left')
284if len(sources) > 0:
285 ax2.scatter(
286 sources["ra"],
287 sources["dec"],
288 #transform=plt.gca().get_transform("icrs"),
289 transform=ax2.get_transform("icrs"),
290 color="none",
291 edgecolor="red",
292 marker="o",
293 s=300,
294 lw=1.5,
295 )
297for obs in obs_collection:
298 ax2.scatter(
299 obs.pointing_radec.ra,
300 obs.pointing_radec.dec,
301 transform=ax2.get_transform("icrs"),
302 marker="x",
303 c="black",
304 s=100,
305 linewidths=1.5,
306 )
308# Dummy entry for legend
309ax2.scatter([], [], marker="x", c="black", label="Pointings")
312ax1.set_title("Excess map")
313lima_maps["npred_excess"].plot(ax=ax1, add_cbar=True)
314ax1.scatter(
315 source_position.ra,
316 source_position.dec,
317 transform=ax1.get_transform("icrs"),
318 marker="+",
319 c="red",
320 label=filename_output,
321 s=[300],
322 linewidths=3,
323)
325for obs in obs_collection:
326 ax1.scatter(
327 obs.pointing_radec.ra,
328 obs.pointing_radec.dec,
329 transform=ax1.get_transform("icrs"),
330 marker="x",
331 c="black",
332 s=100,
333 linewidths=1.5,
334 )
336ax1.scatter([], [], marker="x", c="black", label="Pointings")
339ax1.legend()
341ax3.set_title("Significance distribution")
342ax3.hist(significance_all, density=True, alpha=0.5, color="red", label="All bins", bins=bins)
343ax3.hist(significance_background, density=True, alpha=0.5, color="blue", label="Background bins", bins=bins)
345ax3.plot(x, p, lw=2, color="black")
346ax3.legend()
347ax3.set_xlabel("Significance")
348ax3.set_yscale("log")
349ax3.set_ylim(1e-5, 1)
350xmin, xmax = np.min(significance_all), np.max(significance_all)
351ax3.set_xlim(xmin, xmax)
353text = text = r"$\mu$ = {:.2f}" f"\n" r"$\sigma$ = {:.2f}".format(mu, std)
354box_prop = dict(boxstyle="Round", facecolor="white", alpha=0.5)
355text_prop = dict(fontsize="x-large", bbox=box_prop)
356# txt = AnchoredText(text, loc=2, transform=ax3.transAxes, prop=text_prop, frameon=False)
357txt = AnchoredText(text, loc=2, prop=text_prop, frameon=False)
358ax3.add_artist(txt)
360plt.savefig(os.path.join(path_plot, "{}__sky_map.png".format(filename_output)), dpi=300)
361print(f"Fit results: mu = {mu:.2f}, std = {std:.2f}")