Coverage for lst_auto_rta/Ring_Background_Maps_ACADA.py: 0%
144 statements
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-17 14:47 +0000
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-17 14:47 +0000
1#!/usr/bin/env python
3import astropy
4import gammapy
5import matplotlib
6import numpy as np
7import regions
9print("gammapy:", gammapy.__version__)
10print("numpy:", np.__version__)
11print("astropy", astropy.__version__)
12print("regions", regions.__version__)
13print("matplotlib", matplotlib.__version__)
15import os
17import astropy.units as u
18import matplotlib.pyplot as plt
19import matplotlib.style as style
20import numpy as np
21from astropy.coordinates import SkyCoord
23style.use("tableau-colorblind10")
24import argparse
25from pathlib import Path
27from acceptance_modelisation import RadialAcceptanceMapCreator
28from gammapy.data import DataStore
29from gammapy.datasets import (
30 Datasets,
31 MapDataset,
32 SpectrumDataset,
33)
34from gammapy.estimators import ExcessMapEstimator
35from gammapy.makers import (
36 MapDatasetMaker,
37 ReflectedRegionsBackgroundMaker,
38 RingBackgroundMaker,
39 SafeMaskMaker,
40 SpectrumDatasetMaker,
41)
42from gammapy.maps import Map, MapAxis, RegionGeom, WcsGeom
43from matplotlib.offsetbox import AnchoredText
44from regions import CircleSkyRegion
45from scipy.stats import norm
47parser = argparse.ArgumentParser(
48 description="Automatic Script for the DL1 check", formatter_class=argparse.ArgumentDefaultsHelpFormatter
49)
50parser.add_argument("-d", "--directory", default="/fefs/onsite/pipeline/rta/data/", help="Directory for data")
51parser.add_argument("-da", "--date", default="20230705", help="Date of the run to check")
52parser.add_argument("-r", "--run-id", default="13600", help="run id to check")
53parser.add_argument("-add", "--add-string", default="reco/", help="add a string to the path")
54parser.add_argument("-RA", "--right-ascension", default="270.19042", help="right-ascension in deg")
55parser.add_argument("-DEC", "--declination", default="78.46806", help="declination in deg")
57args = parser.parse_args()
58config = vars(args)
60location_data = (
61 config["directory"] + config["date"] + "/" + config["run_id"] + "/" + config["add_string"] + "/plots"
62) # path to DL3 folder
63source_name = config["run_id"] # e.g., Crab, GRB210807A
64cut_type = "standard" # e.g., loose, hard, ...
65filename_output = "{name}_{cut}".format(name=source_name, cut=cut_type)
67source_position = SkyCoord(ra=config["right_ascension"], dec=config["declination"], unit="deg", frame="icrs")
68max_offset_run = 5 * u.deg
69work_directory = location_data
70path_plot = Path(work_directory + "/")
71print(work_directory + "/")
72path_plot.mkdir(exist_ok=True)
73path_background = Path(work_directory + "/")
74path_background.mkdir(exist_ok=True)
76e_min = 0.05 * u.TeV
77e_max = 10.0 * u.TeV
78n_bin_per_decade = 10
79on_radius = 0.2 * u.deg
80exclusion_radius = 0.35 * u.deg
81fov_observation = 4.5 * u.deg
83r_in = 0.5 * u.deg
84width = 0.4 * u.deg
85correlation_radius = 0.2 * u.deg
87n_bin_per_decade_acceptance = 2.5
88offset_bin_size_acceptance = 0.4 * u.deg
90data_store = DataStore.from_dir(location_data)
91data_store.info()
93obs_ids = data_store.obs_table[source_position.separation(data_store.obs_table.pointing_radec) < max_offset_run][
94 "OBS_ID"
95]
96obs_collection = data_store.get_observations(obs_ids, required_irf=None)
98exclude_region = CircleSkyRegion(center=source_position, radius=exclusion_radius)
100n_bin_energy = int((np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade)
101energy_axis = MapAxis.from_edges(
102 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), n_bin_energy + 1),
103 unit="TeV",
104 name="energy",
105 interp="log",
106)
107maximal_run_separation = np.max(
108 source_position.separation(data_store.obs_table[np.isin(data_store.obs_table["OBS_ID"], obs_ids)].pointing_radec)
109)
110geom = WcsGeom.create(
111 skydir=source_position,
112 width=((maximal_run_separation + fov_observation) * 1.5, (maximal_run_separation + fov_observation) * 1.5),
113 binsz=0.02,
114 frame="icrs",
115 axes=[energy_axis],
116)
118geom_image = geom.to_image()
119exclusion_mask = ~geom_image.region_mask([exclude_region])
121stacked = MapDataset.create(geom=geom, name=source_name + "_stacked")
122unstacked = Datasets()
123maker = MapDatasetMaker(selection=["counts"])
124maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation)
126for obs in obs_collection:
127 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg")
128 dataset = maker.run(cutout, obs)
129 dataset = maker_safe_mask.run(dataset, obs)
130 stacked.stack(dataset)
131 unstacked.append(dataset)
133n_bin_energy_acceptance = int(
134 (np.log10(e_max.to_value(u.TeV)) - np.log10(e_min.to_value(u.TeV))) * n_bin_per_decade_acceptance
135)
136energyAxisAcceptance = MapAxis.from_edges(
137 np.logspace(np.log10(e_min.to_value(u.TeV)), np.log10(e_max.to_value(u.TeV)), 1 + n_bin_energy_acceptance),
138 unit="TeV",
139 name="energy",
140 interp="log",
141)
142n_bin_offset_acceptance = int(fov_observation.to_value(u.deg) / offset_bin_size_acceptance.to_value(u.deg))
143offsetAxisAcceptance = MapAxis.from_edges(
144 np.linspace(0.0, fov_observation.to_value(u.deg), 1 + n_bin_offset_acceptance),
145 unit="deg",
146 name="offset",
147 interp="lin",
148)
150background_creator = RadialAcceptanceMapCreator(
151 energyAxisAcceptance,
152 offsetAxisAcceptance,
153 exclude_regions=[
154 exclude_region,
155 ],
156 oversample_map=10,
157)
158background = background_creator.create_radial_acceptance_map_per_observation(obs_collection)
159# background[list(background.keys())[0]].peek()
160for obs_id in background.keys():
161 hdu_background = background[obs_id].to_table_hdu()
162 hdu_background.writeto(
163 os.path.join(path_background, filename_output + "_" + str(obs_id).zfill(5) + "_background.fits"),
164 overwrite=True,
165 )
167data_store.hdu_table.remove_rows(data_store.hdu_table["HDU_TYPE"] == "bkg")
169for obs_id in np.unique(data_store.hdu_table["OBS_ID"]):
170 data_store.hdu_table.add_row(
171 {
172 "OBS_ID": obs_id,
173 "HDU_TYPE": "bkg",
174 "HDU_CLASS": "bkg_2d",
175 "FILE_DIR": "",
176 "FILE_NAME": os.path.join(path_background, filename_output + "_" + str(obs_id) + "_background.fits"),
177 "HDU_NAME": "BACKGROUND",
178 "SIZE": hdu_background.size,
179 }
180 )
182data_store.hdu_table = data_store.hdu_table.copy()
183obs_collection = data_store.get_observations(obs_ids, required_irf=None)
185stacked = MapDataset.create(geom=geom)
186unstacked = Datasets()
187maker = MapDatasetMaker(selection=["counts", "background"])
188maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=fov_observation)
190for obs in obs_collection:
191 cutout = stacked.cutout(obs.pointing_radec, width="6.5 deg")
192 dataset = maker.run(cutout, obs)
193 dataset = maker_safe_mask.run(dataset, obs)
194 stacked.stack(dataset)
195 unstacked.append(dataset)
197ring_bkg_maker = RingBackgroundMaker(r_in=r_in, width=width) # , exclusion_mask=exclusion_mask)
198stacked_ring = ring_bkg_maker.run(stacked.to_image())
199estimator = ExcessMapEstimator(correlation_radius, correlate_off=False)
200lima_maps = estimator.run(stacked_ring)
202significance_all = lima_maps["sqrt_ts"].data[np.isfinite(lima_maps["sqrt_ts"].data)]
203significance_background = lima_maps["sqrt_ts"].data[
204 np.logical_and(np.isfinite(lima_maps["sqrt_ts"].data), exclusion_mask.data)
205]
207bins = np.linspace(
208 np.min(significance_all),
209 np.max(significance_all),
210 num=int((np.max(significance_all) - np.min(significance_all)) * 3),
211)
213# Now, fit the off distribution with a Gaussian
214mu, std = norm.fit(significance_background)
215x = np.linspace(-8, 8, 50)
216p = norm.pdf(x, mu, std)
218plt.figure(figsize=(8, 21))
219ax1 = plt.subplot(3, 1, 1, projection=lima_maps["sqrt_ts"].geom.wcs)
220ax2 = plt.subplot(3, 1, 2, projection=lima_maps["sqrt_ts"].geom.wcs)
221ax3 = plt.subplot(3, 1, 3)
223ax2.set_title("Significance map")
224lima_maps["sqrt_ts"].plot(ax=ax2, add_cbar=True)
225ax2.scatter(
226 source_position.ra,
227 source_position.dec,
228 transform=ax2.get_transform("world"),
229 marker="+",
230 c="red",
231 label=filename_output,
232 s=[300],
233 linewidths=3,
234)
235ax2.legend()
237ax1.set_title("Excess map")
238lima_maps["npred_excess"].plot(ax=ax1, add_cbar=True)
239ax1.scatter(
240 source_position.ra,
241 source_position.dec,
242 transform=ax1.get_transform("world"),
243 marker="+",
244 c="red",
245 label=filename_output,
246 s=[300],
247 linewidths=3,
248)
249ax1.legend()
251ax3.set_title("Significance distribution")
252ax3.hist(significance_all, density=True, alpha=0.5, color="red", label="All bins", bins=bins)
253ax3.hist(significance_background, density=True, alpha=0.5, color="blue", label="Background bins", bins=bins)
255ax3.plot(x, p, lw=2, color="black")
256ax3.legend()
257ax3.set_xlabel("Significance")
258ax3.set_yscale("log")
259ax3.set_ylim(1e-5, 1)
260xmin, xmax = np.min(significance_all), np.max(significance_all)
261ax3.set_xlim(xmin, xmax)
263text = text = r"$\mu$ = {:.2f}" f"\n" r"$\sigma$ = {:.2f}".format(mu, std)
264box_prop = dict(boxstyle="Round", facecolor="white", alpha=0.5)
265text_prop = dict(fontsize="x-large", bbox=box_prop)
266# txt = AnchoredText(text, loc=2, transform=ax3.transAxes, prop=text_prop, frameon=False)
267txt = AnchoredText(text, loc=2, prop=text_prop, frameon=False)
268ax3.add_artist(txt)
270plt.savefig(os.path.join(path_plot, "{}__sky_map.png".format(filename_output)), dpi=300)
271print(f"Fit results: mu = {mu:.2f}, std = {std:.2f}")