Coverage for lst_auto_rta/SourceAnalyse.py: 0%
451 statements
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-17 14:47 +0000
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-17 14:47 +0000
1## Script
3# %matplotlib inline
4### Licensed under a 3-clause BSD style license - see LICENSE.rst
5import collections
6import datetime as dt
7import logging
8import os
9from glob import iglob
10from pathlib import Path
12import astropy.io.fits as fitssss
13import astropy.units as u
14import gammapy
15import matplotlib
16import matplotlib.colors as mcolors
17import matplotlib.pyplot as plt
18import numpy as np
19import regions
20from acceptanceCalculation import create_radial_acceptance_map
21from astropy.convolution import Gaussian2DKernel
22from astropy.coordinates import AltAz, Angle, SkyCoord
23from astropy.coordinates.angle_utilities import angular_separation
24from astropy.io import fits
25from astropy.modeling import fitting, models
26from astropy.table import Table
27from astropy.table import vstack as vstack_tables
28from astropy.units import Quantity, Unit
29from gammapy.analysis import Analysis, AnalysisConfig
30from gammapy.data import DataStore, EventList
31from gammapy.datasets import (
32 Datasets,
33 FluxPointsDataset,
34 MapDataset,
35 MapDatasetOnOff,
36 SpectrumDataset,
37 SpectrumDatasetOnOff,
38)
39from gammapy.estimators import ExcessMapEstimator, FluxPointsEstimator, TSMapEstimator
40from gammapy.estimators.utils import find_peaks
41from gammapy.irf import EffectiveAreaTable2D, load_cta_irfs
42from gammapy.makers import (
43 MapDatasetMaker,
44 ReflectedRegionsBackgroundMaker,
45 RingBackgroundMaker,
46 SafeMaskMaker,
47 SpectrumDatasetMaker,
48)
49from gammapy.makers.utils import make_theta_squared_table
50from gammapy.maps import Map, MapAxis, MapCoord, RegionGeom, WcsGeom, WcsNDMap
51from gammapy.modeling import Fit
52from gammapy.modeling.models import (
53 PowerLawSpectralModel,
54 SkyModel,
55 create_crab_spectral_model,
56)
57from gammapy.stats import WStatCountsStatistic
58from gammapy.utils.fits import earth_location_from_dict
59from gammapy.utils.scripts import make_path
60from gammapy.utils.testing import Checker
61from gammapy.utils.time import time_ref_from_dict
62from gammapy.visualization import (
63 plot_spectrum_datasets_off_regions,
64 plot_theta_squared_table,
65)
66from regions import CircleAnnulusSkyRegion, CircleSkyRegion
67from scipy.stats import norm
69# import astropy
72__all__ = ["event"]
74log = logging.getLogger(__name__)
75matplotlib.rcParams.update({"font.size": 17})
78class event:
79 def __init__(self, datastore_path, runid):
80 # self.table = table
81 self.table = 0
82 self.count_scatter = 0
83 self.info_table = 0
84 self.stats = 0
85 self.sourcePos = 0
86 self.ringbg_exclude_region = 0
87 self.reflectedbg_exclude_region = 0
88 reflected_exclusion_mask = 0
89 self.skydir = 0
90 self.exclusion_mask = 0
91 self.on_region = 0
92 self.datasets = 0
93 self.datastore = 0
94 self.obs_id = 0
95 self.observations = 0
96 self.create_datastore(datastore_path, runid)
97 self.dataset_maker = 0
98 self.dataset_empty = 0
99 self.bkg_maker = 0
100 self.safe_mask_masker = 0
101 self.signal_table = 0
102 model_best_joint = 0
103 self.flux_points = 0
104 self.model = 0
105 self.model_best_stacked = 0
106 self.obsCollection = 0
107 self.stacked = 0
108 self.npix_x = 0
109 self.npix_y = 0
110 self.ring_exclusion_mask = 0
111 self.significance_map = 0
112 self.excess_map = 0
113 self.axis = 0
114 self.exclusion_map = 0
116 def create_datastore(self, datastore_path, runid):
117 datastore = DataStore.from_dir(datastore_path)
118 obs_id = runid
119 observations = datastore.get_observations(obs_id)
121 self.obs_id = obs_id
122 self.datastore = datastore
123 self.observations = observations
124 self.table = []
125 for i in obs_id:
126 self.table.append(datastore.obs(obs_id=i).events.table)
128 def plot_timee(self, id_=0, ax=None):
129 """Plots an event rate time curve.
131 Parameters
132 ----------
133 ax : `~matplotlib.axes.Axes` or None
134 Axes
136 Returns
137 -------
138 ax : `~matplotlib.axes.Axes`
139 Axes
140 i"""
141 plt.figure(figsize=(16, 8))
142 for i in range(len(self.table)):
143 ax = plt.gca() if ax is None else ax
144 # Note the events are not necessarily in time order
145 # print(self.table[i])
146 time = self.table[i]["TIME"]
147 time = time - np.min(time)
149 ax.set_xlabel("Time (sec)")
150 ax.set_ylabel("Counts")
151 y, x_edges = np.histogram(time, bins=np.linspace(0, 2400, 100))
152 y = y[:-1]
153 x_edges = x_edges[:-1]
155 xerr = np.diff(x_edges) / 2
156 x = x_edges[:-1] + xerr
157 yerr = np.sqrt(y)
158 ax.errorbar(x=x, y=y, xerr=xerr, yerr=yerr, label="run" + str(self.obs_id[i]))
160 plt.legend(
161 shadow=True, bbox_to_anchor=(1.05, 1), loc="upper left", borderaxespad=0, handlelength=1.5, fontsize=10
162 )
163 prefix = (5 - len(str(id_))) * "0"
164 plt.savefig("../plots/event_rate" + prefix + str(id_) + ".png")
165 plt.show()
167 return x, y
169 def create_counts_scatter(self):
170 time = self.table["TIME"]
171 time = time - np.min(time)
172 y, x_edges = np.histogram(time, bins=np.linspace(0, 1200, 20))
173 y = y[:-1]
175 self.count_scatter = y
177 def statisticals_parameters(self, runid):
178 info_table = self.datastore.obs_table
179 self.create_counts_scatter()
181 variation_max = (
182 np.abs((self.count_scatter.max() - self.count_scatter.mean()) / self.count_scatter.mean()) * 100
183 )
184 variation_min = (
185 np.abs((self.count_scatter.min() - self.count_scatter.mean()) / self.count_scatter.mean()) * 100
186 )
187 general_variation = self.count_scatter.std() / self.count_scatter.mean() * 100
188 variability = self.count_scatter.std() / (np.sqrt(self.count_scatter.mean()))
189 mean_trigger_rate = self.count_scatter.mean() / info_table["LIVETIME"][runid]
190 zenith_angle = info_table["ZEN_PNT"][runid]
191 livetime = info_table["LIVETIME"][runid]
193 print(f"Mean = {self.count_scatter.mean()}")
194 print(f"Standard deviation = {self.count_scatter.std()}")
195 print(f"Localized variation on the maximum = {variation_max} %")
196 print(f"Localized variation on the maximum = {variation_min} %")
197 print(f"General variation = {general_variation}")
198 print(f"Variability = {variability}")
199 print(f"Mean_trigger_rate = {mean_trigger_rate}")
200 print(f"Zenith_angle = {zenith_angle}")
201 print(f"Livetime = {livetime}")
203 self.info_table = info_table
205 return (
206 self.count_scatter.mean(),
207 self.count_scatter.std(),
208 variation_max,
209 variation_min,
210 general_variation,
211 variability,
212 mean_trigger_rate,
213 zenith_angle,
214 livetime,
215 )
217 def create_on_region(self, ra=83.633, dec=22.014, on_radius_angle=0.2, radius_excluded=0.35):
218 # def create_on_exclusion_regions(self, ra=238.929, dec=11.190, on_radius_angle=0.14, radius_excluded=0.2):
219 # def create_on_exclusion_regions(self, ra=187.706, dec=12.391, on_radius_angle=0.14, radius_excluded=0.2):
221 sourcePos = SkyCoord(ra=ra * u.deg, dec=dec * u.deg, frame="icrs")
223 on_radius = on_radius_angle * u.deg
224 on_region = CircleSkyRegion(center=sourcePos, radius=on_radius)
226 # Create exclusion region
227 reflectedbg_exclude_region = CircleSkyRegion(center=sourcePos, radius=radius_excluded * u.deg)
228 ringbg_exclude_region = CircleSkyRegion(center=sourcePos, radius=on_radius_angle * 3 * u.deg)
230 self.sourcePos = sourcePos
231 self.on_region = on_region
232 self.reflectedbg_exclude_region = reflectedbg_exclude_region
233 self.ringbg_exclude_region = ringbg_exclude_region
235 # Reflected Background Method
237 def create_exclusion_mask(self, plot=False, binsz=0.02, npix_x=400, npix_y=400):
238 skydir = self.sourcePos.galactic
240 reflected_exclusion_mask = Map.create(
241 npix=(npix_x, npix_y), binsz=binsz, skydir=skydir, proj="TAN", frame="icrs"
242 )
243 mask = reflected_exclusion_mask.geom.region_mask([self.reflectedbg_exclude_region], inside=False)
244 reflected_exclusion_mask.data = mask
246 self.skydir = skydir
247 self.reflected_exclusion_mask = reflected_exclusion_mask
249 if plot:
250 mask.plot()
251 plt.show()
253 def reduction_chain(
254 self, e_reco_min=0.1, e_reco_max=40, e_reco_bin=40, e_true_min=0.05, e_true_max=100, e_true_bin=200
255 ):
256 # Run data reduction chain, e_=energy
258 e_reco = MapAxis.from_energy_bounds(e_reco_min, e_reco_max, e_reco_bin, unit="TeV", name="energy")
259 e_true = MapAxis.from_energy_bounds(e_true_min, e_true_max, e_true_bin, unit="TeV", name="energy_true")
260 dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=self.on_region)
261 self.dataset_empty = dataset_empty
263 def data_maker(self):
264 dataset_maker = SpectrumDatasetMaker(containment_correction=False, selection=["counts", "exposure", "edisp"])
265 bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=self.reflected_exclusion_mask)
266 safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)
268 self.dataset_maker = dataset_maker
269 self.bkg_maker = bkg_maker
270 self.safe_mask_masker = safe_mask_masker
272 def data_run(self):
273 datasets = Datasets()
275 observations = self.datastore.get_observations(self.obs_id)
277 for obs_id, observation in zip(self.obs_id, observations):
278 dataset = self.dataset_maker.run(self.dataset_empty.copy(name=str(obs_id)), observation)
279 dataset_on_off = self.bkg_maker.run(dataset, observation)
280 dataset_on_off = self.safe_mask_masker.run(dataset_on_off, observation)
281 datasets.append(dataset_on_off)
283 self.datasets = datasets
285 def plot_off_region(self):
286 plt.figure(figsize=(8, 8))
287 _, ax, _ = self.reflected_exclusion_mask.plot()
288 self.on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="k")
289 plot_spectrum_datasets_off_regions(ax=ax, datasets=self.datasets)
291 plt.show()
293 def signal_info(self):
294 info_table = self.datastore.obs_table
295 print(info_table[0])
296 signal_table = self.datasets.info_table(cumulative=True)
297 alpha = signal_table["alpha"]
299 excess = signal_table["excess"]
300 uncertainty_excess = np.sqrt(
301 signal_table["counts"] + ((1 / alpha) * signal_table["background"]) / ((1 / alpha) ** 2)
302 )
304 bg = signal_table["background"]
305 uncertainty_bg = np.sqrt((1 / alpha) * signal_table["background"]) / (1 / alpha)
307 self.signal_table = signal_table
309 print(f"reflected_excess = {excess[-1]}")
310 print(f"reflected_uncertainty_excess = {uncertainty_excess[-1]}")
311 print(f"reflected_uncertainty_excess = {self.signal_table['livetime'].to('h')[-1]} hours")
312 print(f"sqrt_sts = {self.signal_table['sqrt_ts'][-1]}")
313 # print(f"reflected_bg = {bg}")
314 # print(f"reflected_uncertainty_bg = {uncertainty_bg}")
316 return (
317 excess[-1],
318 uncertainty_excess[-1],
319 bg[-1],
320 uncertainty_bg[-1],
321 self.signal_table["sqrt_ts"][-1],
322 self.signal_table["livetime"][-1],
323 )
325 def statistic_source_excess(self, plot=False):
326 if plot:
327 plt.plot(
328 # self.signal_table["livetime"].to("h"),
329 self.signal_table["name"],
330 self.signal_table["excess"],
331 marker="o",
332 ls="none",
333 )
334 plt.xlabel("Livetime [h]")
335 plt.ylabel("Excess")
337 plt.show()
339 def statistic_source_ts(self, plot=False):
340 if plot:
341 plt.plot(self.signal_table["livetime"].to("h"), self.signal_table["sqrt_ts"], marker="o", ls="none")
342 plt.xlabel("Livetime [h]")
343 plt.ylabel("Sqrt(TS)")
345 plt.show()
347 def fit_spectrum(self):
348 # Fit spectrum
349 spectral_model = PowerLawSpectralModel(
350 index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV
351 )
352 model = SkyModel(spectral_model=spectral_model, name="crab")
354 for self.dataset in self.datasets:
355 self.dataset.models = model
357 fit_joint = Fit(self.datasets)
358 result_joint = fit_joint.run()
360 # Make a copy here to compare it later
361 model_best_joint = model.copy()
363 self.model = model
364 self.model_best_joint = model_best_joint
366 def fit_quality(self, plot=False):
367 if plot:
368 ax_spectrum, ax_residuals = self.datasets[0].plot_fit()
369 ax_spectrum.set_ylim(0.1, 40)
370 plt.show()
372 def flux_points_plot(self, e_min=0.7, e_max=30, plot=False):
373 # Compute Flux Points
374 energy_edges = np.logspace(np.log10(e_min), np.log10(e_max), 11) * u.TeV
376 # Create an instance of the FluxPointsEstimator
377 fpe = FluxPointsEstimator(energy_edges=energy_edges, source="crab")
378 flux_points = fpe.run(datasets=self.datasets)
380 flux_points.table_formatted
382 # Plot the flux points and their likelihood profile with Treshold < 4
383 if plot:
384 plt.figure(figsize=(8, 5))
385 flux_points.table["is_ul"] = flux_points.table["ts"] < 4
386 ax = flux_points.plot(energy_power=2, flux_unit="erg-1 cm-2 s-1", color="darkorange")
387 flux_points.to_sed_type("e2dnde").plot_ts_profiles(ax=ax)
388 plt.show()
390 self.flux_points = flux_points
392 def model_fit_plot(self, plot=False):
393 # Final plot with the best fit model
395 flux_points_dataset = FluxPointsDataset(data=self.flux_points, models=self.model_best_joint)
397 if plot:
398 flux_points_dataset.plot_fit()
399 plt.show()
401 def flux_plot(self, plot=False, id_=0):
402 dataset_stacked = Datasets(self.datasets).stack_reduce()
404 dataset_stacked.models = self.model
405 stacked_fit = Fit([dataset_stacked])
406 result_stacked = stacked_fit.run()
408 # Make a copy to compare later
409 model_best_stacked = self.model.copy()
411 if plot:
412 plt.figure(figsize=(16, 8))
413 plot_kwargs = {
414 "energy_range": [0.1, 30] * u.TeV,
415 "energy_power": 2,
416 "flux_unit": "erg-1 cm-2 s-1",
417 }
419 # courb stacked model
420 model_best_stacked.spectral_model.plot(**plot_kwargs, label="run" + str(self.obs_id))
421 model_best_stacked.spectral_model.plot_error(**plot_kwargs)
423 # courb reference
424 create_crab_spectral_model("hess_pl").plot(**plot_kwargs, label="Crab reference")
426 plt.legend()
427 prefix = (5 - len(str(id_))) * "0"
428 plt.savefig("./Plots/flux_plot" + prefix + str(id_) + ".png")
429 plt.show()
431 self.model_best_stacked = model_best_stacked
433 def flux_parameters(self):
434 parameters_table = self.model_best_stacked.parameters.to_table()
436 spectral_index = parameters_table[0][1]
437 uncertainty_spectral_index = parameters_table[0][6]
438 amplitude = parameters_table[1][1]
439 uncertainty_amplitude = parameters_table[1][6]
441 return spectral_index, uncertainty_spectral_index, amplitude, uncertainty_amplitude
443 """Ring Background Method"""
445 def calculate_acceptance_model(
446 self,
447 energyaxis_min=np.log10(0.1),
448 energyaxis_max=np.log10(2.0),
449 energy_nbin=5,
450 offset_min=0.0,
451 offset_max=5.0,
452 offset_nbin=8,
453 plot=False,
454 ):
455 obsCollection = self.datastore.get_observations(self.obs_id)
456 energyAxisAcceptance = MapAxis.from_edges(
457 np.logspace(energyaxis_min, energyaxis_max, energy_nbin), unit="TeV", name="energy", interp="log"
458 )
459 offsetAxisAcceptance = MapAxis.from_edges(
460 np.linspace(offset_min, offset_max, offset_nbin), unit="deg", name="offset", interp="lin"
461 )
462 background = create_radial_acceptance_map(
463 obsCollection,
464 energyAxisAcceptance,
465 offsetAxisAcceptance,
466 exclude_regions=[self.ringbg_exclude_region],
467 oversample_map=10,
468 )
470 if plot:
471 background.peek()
472 plt.show
474 hduBackground = background.to_table_hdu()
475 hduBackground.writeto("background.fits", overwrite=True)
477 self.obsCollection = obsCollection
479 def add_acceptance_map(self):
480 # Add acceptance map to observations
482 listIDObs = []
483 for obs in self.obsCollection:
484 listIDObs.append(obs.obs_id)
485 self.datastore.hdu_table.add_row(
486 {
487 "OBS_ID": listIDObs[-1],
488 "HDU_TYPE": "bkg",
489 "HDU_CLASS": "bkg_2d",
490 "FILE_DIR": "",
491 "FILE_NAME": "background.fits",
492 "HDU_NAME": "BACKGROUND",
493 }
494 )
495 self.datastore.hdu_table = self.datastore.hdu_table.copy()
496 self.datastore.hdu_table
498 self.obsCollection = self.datastore.get_observations(listIDObs)
500 def map_geometry(
501 self,
502 RA,
503 DEC,
504 plot=False,
505 npix_x=200,
506 npix_y=200,
507 binsize=0.02,
508 axis_min=np.log10(0.1),
509 axis_max=np.log10(2.0),
510 axis_nbin=5,
511 ):
512 sourcePos = SkyCoord(ra=RA * u.deg, dec=DEC * u.deg, frame="icrs")
513 self.axis = MapAxis.from_edges(
514 np.logspace(axis_min, axis_max, axis_nbin), unit="TeV", name="energy", interp="log"
515 )
516 geom = WcsGeom.create(skydir=sourcePos, npix=(npix_x, npix_y), binsz=binsize, frame="icrs", axes=[self.axis])
517 geom
519 ring_exclusion_mask = geom.to_image().region_mask([self.ringbg_exclude_region], inside=False)
520 self.exclusion_map = WcsNDMap(geom.to_image(), ring_exclusion_mask)
522 self.ring_exclusion_mask = ring_exclusion_mask
524 if plot:
525 self.exclusion_map.plot()
526 plt.show()
528 stacked = MapDataset.create(geom=geom)
529 unstacked = Datasets()
530 maker = MapDatasetMaker(selection=["counts", "background"])
531 # maker = MapDatasetMaker(selection=["counts"])
532 maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=3.0 * u.deg)
534 for obs in self.obsCollection:
535 cutout = stacked.cutout(obs.pointing_radec, width="10 deg")
536 dataset = maker.run(cutout, obs)
537 dataset = maker_safe_mask.run(dataset, obs)
538 stacked.stack(dataset)
539 unstacked.append(dataset)
541 self.stacked = stacked
542 self.npix_x = npix_x
543 self.npix_y = npix_y
545 def ring_data_estimation(self):
546 ring_maker = RingBackgroundMaker(r_in="0.5 deg", width="0.3 deg", exclusion_mask=self.ring_exclusion_mask)
548 estimator = ExcessMapEstimator(0.2 * u.deg)
549 lima_maps = estimator.run(self.stacked)
551 significance_map = lima_maps["sqrt_ts"]
552 excess_map = lima_maps["excess"]
554 npix_x = int(self.npix_x / 2)
555 npix_y = int(self.npix_y / 2)
557 ring_sqrt_ts = lima_maps["sqrt_ts"].data[0][npix_x][npix_y]
559 ring_excess = lima_maps["excess"].data[0][npix_x][npix_y]
560 ring_bg = lima_maps["background"].data[0][npix_x][npix_y]
562 uncertainty_excess_ring = lima_maps["err"].data[0][npix_x][npix_y]
563 # uncertainty_ring_bg = np.sqrt((1/self.alpha)*ring_bg)/(1/self.alpha)
565 self.significance_map = significance_map
566 self.excess_map = excess_map
568 print(f"ring_excess = {ring_excess}")
569 print(f"ring_sqrt_ts = {ring_sqrt_ts}")
570 print(f"ring_bg = {ring_bg}")
571 print(f"uncertainty_excess_ring = {uncertainty_excess_ring}")
572 # print(f"uncertainty_ring_bg{uncertainty_ring_bg}")
574 return ring_excess, ring_sqrt_ts, ring_bg, uncertainty_excess_ring
575 # uncertainty_ring_bg
577 def excess_significance_plot(self, directory, plot=False, id_=0):
578 if plot:
579 plt.figure(figsize=(16, 8))
580 ax1 = plt.subplot(121, projection=self.significance_map.geom.wcs)
581 ax2 = plt.subplot(122, projection=self.excess_map.geom.wcs)
583 ax2.set_title("Significance map")
584 self.significance_map.plot(ax=ax2, add_cbar=True)
585 sources = find_peaks(
586 self.significance_map.get_image_by_idx((0,)),
587 threshold=5,
588 min_distance="0.2 deg",
589 )
590 print("Found sources")
591 print(sources)
592 f = open(directory + "../plots/results.txt", "w")
593 f.write("Found sources")
594 f.write(str(sources))
595 f.close()
596 sources_2 = find_peaks(
597 self.significance_map.get_image_by_idx((0,)),
598 threshold=7,
599 min_distance="0.2 deg",
600 )
602 now = dt.datetime.now()
603 timestamp_str = now.strftime("%Y-%m-%d %H:%M:%S")
604 ax1.text(
605 0.02, 0.98, timestamp_str, transform=ax1.transAxes, fontsize=11, fontweight="bold", va="top", ha="left"
606 )
607 ax2.text(
608 0.02, 0.98, timestamp_str, transform=ax2.transAxes, fontsize=11, fontweight="bold", va="top", ha="left"
609 )
610 if len(sources) > 0:
611 ax2.scatter(
612 sources["ra"],
613 sources["dec"],
614 transform=plt.gca().get_transform("icrs"),
615 color="none",
616 edgecolor="white",
617 marker="o",
618 s=300,
619 lw=1.5,
620 )
621 if len(sources_2) > 0:
622 ax2.scatter(
623 sources_2["ra"],
624 sources_2["dec"],
625 transform=plt.gca().get_transform("icrs"),
626 color="none",
627 edgecolor="blue",
628 marker="o",
629 s=300,
630 lw=1.5,
631 )
633 ax1.set_title("Excess map")
634 self.excess_map.plot(ax=ax1, add_cbar=True)
636 prefix = (5 - len(str(id_))) * "0"
637 plt.savefig(directory + "../plots/sig_excess_plot" + prefix + str(id_) + ".png")
638 plt.show()
639 return sources
641 def off_distribution_plot(self, plot=False):
642 # create a 2D mask for the images
643 exclusion_map_ring = self.significance_map.geom.region_mask([self.ringbg_exclude_region], inside=False)
644 significance_map_off = self.significance_map * exclusion_map_ring
645 significance_all = self.significance_map.data[np.isfinite(self.significance_map.data)]
646 significance_off = significance_map_off.data[np.isfinite(significance_map_off.data)]
648 bins = np.linspace(
649 np.min(significance_all),
650 np.max(significance_all),
651 num=int(np.max(significance_all - np.min(significance_all)) * 3),
652 )
653 mu, std = norm.fit(significance_off)
655 if plot:
656 plt.hist(
657 significance_all,
658 density=True,
659 alpha=0.5,
660 color="red",
661 label="all bins",
662 bins=bins,
663 )
665 plt.hist(
666 significance_off,
667 density=True,
668 alpha=0.5,
669 color="blue",
670 label="off bins",
671 bins=bins,
672 )
674 # Now, fit the off distribution with a Gaussian
676 x = np.linspace(-8, 8, 50)
677 p = norm.pdf(x, mu, std)
678 plt.plot(x, p, lw=2, color="black")
679 plt.legend()
680 plt.xlabel("Significance")
681 plt.yscale("log")
682 plt.ylim(1e-5, 1)
683 xmin, xmax = np.min(significance_all), np.max(significance_all)
684 plt.xlim(xmin, xmax)
686 plt.show()
687 print(mu, std)
688 return mu, std
690 def thetaSquarePlot(self):
691 # theta2Edge = np.linspace(0.0, 0.35**2, num=15)
692 theta2Edge = np.linspace(0.0, 0.35**2, num=15)
693 thetaEdge = np.sqrt(theta2Edge)
694 theta2 = (theta2Edge[1:] + theta2Edge[:-1]) / 2.0
695 count = np.zeros(theta2.shape)
696 countBack = np.zeros(theta2.shape)
697 alpha = np.zeros(theta2.shape)
698 significance = np.zeros(theta2.shape)
699 sb = np.zeros(theta2.shape)
701 for i in range(len(theta2)):
702 on_region_theta2_ring = CircleAnnulusSkyRegion(
703 center=self.sourcePos, inner_radius=thetaEdge[i] * u.deg, outer_radius=thetaEdge[i + 1] * u.deg
704 )
705 on_region_theta2_circle = CircleSkyRegion(center=self.sourcePos, radius=thetaEdge[i + 1] * u.deg)
706 dataset_maker_spectrum_significance_theta2 = SpectrumDatasetMaker(
707 selection=["counts", "exposure", "edisp"]
708 )
709 spectrum_dataset_empty_significance_theta2_ring = SpectrumDataset.create(
710 e_reco=self.axis, region=on_region_theta2_ring
711 )
712 spectrum_dataset_empty_significance_theta2_circle = SpectrumDataset.create(
713 e_reco=self.axis, region=on_region_theta2_circle
714 )
715 bkg_maker_spectrum_significance_theta2 = ReflectedRegionsBackgroundMaker(exclusion_mask=self.exclusion_map)
717 countsRing = np.zeros(len(self.obsCollection))
718 countsOffRing = np.zeros(len(self.obsCollection))
719 alphaRing = np.zeros(len(self.obsCollection))
720 exposureRing = np.zeros(len(self.obsCollection))
721 countsCircle = np.zeros(len(self.obsCollection))
722 countsOffCircle = np.zeros(len(self.obsCollection))
723 alphaCircle = np.zeros(len(self.obsCollection))
724 exposureCircle = np.zeros(len(self.obsCollection))
725 for j, obs in enumerate(self.obsCollection):
726 print(i)
727 dataset_spectrum_significance_theta2_ring = dataset_maker_spectrum_significance_theta2.run(
728 spectrum_dataset_empty_significance_theta2_ring.copy(name=f"obs-{obs.obs_id}"), obs
729 )
730 dataset_on_off_spectrum_significance_theta2_ring = bkg_maker_spectrum_significance_theta2.run(
731 observation=obs, dataset=dataset_spectrum_significance_theta2_ring
732 )
733 countsRing[j] = dataset_on_off_spectrum_significance_theta2_ring.counts.get_by_idx([0])[0, 0, 0]
734 countsOffRing[j] = dataset_on_off_spectrum_significance_theta2_ring.counts_off.get_by_idx([0])[0, 0, 0]
735 alphaRing[j] = dataset_on_off_spectrum_significance_theta2_ring.alpha.get_by_idx([0])[0, 0, 0]
736 exposureRing[j] = dataset_on_off_spectrum_significance_theta2_ring.exposure.get_by_idx([0])[0, 0, 0]
738 dataset_spectrum_significance_theta2_circle = dataset_maker_spectrum_significance_theta2.run(
739 spectrum_dataset_empty_significance_theta2_circle.copy(name=f"obs-{obs.obs_id}"), obs
740 )
741 dataset_on_off_spectrum_significance_theta2_circle = bkg_maker_spectrum_significance_theta2.run(
742 observation=obs, dataset=dataset_spectrum_significance_theta2_circle
743 )
744 countsCircle[j] = dataset_on_off_spectrum_significance_theta2_circle.counts.get_by_idx([0])[0, 0, 0]
745 countsOffCircle[j] = dataset_on_off_spectrum_significance_theta2_circle.counts_off.get_by_idx([0])[
746 0, 0, 0
747 ]
748 alphaCircle[j] = dataset_on_off_spectrum_significance_theta2_circle.alpha.get_by_idx([0])[0, 0, 0]
749 exposureCircle[j] = dataset_on_off_spectrum_significance_theta2_circle.exposure.get_by_idx([0])[
750 0, 0, 0
751 ]
753 countsRing = np.sum(countsRing)
754 countsOffRing = np.sum(countsOffRing)
755 alphaRing = np.sum(alphaRing * exposureRing / np.sum(exposureRing))
757 countsCircle = np.sum(countsCircle)
758 countsOffCircle = np.sum(countsOffCircle)
759 alphaCircle = np.sum(alphaCircle * exposureCircle / np.sum(exposureCircle))
761 wstatCircle = WStatCountsStatistic(n_on=countsCircle, n_off=countsOffCircle, alpha=alphaCircle)
763 count[i] = countsRing
764 countBack[i] = countsOffRing
765 alpha[i] = alphaRing
766 significance[i] = wstatCircle.sqrt_ts
767 sb[i] = wstatCircle.n_sig / wstatCircle.n_bkg
769 plt.figure(figsize=(30, 10))
770 plt.subplot(1, 3, 1)
771 plt.errorbar(
772 x=theta2,
773 xerr=[theta2 - theta2Edge[:-1], theta2Edge[1:] - theta2],
774 y=count,
775 yerr=np.sqrt(count),
776 label="On",
777 fmt=".",
778 )
779 plt.errorbar(
780 x=theta2,
781 xerr=[theta2 - theta2Edge[:-1], theta2Edge[1:] - theta2],
782 y=countBack * alpha,
783 yerr=np.sqrt(countBack) * alpha,
784 label="Off",
785 fmt=".",
786 )
787 plt.legend()
788 plt.xlabel("Theta^2")
789 plt.ylabel("Count")
790 plt.axvline(x=0.14**2, ls="--", c="k")
791 plt.subplot(1, 3, 2)
792 plt.plot(theta2Edge[:-1], significance, "+", mew=3.0)
793 plt.xlabel("Theta^2")
794 plt.ylabel("Significance")
795 plt.axvline(x=0.14**2, ls="--", c="k")
796 plt.subplot(1, 3, 3)
797 plt.plot(theta2Edge[:-1], sb, "+", mew=3.0)
798 plt.xlabel("Theta^2")
799 plt.ylabel("S/B")
800 plt.axvline(x=0.14**2, ls="--", c="k")
801 return 0