Coverage for lst_auto_rta / merge_DL3.py: 0%
201 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-03-22 14:48 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-03-22 14:48 +0000
1#!/usr/bin/env python
2# S. Caroff
4"""
5Script to merge DL3 and analyse them.
7Usage:
8$> python merge_DL3.py \
9 --input-filter "dl3_LST-1.Run04190.*.fits" \
10 --input-directory "./DL3/" \
11 --output-dir "./DL3/" \
12 --run_id 9864
13"""
15import argparse
16import glob
17import logging
18import os
19import sys
20import time
21import warnings
22from pathlib import Path
24import astropy.units as u
25import matplotlib.pyplot as plt
26import numpy as np
27from astropy.io import fits
28from astropy.table import Column, QTable, Table, vstack
29from gammapy.data import DataStore, EventList
30from SourceAnalyse import event
32warnings.filterwarnings("ignore")
33warnings.simplefilter("ignore")
36def setup_logging(output_dir, run_id):
37 """
38 Configure logging with one log file per execution.
40 If output_dir does not exist, write the log in a fallback location
41 and raise FileNotFoundError so the script crashes immediately.
42 """
43 timestamp = time.strftime("%Y-%m-%d_%H-%M-%S")
44 logger = logging.getLogger("merge_DL3")
45 logger.setLevel(logging.INFO)
46 logger.handlers.clear()
48 formatter = logging.Formatter(
49 fmt="%(asctime)s | %(levelname)s | %(message)s",
50 datefmt="%Y-%m-%d %H:%M:%S",
51 )
53 # Always keep stdout logging
54 stream_handler = logging.StreamHandler(sys.stdout)
55 stream_handler.setLevel(logging.INFO)
56 stream_handler.setFormatter(formatter)
57 logger.addHandler(stream_handler)
59 output_path = Path(output_dir)
61 if not output_path.exists():
62 fallback_log = Path.cwd() / f"merge_DL3_run{run_id}_{timestamp}.log"
64 file_handler = logging.FileHandler(fallback_log)
65 file_handler.setLevel(logging.INFO)
66 file_handler.setFormatter(formatter)
67 logger.addHandler(file_handler)
69 logger.info("============================================================")
70 logger.error("Output directory does not exist: %s", output_path)
71 logger.error("Fallback log file: %s", fallback_log)
72 logger.error("Crashing because output directory is missing.")
73 logger.info("============================================================")
75 raise FileNotFoundError(f"Output directory does not exist: {output_path}")
77 log_dir = output_path / "logs"
78 log_dir.mkdir(exist_ok=True)
80 log_file = log_dir / f"merge_DL3_run{run_id}_{timestamp}.log"
82 file_handler = logging.FileHandler(log_file)
83 file_handler.setLevel(logging.INFO)
84 file_handler.setFormatter(formatter)
85 logger.addHandler(file_handler)
87 logger.info("============================================================")
88 logger.info("Starting merge_DL3.py")
89 logger.info("Log file: %s", log_file)
91 return logger, log_file
94def CreatePlots(directory, run_id, RA, DEC, RA_map, DEC_map, logger=None):
95 datastore = DataStore.from_dir(directory)
97 Analysis = event(directory, run_id)
98 if logger:
99 logger.info("Create On Region...")
100 Analysis.create_on_region(RA, DEC)
102 if logger:
103 logger.info("Create Exclusion Mask...")
104 Analysis.create_exclusion_mask()
106 if logger:
107 logger.info("Create reduction chain...")
108 Analysis.reduction_chain()
110 if logger:
111 logger.info("Create data maker...")
112 Analysis.data_maker()
114 if logger:
115 logger.info("Compute Acceptance model...")
116 Analysis.calculate_acceptance_model(plot=False)
118 if logger:
119 logger.info("Add acceptance map...")
120 Analysis.add_acceptance_map()
122 if logger:
123 logger.info("Compute map geometry...")
124 Analysis.map_geometry(RA_map, DEC_map, plot=False, binsize=0.05, npix_x=150, npix_y=150)
126 ring_excess, ring_sqrt_ts, ring_bg, uncertainty_excess_ring = Analysis.ring_data_estimation()
128 if logger:
129 logger.info("Create excess significance plot...")
130 sources = Analysis.excess_significance_plot(directory=directory, plot=True)
132 if logger:
133 logger.info("CreatePlots finished successfully.")
135 return (sources[0][3], sources[0][4])
138def main():
139 parser = argparse.ArgumentParser(description="DL3 merger")
141 parser.add_argument(
142 "--input-filter",
143 "-f",
144 type=str,
145 dest="input_data",
146 help='DL3 filter selection, example: "dl3_LST-1.Run04190.*.fits"',
147 default=None,
148 required=True,
149 )
151 parser.add_argument(
152 "--input-directory",
153 "-d",
154 type=str,
155 dest="input_dir",
156 help="Path to input DL3 files directory",
157 default=None,
158 required=True,
159 )
161 parser.add_argument(
162 "--output-dir",
163 "-o",
164 type=str,
165 dest="output_fits_dir",
166 help="Path to output files",
167 default=None,
168 required=True,
169 )
171 parser.add_argument(
172 "--run_id",
173 "-r",
174 type=int,
175 dest="run_id",
176 help="Run ID to assign to merged file",
177 default=None,
178 required=True,
179 )
181 args = parser.parse_args()
183 logger, log_file = setup_logging(args.output_fits_dir, args.run_id)
185 try:
186 logger.info("Arguments:")
187 logger.info(" input_filter = %s", args.input_data)
188 logger.info(" input_dir = %s", args.input_dir)
189 logger.info(" output_fits_dir= %s", args.output_fits_dir)
190 logger.info(" run_id = %s", args.run_id)
192 file_filter = args.input_data
193 directory = args.input_dir
195 search_pattern = directory + file_filter
196 logger.info("Searching files with pattern: %s", search_pattern)
197 filelist = glob.glob(search_pattern)
198 filelist = sorted(filelist)
200 logger.info("Found %d input files.", len(filelist))
201 for f in filelist:
202 logger.info(" input file: %s", f)
204 if len(filelist) == 0:
205 logger.error("No input files found. Exiting.")
206 sys.exit(1)
208 logger.info("Opening first FITS file: %s", filelist[0])
209 hdu1 = fits.open(filelist[0])
211 for file in filelist[1:]:
212 logger.info("Processing file: %s", file)
213 hdu2 = fits.open(file)
215 nev_hdu1 = len(hdu1["EVENTS"].data)
216 nev_hdu2 = len(hdu2["EVENTS"].data)
218 logger.info(
219 "Current merge status: nev_hdu1=%d, nev_hdu2=%d, RA1=%.6f, RA2=%.6f",
220 nev_hdu1,
221 nev_hdu2,
222 hdu1["EVENTS"].header["RA_PNT"],
223 hdu2["EVENTS"].header["RA_PNT"],
224 )
226 if (hdu1["EVENTS"].header["RA_PNT"] - hdu2["EVENTS"].header["RA_PNT"]) ** 2 < 2**2:
227 logger.info("File accepted for merge: %s", file)
229 hdu1["EVENTS"].header["RA_PNT"] = (
230 hdu1["EVENTS"].header["RA_PNT"] * nev_hdu1 + hdu2["EVENTS"].header["RA_PNT"] * nev_hdu2
231 ) / (nev_hdu1 + nev_hdu2)
233 hdu1["EVENTS"].header["DEC_PNT"] = (
234 hdu1["EVENTS"].header["DEC_PNT"] * nev_hdu1 + hdu2["EVENTS"].header["DEC_PNT"] * nev_hdu2
235 ) / (nev_hdu1 + nev_hdu2)
237 hdu1["EVENTS"].header["ALT_PNT"] = (
238 hdu1["EVENTS"].header["ALT_PNT"] * nev_hdu1 + hdu2["EVENTS"].header["ALT_PNT"] * nev_hdu2
239 ) / (nev_hdu1 + nev_hdu2)
241 hdu1["EVENTS"].header["AZ_PNT"] = (
242 hdu1["EVENTS"].header["AZ_PNT"] * nev_hdu1 + hdu2["EVENTS"].header["AZ_PNT"] * nev_hdu2
243 ) / (nev_hdu1 + nev_hdu2)
245 hdu1["EVENTS"].header["DEADC"] = (
246 hdu1["EVENTS"].header["DEADC"] * nev_hdu1 + hdu2["EVENTS"].header["DEADC"] * nev_hdu2
247 ) / (nev_hdu1 + nev_hdu2)
249 if hdu1["EVENTS"].header["OBS_ID"] != hdu2["EVENTS"].header["OBS_ID"]:
250 logger.warning(
251 "Merging DL3 events with different OBS_ID values: %s vs %s",
252 hdu1["EVENTS"].header["OBS_ID"],
253 hdu2["EVENTS"].header["OBS_ID"],
254 )
256 hdu1["EVENTS"].data = np.append(hdu1["EVENTS"].data, hdu2["EVENTS"].data)
257 logger.info("Merge done. New event count: %d", len(hdu1["EVENTS"].data))
258 else:
259 logger.info("File rejected by RA_PNT criterion: %s", file)
261 hdu2.close()
263 logger.info("Sorting merged EVENTS by TIME...")
264 hdu1["EVENTS"].data = hdu1["EVENTS"].data[np.argsort(hdu1["EVENTS"].data["TIME"])]
266 logger.info("Applying 8-hour time window selection...")
267 initial_count = len(hdu1["EVENTS"].data)
268 hdu1["EVENTS"].data = hdu1["EVENTS"].data[
269 (hdu1["EVENTS"].data["TIME"] - hdu1["EVENTS"].data["TIME"][0]) < 8 * 3600
270 ]
271 logger.info(
272 "Time selection kept %d / %d events.",
273 len(hdu1["EVENTS"].data),
274 initial_count,
275 )
277 logger.info("Updating FITS headers...")
278 hdu1["EVENTS"].header["OBS_ID"] = args.run_id
279 hdu1["EVENTS"].header["N_TELS"] = 1
280 hdu1["EVENTS"].header["TELLIST"] = "LST-1"
281 hdu1["EVENTS"].header["TSTART"] = hdu1["EVENTS"].data["TIME"][0]
282 hdu1["EVENTS"].header["TSTOP"] = hdu1["EVENTS"].data["TIME"][-1]
283 hdu1["EVENTS"].header["ONTIME"] = hdu1["EVENTS"].data["TIME"][-1] - hdu1["EVENTS"].data["TIME"][0]
284 hdu1["EVENTS"].header["LIVETIME"] = hdu1["EVENTS"].header["ONTIME"] * hdu1["EVENTS"].header["DEADC"]
285 hdu1["EVENTS"].header["INSTRUME"] = "LST"
286 hdu1["EVENTS"].header["DATE-OBS"] = 0
287 hdu1["EVENTS"].header["TIME-OBS"] = 0
288 hdu1["EVENTS"].header["DATE-END"] = 0
289 hdu1["EVENTS"].header["TIME-END"] = 0
290 hdu1["EVENTS"].header["TELAPSE"] = 0
292 hdu1["POINTING"].header["ALT_PNT"] = hdu1["EVENTS"].header["ALT_PNT"]
293 hdu1["POINTING"].header["AZ_PNT"] = hdu1["EVENTS"].header["AZ_PNT"]
294 hdu1["POINTING"].header["TIME"] = hdu1["EVENTS"].header["TSTART"]
296 prefix = (5 - len(str(hdu1["EVENTS"].header["OBS_ID"]))) * "0"
298 hdu1["EVENTS"].columns["TIME"].unit = "s"
299 hdu1["EVENTS"].columns["ENERGY"].unit = "TeV"
300 hdu1["EVENTS"].columns["RA"].unit = "deg"
301 hdu1["EVENTS"].columns["DEC"].unit = "deg"
303 hdu1["GTI"].data[0][0] = hdu1["EVENTS"].data[0][1]
304 hdu1["GTI"].data[0][1] = hdu1["EVENTS"].data[-1][1]
306 output_file = (
307 args.output_fits_dir
308 + "dl3_LST-1.Run"
309 + prefix
310 + str(hdu1["EVENTS"].header["OBS_ID"])
311 + ".fits"
312 )
314 logger.info("Writing merged FITS file to: %s", output_file)
315 hdu1.writeto(output_file, overwrite=True)
316 logger.info("Merged FITS file written successfully.")
318 plots_dir = directory[:-10] + str(int(args.run_id)) + "/plots"
319 logger.info("Checking plots directory: %s", plots_dir)
320 if not os.path.exists(plots_dir):
321 os.mkdir(plots_dir)
322 logger.info("Created plots directory: %s", plots_dir)
323 else:
324 logger.info("Plots directory already exists.")
326 logger.info("Copying previous DL3 files from the last 150 runs...")
327 for i in range(150):
328 src_cmd = (
329 "cp -f "
330 + directory[:-10]
331 + str(int(args.run_id) - i)
332 + "/DL3/dl3_LST* "
333 + directory
334 + "/."
335 )
336 logger.info("Executing command: %s", src_cmd)
337 ret = os.system(src_cmd)
338 logger.info("Command exit code: %s", ret)
340 filelist_runs = glob.glob(directory + "dl3_LST*")
341 logger.info("Found %d DL3 files in working directory after copy.", len(filelist_runs))
343 runlist_to_analyse = []
344 for i in range(len(filelist_runs)):
345 logger.info("Inspecting candidate run file: %s", filelist_runs[i])
346 hdutemp = fits.open(filelist_runs[i])
348 if (hdutemp["EVENTS"].header["RA_PNT"] - hdu1["EVENTS"].header["RA_PNT"]) ** 2 < 9 and (
349 hdutemp["EVENTS"].header["DEC_PNT"] - hdu1["EVENTS"].header["DEC_PNT"]
350 ) ** 2 < 9:
351 run_id_candidate = int(filelist_runs[i][-10:-5])
352 runlist_to_analyse.append(run_id_candidate)
353 logger.info("Run selected for analysis: %s", run_id_candidate)
355 hdutemp.close()
357 cmd_index = (
358 "lstchain_create_dl3_index_files --file-pattern='dl3_LST*' "
359 "--input-dl3-dir='"
360 + directory
361 + "' --overwrite "
362 )
363 logger.info("Executing index command: %s", cmd_index)
364 ret = os.system(cmd_index)
365 logger.info("Index command exit code: %s", ret)
367 logger.info("Final RA_PNT: %s", hdu1["EVENTS"].header["RA_PNT"])
368 logger.info("Final run list for analysis: %s", runlist_to_analyse)
370 # Uncomment if needed
371 # RA, DEC = CreatePlots(
372 # directory,
373 # [runlist_to_analyse[-1]],
374 # hdu1["EVENTS"].header["RA_PNT"],
375 # hdu1["EVENTS"].header["DEC_PNT"],
376 # hdu1["EVENTS"].header["RA_PNT"],
377 # hdu1["EVENTS"].header["DEC_PNT"],
378 # logger=logger,
379 # )
381 hdu1.close()
383 logger.info("merge_DL3.py finished successfully.")
384 logger.info("============================================================")
386 except Exception as e:
387 logger.exception("Fatal error while running merge_DL3.py: %s", e)
388 logger.info("============================================================")
389 raise
392if __name__ == "__main__":
393 main()