ctools  2.0.0
 All Classes Namespaces Files Functions Variables Macros Pages
csroot2caldb.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 # ==========================================================================
3 # Generate IRFs in CALDB format from a ROOT offaxis performance file
4 #
5 # Copyright (C) 2016-2022 Juergen Knoedlseder
6 #
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
19 #
20 # ==========================================================================
21 import os
22 import sys
23 import math
24 import copy
25 from datetime import datetime
26 import gammalib
27 import ctools
28 from cscripts import calutils
29 
30 
31 # ================== #
32 # csroot2caldb class #
33 # ================== #
34 class csroot2caldb(ctools.cscript):
35  """
36  Generates IRFs in CALDB from a ROOT offaxis performance file
37  """
38 
39  # Constructor
40  def __init__(self, *argv):
41  """
42  Constructor
43  """
44  # Initialise application by calling the base class constructor
45  self._init_cscript(self.__class__.__name__, ctools.__version__, argv)
46 
47  # Return
48  return
49 
50 
51  # Private methods
52  def _get_parameters(self):
53  """
54  Get parameters from parfile
55  """
56  # Query parameters
57  self['infile'].filename()
58  self['outdir'].string()
59  self['inst'].string()
60  self['id'].string()
61  self['analysis'].string()
62  self['zenith'].real()
63  self['azimuth'].real()
64  self['version'].string()
65  self['psftype'].string()
66  self['split'].boolean()
67  self['norm1d'].boolean()
68  self['rebin'].boolean()
69  self['eascale'].real()
70  self['bgdscale'].real()
71  self['bgdoversample'].integer()
72  self['bgdinfill'].boolean()
73 
74  # Write input parameters into logger
75  self._log_parameters(gammalib.TERSE)
76 
77  # Return
78  return
79 
80  def _init_metadata(self):
81  """
82  Initialise dictionary containing the IRF metadata
83 
84  Returns
85  -------
86  irf : dict
87  IRF metadata dictionary
88  """
89  # Initialise IRF metadata dictionary
90  irf = {}
91 
92  # Set calibration information
93  cal_name = 'NAME('+self['id'].string()+')'
94  cal_version = 'VERSION('+self['version'].string()+')'
95  cal_cut = 'CLASS(BEST)'
96  cal_analysis = 'ANALYSIS('+self['analysis'].string()+')'
97  cal_zenith = 'ZENITH(%.3f)deg' % self['zenith'].real()
98  cal_azimuth = 'AZIMUTH(%.3f)deg' % self['azimuth'].real()
99  cal_bounds = [cal_name, cal_version, cal_cut, cal_analysis, \
100  cal_zenith, cal_azimuth]
101 
102  # Set IRF information
103  irf['CAL_TEL'] = 'CTA'
104  irf['CAL_INST'] = self['inst'].string().upper()
105  irf['CAL_OBSID'] = self['id'].string()
106  irf['CAL_DET'] = 'NONE'
107  irf['CAL_FLT'] = 'NONE'
108  irf['CAL_CLASS'] = 'BCF'
109  irf['CAL_TYPE'] = 'DATA'
110  irf['CAL_QUAL'] = 0 # 0=good, 1=bad, 2=dubious, ...
111  irf['CAL_DATE'] = '14/01/30'
112  irf['VAL_DATE'] = '2014-01-30'
113  irf['VAL_TIME'] = '00:00:00'
114  irf['REF_TIME'] = 51544.0
115  irf['EA_NAME'] = 'EFF_AREA'
116  irf['EA_HDUCLAS3'] = 'FULL-ENCLOSURE'
117  irf['EA_HDUCLAS4'] = '2D'
118  irf['EA_DOC'] = 'CAL/GEN/92-019'
119  irf['EA_BOUNDS'] = copy.deepcopy(cal_bounds)
120  irf['EA_DESC'] = 'CTA effective area'
121  irf['PSF_NAME'] = 'RPSF'
122  irf['PSF_HDUCLAS3'] = 'FULL-ENCLOSURE'
123  irf['PSF_HDUCLAS4'] = '3GAUSS'
124  irf['PSF_DOC'] = 'CAL/GEN/92-020'
125  irf['PSF_BOUNDS'] = copy.deepcopy(cal_bounds)
126  irf['PSF_DESC'] = 'CTA point spread function'
127  irf['EDISP_NAME'] = 'EDISP'
128  irf['EDISP_HDUCLAS3'] = 'FULL-ENCLOSURE'
129  irf['EDISP_HDUCLAS4'] = '2D'
130  irf['EDISP_DOC'] = '???'
131  irf['EDISP_BOUNDS'] = copy.deepcopy(cal_bounds)
132  irf['EDISP_DESC'] = 'CTA energy dispersion'
133  irf['BKG_NAME'] = 'BKG'
134  irf['BKG_HDUCLAS3'] = 'FULL-ENCLOSURE'
135  irf['BKG_HDUCLAS4'] = '3D'
136  irf['BKG_DOC'] = '???'
137  irf['BKG_BOUNDS'] = copy.deepcopy(cal_bounds)
138  irf['BKG_DESC'] = 'CTA background'
139 
140  # Set parameter dependent IRF information
141  if self['psftype'].string() == 'King':
142  irf['PSF_HDUCLAS4'] = 'KING'
143  else:
144  irf['PSF_HDUCLAS4'] = '3GAUSS'
145 
146  # Return metadata
147  return irf
148 
149  def _make_dirs(self, version, irf):
150  """
151  Generate CALDB directory structure for one observation identifier
152 
153  The structure is given by
154 
155  data/<tel>/<inst>/bcf/<obsid>
156 
157  where <tel> is "cta" and <inst> is the instrument specified in the
158  CALDB constructor (the instrument may be used for different array
159  configurations).
160 
161  Parameters
162  ----------
163  version : str
164  Version string
165  irf : dict
166  IRF metadata dictionary
167 
168  Returns
169  -------
170  ds : dict
171  Directory structure information
172  """
173  # Initialise directory structure dictionary
174  ds = {}
175 
176  # Set base directory
177  base_dir = 'data'
178  base_dir += '/'+irf['CAL_TEL'].lower()
179  base_dir += '/'+irf['CAL_INST'].lower()
180 
181  # Set directory names
182  obs_dir = base_dir+'/bcf/'+irf['CAL_OBSID']
183  ds['EA_DIR'] = obs_dir
184  ds['PSF_DIR'] = obs_dir
185  ds['EDISP_DIR'] = obs_dir
186  ds['BKG_DIR'] = obs_dir
187 
188  # Set path to response components. If the "outdir" parameter is set
189  # then prefix the value to the path of the response components
190  outdir = self['outdir'].string()
191  if len(outdir) > 0:
192  ds['BASE_PATH'] = outdir+'/'+base_dir
193  ds['EA_PATH'] = outdir+'/'+ds['EA_DIR']
194  ds['PSF_PATH'] = outdir+'/'+ds['PSF_DIR']
195  ds['EDISP_PATH'] = outdir+'/'+ds['EDISP_DIR']
196  ds['BKG_PATH'] = outdir+'/'+ds['BKG_DIR']
197  else:
198  ds['BASE_PATH'] = base_dir
199  ds['EA_PATH'] = ds['EA_DIR']
200  ds['PSF_PATH'] = ds['PSF_DIR']
201  ds['EDISP_PATH'] = ds['EDISP_DIR']
202  ds['BKG_PATH'] = ds['BKG_DIR']
203 
204  # Set IRF component file names. If the "split" parameter is "yes" then
205  # split the IRF components over several files.
206  if self['split'].boolean():
207  ds['EA_FILE'] = 'ea_'+version+'.fits'
208  ds['PSF_FILE'] = 'psf_'+version+'.fits'
209  ds['EDISP_FILE'] = 'edisp_'+version+'.fits'
210  ds['BKG_FILE'] = 'bgd_'+version+'.fits'
211  else:
212  ds['EA_FILE'] = 'irf_'+version+'.fits'
213  ds['PSF_FILE'] = 'irf_'+version+'.fits'
214  ds['EDISP_FILE'] = 'irf_'+version+'.fits'
215  ds['BKG_FILE'] = 'irf_'+version+'.fits'
216 
217  # Create directory structure
218  if not os.path.isdir(ds['EA_PATH']):
219  os.makedirs(ds['EA_PATH'])
220  if not os.path.isdir(ds['PSF_PATH']):
221  os.makedirs(ds['PSF_PATH'])
222  if not os.path.isdir(ds['EDISP_PATH']):
223  os.makedirs(ds['EDISP_PATH'])
224  if not os.path.isdir(ds['BKG_PATH']):
225  os.makedirs(ds['BKG_PATH'])
226 
227  # Return ds
228  return ds
229 
230  def _open(self, irf, ds):
231  """
232  Open existing or create new calibration
233 
234  The actual version will put all calibrations in the same file, although
235  each part of the response function will have its own logical name. We
236  can thus easily modify the script to put each calibration information
237  in a separate file
238 
239  Parameters
240  ----------
241  irf : dict
242  IRF metadata dictionary
243  ds : dict
244  Directory structure dictionary
245  """
246  # Open calibration database index
247  ds['CIF'] = gammalib.GFits(ds['BASE_PATH']+'/caldb.indx', True)
248 
249  # If file has no CIF extension than create it now
250  if not ds['CIF'].contains('CIF'):
251  ds['CIF'].append(calutils.create_cif_table())
252  ds['HDU_CIF'] = ds['CIF'].table('CIF')
253 
254  # Set IRF component filenames
255  ea_filename = ds['EA_PATH']+'/'+ds['EA_FILE']
256  psf_filename = ds['PSF_PATH']+'/'+ds['PSF_FILE']
257  edisp_filename = ds['EDISP_PATH']+'/'+ds['EDISP_FILE']
258  bgd_filename = ds['BKG_PATH']+'/'+ds['BKG_FILE']
259 
260  # Open IRF component files
261  if self['split'].boolean():
262  ds['EA_FITS'] = gammalib.GFits(ea_filename, True)
263  ds['PSF_FITS'] = gammalib.GFits(psf_filename, True)
264  ds['EDISP_FITS'] = gammalib.GFits(edisp_filename, True)
265  ds['BKG_FITS'] = gammalib.GFits(bgd_filename, True)
266  else:
267  ds['EA_FITS'] = gammalib.GFits(ea_filename, True)
268  ds['PSF_FITS'] = ds['EA_FITS']
269  ds['EDISP_FITS'] = ds['EA_FITS']
270  ds['BKG_FITS'] = ds['EA_FITS']
271 
272  # Open HDUs
273  ds['HDU_EA'] = self._open_hdu(ds['EA_FITS'], "EFFECTIVE AREA",
274  irf['EA_NAME'], irf['EA_HDUCLAS3'],
275  irf['EA_HDUCLAS4'], irf['EA_DOC'],
276  irf)
277  ds['HDU_PSF'] = self._open_hdu(ds['PSF_FITS'], "POINT SPREAD FUNCTION",
278  irf['PSF_NAME'], irf['PSF_HDUCLAS3'],
279  irf['PSF_HDUCLAS4'], irf['PSF_DOC'],
280  irf)
281  ds['HDU_EDISP'] = self._open_hdu(ds['EDISP_FITS'], "ENERGY DISPERSION",
282  irf['EDISP_NAME'], irf['EDISP_HDUCLAS3'],
283  irf['EDISP_HDUCLAS4'], irf['EDISP_DOC'],
284  irf)
285  ds['HDU_BKG'] = self._open_hdu(ds['BKG_FITS'], "BACKGROUND",
286  irf['BKG_NAME'], irf['BKG_HDUCLAS3'],
287  irf['BKG_HDUCLAS4'], irf['BKG_DOC'],
288  irf)
289 
290  # Return
291  return
292 
293  def _close(self, irf, ds):
294  """
295  Close calibration FITS files
296 
297  Parameters
298  ----------
299  irf : dict
300  IRF metadata dictionary
301  ds : dict
302  Directory structure dictionary
303  """
304  # Add information to CIF. We do this now as before we did not
305  # necessarily have all the information at hand (in particular about
306  # the boundaries)
307  self._add_cif_info(irf, ds)
308 
309  # Save and close CIF
310  ds['CIF'].save(True)
311  ds['CIF'].close()
312 
313  # Close all IRF components. If the files have been split all components
314  # need to be closed, otherwise only the effective area component needs
315  # to be closed as representative for all components.
316  ds['EA_FITS'].save(True)
317  ds['EA_FITS'].close()
318  if self['split'].boolean():
319  ds['PSF_FITS'].save(True)
320  ds['EDISP_FITS'].save(True)
321  ds['BKG_FITS'].save(True)
322  ds['PSF_FITS'].close()
323  ds['EDISP_FITS'].close()
324  ds['BKG_FITS'].close()
325 
326  # Return
327  return
328 
329  def _open_hdu(self, fits, extname, name, hduclas3, hduclas4, doc, irf):
330  """
331  Open HDU
332 
333  Opens a FITS binary table with given "extname". If HDU does not exist
334  in the FITS file it will be created and appended to the FITS file.
335 
336  Parameters
337  ----------
338  fits : `~gammalib.GFits`
339  FITS file
340  extname : str
341  Extension name
342  name : str
343  Name string
344  hduclas3 : str
345  HDU class 3 string
346  hduclas4 : str
347  HDU class 4 string
348  doc : str
349  Document string
350  irf : dict
351  IRF metadata dictionary
352 
353  Returns
354  -------
355  table : `~gammalib.GFitsBinTable`
356  FITS binary table
357  """
358  # Create table if it does not yet exist
359  if not fits.contains(extname):
360 
361  # Create binary table
362  table = gammalib.GFitsBinTable()
363 
364  # Set extension name
365  table.extname(extname)
366 
367  # Set OGIP keywords
368  self._set_ogip_keywords(table, doc,
369  ['RESPONSE', name, hduclas3, hduclas4],
370  irf)
371 
372  # Append table to FITS file
373  fits.append(table)
374 
375  # Return FITS table
376  return fits.table(extname)
377 
378  def _set_ogip_keywords(self, hdu, hdudoc, hduclas, irf):
379  """
380  Set standard OGIP keywords for extension
381 
382  Parameters
383  ----------
384  hdu : `~gammalig.GFitsHDU`
385  Header Data Unit
386  hdudoc : str
387  Documentation reference string
388  hduclas : list of str
389  List of HDUCLAS fields
390  irf : dict
391  IRF dictonary
392  """
393  # Set UTC date of file creation
394  utc = datetime.utcnow().isoformat()[:19]
395 
396  # Set keywords
397  hdu.card('ORIGIN', 'IRAP', 'Name of organization making this file')
398  hdu.card('DATE', utc, 'File creation date (YYYY-MM-DDThh:mm:ss UTC)')
399  hdu.card('TELESCOP', irf['CAL_TEL'], 'Name of telescope')
400  hdu.card('INSTRUME', irf['CAL_INST'], 'Name of instrument')
401  hdu.card('DETNAM', irf['CAL_DET'], 'Name of detector')
402  hdu.card('HDUCLASS', 'OGIP', 'HDU class')
403  hdu.card('HDUDOC', hdudoc, 'HDU documentation')
404  for i, item in enumerate(hduclas):
405  key = 'HDUCLAS%d' % (i+1)
406  hdu.card(key, item, 'HDU class')
407  hdu.card('HDUVERS', '0.2', 'HDU version')
408 
409  # Stamp HDU
410  self._stamp(hdu)
411 
412  # Return
413  return
414 
415  def _add_cif_info(self, irf, ds):
416  """
417  Add information to CIF extension
418 
419  Parameters
420  ----------
421  irf : dict
422  IRF metadata dictionary
423  ds : dict
424  Directory structure dictionary
425  """
426  # Set list of IRF component names
427  names = ['EA', 'PSF', 'EDISP', 'BKG']
428 
429  # Initialise CIF row index
430  row = ds['HDU_CIF'].nrows()
431 
432  # Append rows for all components to CIF extension
433  ds['HDU_CIF'].append_rows(len(names))
434 
435  # Add information for all components
436  for name in names:
437 
438  # Set dictionary keys for this component
439  key_dir = '%s_DIR' % name
440  key_file = '%s_FILE' % name
441  key_name = '%s_NAME' % name
442  key_desc = '%s_DESC' % name
443  key_bounds = '%s_BOUNDS' % name
444 
445  # Set generic information
446  ds['HDU_CIF']['TELESCOP'][row] = irf['CAL_TEL']
447  ds['HDU_CIF']['INSTRUME'][row] = irf['CAL_INST']
448  ds['HDU_CIF']['DETNAM'][row] = irf['CAL_DET']
449  ds['HDU_CIF']['FILTER'][row] = irf['CAL_FLT']
450  ds['HDU_CIF']['CAL_DEV'][row] = 'ONLINE'
451  ds['HDU_CIF']['CAL_CLAS'][row] = irf['CAL_CLASS']
452  ds['HDU_CIF']['CAL_DTYP'][row] = irf['CAL_TYPE']
453  ds['HDU_CIF']['CAL_VSD'][row] = irf['VAL_DATE']
454  ds['HDU_CIF']['CAL_VST'][row] = irf['VAL_TIME']
455  ds['HDU_CIF']['REF_TIME'][row] = irf['REF_TIME']
456  ds['HDU_CIF']['CAL_QUAL'][row] = irf['CAL_QUAL']
457  ds['HDU_CIF']['CAL_DATE'][row] = irf['CAL_DATE']
458 
459  # Set component specific information
460  ds['HDU_CIF']['CAL_DIR'][row] = ds[key_dir]
461  ds['HDU_CIF']['CAL_FILE'][row] = ds[key_file]
462  ds['HDU_CIF']['CAL_CNAM'][row] = irf[key_name]
463  ds['HDU_CIF']['CAL_DESC'][row] = irf[key_desc]
464  ds['HDU_CIF']['CAL_XNO'][row] = 1
465  for i in range(9):
466  if i >= len(irf[key_bounds]):
467  ds['HDU_CIF']['CAL_CBD'][row,i] = 'NONE'
468  else:
469  ds['HDU_CIF']['CAL_CBD'][row,i] = irf[key_bounds][i]
470 
471  # Increment row index
472  row += 1
473 
474  # Return
475  return
476 
477  def _set_cif_keywords(self, hdu, name, bounds, desc, irf):
478  """
479  Set standard CIF keywords for extension
480 
481  Parameters
482  ----------
483  hdu : `~gammalib.GFitsHDU`
484  FITS HDU
485  name : str
486  Calibration name
487  bounds : list of str
488  Calibration boundaries
489  desc : str
490  Calibration description
491  irf : dict
492  IRF metadata dictionary
493  """
494  # Set standard CIF keywords
495  hdu.card('CSYSNAME', 'XMA_POL', '')
496  hdu.card('CCLS0001', irf['CAL_CLASS'], 'Calibration class')
497  hdu.card('CDTP0001', irf['CAL_TYPE'], 'Calibration type')
498  hdu.card('CCNM0001', name, 'Calibration name')
499 
500  # Set boundary keywords
501  for i in range(9):
502  keyname = 'CBD%d0001' % (i+1)
503  if i >= len(bounds):
504  value = 'NONE'
505  else:
506  value = bounds[i]
507  hdu.card(keyname, value, 'Calibration boundary')
508 
509  # Set validity keywords
510  hdu.card('CVSD0001', irf['VAL_DATE'],
511  'Calibration validity start date (UTC)')
512  hdu.card('CVST0001', irf['VAL_TIME'],
513  'Calibration validity start time (UTC)')
514  hdu.card('CDEC0001', desc, 'Calibration description')
515 
516  # Return
517  return
518 
519 
520  # ROOT specific private members
521  def _root2caldb(self, irf, ds):
522  """
523  Translate ROOT to CALDB information
524 
525  Parameters
526  ----------
527  irf : dict
528  IRF metadata dictionary
529  ds : dict
530  Directory structure dictionary
531  """
532  # Import TFile from ROOT module
533  from ROOT import TFile
534 
535  # Open ROOT performance file
536  tfile = TFile(self['infile'].filename().url())
537 
538  # Create effective area
539  self._root2ea(tfile, irf, ds)
540 
541  # Create point spread function
542  self._root2psf(tfile, irf, ds)
543 
544  # Create energy dispersion
545  self._root2edisp(tfile, irf, ds)
546 
547  # Create background
548  self._root2bgd(tfile, irf, ds)
549 
550  # Return
551  return
552 
553  def _root2ea(self, tfile, irf, ds):
554  """
555  Translate ROOT to CALDB effective area extension
556 
557  The following ROOT histograms are used:
558  - EffectiveAreaEtrueNoTheta2cut_offaxis -> EFFAREA
559  - EffectiveAreaEtrueNoTheta2cut (for normalization)
560 
561  Parameters
562  ----------
563  tfile : `~ROOT.TFile`
564  ROOT file
565  irf : dict
566  IRF metadata dictionary
567  ds : dict
568  Directory structure dictionary
569  """
570  # Write header
571  self._log_header1(gammalib.TERSE, 'Generate effective area extension')
572 
573  # Get relevant ROOT histograms
574  etrue = tfile.Get('EffectiveAreaEtrueNoTheta2cut_offaxis')
575 
576  # If requested then normalize the 2D histogram on the on-axis 1D
577  # histogram. This assures that the 2D histogram has the same on-axis
578  # effective area dependence as the 1D histogram.
579  if self['norm1d'].boolean():
580  etrue_1D = tfile.Get('EffectiveAreaEtrueNoTheta2cut')
581  self._renorm_onaxis(etrue, etrue_1D)
582 
583  # If requested then rebin the effective area Etrue histogram. This
584  # increases the number of bins by a factor 10.
585  if self['rebin'].boolean():
586  etrue.RebinX(10)
587  neng = etrue.GetXaxis().GetNbins()
588  noffset = etrue.GetYaxis().GetNbins()
589  for ioff in range(noffset):
590  for ieng in range(neng):
591  value = etrue.GetBinContent(ieng+1,ioff+1) / 10.0
592  etrue.SetBinContent(ieng+1,ioff+1,value)
593 
594  # Get effective area multiplicator. This allows renormalising the
595  # effective area.
596  eascale = self['eascale'].real()
597 
598  # If renormalisation has been requested then do it now
599  if eascale != 1.0:
600  neng = etrue.GetXaxis().GetNbins()
601  noffset = etrue.GetYaxis().GetNbins()
602  for ioff in range(noffset):
603  for ieng in range(neng):
604  value = etrue.GetBinContent(ieng+1,ioff+1) * eascale
605  etrue.SetBinContent(ieng+1,ioff+1,value)
606 
607  # Write boundary keywords
608  self._set_cif_keywords(ds['HDU_EA'], irf['EA_NAME'],
609  irf['EA_BOUNDS'], irf['EA_DESC'], irf)
610 
611  # Optionally write energy thresholds
612  if self['emin'].is_valid():
613  emin = self['emin'].real()
614  ds['HDU_EA'].card('LO_THRES', emin, '[TeV] Low energy threshold')
615  if self['emax'].is_valid():
616  emin = self['emax'].real()
617  ds['HDU_EA'].card('HI_THRES', emin, '[TeV] High energy threshold')
618 
619  # Create "EFFAREA" data column
620  self._make_2D(etrue, ds['HDU_EA'], 'EFFAREA', 'm**2')
621 
622  # Return
623  return
624 
625  def _root2psf(self, tfile, irf, ds):
626  """
627  Translate ROOT to CALDB point spread function extension
628 
629  Parameters
630  ----------
631  tfile : `~ROOT.TFile`
632  ROOT file
633  irf : dict
634  IRF metadata dictionary
635  ds : dict
636  Directory structure dictionary
637  """
638  # King profile PSF
639  if self['psftype'].string() == 'King':
640  self._root2psf_king(tfile, irf, ds)
641 
642  # ... otherwise use Gaussian profile PSF
643  else:
644  self._root2psf_gauss(tfile, irf, ds)
645 
646  # Return
647  return
648 
649  def _root2psf_get_r68_and_r80(self, tfile):
650  """
651  Generates 68% and 80% containment radii histograms from
652  AngularPSF2DEtrue_offaxis histogram
653 
654  Parameters
655  ----------
656  tfile : `~ROOT.TFile`
657  ROOT file
658 
659  Returns
660  -------
661  r68, r80 : Tuple of `~ROOT.THF2`
662  68% and 80% containment radius ROOT histograms
663  """
664  # Get relevant ROOT histograms
665  psf = tfile.Get('AngularPSF2DEtrue_offaxis')
666 
667  # Extract axes
668  energies = psf.GetXaxis()
669  dists = psf.GetYaxis()
670  offsets = psf.GetZaxis()
671 
672  # Extract number of bins in histogram
673  neng = energies.GetNbins()
674  ndist = dists.GetNbins()
675  noffset = offsets.GetNbins()
676 
677  # Import TH2F histograms from ROOT module
678  from ROOT import TH2F
679 
680  # Create r68 and r80 histograms
681  r68 = TH2F('AngRes68Etrue_offaxis',
682  'Angular resolution (68% containment)',
683  neng, energies.GetBinLowEdge(1), energies.GetBinUpEdge(neng),
684  noffset, offsets.GetBinLowEdge(1), offsets.GetBinUpEdge(noffset))
685  r80 = TH2F('AngRes80Etrue_offaxis',
686  'Angular resolution (80% containment)',
687  neng, energies.GetBinLowEdge(1), energies.GetBinUpEdge(neng),
688  noffset, offsets.GetBinLowEdge(1), offsets.GetBinUpEdge(noffset))
689 
690  # Loop over all energy and offset angle bins
691  for ieng in range(neng):
692  for ioff in range(noffset):
693 
694  # Compute PDF integral
695  pdf_integral = 0.0
696  for idist in range(ndist):
697  pdf_integral += psf.GetBinContent(ieng+1,idist+1,ioff+1)
698 
699  # Compute 68% containment radius
700  pdf_68 = 0.0
701  r68_angle = 0.0
702  for idist in range(ndist):
703  pdf_68 += psf.GetBinContent(ieng+1,idist+1,ioff+1)
704  if pdf_68 > 0.68*pdf_integral:
705  r68_angle = dists.GetBinLowEdge(idist+1)
706  break
707 
708  # Compute 80% containment radius
709  pdf_80 = 0.0
710  r80_angle = 0.0
711  for idist in range(ndist):
712  pdf_80 += psf.GetBinContent(ieng+1,idist+1,ioff+1)
713  if pdf_80 > 0.80*pdf_integral:
714  r80_angle = dists.GetBinLowEdge(idist+1)
715  break
716 
717  # Set histogram values
718  r68.SetBinContent(ieng+1,ioff+1,r68_angle)
719  r80.SetBinContent(ieng+1,ioff+1,r80_angle)
720 
721  # Return
722  return r68, r80
723 
724  def _root2psf_gauss(self, tfile, irf, ds):
725  """
726  Translate ROOT to CALDB point spread function extension
727 
728  The following ROOT histograms are used:
729  - 1/(2*pi*SIGMA_1) -> SCALE
730  - AngRes_offaxis -> SIGMA_1 (scaling: 1/0.8)
731  - 0.0 -> AMPL_2
732  - 0.0 -> SIGMA_2
733  - 0.0 -> AMPL_3
734  - 0.0 -> SIGMA_3
735 
736  Parameters
737  ----------
738  tfile : `~ROOT.TFile`
739  ROOT file
740  irf : dict
741  IRF metadata dictionary
742  ds : dict
743  Directory structure dictionary
744  """
745  # Write header
746  self._log_header1(gammalib.TERSE,
747  'Generate Gaussian point spread function extension')
748 
749  # Get relevant ROOT histograms
750  r68, _ = self._root2psf_get_r68_and_r80(tfile)
751 
752  # Extract number of bins in histogram
753  neng = r68.GetXaxis().GetNbins()
754  noffset = r68.GetYaxis().GetNbins()
755 
756  # Converts 68% -> 1 sigma
757  r68_to_sigma = 0.6624305
758  for ioff in range(noffset):
759  for ieng in range(neng):
760  sigma = r68.GetBinContent(ieng+1,ioff+1) * r68_to_sigma
761  r68.SetBinContent(ieng+1,ioff+1,sigma)
762 
763  # Compute scale histogram
764  scale = r68.Clone()
765  for ioff in range(noffset):
766  for ieng in range(neng):
767  integral = 2.0 * math.pi * r68.GetBinContent(ieng+1,ioff+1)
768  if integral > 0.0:
769  value = 1.0 / integral
770  else:
771  value = 0.0
772  scale.SetBinContent(ieng+1,ioff+1,value)
773 
774  # Set zero histogram
775  zero = r68.Clone()
776  for ioff in range(noffset):
777  for ieng in range(neng):
778  zero.SetBinContent(ieng+1,ioff+1,0.0)
779 
780  # Set boundaries
781  irf['PSF_BOUNDS'].append('PSF(GAUSS)')
782 
783  # Write boundary keywords
784  self._set_cif_keywords(ds['HDU_PSF'], irf['PSF_NAME'],
785  irf['PSF_BOUNDS'], irf['PSF_DESC'], irf)
786 
787  # Create "SCALE" data column
788  self._make_2D(scale, ds['HDU_PSF'], 'SCALE', '')
789 
790  # Create "SIGMA_1" data column
791  self._make_2D(r68, ds['HDU_PSF'], 'SIGMA_1', 'deg')
792 
793  # Create "AMPL_2" data column
794  self._make_2D(zero, ds['HDU_PSF'], 'AMPL_2', '')
795 
796  # Create "SIGMA_2" data column
797  self._make_2D(zero, ds['HDU_PSF'], 'SIGMA_2', 'deg')
798 
799  # Create "AMPL_3" data column
800  self._make_2D(zero, ds['HDU_PSF'], 'AMPL_3', '')
801 
802  # Create "SIGMA_3" data column
803  self._make_2D(zero, ds['HDU_PSF'], 'SIGMA_3', 'deg')
804 
805  # Return
806  return
807 
808  def _root2psf_king(self, tfile, irf, ds):
809  """
810  Translate ROOT to CALDB point spread function extension
811 
812  The following ROOT histograms are used:
813  - AngRes_offaxis
814  - AngRes80_offaxis
815 
816  Parameters
817  ----------
818  tfile : `~ROOT.TFile`
819  ROOT file
820  irf : dict
821  IRF metadata dictionary
822  ds : dict
823  Directory structure dictionary
824  """
825  # Write header
826  self._log_header1(gammalib.TERSE,
827  'Generate King point spread function extension')
828 
829  # Get relevant ROOT histograms
830  r68, r80 = self._root2psf_get_r68_and_r80(tfile)
831 
832  # Extract number of bins in histogram
833  neng = r68.GetXaxis().GetNbins()
834  noffset = r68.GetYaxis().GetNbins()
835 
836  # Initialise parameter maps by cloning the r68 2D map
837  gamma2D = r68.Clone()
838  sigma2D = r68.Clone()
839 
840  # Compute gamma and sigma values
841  for ioff in range(noffset):
842 
843  # Initialise last results
844  last_gamma = 0.0
845  last_sigma = 0.0
846 
847  # Loop over all energies
848  for ieng in range(neng):
849 
850  # Extract radii
851  r_68 = r68.GetBinContent(ieng+1,ioff+1)
852  r_80 = r80.GetBinContent(ieng+1,ioff+1)
853 
854  # Initialise results
855  gamma = 0.0
856  sigma = 0.0
857 
858  # Continue only if both radii are positive
859  if r_68 > 0 and r_80 > 0:
860 
861  # Derive constants for equation to solve
862  a = 1.0 - 0.68
863  b = 1.0 - 0.80
864  c = r_68*r_68/(r_80*r_80)
865 
866  # Solve equation (a^x-1)/(b^x-1)=c for x using secant
867  # method. Stop when we are better than 1e-6.
868  x1 = -0.5
869  x2 = -1.0
870  f1 = (math.pow(a,x1) - 1.0)/(math.pow(b,x1) - 1.0) - c
871  f2 = (math.pow(a,x2) - 1.0)/(math.pow(b,x2) - 1.0) - c
872  while True:
873  x = x1 - f1 * (x1-x2)/(f1-f2)
874  f = (math.pow(a,x) - 1.0)/(math.pow(b,x) - 1.0) - c
875  if abs(f) < 1.0e-6:
876  break
877  else:
878  f2 = f1
879  x2 = x1
880  f1 = f
881  x1 = x
882 
883  # Compute gamma.
884  if x < 0.0:
885  gamma = 1.0 - 1.0/x
886  else:
887  gamma = 1.0
888 
889  # Compute sigma
890  denominator = 2.0 * gamma * (math.pow(a, x) - 1.0)
891  if denominator > 0.0:
892  sigma = r_68 * math.sqrt(1.0/denominator)
893  else:
894  denominator = 2.0 * gamma * (math.pow(b, x) - 1.0)
895  if denominator > 0.0:
896  sigma = r_80 * math.sqrt(1.0/denominator)
897  else:
898  gamma = 0.0
899  sigma = 0.0
900 
901  # Handle special case that no results were found.
902  # This takes care of pixels that are ill defined
903  # in the MC file.
904  if gamma == 0.0 and sigma == 0.0:
905  gamma = last_gamma
906  sigma = last_sigma
907  status = ' (use last)'
908  else:
909  status = ''
910 
911  # Log results
912  self._log_value(gammalib.EXPLICIT,
913  'ieng=%d ioff=%d%s' % (ieng,ioff,status),
914  'r68=%f r80=%f gamma=%f sigma=%f' %
915  (r_68, r_80, gamma, sigma))
916 
917  # Store surrent result as last result
918  last_gamma = gamma
919  last_sigma = sigma
920 
921  # Set bin contents
922  gamma2D.SetBinContent(ieng+1,ioff+1,gamma)
923  sigma2D.SetBinContent(ieng+1,ioff+1,sigma)
924 
925  # Set boundaries
926  irf['PSF_BOUNDS'].append('PSF(KING)')
927 
928  # Write boundary keywords
929  self._set_cif_keywords(ds['HDU_PSF'], irf['PSF_NAME'],
930  irf['PSF_BOUNDS'], irf['PSF_DESC'], irf)
931 
932  # Create "GAMMA" data column
933  self._make_2D(gamma2D, ds['HDU_PSF'], 'GAMMA', '')
934 
935  # Create "SIGMA" data column
936  self._make_2D(sigma2D, ds['HDU_PSF'], 'SIGMA', 'deg')
937 
938  # Return
939  return
940 
941  def _root2edisp_get_migra(self, tfile):
942  """
943  Generates migration matrix histogram from AngularPSF2DEtrue_offaxis
944  histogram
945 
946  Parameters
947  ----------
948  tfile : `~ROOT.TFile`
949  ROOT file
950 
951  Returns
952  -------
953  migra : `~ROOT.TH3F`
954  Migration matrix
955  """
956  # Get relevant ROOT histograms
957  matrix = tfile.Get('MigMatrixNoTheta2cut_offaxis')
958 
959  # Extract axes
960  ereco = matrix.GetXaxis()
961  etrue = matrix.GetYaxis()
962  offsets = matrix.GetZaxis()
963 
964  # Extract number of bins in histogram
965  nereco = ereco.GetNbins()
966  netrue = etrue.GetNbins()
967  noffset = offsets.GetNbins()
968 
969  # Set number of migration bins
970  nmigra = 300
971 
972  # Import TH3F histogram from ROOT module
973  from ROOT import TH3F
974 
975  # Create migra histograms
976  migra = TH3F('EestOverEtrue_offaxis',
977  'Migration',
978  netrue, etrue.GetBinLowEdge(1), etrue.GetBinUpEdge(netrue),
979  nmigra, 0.0, 3.0,
980  noffset, offsets.GetBinLowEdge(1), offsets.GetBinUpEdge(noffset))
981 
982  # Loop over true energies
983  for ietrue in range(netrue):
984 
985  # Get true energy in TeV
986  e_true = math.pow(10.0, migra.GetXaxis().GetBinCenter(ietrue+1))
987 
988  # Loop over migration values
989  for imigra in range(nmigra):
990 
991  # Get migration value
992  migration = migra.GetYaxis().GetBinCenter(imigra+1)
993 
994  # Compute reconstructed energy in TeV
995  e_reco = e_true * migration
996 
997  # Continue only if reconstructed energy is positive
998  if e_reco > 0.0:
999 
1000  # Get reconstructed bin
1001  ireco = ereco.FindFixBin(math.log10(e_reco))
1002 
1003  # Continue only if bin is valid
1004  if ireco > 0 and ireco <= nereco:
1005 
1006  # Loop over offset angles
1007  for ioff in range(noffset):
1008 
1009  # Get bin value
1010  value = matrix.GetBinContent(ireco,ietrue+1,ioff+1)
1011 
1012  # Set bin value
1013  migra.SetBinContent(ietrue+1,imigra+1,ioff+1,value)
1014 
1015  # Return
1016  return migra
1017 
1018  def _root2edisp(self, tfile, irf, ds):
1019  """
1020  Translate ROOT to CALDB energy dispersion extension
1021 
1022  The following ROOT histograms are used:
1023  - EestOverEtrue_offaxis -> MATRIX
1024 
1025  Parameters
1026  ----------
1027  tfile : `~ROOT.TFile`
1028  ROOT file
1029  irf : dict
1030  IRF metadata dictionary
1031  ds : dict
1032  Directory structure dictionary
1033  """
1034  # Write header
1035  self._log_header1(gammalib.TERSE, 'Generate energy dispersion extension')
1036 
1037  # Get relevant ROOT histograms
1038  matrix = self._root2edisp_get_migra(tfile)
1039 
1040  # Write boundary keywords
1041  self._set_cif_keywords(ds['HDU_EDISP'], irf['EDISP_NAME'],
1042  irf['EDISP_BOUNDS'], irf['EDISP_DESC'], irf)
1043 
1044  # Create "MATRIX" data column
1045  self._make_3D_migra(matrix, ds['HDU_EDISP'], 'MATRIX', '')
1046 
1047  # Return
1048  return
1049 
1050  def _root2bgd(self, tfile, irf, ds):
1051  """
1052  Translate ROOT to CALDB background extension.
1053 
1054  The following ROOT histograms are used:
1055  - BGRatePerSqDeg_offaxis -> BKG
1056 
1057  Parameters
1058  ----------
1059  tfile : `~ROOT.TFile`
1060  ROOT file
1061  irf : dict
1062  IRF metadata dictionary
1063  ds : dict
1064  Directory structure dictionary
1065  """
1066  # Write header
1067  self._log_header1(gammalib.TERSE, 'Generate 3D background extension')
1068 
1069  # Get relevant ROOT histograms
1070  array = tfile.Get('BGRatePerSqDeg_offaxis')
1071 
1072  # Replace 2D histogram values by power law extrapolation
1073  self._plaw_replace(array, self['bgdethres'].real())
1074 
1075  # If requested then fill-in empty values in 2D histogram
1076  if self['bgdinfill'].boolean():
1077  self._infill_bgd(array)
1078 
1079  # If requested then normalize the 2D histogram on the on-axis 1D
1080  # histogram. This assures that the 2D histogram has the same on-axis
1081  # effective area dependence as the 1D histogram.
1082  if self['norm1d'].boolean():
1083  array_1D = tfile.Get('BGRatePerSqDeg')
1084  self._renorm_onaxis(array, array_1D)
1085 
1086  # Write boundary keywords
1087  self._set_cif_keywords(ds['HDU_BKG'], irf['BKG_NAME'],
1088  irf['BKG_BOUNDS'], irf['BKG_DESC'], irf)
1089 
1090  # Create "BKG" data column
1091  self._make_3D(array, ds['HDU_BKG'], 'BKG', '1/(MeV s sr)')
1092 
1093  # Return
1094  return
1095 
1096  def _append_column_axis(self, hdu, name, unit, axis, log=False):
1097  """
1098  Append column of ROOT axis values to HDU
1099 
1100  Parameters
1101  ----------
1102  hdu : `~gammalib.GFitsHDU`
1103  FITS HDU
1104  name : str
1105  Column name
1106  unit : str
1107  Column unit
1108  axis : `~ROOT.TAxis`
1109  ROOT histogram axis
1110  log : bool, optional
1111  Axis is logarithmic
1112  """
1113  # Continue only if columns does not yet exist
1114  if not hdu.contains(name):
1115 
1116  # Write header
1117  self._log_header3(gammalib.TERSE, 'Append axis column "%s"' % name)
1118 
1119  # Get number of axis bins
1120  nbins = axis.GetNbins()
1121 
1122  # Append column and set column unit
1123  hdu.append(gammalib.GFitsTableFloatCol(name, 1, nbins))
1124  hdu[name].unit(unit)
1125 
1126  # Do we have a LO axis? If not we have a HI axis
1127  if name[-2:] == 'LO':
1128  low = True
1129  else:
1130  low = False
1131 
1132  # Fill column
1133  for i in range(nbins):
1134  if low:
1135  value = axis.GetBinLowEdge(i+1)
1136  else:
1137  value = axis.GetBinUpEdge(i+1)
1138  if log:
1139  value = pow(10.0, value)
1140  hdu[name][0,i] = value
1141 
1142  # Log values
1143  self._log_value(gammalib.NORMAL, 'Number of axis bins', nbins)
1144  self._log_value(gammalib.NORMAL, 'Unit', unit)
1145 
1146  # Return
1147  return
1148 
1149  def _append_column_values(self, hdu, name, unit, values):
1150  """
1151  Append column of values to HDU
1152 
1153  Parameters
1154  ----------
1155  hdu : `~gammalib.GFitsHDU`
1156  FITS HDU
1157  name : str
1158  Column name
1159  unit : str
1160  Column unit
1161  values : list of float
1162  Axis values
1163  """
1164  # Continue only if columns does not yet exist
1165  if not hdu.contains(name):
1166 
1167  # Write header
1168  self._log_header3(gammalib.TERSE, 'Append value column "%s"' % name)
1169 
1170  # Get number of values
1171  nbins = len(values)
1172 
1173  # Append column and set column unit
1174  hdu.append(gammalib.GFitsTableFloatCol(name, 1, nbins))
1175  hdu[name].unit(unit)
1176 
1177  # Fill column
1178  for i in range(nbins):
1179  hdu[name][0,i] = values[i]
1180 
1181  # Log values
1182  self._log_value(gammalib.NORMAL, 'Number of values', nbins)
1183  self._log_value(gammalib.NORMAL, 'Unit', unit)
1184 
1185  # Return
1186  return
1187 
1188  def _renorm_onaxis(self, hist2D, hist1D):
1189  """
1190  Renormalise 2D histogram (energy,offset) on 1D histogram (energy)
1191 
1192  This method makes sure that a 2D histogram has the same onaxis values
1193  as the corresponding 1D histogram.
1194 
1195  Parameters
1196  ----------
1197  hist2D : `~ROOT.TH2F`
1198  ROOT 2D histogram
1199  hist1D : `~ROOT.TH1F`
1200  ROOT 1D histogram
1201  """
1202  # Get 2D dimensions
1203  neng = hist2D.GetXaxis().GetNbins()
1204  noffset = hist2D.GetYaxis().GetNbins()
1205 
1206  # Continue only if the 1D and 2D histograms have the same number
1207  # of energy bins
1208  if neng != hist1D.GetXaxis().GetNbins():
1209 
1210  # Log if energy binning differs
1211  self._log_value(gammalib.TERSE, 'On-axis normalisation',
1212  'Impossible since energy binning of 1D histogram '
1213  '"%s" does not match that of 2D histogram "%s".' %
1214  (hist1D.GetName(), hist2D.GetName()))
1215  else:
1216  # Log on-axis normalisation
1217  self._log_value(gammalib.TERSE, 'On-axis normalisation',
1218  'On-axis values of 1D histogram "%s" imposed on '
1219  '2D histogram "%s".' %
1220  (hist1D.GetName(), hist2D.GetName()))
1221 
1222  # Get a copy of 2D histogram
1223  hist2D_copy = hist2D.Clone()
1224 
1225  # Divide 2D histogram by onaxis values to remove the energy
1226  # dependence
1227  for ieng in range(neng):
1228  onaxis = hist1D.GetBinContent(ieng+1)
1229  if onaxis > 0.0:
1230  for ioff in range(noffset):
1231  value = hist2D.GetBinContent(ieng+1,ioff+1) / onaxis
1232  hist2D.SetBinContent(ieng+1,ioff+1,value)
1233 
1234  # Smooth in energy direction to reduce statistical fluctuations
1235  for ieng in range(neng):
1236  for ioff in range(noffset):
1237  if ieng == 0:
1238  value = (2.0 * hist2D.GetBinContent(ieng+1,ioff+1) +
1239  1.0 * hist2D.GetBinContent(ieng+2,ioff+1)) / 3.0
1240  elif ieng == neng-1:
1241  value = (2.0 * hist2D.GetBinContent(ieng+1,ioff+1) +
1242  1.0 * hist2D.GetBinContent(ieng+0,ioff+1)) / 3.0
1243  else:
1244  value = (2.0 * hist2D.GetBinContent(ieng+1,ioff+1) +
1245  1.0 * hist2D.GetBinContent(ieng+0,ioff+1) +
1246  1.0 * hist2D.GetBinContent(ieng+2,ioff+1)) / 4.0
1247  hist2D_copy.SetBinContent(ieng+1,ioff+1,value)
1248 
1249  # Normalize now for an onaxis value of 1
1250  for ieng in range(neng):
1251  onaxis = hist2D_copy.GetBinContent(ieng+1,1)
1252  if onaxis > 0.0:
1253  for ioff in range(noffset):
1254  value = hist2D_copy.GetBinContent(ieng+1,ioff+1) / onaxis
1255  hist2D_copy.SetBinContent(ieng+1,ioff+1,value)
1256 
1257  # Put back the energy dependence
1258  for ieng in range(neng):
1259  onaxis = hist1D.GetBinContent(ieng+1)
1260  for ioff in range(noffset):
1261  value = hist2D_copy.GetBinContent(ieng+1,ioff+1) * onaxis
1262  hist2D.SetBinContent(ieng+1,ioff+1,value)
1263 
1264  # Return
1265  return
1266 
1267  def _plaw_replace(self, hist2D, ethres):
1268  """
1269  Replace background rate values by power law values
1270 
1271  Parameters
1272  ----------
1273  hist2D : `~ROOT.TH2F`
1274  ROOT 2D histogram
1275  ethres : float
1276  Energy threshold
1277  """
1278  # Extract energy and offset angle vectors
1279  energies = hist2D.GetXaxis()
1280  neng = energies.GetNbins()
1281  offsets = hist2D.GetYaxis()
1282  noffset = offsets.GetNbins()
1283 
1284  # Continue only if threshold is low enough
1285  if ethres < math.pow(10.0, energies.GetBinUpEdge(neng)):
1286 
1287  # Determine mean energy values
1288  engs = []
1289  for ieng in range(neng):
1290  energy = 0.5 * (math.pow(10.0, energies.GetBinLowEdge(ieng+1)) +
1291  math.pow(10.0, energies.GetBinUpEdge(ieng+1)))
1292  engs.append(energy)
1293 
1294  # Loop over all offaxis angles
1295  for ioff in range(noffset):
1296 
1297  # Determine offset angle (for logging)
1298  offset = 0.5 * (offsets.GetBinLowEdge(ioff+1) +
1299  offsets.GetBinUpEdge(ioff+1))
1300 
1301  # Initialise vectors
1302  axis = []
1303  rates = []
1304 
1305  # Extract values
1306  for ieng in range(neng):
1307  energy = engs[ieng]
1308  rate = hist2D.GetBinContent(ieng+1,ioff+1)
1309  axis.append(energy)
1310  rates.append(rate)
1311 
1312  # Determine minimum energy (ignoring the maximum energy as
1313  # we do not need it)
1314  emin = self._plaw_energy_range(axis, rates)[0]
1315 
1316  # Get power law coefficients
1317  coeff = self._plaw_coefficients(axis, rates, emin, ethres)
1318 
1319  # Replace histogram values by power law values if coefficients
1320  # are valid
1321  if coeff['m'] != 0.0 and coeff['t'] != 0.0:
1322  for ieng in range(neng):
1323  energy = engs[ieng]
1324  if energy > ethres:
1325  value = self._plaw_value(coeff, energy)
1326  hist2D.SetBinContent(ieng+1,ioff+1,value)
1327 
1328  # Log power law replacement
1329  self._log_value(gammalib.NORMAL,
1330  'Power law replacement',
1331  '%e cts/(s deg**2) (E=%8.4f TeV (%d) '
1332  'Off=%.2f deg (%d)' %
1333  (value,engs[ieng],ieng,offset,ioff))
1334 
1335  # Return
1336  return
1337 
1338  def _plaw_energy_range(self, energies, rates):
1339  """
1340  Determine the energy range over which the power law is valid
1341 
1342  Parameters
1343  ----------
1344  energies : list of float
1345  Energy values (TeV)
1346  rates : list of float
1347  Background rate values
1348  """
1349  # Find energy at which background rate takes a maximum
1350  min_energy = 0.0
1351  max_rate = 0.0
1352  for ieng, energy in enumerate(energies):
1353  if rates[ieng] > max_rate:
1354  max_rate = rates[ieng]
1355  min_energy = energy
1356 
1357  # Find last data point
1358  max_energy = 0.0
1359  for ieng in range(len(energies)-1,-1,-1):
1360  if rates[ieng] > 0.0:
1361  max_energy = energies[ieng]
1362  break
1363 
1364  # Return
1365  return (min_energy, max_energy)
1366 
1367  def _plaw_coefficients(self, energies, rates, emin, emax):
1368  """
1369  Determines the power law coefficients
1370 
1371  Parameters
1372  ----------
1373  energies : list of float
1374  Energy values (TeV)
1375  rates : list of float
1376  Background rate values
1377  emin : float
1378  Minimum energy (TeV)
1379  emax : float
1380  Maximum energy (TeV)
1381  """
1382  # Determine nearest energy indices
1383  i_emin = 0
1384  i_emax = 0
1385  delta_emin = 1.0e30
1386  delta_emax = 1.0e30
1387  for ieng, energy in enumerate(energies):
1388  if abs(energy-emin) < delta_emin:
1389  delta_emin = abs(energy-emin)
1390  i_emin = ieng
1391  if abs(energy-emax) < delta_emax:
1392  delta_emax = abs(energy-emax)
1393  i_emax = ieng
1394 
1395  # Determine coefficients of power law
1396  while rates[i_emax] == 0.0 and i_emax > i_emin:
1397  i_emax -= 1
1398  if rates[i_emax] > 0.0:
1399  x1 = math.log10(energies[i_emin])
1400  x2 = math.log10(energies[i_emax])
1401  y1 = math.log10(rates[i_emin])
1402  y2 = math.log10(rates[i_emax])
1403  m = (y2-y1)/(x2-x1)
1404  t = y1 - m * x1
1405  else:
1406  m = 0.0
1407  t = 0.0
1408 
1409  # Return coefficients
1410  return {'m': m, 't': t}
1411 
1412  def _plaw_value(self, coeff, energy):
1413  """
1414  Determine the power law value
1415 
1416  Parameters
1417  ----------
1418  coeff : dict
1419  Dictionary of coefficients
1420  energy : float
1421  Energy value (TeV)
1422  """
1423  # Compute value
1424  value = math.pow(10.0, coeff['m'] * math.log10(energy) + coeff['t'])
1425 
1426  # Return value
1427  return value
1428 
1429  def _infill_bgd(self, hist2D):
1430  """
1431  Fill all empty bins in 2D histogram by preceeding values in energy
1432 
1433  Parameters
1434  ----------
1435  hist2D : `~ROOT.TH2F`
1436  ROOT 2D histogram
1437  """
1438  # Extract energy and offset angle vectors
1439  energies = hist2D.GetXaxis()
1440  neng = energies.GetNbins()
1441  offsets = hist2D.GetYaxis()
1442  noffset = offsets.GetNbins()
1443 
1444  # Determine mean energy values (for logging)
1445  engs = []
1446  for ieng in range(neng):
1447  energy = 0.5 * (math.pow(10.0, energies.GetBinLowEdge(ieng+1)) +
1448  math.pow(10.0, energies.GetBinUpEdge(ieng+1)))
1449  engs.append(energy)
1450 
1451  # Loop over all offaxis angles
1452  for ioff in range(noffset):
1453 
1454  # Determine offset angle (for logging)
1455  offset = 0.5 * (offsets.GetBinLowEdge(ioff+1) +
1456  offsets.GetBinUpEdge(ioff+1))
1457 
1458  # Initialise indices and value
1459  i_start = -1
1460  i_stop = -1
1461  value = 0.0
1462 
1463  # First start and stop indices for infill
1464  for ieng in range(neng):
1465  if hist2D.GetBinContent(ieng+1,ioff+1) > 0.0:
1466  i_start = ieng
1467  value = hist2D.GetBinContent(ieng+1,ioff+1)
1468  break
1469  for ieng in range(neng-1, -1, -1):
1470  if hist2D.GetBinContent(ieng+1,ioff+1) > 0.0:
1471  i_stop = ieng
1472  break
1473 
1474  # If indices are valid then fill in background rates
1475  if i_start > -1 and i_stop > -1:
1476  for ieng in range(i_start, i_stop+1):
1477  if hist2D.GetBinContent(ieng+1,ioff+1) == 0.0:
1478  hist2D.SetBinContent(ieng+1,ioff+1,value)
1479 
1480  # Log background infill
1481  self._log_value(gammalib.NORMAL,
1482  'Background infill',
1483  '%e cts/(s deg**2) (E=%8.4f TeV (%d) '
1484  'Off=%.2f deg (%d)' %
1485  (value,engs[ieng],ieng,offset,ioff))
1486 
1487  else:
1488  value = hist2D.GetBinContent(ieng+1,ioff+1)
1489 
1490  # Return
1491  return
1492 
1493  def _make_2D(self, array, hdu, name, unit, scale=1.0):
1494  """
1495  Make 2D data column as function of energy and offset angle
1496 
1497  If the HDU has already the energy and offset angle columns, this method
1498  will simply add another data column. If name==None, the method will not
1499  append any data column.
1500 
1501  Parameters
1502  ----------
1503  array : `~ROOT.TH2F`
1504  ROOT 2D histogram
1505  hdu : `~gammalib.GFitsHDU`
1506  FITS HDU
1507  name : str
1508  Data column name
1509  unit : str
1510  Data unit
1511  scale : float, optional
1512  Scaling factor for histogram values
1513  """
1514  # Write header
1515  self._log_header3(gammalib.TERSE, 'Make 2D data column "%s"' % name)
1516 
1517  # Extract energy and offset angle vectors
1518  energies = array.GetXaxis()
1519  offsets = array.GetYaxis()
1520  neng = energies.GetNbins()
1521  noffset = offsets.GetNbins()
1522 
1523  # Log parameters
1524  self._log_value(gammalib.NORMAL, 'Number of energies', neng)
1525  self._log_value(gammalib.NORMAL, 'Number of offsets', noffset)
1526 
1527  # Append axis columns to HDU
1528  self._append_column_axis(hdu, 'ENERG_LO', 'TeV', energies, log=True)
1529  self._append_column_axis(hdu, 'ENERG_HI', 'TeV', energies, log=True)
1530  self._append_column_axis(hdu, 'THETA_LO', 'deg', offsets)
1531  self._append_column_axis(hdu, 'THETA_HI', 'deg', offsets)
1532 
1533  # Append array column
1534  if name != None and not hdu.contains(name):
1535  hdu.append(gammalib.GFitsTableFloatCol(name, 1, neng*noffset))
1536  hdu[name].unit(unit)
1537  hdu[name].dim([neng, noffset])
1538  for ioff in range(noffset):
1539  for ieng in range(neng):
1540  index = ieng + ioff * neng
1541  value = array.GetBinContent(ieng+1,ioff+1)
1542  hdu[name][0,index] = value * scale
1543 
1544  # Collect boundary information
1545  bd_eng = 'ENERG(%.4f-%.2f)TeV' % \
1546  (pow(10.0, energies.GetBinLowEdge(1)), \
1547  pow(10.0, energies.GetBinUpEdge(neng)))
1548  bd_off = 'THETA(%.2f-%.2f)deg' % \
1549  (offsets.GetBinLowEdge(1), \
1550  offsets.GetBinUpEdge(noffset))
1551  bd_phi = 'PHI(0-360)deg'
1552  bounds = [bd_eng, bd_off, bd_phi]
1553 
1554  # Return boundary information
1555  return bounds
1556 
1557  def _make_3D(self, array, hdu, name, unit):
1558  """
1559  Make 3D data column as function of DETX, DETY and energy
1560 
1561  If the HDU has already the energy and offset angle columns, this method
1562  will simply add another data column. If name==None, the method will not
1563  append any data column.
1564 
1565  Parameters
1566  ----------
1567  array : `~ROOT.TH2F`
1568  ROOT 2D histogram
1569  hdu : `~gammalib.GFitsHDU`
1570  FITS HDU
1571  name : str
1572  Data column name
1573  unit : str
1574  Data unit
1575  """
1576  # Write header
1577  self._log_header3(gammalib.TERSE, 'Make 3D data column "%s"' % name)
1578 
1579  # Get User parameters
1580  scale = self['bgdscale'].real()
1581  oversample = self['bgdoversample'].integer()
1582 
1583  # Set constants
1584  deg2sr = 0.01745329*0.01745329
1585 
1586  # Extract energy and offset angle vectors
1587  energies = array.GetXaxis()
1588  neng = energies.GetNbins()
1589  offsets = array.GetYaxis()
1590  noffset = offsets.GetNbins()
1591  theta_max = offsets.GetBinUpEdge(noffset) # Maximum theta
1592  ewidth = [] # in MeV
1593  for ieng in range(neng):
1594  ewidth.append(pow(10.0, energies.GetBinUpEdge(ieng+1) +6.0) -
1595  pow(10.0, energies.GetBinLowEdge(ieng+1)+6.0))
1596 
1597  # Log parameters
1598  self._log_value(gammalib.NORMAL, 'Scale', scale)
1599  self._log_value(gammalib.NORMAL, 'Oversample', oversample)
1600  self._log_value(gammalib.NORMAL, 'Number of energies', neng)
1601  self._log_value(gammalib.NORMAL, 'Number of offsets', noffset)
1602  self._log_value(gammalib.NORMAL, 'Maximum offset', theta_max)
1603 
1604  # Build DETX and DETY axes
1605  ndet = array.GetYaxis().GetNbins()
1606  ndets = 2*ndet*oversample
1607  det_max = array.GetYaxis().GetBinUpEdge(ndet)
1608  det_bin = det_max/float(ndet*oversample)
1609  dets_lo = []
1610  dets_hi = []
1611  dets2 = []
1612  for i in range(ndets):
1613  det_lo = -det_max + i*det_bin
1614  det_hi = det_lo + det_bin
1615  det_val = 0.5*(det_lo + det_hi)
1616  dets_lo.append(det_lo)
1617  dets_hi.append(det_hi)
1618  dets2.append(det_val*det_val)
1619 
1620  # Log parameters
1621  self._log_value(gammalib.NORMAL, 'Number of DETXY bins', ndets)
1622  self._log_value(gammalib.NORMAL, 'Maximum DETXY', det_max)
1623  self._log_value(gammalib.NORMAL, 'DETXY binsize', det_bin)
1624 
1625  # Append DETX_LO, DETX_HI, DETY_LO and DETY_HI columns
1626  self._append_column_values(hdu, 'DETX_LO', 'deg', dets_lo)
1627  self._append_column_values(hdu, 'DETX_HI', 'deg', dets_hi)
1628  self._append_column_values(hdu, 'DETY_LO', 'deg', dets_lo)
1629  self._append_column_values(hdu, 'DETY_HI', 'deg', dets_hi)
1630 
1631  # Append ENERG_LO and ENERG_HI columns
1632  self._append_column_axis(hdu, 'ENERG_LO', 'TeV', energies, log=True)
1633  self._append_column_axis(hdu, 'ENERG_HI', 'TeV', energies, log=True)
1634 
1635  # Append array column
1636  if name != None and not hdu.contains(name):
1637  hdu.append(gammalib.GFitsTableFloatCol(name, 1, ndets*ndets*neng))
1638  hdu[name].unit(unit)
1639  hdu[name].dim([ndets,ndets,neng])
1640  for ix in range(ndets):
1641  for iy in range(ndets):
1642  for ieng in range(neng):
1643  index = ix + (iy + ieng * ndets) * ndets
1644  theta = math.sqrt(dets2[ix] + dets2[iy])
1645  if theta < theta_max:
1646  binsq = array.Interpolate(energies.GetBinCenter(ieng+1),
1647  theta)
1648  value = binsq / deg2sr / (ewidth[ieng])
1649  hdu[name][0,index] = value * scale
1650 
1651  # Collect boundary information
1652  bd_detx = 'DETX(%.2f-%.2f)deg' % (-det_max, det_max)
1653  bd_dety = 'DETY(%.2f-%.2f)deg' % (-det_max, det_max)
1654  bd_eng = 'ENERG(%.4f-%.2f)TeV' % \
1655  (pow(10.0, energies.GetBinLowEdge(1)),
1656  pow(10.0, energies.GetBinUpEdge(neng)))
1657  bounds = [bd_detx, bd_dety, bd_eng]
1658 
1659  # Return boundary information
1660  return bounds
1661 
1662  def _make_3D_migra(self, array, hdu, name, unit, scale=1.0):
1663  """
1664  Make 3D data column as function of ENERG, MIGRA and THETA
1665 
1666  If the HDU has already the energy and offset angle columns, this method
1667  will simply add another data column. If name==None, the method will not
1668  append any data column.
1669 
1670  Parameters
1671  ----------
1672  array : `~ROOT.TH3F`
1673  ROOT 2D histogram
1674  hdu : `~gammalib.GFitsHDU`
1675  FITS HDU
1676  name : str
1677  Data column name
1678  unit : str
1679  Data unit
1680  scale : float, optional
1681  Scaling factor for histogram values
1682  """
1683  # Write header
1684  self._log_header3(gammalib.TERSE,
1685  'Make 3D migration matrix data column "%s"' % name)
1686 
1687  # Extract Etrue, Eobs/Etrue and offset angle vectors
1688  etrue = array.GetXaxis()
1689  netrue = etrue.GetNbins()
1690  migra = array.GetYaxis()
1691  nmigra = migra.GetNbins()
1692  offsets = array.GetZaxis()
1693  noffset = offsets.GetNbins()
1694  ewidth = [] # in MeV
1695  for ieng in range(netrue):
1696  ewidth.append(pow(10.0, etrue.GetBinUpEdge(ieng+1) +6.0) - \
1697  pow(10.0, etrue.GetBinLowEdge(ieng+1)+6.0))
1698 
1699  # Log parameters
1700  self._log_value(gammalib.NORMAL, 'Number of energies', netrue)
1701  self._log_value(gammalib.NORMAL, 'Number of migrations', nmigra)
1702  self._log_value(gammalib.NORMAL, 'Number of offsets', noffset)
1703 
1704  # Append axis columns to HDU
1705  self._append_column_axis(hdu, 'ENERG_LO', 'TeV', etrue, log=True)
1706  self._append_column_axis(hdu, 'ENERG_HI', 'TeV', etrue, log=True)
1707  self._append_column_axis(hdu, 'MIGRA_LO', '', migra)
1708  self._append_column_axis(hdu, 'MIGRA_HI', '', migra)
1709  self._append_column_axis(hdu, 'THETA_LO', 'deg', offsets)
1710  self._append_column_axis(hdu, 'THETA_HI', 'deg', offsets)
1711 
1712  # Append migration matrix to HDU
1713  if name != None and not hdu.contains(name):
1714  hdu.append(gammalib.GFitsTableFloatCol(name, 1, netrue*nmigra*noffset))
1715  hdu[name].unit(unit)
1716  hdu[name].dim([netrue, nmigra, noffset])
1717  for ioff in range(noffset):
1718  for imigra in range(nmigra):
1719  for ieng in range(netrue):
1720  index = ieng + (imigra + ioff * nmigra) * netrue
1721  value = array.GetBinContent(ieng+1,imigra+1,ioff+1)
1722  hdu[name][0,index] = value * scale
1723 
1724  # Collect boundary information
1725  bd_eng = 'ENERG(%.4f-%.2f)TeV' % \
1726  (pow(10.0, etrue.GetBinLowEdge(1)),
1727  pow(10.0, etrue.GetBinUpEdge(netrue)))
1728  bd_migra = 'MIGRA(%.3f-%.3f)' % \
1729  (migra.GetBinLowEdge(1),
1730  migra.GetBinUpEdge(nmigra))
1731  bd_off = 'THETA(%.2f-%.2f)deg' % \
1732  (offsets.GetBinLowEdge(1),
1733  offsets.GetBinUpEdge(noffset))
1734  bd_phi = 'PHI(0-360)deg'
1735  bounds = [bd_eng, bd_migra, bd_off, bd_phi]
1736 
1737  # Return boundary information
1738  return bounds
1739 
1740 
1741  # Public methods
1742  def process(self):
1743  """
1744  Process the script
1745  """
1746  # Optional ROOT import
1747  try:
1748  from ROOT import TFile, TH2F, TH3F
1749  _has_root = True
1750  except ImportError:
1751  _has_root = False
1752 
1753  # Warn if ROOT module is missing
1754  if not _has_root:
1755  gammalib.warning('csroot2caldb', 'ROOT Python module not present, '
1756  'script will create empty IRF.')
1757 
1758  # Get parameters
1759  self._get_parameters()
1760 
1761  # Initialise IRF metadata
1762  irf = self._init_metadata()
1763 
1764  # Create directory structure
1765  ds = self._make_dirs('file', irf)
1766 
1767  # Open calibration files
1768  self._open(irf, ds)
1769 
1770  # Translate ROOT to CALDB information
1771  if _has_root:
1772  self._root2caldb(irf, ds)
1773 
1774  # Close calibration files
1775  self._close(irf, ds)
1776 
1777  # Return
1778  return
1779 
1780 
1781 # ======================== #
1782 # Main routine entry point #
1783 # ======================== #
1784 if __name__ == '__main__':
1785 
1786  # Create instance of application
1787  app = csroot2caldb(sys.argv)
1788 
1789  # Execute application
1790  app.execute()