lsst.obs.base  17.0.1-11-g20c7f65+4
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 from astropy.io import fits # required by _makeDefectsDict until defects are written as AFW tables
26 import re
27 import weakref
28 import lsst.daf.persistence as dafPersist
29 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base as dafBase
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 from lsst.afw.fits import readMetadata
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.exceptions as pexExcept
38 from .exposureIdInfo import ExposureIdInfo
39 from .makeRawVisitInfo import MakeRawVisitInfo
40 from lsst.utils import getPackageDir
41 
42 __all__ = ["CameraMapper", "exposureFromImage"]
43 
44 
45 class CameraMapper(dafPersist.Mapper):
46 
47  """CameraMapper is a base class for mappers that handle images from a
48  camera and products derived from them. This provides an abstraction layer
49  between the data on disk and the code.
50 
51  Public methods: keys, queryMetadata, getDatasetTypes, map,
52  canStandardize, standardize
53 
54  Mappers for specific data sources (e.g., CFHT Megacam, LSST
55  simulations, etc.) should inherit this class.
56 
57  The CameraMapper manages datasets within a "root" directory. Note that
58  writing to a dataset present in the input root will hide the existing
59  dataset but not overwrite it. See #2160 for design discussion.
60 
61  A camera is assumed to consist of one or more rafts, each composed of
62  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
63  (amps). A camera is also assumed to have a camera geometry description
64  (CameraGeom object) as a policy file, a filter description (Filter class
65  static configuration) as another policy file, and an optional defects
66  description directory.
67 
68  Information from the camera geometry and defects are inserted into all
69  Exposure objects returned.
70 
71  The mapper uses one or two registries to retrieve metadata about the
72  images. The first is a registry of all raw exposures. This must contain
73  the time of the observation. One or more tables (or the equivalent)
74  within the registry are used to look up data identifier components that
75  are not specified by the user (e.g. filter) and to return results for
76  metadata queries. The second is an optional registry of all calibration
77  data. This should contain validity start and end entries for each
78  calibration dataset in the same timescale as the observation time.
79 
80  Subclasses will typically set MakeRawVisitInfoClass:
81 
82  MakeRawVisitInfoClass: a class variable that points to a subclass of
83  MakeRawVisitInfo, a functor that creates an
84  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
85 
86  Subclasses must provide the following methods:
87 
88  _extractDetectorName(self, dataId): returns the detector name for a CCD
89  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
90  a dataset identifier referring to that CCD or a subcomponent of it.
91 
92  _computeCcdExposureId(self, dataId): see below
93 
94  _computeCoaddExposureId(self, dataId, singleFilter): see below
95 
96  Subclasses may also need to override the following methods:
97 
98  _transformId(self, dataId): transformation of a data identifier
99  from colloquial usage (e.g., "ccdname") to proper/actual usage
100  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
101  commas). The default implementation does nothing. Note that this
102  method should not modify its input parameter.
103 
104  getShortCcdName(self, ccdName): a static method that returns a shortened
105  name suitable for use as a filename. The default version converts spaces
106  to underscores.
107 
108  _getCcdKeyVal(self, dataId): return a CCD key and value
109  by which to look up defects in the defects registry.
110  The default value returns ("ccd", detector name)
111 
112  _mapActualToPath(self, template, actualId): convert a template path to an
113  actual path, using the actual dataset identifier.
114 
115  The mapper's behaviors are largely specified by the policy file.
116  See the MapperDictionary.paf for descriptions of the available items.
117 
118  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
119  mappings (see Mappings class).
120 
121  Common default mappings for all subclasses can be specified in the
122  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
123  provides a simple way to add a product to all camera mappers.
124 
125  Functions to map (provide a path to the data given a dataset
126  identifier dictionary) and standardize (convert data into some standard
127  format or type) may be provided in the subclass as "map_{dataset type}"
128  and "std_{dataset type}", respectively.
129 
130  If non-Exposure datasets cannot be retrieved using standard
131  daf_persistence methods alone, a "bypass_{dataset type}" function may be
132  provided in the subclass to return the dataset instead of using the
133  "datasets" subpolicy.
134 
135  Implementations of map_camera and bypass_camera that should typically be
136  sufficient are provided in this base class.
137 
138  Notes
139  -----
140  TODO:
141 
142  - Handle defects the same was as all other calibration products, using the
143  calibration registry
144  - Instead of auto-loading the camera at construction time, load it from
145  the calibration registry
146  - Rewrite defects as AFW tables so we don't need astropy.io.fits to
147  unpersist them; then remove all mention of astropy.io.fits from this
148  package.
149  """
150  packageName = None
151 
152  # a class or subclass of MakeRawVisitInfo, a functor that makes an
153  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
154  MakeRawVisitInfoClass = MakeRawVisitInfo
155 
156  # a class or subclass of PupilFactory
157  PupilFactoryClass = afwCameraGeom.PupilFactory
158 
159  def __init__(self, policy, repositoryDir,
160  root=None, registry=None, calibRoot=None, calibRegistry=None,
161  provided=None, parentRegistry=None, repositoryCfg=None):
162  """Initialize the CameraMapper.
163 
164  Parameters
165  ----------
166  policy : daf_persistence.Policy,
167  Policy with per-camera defaults already merged.
168  repositoryDir : string
169  Policy repository for the subclassing module (obtained with
170  getRepositoryPath() on the per-camera default dictionary).
171  root : string, optional
172  Path to the root directory for data.
173  registry : string, optional
174  Path to registry with data's metadata.
175  calibRoot : string, optional
176  Root directory for calibrations.
177  calibRegistry : string, optional
178  Path to registry with calibrations' metadata.
179  provided : list of string, optional
180  Keys provided by the mapper.
181  parentRegistry : Registry subclass, optional
182  Registry from a parent repository that may be used to look up
183  data's metadata.
184  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
185  The configuration information for the repository this mapper is
186  being used with.
187  """
188 
189  dafPersist.Mapper.__init__(self)
190 
191  self.log = lsstLog.Log.getLogger("CameraMapper")
192 
193  if root:
194  self.root = root
195  elif repositoryCfg:
196  self.root = repositoryCfg.root
197  else:
198  self.root = None
199 
200  repoPolicy = repositoryCfg.policy if repositoryCfg else None
201  if repoPolicy is not None:
202  policy.update(repoPolicy)
203 
204  # Levels
205  self.levels = dict()
206  if 'levels' in policy:
207  levelsPolicy = policy['levels']
208  for key in levelsPolicy.names(True):
209  self.levels[key] = set(levelsPolicy.asArray(key))
210  self.defaultLevel = policy['defaultLevel']
211  self.defaultSubLevels = dict()
212  if 'defaultSubLevels' in policy:
213  self.defaultSubLevels = policy['defaultSubLevels']
214 
215  # Root directories
216  if root is None:
217  root = "."
218  root = dafPersist.LogicalLocation(root).locString()
219 
220  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
221 
222  # If the calibRoot is passed in, use that. If not and it's indicated in
223  # the policy, use that. And otherwise, the calibs are in the regular
224  # root.
225  # If the location indicated by the calib root does not exist, do not
226  # create it.
227  calibStorage = None
228  if calibRoot is not None:
229  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
230  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
231  create=False)
232  else:
233  calibRoot = policy.get('calibRoot', None)
234  if calibRoot:
235  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236  create=False)
237  if calibStorage is None:
238  calibStorage = self.rootStorage
239 
240  self.root = root
241 
242  # Registries
243  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
244  self.rootStorage, searchParents=False,
245  posixIfNoSql=(not parentRegistry))
246  if not self.registry:
247  self.registry = parentRegistry
248  needCalibRegistry = policy.get('needCalibRegistry', None)
249  if needCalibRegistry:
250  if calibStorage:
251  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
252  "calibRegistryPath", calibStorage,
253  posixIfNoSql=False) # NB never use posix for calibs
254  else:
255  raise RuntimeError(
256  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
257  "calibRoot ivar:%s or policy['calibRoot']:%s" %
258  (calibRoot, policy.get('calibRoot', None)))
259  else:
260  self.calibRegistry = None
261 
262  # Dict of valid keys and their value types
263  self.keyDict = dict()
264 
265  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
266  self._initWriteRecipes()
267 
268  # Camera geometry
269  self.cameraDataLocation = None # path to camera geometry config file
270  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
271 
272  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
273  # camera package, which is on the local filesystem.
274  self.defectRegistry = None
275  if 'defects' in policy:
276  self.defectPath = os.path.join(repositoryDir, policy['defects'])
277  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
278  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
279 
280  # Filter translation table
281  self.filters = None
282 
283  # verify that the class variable packageName is set before attempting
284  # to instantiate an instance
285  if self.packageName is None:
286  raise ValueError('class variable packageName must not be None')
287 
289 
290  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
291  """Initialize mappings
292 
293  For each of the dataset types that we want to be able to read, there
294  are methods that can be created to support them:
295  * map_<dataset> : determine the path for dataset
296  * std_<dataset> : standardize the retrieved dataset
297  * bypass_<dataset> : retrieve the dataset (bypassing the usual
298  retrieval machinery)
299  * query_<dataset> : query the registry
300 
301  Besides the dataset types explicitly listed in the policy, we create
302  additional, derived datasets for additional conveniences,
303  e.g., reading the header of an image, retrieving only the size of a
304  catalog.
305 
306  Parameters
307  ----------
308  policy : `lsst.daf.persistence.Policy`
309  Policy with per-camera defaults already merged
310  rootStorage : `Storage subclass instance`
311  Interface to persisted repository data.
312  calibRoot : `Storage subclass instance`
313  Interface to persisted calib repository data
314  provided : `list` of `str`
315  Keys provided by the mapper
316  """
317  # Sub-dictionaries (for exposure/calibration/dataset types)
318  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319  "obs_base", "ImageMappingDefaults.yaml", "policy"))
320  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
321  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
322  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
324  dsMappingPolicy = dafPersist.Policy()
325 
326  # Mappings
327  mappingList = (
328  ("images", imgMappingPolicy, ImageMapping),
329  ("exposures", expMappingPolicy, ExposureMapping),
330  ("calibrations", calMappingPolicy, CalibrationMapping),
331  ("datasets", dsMappingPolicy, DatasetMapping)
332  )
333  self.mappings = dict()
334  for name, defPolicy, cls in mappingList:
335  if name in policy:
336  datasets = policy[name]
337 
338  # Centrally-defined datasets
339  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
340  if os.path.exists(defaultsPath):
341  datasets.merge(dafPersist.Policy(defaultsPath))
342 
343  mappings = dict()
344  setattr(self, name, mappings)
345  for datasetType in datasets.names(True):
346  subPolicy = datasets[datasetType]
347  subPolicy.merge(defPolicy)
348 
349  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
350  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351  subPolicy=subPolicy):
352  components = subPolicy.get('composite')
353  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
354  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
355  python = subPolicy['python']
356  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357  disassembler=disassembler,
358  python=python,
359  dataId=dataId,
360  mapper=self)
361  for name, component in components.items():
362  butlerComposite.add(id=name,
363  datasetType=component.get('datasetType'),
364  setter=component.get('setter', None),
365  getter=component.get('getter', None),
366  subset=component.get('subset', False),
367  inputOnly=component.get('inputOnly', False))
368  return butlerComposite
369  setattr(self, "map_" + datasetType, compositeClosure)
370  # for now at least, don't set up any other handling for this dataset type.
371  continue
372 
373  if name == "calibrations":
374  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
375  provided=provided, dataRoot=rootStorage)
376  else:
377  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
378  self.keyDict.update(mapping.keys())
379  mappings[datasetType] = mapping
380  self.mappings[datasetType] = mapping
381  if not hasattr(self, "map_" + datasetType):
382  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383  return mapping.map(mapper, dataId, write)
384  setattr(self, "map_" + datasetType, mapClosure)
385  if not hasattr(self, "query_" + datasetType):
386  def queryClosure(format, dataId, mapping=mapping):
387  return mapping.lookup(format, dataId)
388  setattr(self, "query_" + datasetType, queryClosure)
389  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
390  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391  return mapping.standardize(mapper, item, dataId)
392  setattr(self, "std_" + datasetType, stdClosure)
393 
394  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395  """Set convenience methods on CameraMapper"""
396  mapName = "map_" + datasetType + "_" + suffix
397  bypassName = "bypass_" + datasetType + "_" + suffix
398  queryName = "query_" + datasetType + "_" + suffix
399  if not hasattr(self, mapName):
400  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
401  if not hasattr(self, bypassName):
402  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
403  bypassImpl = getattr(self, "bypass_" + datasetType)
404  if bypassImpl is not None:
405  setattr(self, bypassName, bypassImpl)
406  if not hasattr(self, queryName):
407  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
408 
409  # Filename of dataset
410  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
411  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
412  # Metadata from FITS file
413  if subPolicy["storage"] == "FitsStorage": # a FITS image
414  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
415  readMetadata(location.getLocationsWithRoot()[0]))
416 
417  # Add support for configuring FITS compression
418  addName = "add_" + datasetType
419  if not hasattr(self, addName):
420  setattr(self, addName, self.getImageCompressionSettings)
421 
422  if name == "exposures":
423  def getSkyWcs(datasetType, pythonType, location, dataId):
424  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
425  return fitsReader.readWcs()
426 
427  setMethods("wcs", bypassImpl=getSkyWcs)
428 
429  def getPhotoCalib(datasetType, pythonType, location, dataId):
430  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431  return fitsReader.readPhotoCalib()
432 
433  setMethods("photoCalib", bypassImpl=getPhotoCalib)
434 
435  def getVisitInfo(datasetType, pythonType, location, dataId):
436  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437  return fitsReader.readVisitInfo()
438 
439  setMethods("visitInfo", bypassImpl=getVisitInfo)
440 
441  def getFilter(datasetType, pythonType, location, dataId):
442  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
443  return fitsReader.readFilter()
444 
445  setMethods("filter", bypassImpl=getFilter)
446 
447  setMethods("detector",
448  mapImpl=lambda dataId, write=False:
449  dafPersist.ButlerLocation(
450  pythonType="lsst.afw.cameraGeom.CameraConfig",
451  cppType="Config",
452  storageName="Internal",
453  locationList="ignored",
454  dataId=dataId,
455  mapper=self,
456  storage=None,
457  ),
458  bypassImpl=lambda datasetType, pythonType, location, dataId:
459  self.camera[self._extractDetectorName(dataId)]
460  )
461  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
462  afwImage.bboxFromMetadata(
463  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
464 
465  elif name == "images":
466  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
467  afwImage.bboxFromMetadata(
468  readMetadata(location.getLocationsWithRoot()[0])))
469 
470  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
471  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
472  readMetadata(os.path.join(location.getStorage().root,
473  location.getLocations()[0]), hdu=1))
474 
475  # Sub-images
476  if subPolicy["storage"] == "FitsStorage":
477  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
478  subId = dataId.copy()
479  del subId['bbox']
480  loc = mapping.map(mapper, subId, write)
481  bbox = dataId['bbox']
482  llcX = bbox.getMinX()
483  llcY = bbox.getMinY()
484  width = bbox.getWidth()
485  height = bbox.getHeight()
486  loc.additionalData.set('llcX', llcX)
487  loc.additionalData.set('llcY', llcY)
488  loc.additionalData.set('width', width)
489  loc.additionalData.set('height', height)
490  if 'imageOrigin' in dataId:
491  loc.additionalData.set('imageOrigin',
492  dataId['imageOrigin'])
493  return loc
494 
495  def querySubClosure(key, format, dataId, mapping=mapping):
496  subId = dataId.copy()
497  del subId['bbox']
498  return mapping.lookup(format, subId)
499  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
500 
501  if subPolicy["storage"] == "FitsCatalogStorage":
502  # Length of catalog
503  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
504  readMetadata(os.path.join(location.getStorage().root,
505  location.getLocations()[0]),
506  hdu=1).getScalar("NAXIS2"))
507 
508  # Schema of catalog
509  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
510  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
511  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
512  location.getLocations()[0])))
513 
514  def _computeCcdExposureId(self, dataId):
515  """Compute the 64-bit (long) identifier for a CCD exposure.
516 
517  Subclasses must override
518 
519  Parameters
520  ----------
521  dataId : `dict`
522  Data identifier with visit, ccd.
523  """
524  raise NotImplementedError()
525 
526  def _computeCoaddExposureId(self, dataId, singleFilter):
527  """Compute the 64-bit (long) identifier for a coadd.
528 
529  Subclasses must override
530 
531  Parameters
532  ----------
533  dataId : `dict`
534  Data identifier with tract and patch.
535  singleFilter : `bool`
536  True means the desired ID is for a single-filter coadd, in which
537  case dataIdmust contain filter.
538  """
539  raise NotImplementedError()
540 
541  def _search(self, path):
542  """Search for path in the associated repository's storage.
543 
544  Parameters
545  ----------
546  path : string
547  Path that describes an object in the repository associated with
548  this mapper.
549  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
550  indicator will be stripped when searching and so will match
551  filenames without the HDU indicator, e.g. 'foo.fits'. The path
552  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
553 
554  Returns
555  -------
556  string
557  The path for this object in the repository. Will return None if the
558  object can't be found. If the input argument path contained an HDU
559  indicator, the returned path will also contain the HDU indicator.
560  """
561  return self.rootStorage.search(path)
562 
563  def backup(self, datasetType, dataId):
564  """Rename any existing object with the given type and dataId.
565 
566  The CameraMapper implementation saves objects in a sequence of e.g.:
567 
568  - foo.fits
569  - foo.fits~1
570  - foo.fits~2
571 
572  All of the backups will be placed in the output repo, however, and will
573  not be removed if they are found elsewhere in the _parent chain. This
574  means that the same file will be stored twice if the previous version
575  was found in an input repo.
576  """
577 
578  # Calling PosixStorage directly is not the long term solution in this
579  # function, this is work-in-progress on epic DM-6225. The plan is for
580  # parentSearch to be changed to 'search', and search only the storage
581  # associated with this mapper. All searching of parents will be handled
582  # by traversing the container of repositories in Butler.
583 
584  def firstElement(list):
585  """Get the first element in the list, or None if that can't be
586  done.
587  """
588  return list[0] if list is not None and len(list) else None
589 
590  n = 0
591  newLocation = self.map(datasetType, dataId, write=True)
592  newPath = newLocation.getLocations()[0]
593  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
594  path = firstElement(path)
595  oldPaths = []
596  while path is not None:
597  n += 1
598  oldPaths.append((n, path))
599  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
600  path = firstElement(path)
601  for n, oldPath in reversed(oldPaths):
602  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
603 
604  def keys(self):
605  """Return supported keys.
606 
607  Returns
608  -------
609  iterable
610  List of keys usable in a dataset identifier
611  """
612  return iter(self.keyDict.keys())
613 
614  def getKeys(self, datasetType, level):
615  """Return a dict of supported keys and their value types for a given
616  dataset type at a given level of the key hierarchy.
617 
618  Parameters
619  ----------
620  datasetType : `str`
621  Dataset type or None for all dataset types.
622  level : `str` or None
623  Level or None for all levels or '' for the default level for the
624  camera.
625 
626  Returns
627  -------
628  `dict`
629  Keys are strings usable in a dataset identifier, values are their
630  value types.
631  """
632 
633  # not sure if this is how we want to do this. what if None was intended?
634  if level == '':
635  level = self.getDefaultLevel()
636 
637  if datasetType is None:
638  keyDict = copy.copy(self.keyDict)
639  else:
640  keyDict = self.mappings[datasetType].keys()
641  if level is not None and level in self.levels:
642  keyDict = copy.copy(keyDict)
643  for l in self.levels[level]:
644  if l in keyDict:
645  del keyDict[l]
646  return keyDict
647 
648  def getDefaultLevel(self):
649  return self.defaultLevel
650 
651  def getDefaultSubLevel(self, level):
652  if level in self.defaultSubLevels:
653  return self.defaultSubLevels[level]
654  return None
655 
656  @classmethod
657  def getCameraName(cls):
658  """Return the name of the camera that this CameraMapper is for."""
659  className = str(cls)
660  className = className[className.find('.'):-1]
661  m = re.search(r'(\w+)Mapper', className)
662  if m is None:
663  m = re.search(r"class '[\w.]*?(\w+)'", className)
664  name = m.group(1)
665  return name[:1].lower() + name[1:] if name else ''
666 
667  @classmethod
668  def getPackageName(cls):
669  """Return the name of the package containing this CameraMapper."""
670  if cls.packageName is None:
671  raise ValueError('class variable packageName must not be None')
672  return cls.packageName
673 
674  @classmethod
675  def getPackageDir(cls):
676  """Return the base directory of this package"""
677  return getPackageDir(cls.getPackageName())
678 
679  def map_camera(self, dataId, write=False):
680  """Map a camera dataset."""
681  if self.camera is None:
682  raise RuntimeError("No camera dataset available.")
683  actualId = self._transformId(dataId)
684  return dafPersist.ButlerLocation(
685  pythonType="lsst.afw.cameraGeom.CameraConfig",
686  cppType="Config",
687  storageName="ConfigStorage",
688  locationList=self.cameraDataLocation or "ignored",
689  dataId=actualId,
690  mapper=self,
691  storage=self.rootStorage
692  )
693 
694  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
695  """Return the (preloaded) camera object.
696  """
697  if self.camera is None:
698  raise RuntimeError("No camera dataset available.")
699  return self.camera
700 
701  def map_defects(self, dataId, write=False):
702  """Map defects dataset.
703 
704  Returns
705  -------
706  `lsst.daf.butler.ButlerLocation`
707  Minimal ButlerLocation containing just the locationList field
708  (just enough information that bypass_defects can use it).
709  """
710  defectFitsPath = self._defectLookup(dataId=dataId)
711  if defectFitsPath is None:
712  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
713 
714  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
715  dataId, self,
716  storage=self.rootStorage)
717 
718  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
719  """Return a defect based on the butler location returned by map_defects
720 
721  Parameters
722  ----------
723  butlerLocation : `lsst.daf.persistence.ButlerLocation`
724  locationList = path to defects FITS file
725  dataId : `dict`
726  Butler data ID; "ccd" must be set.
727 
728  Note: the name "bypass_XXX" means the butler makes no attempt to
729  convert the ButlerLocation into an object, which is what we want for
730  now, since that conversion is a bit tricky.
731  """
732  detectorName = self._extractDetectorName(dataId)
733  defectsFitsPath = butlerLocation.locationList[0]
734 
735  with fits.open(defectsFitsPath) as hduList:
736  for hdu in hduList[1:]:
737  if hdu.header["name"] != detectorName:
738  continue
739 
740  defectList = []
741  for data in hdu.data:
742  bbox = afwGeom.Box2I(
743  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
744  afwGeom.Extent2I(int(data['width']), int(data['height'])),
745  )
746  defectList.append(afwImage.DefectBase(bbox))
747  return defectList
748 
749  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
750 
751  def map_expIdInfo(self, dataId, write=False):
752  return dafPersist.ButlerLocation(
753  pythonType="lsst.obs.base.ExposureIdInfo",
754  cppType=None,
755  storageName="Internal",
756  locationList="ignored",
757  dataId=dataId,
758  mapper=self,
759  storage=self.rootStorage
760  )
761 
762  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
763  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
764  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
765  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
766  return ExposureIdInfo(expId=expId, expBits=expBits)
767 
768  def std_bfKernel(self, item, dataId):
769  """Disable standardization for bfKernel
770 
771  bfKernel is a calibration product that is numpy array,
772  unlike other calibration products that are all images;
773  all calibration images are sent through _standardizeExposure
774  due to CalibrationMapping, but we don't want that to happen to bfKernel
775  """
776  return item
777 
778  def std_raw(self, item, dataId):
779  """Standardize a raw dataset by converting it to an Exposure instead
780  of an Image"""
781  return self._standardizeExposure(self.exposures['raw'], item, dataId,
782  trimmed=False, setVisitInfo=True)
783 
784  def map_skypolicy(self, dataId):
785  """Map a sky policy."""
786  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
787  "Internal", None, None, self,
788  storage=self.rootStorage)
789 
790  def std_skypolicy(self, item, dataId):
791  """Standardize a sky policy by returning the one we use."""
792  return self.skypolicy
793 
794 
799 
800  def _getCcdKeyVal(self, dataId):
801  """Return CCD key and value used to look a defect in the defect
802  registry
803 
804  The default implementation simply returns ("ccd", full detector name)
805  """
806  return ("ccd", self._extractDetectorName(dataId))
807 
808  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
809  posixIfNoSql=True):
810  """Set up a registry (usually SQLite3), trying a number of possible
811  paths.
812 
813  Parameters
814  ----------
815  name : string
816  Name of registry.
817  description: `str`
818  Description of registry (for log messages)
819  path : string
820  Path for registry.
821  policy : string
822  Policy that contains the registry name, used if path is None.
823  policyKey : string
824  Key in policy for registry path.
825  storage : Storage subclass
826  Repository Storage to look in.
827  searchParents : bool, optional
828  True if the search for a registry should follow any Butler v1
829  _parent symlinks.
830  posixIfNoSql : bool, optional
831  If an sqlite registry is not found, will create a posix registry if
832  this is True.
833 
834  Returns
835  -------
836  lsst.daf.persistence.Registry
837  Registry object
838  """
839  if path is None and policyKey in policy:
840  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
841  if os.path.isabs(path):
842  raise RuntimeError("Policy should not indicate an absolute path for registry.")
843  if not storage.exists(path):
844  newPath = storage.instanceSearch(path)
845 
846  newPath = newPath[0] if newPath is not None and len(newPath) else None
847  if newPath is None:
848  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
849  path)
850  path = newPath
851  else:
852  self.log.warn("Unable to locate registry at policy path: %s", path)
853  path = None
854 
855  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
856  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
857  # root from path. Currently only works with PosixStorage
858  try:
859  root = storage.root
860  if path and (path.startswith(root)):
861  path = path[len(root + '/'):]
862  except AttributeError:
863  pass
864 
865  # determine if there is an sqlite registry and if not, try the posix registry.
866  registry = None
867 
868  def search(filename, description):
869  """Search for file in storage
870 
871  Parameters
872  ----------
873  filename : `str`
874  Filename to search for
875  description : `str`
876  Description of file, for error message.
877 
878  Returns
879  -------
880  path : `str` or `None`
881  Path to file, or None
882  """
883  result = storage.instanceSearch(filename)
884  if result:
885  return result[0]
886  self.log.debug("Unable to locate %s: %s", description, filename)
887  return None
888 
889  # Search for a suitable registry database
890  if path is None:
891  path = search("%s.pgsql" % name, "%s in root" % description)
892  if path is None:
893  path = search("%s.sqlite3" % name, "%s in root" % description)
894  if path is None:
895  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
896 
897  if path is not None:
898  if not storage.exists(path):
899  newPath = storage.instanceSearch(path)
900  newPath = newPath[0] if newPath is not None and len(newPath) else None
901  if newPath is not None:
902  path = newPath
903  localFileObj = storage.getLocalFile(path)
904  self.log.info("Loading %s registry from %s", description, localFileObj.name)
905  registry = dafPersist.Registry.create(localFileObj.name)
906  localFileObj.close()
907  elif not registry and posixIfNoSql:
908  try:
909  self.log.info("Loading Posix %s registry from %s", description, storage.root)
910  registry = dafPersist.PosixRegistry(storage.root)
911  except Exception:
912  registry = None
913 
914  return registry
915 
916  def _transformId(self, dataId):
917  """Generate a standard ID dict from a camera-specific ID dict.
918 
919  Canonical keys include:
920  - amp: amplifier name
921  - ccd: CCD name (in LSST this is a combination of raft and sensor)
922  The default implementation returns a copy of its input.
923 
924  Parameters
925  ----------
926  dataId : `dict`
927  Dataset identifier; this must not be modified
928 
929  Returns
930  -------
931  `dict`
932  Transformed dataset identifier.
933  """
934 
935  return dataId.copy()
936 
937  def _mapActualToPath(self, template, actualId):
938  """Convert a template path to an actual path, using the actual data
939  identifier. This implementation is usually sufficient but can be
940  overridden by the subclass.
941 
942  Parameters
943  ----------
944  template : `str`
945  Template path
946  actualId : `dict`
947  Dataset identifier
948 
949  Returns
950  -------
951  `str`
952  Pathname
953  """
954 
955  try:
956  transformedId = self._transformId(actualId)
957  return template % transformedId
958  except Exception as e:
959  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
960 
961  @staticmethod
962  def getShortCcdName(ccdName):
963  """Convert a CCD name to a form useful as a filename
964 
965  The default implementation converts spaces to underscores.
966  """
967  return ccdName.replace(" ", "_")
968 
969  def _extractDetectorName(self, dataId):
970  """Extract the detector (CCD) name from the dataset identifier.
971 
972  The name in question is the detector name used by lsst.afw.cameraGeom.
973 
974  Parameters
975  ----------
976  dataId : `dict`
977  Dataset identifier.
978 
979  Returns
980  -------
981  `str`
982  Detector name
983  """
984  raise NotImplementedError("No _extractDetectorName() function specified")
985 
986  def _extractAmpId(self, dataId):
987  """Extract the amplifier identifer from a dataset identifier.
988 
989  .. note:: Deprecated in 11_0
990 
991  amplifier identifier has two parts: the detector name for the CCD
992  containing the amplifier and index of the amplifier in the detector.
993 
994  Parameters
995  ----------
996  dataId : `dict`
997  Dataset identifer
998 
999  Returns
1000  -------
1001  `tuple`
1002  Amplifier identifier
1003  """
1004 
1005  trDataId = self._transformId(dataId)
1006  return (trDataId["ccd"], int(trDataId['amp']))
1007 
1008  def _setAmpDetector(self, item, dataId, trimmed=True):
1009  """Set the detector object in an Exposure for an amplifier.
1010 
1011  Defects are also added to the Exposure based on the detector object.
1012 
1013  Parameters
1014  ----------
1015  item : `lsst.afw.image.Exposure`
1016  Exposure to set the detector in.
1017  dataId : `dict`
1018  Dataset identifier
1019  trimmed : `bool`
1020  Should detector be marked as trimmed? (ignored)
1021  """
1022 
1023  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1024 
1025  def _setCcdDetector(self, item, dataId, trimmed=True):
1026  """Set the detector object in an Exposure for a CCD.
1027 
1028  Parameters
1029  ----------
1030  item : `lsst.afw.image.Exposure`
1031  Exposure to set the detector in.
1032  dataId : `dict`
1033  Dataset identifier
1034  trimmed : `bool`
1035  Should detector be marked as trimmed? (ignored)
1036  """
1037  if item.getDetector() is not None:
1038  return
1039 
1040  detectorName = self._extractDetectorName(dataId)
1041  detector = self.camera[detectorName]
1042  item.setDetector(detector)
1043 
1044  def _setFilter(self, mapping, item, dataId):
1045  """Set the filter object in an Exposure. If the Exposure had a FILTER
1046  keyword, this was already processed during load. But if it didn't,
1047  use the filter from the registry.
1048 
1049  Parameters
1050  ----------
1051  mapping : `lsst.obs.base.Mapping`
1052  Where to get the filter from.
1053  item : `lsst.afw.image.Exposure`
1054  Exposure to set the filter in.
1055  dataId : `dict`
1056  Dataset identifier.
1057  """
1058 
1059  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1060  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1061  return
1062 
1063  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1064  return
1065 
1066  actualId = mapping.need(['filter'], dataId)
1067  filterName = actualId['filter']
1068  if self.filters is not None and filterName in self.filters:
1069  filterName = self.filters[filterName]
1070  item.setFilter(afwImage.Filter(filterName))
1071 
1072  # Default standardization function for exposures
1073  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1074  trimmed=True, setVisitInfo=True):
1075  """Default standardization function for images.
1076 
1077  This sets the Detector from the camera geometry
1078  and optionally set the Fiter. In both cases this saves
1079  having to persist some data in each exposure (or image).
1080 
1081  Parameters
1082  ----------
1083  mapping : `lsst.obs.base.Mapping`
1084  Where to get the values from.
1085  item : image-like object
1086  Can be any of lsst.afw.image.Exposure,
1087  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1088  or lsst.afw.image.MaskedImage
1089 
1090  dataId : `dict`
1091  Dataset identifier
1092  filter : `bool`
1093  Set filter? Ignored if item is already an exposure
1094  trimmed : `bool`
1095  Should detector be marked as trimmed?
1096  setVisitInfo : `bool`
1097  Should Exposure have its VisitInfo filled out from the metadata?
1098 
1099  Returns
1100  -------
1101  `lsst.afw.image.Exposure`
1102  The standardized Exposure.
1103  """
1104  try:
1105  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1106  except Exception as e:
1107  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1108  raise
1109 
1110  if mapping.level.lower() == "amp":
1111  self._setAmpDetector(item, dataId, trimmed)
1112  elif mapping.level.lower() == "ccd":
1113  self._setCcdDetector(item, dataId, trimmed)
1114 
1115  if filter:
1116  self._setFilter(mapping, item, dataId)
1117 
1118  return item
1119 
1120  def _defectLookup(self, dataId, dateKey='taiObs'):
1121  """Find the defects for a given CCD.
1122 
1123  Parameters
1124  ----------
1125  dataId : `dict`
1126  Dataset identifier
1127 
1128  Returns
1129  -------
1130  `str`
1131  Path to the defects file or None if not available.
1132  """
1133  if self.defectRegistry is None:
1134  return None
1135  if self.registry is None:
1136  raise RuntimeError("No registry for defect lookup")
1137 
1138  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1139 
1140  dataIdForLookup = {'visit': dataId['visit']}
1141  # .lookup will fail in a posix registry because there is no template to provide.
1142  rows = self.registry.lookup((dateKey), ('raw_visit'), dataIdForLookup)
1143  if len(rows) == 0:
1144  return None
1145  assert len(rows) == 1
1146  dayObs = rows[0][0]
1147 
1148  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1149  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1150  [(ccdKey, "?")],
1151  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1152  (ccdVal, dayObs))
1153  if not rows or len(rows) == 0:
1154  return None
1155  if len(rows) == 1:
1156  return os.path.join(self.defectPath, rows[0][0])
1157  else:
1158  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1159  (ccdVal, dayObs, len(rows), ", ".join([_[0] for _ in rows])))
1160 
1161  def _makeCamera(self, policy, repositoryDir):
1162  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1163  the camera geometry
1164 
1165  Also set self.cameraDataLocation, if relevant (else it can be left
1166  None).
1167 
1168  This implementation assumes that policy contains an entry "camera"
1169  that points to the subdirectory in this package of camera data;
1170  specifically, that subdirectory must contain:
1171  - a file named `camera.py` that contains persisted camera config
1172  - ampInfo table FITS files, as required by
1173  lsst.afw.cameraGeom.makeCameraFromPath
1174 
1175  Parameters
1176  ----------
1177  policy : `lsst.daf.persistence.Policy`
1178  Policy with per-camera defaults already merged
1179  (PexPolicy only for backward compatibility).
1180  repositoryDir : `str`
1181  Policy repository for the subclassing module (obtained with
1182  getRepositoryPath() on the per-camera default dictionary).
1183  """
1184  if 'camera' not in policy:
1185  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1186  cameraDataSubdir = policy['camera']
1187  self.cameraDataLocation = os.path.normpath(
1188  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1189  cameraConfig = afwCameraGeom.CameraConfig()
1190  cameraConfig.load(self.cameraDataLocation)
1191  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1192  return afwCameraGeom.makeCameraFromPath(
1193  cameraConfig=cameraConfig,
1194  ampInfoPath=ampInfoPath,
1195  shortNameFunc=self.getShortCcdName,
1196  pupilFactoryClass=self.PupilFactoryClass
1197  )
1198 
1199  def getRegistry(self):
1200  """Get the registry used by this mapper.
1201 
1202  Returns
1203  -------
1204  Registry or None
1205  The registry used by this mapper for this mapper's repository.
1206  """
1207  return self.registry
1208 
1209  def getImageCompressionSettings(self, datasetType, dataId):
1210  """Stuff image compression settings into a daf.base.PropertySet
1211 
1212  This goes into the ButlerLocation's "additionalData", which gets
1213  passed into the boost::persistence framework.
1214 
1215  Parameters
1216  ----------
1217  datasetType : `str`
1218  Type of dataset for which to get the image compression settings.
1219  dataId : `dict`
1220  Dataset identifier.
1221 
1222  Returns
1223  -------
1224  additionalData : `lsst.daf.base.PropertySet`
1225  Image compression settings.
1226  """
1227  mapping = self.mappings[datasetType]
1228  recipeName = mapping.recipe
1229  storageType = mapping.storage
1230  if storageType not in self._writeRecipes:
1231  return dafBase.PropertySet()
1232  if recipeName not in self._writeRecipes[storageType]:
1233  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1234  (datasetType, storageType, recipeName))
1235  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1236  seed = hash(tuple(dataId.items())) % 2**31
1237  for plane in ("image", "mask", "variance"):
1238  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1239  recipe.set(plane + ".scaling.seed", seed)
1240  return recipe
1241 
1242  def _initWriteRecipes(self):
1243  """Read the recipes for writing files
1244 
1245  These recipes are currently used for configuring FITS compression,
1246  but they could have wider uses for configuring different flavors
1247  of the storage types. A recipe is referred to by a symbolic name,
1248  which has associated settings. These settings are stored as a
1249  `PropertySet` so they can easily be passed down to the
1250  boost::persistence framework as the "additionalData" parameter.
1251 
1252  The list of recipes is written in YAML. A default recipe and
1253  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1254  and these may be overridden or supplemented by the individual obs_*
1255  packages' own policy/writeRecipes.yaml files.
1256 
1257  Recipes are grouped by the storage type. Currently, only the
1258  ``FitsStorage`` storage type uses recipes, which uses it to
1259  configure FITS image compression.
1260 
1261  Each ``FitsStorage`` recipe for FITS compression should define
1262  "image", "mask" and "variance" entries, each of which may contain
1263  "compression" and "scaling" entries. Defaults will be provided for
1264  any missing elements under "compression" and "scaling".
1265 
1266  The allowed entries under "compression" are:
1267 
1268  * algorithm (string): compression algorithm to use
1269  * rows (int): number of rows per tile (0 = entire dimension)
1270  * columns (int): number of columns per tile (0 = entire dimension)
1271  * quantizeLevel (float): cfitsio quantization level
1272 
1273  The allowed entries under "scaling" are:
1274 
1275  * algorithm (string): scaling algorithm to use
1276  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1277  * fuzz (bool): fuzz the values when quantising floating-point values?
1278  * seed (long): seed for random number generator when fuzzing
1279  * maskPlanes (list of string): mask planes to ignore when doing
1280  statistics
1281  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1282  * quantizePad: number of stdev to allow on the low side (for
1283  STDEV_POSITIVE/NEGATIVE)
1284  * bscale: manually specified BSCALE (for MANUAL scaling)
1285  * bzero: manually specified BSCALE (for MANUAL scaling)
1286 
1287  A very simple example YAML recipe:
1288 
1289  FitsStorage:
1290  default:
1291  image: &default
1292  compression:
1293  algorithm: GZIP_SHUFFLE
1294  mask: *default
1295  variance: *default
1296  """
1297  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1298  recipes = dafPersist.Policy(recipesFile)
1299  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1300  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1301  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1302  supplements = dafPersist.Policy(supplementsFile)
1303  # Don't allow overrides, only supplements
1304  for entry in validationMenu:
1305  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1306  if intersection:
1307  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1308  (supplementsFile, entry, recipesFile, intersection))
1309  recipes.update(supplements)
1310 
1311  self._writeRecipes = {}
1312  for storageType in recipes.names(True):
1313  if "default" not in recipes[storageType]:
1314  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1315  (storageType, recipesFile))
1316  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1317 
1318 
1319 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1320  """Generate an Exposure from an image-like object
1321 
1322  If the image is a DecoratedImage then also set its WCS and metadata
1323  (Image and MaskedImage are missing the necessary metadata
1324  and Exposure already has those set)
1325 
1326  Parameters
1327  ----------
1328  image : Image-like object
1329  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1330  Exposure.
1331 
1332  Returns
1333  -------
1334  `lsst.afw.image.Exposure`
1335  Exposure containing input image.
1336  """
1337  metadata = None
1338  if isinstance(image, afwImage.MaskedImage):
1339  exposure = afwImage.makeExposure(image)
1340  elif isinstance(image, afwImage.DecoratedImage):
1341  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1342  metadata = image.getMetadata()
1343  try:
1344  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1345  exposure.setWcs(wcs)
1346  except pexExcept.TypeError as e:
1347  # raised on failure to create a wcs (and possibly others)
1348  if logger is None:
1349  logger = lsstLog.Log.getLogger("CameraMapper")
1350  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1351  " %s", e.args[0])
1352 
1353  exposure.setMetadata(metadata)
1354  elif isinstance(image, afwImage.Exposure):
1355  # Exposure
1356  exposure = image
1357  metadata = exposure.getMetadata()
1358  else:
1359  # Image
1360  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1361  #
1362  # set VisitInfo if we can
1363  #
1364  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1365  if metadata is not None:
1366  if mapper is None:
1367  if not logger:
1368  logger = lsstLog.Log.getLogger("CameraMapper")
1369  logger.warn("I can only set the VisitInfo if you provide a mapper")
1370  else:
1371  exposureId = mapper._computeCcdExposureId(dataId)
1372  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1373 
1374  exposure.getInfo().setVisitInfo(visitInfo)
1375 
1376  return exposure
1377 
1378 
1380  """Validate recipes for FitsStorage
1381 
1382  The recipes are supplemented with default values where appropriate.
1383 
1384  TODO: replace this custom validation code with Cerberus (DM-11846)
1385 
1386  Parameters
1387  ----------
1388  recipes : `lsst.daf.persistence.Policy`
1389  FitsStorage recipes to validate.
1390 
1391  Returns
1392  -------
1393  validated : `lsst.daf.base.PropertySet`
1394  Validated FitsStorage recipe.
1395 
1396  Raises
1397  ------
1398  `RuntimeError`
1399  If validation fails.
1400  """
1401  # Schemas define what should be there, and the default values (and by the default
1402  # value, the expected type).
1403  compressionSchema = {
1404  "algorithm": "NONE",
1405  "rows": 1,
1406  "columns": 0,
1407  "quantizeLevel": 0.0,
1408  }
1409  scalingSchema = {
1410  "algorithm": "NONE",
1411  "bitpix": 0,
1412  "maskPlanes": ["NO_DATA"],
1413  "seed": 0,
1414  "quantizeLevel": 4.0,
1415  "quantizePad": 5.0,
1416  "fuzz": True,
1417  "bscale": 1.0,
1418  "bzero": 0.0,
1419  }
1420 
1421  def checkUnrecognized(entry, allowed, description):
1422  """Check to see if the entry contains unrecognised keywords"""
1423  unrecognized = set(entry.keys()) - set(allowed)
1424  if unrecognized:
1425  raise RuntimeError(
1426  "Unrecognized entries when parsing image compression recipe %s: %s" %
1427  (description, unrecognized))
1428 
1429  validated = {}
1430  for name in recipes.names(True):
1431  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1432  rr = dafBase.PropertySet()
1433  validated[name] = rr
1434  for plane in ("image", "mask", "variance"):
1435  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1436  name + "->" + plane)
1437 
1438  for settings, schema in (("compression", compressionSchema),
1439  ("scaling", scalingSchema)):
1440  prefix = plane + "." + settings
1441  if settings not in recipes[name][plane]:
1442  for key in schema:
1443  rr.set(prefix + "." + key, schema[key])
1444  continue
1445  entry = recipes[name][plane][settings]
1446  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1447  for key in schema:
1448  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1449  rr.set(prefix + "." + key, value)
1450  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def _defectLookup(self, dataId, dateKey='taiObs')
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.