lsst.obs.base  18.1.0-20-g08a3639+2
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 import re
26 import traceback
27 import weakref
28 
29 from deprecated.sphinx import deprecated
30 
31 from astro_metadata_translator import fix_header
32 import lsst.daf.persistence as dafPersist
33 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
34 import lsst.daf.base as dafBase
35 import lsst.afw.geom as afwGeom
36 import lsst.afw.image as afwImage
37 import lsst.afw.table as afwTable
38 from lsst.afw.fits import readMetadata
39 import lsst.afw.cameraGeom as afwCameraGeom
40 import lsst.log as lsstLog
41 import lsst.pex.exceptions as pexExcept
42 from .exposureIdInfo import ExposureIdInfo
43 from .makeRawVisitInfo import MakeRawVisitInfo
44 from .utils import createInitialSkyWcs, InitialSkyWcsError
45 from lsst.utils import getPackageDir
46 
47 __all__ = ["CameraMapper", "exposureFromImage"]
48 
49 
50 class CameraMapper(dafPersist.Mapper):
51 
52  """CameraMapper is a base class for mappers that handle images from a
53  camera and products derived from them. This provides an abstraction layer
54  between the data on disk and the code.
55 
56  Public methods: keys, queryMetadata, getDatasetTypes, map,
57  canStandardize, standardize
58 
59  Mappers for specific data sources (e.g., CFHT Megacam, LSST
60  simulations, etc.) should inherit this class.
61 
62  The CameraMapper manages datasets within a "root" directory. Note that
63  writing to a dataset present in the input root will hide the existing
64  dataset but not overwrite it. See #2160 for design discussion.
65 
66  A camera is assumed to consist of one or more rafts, each composed of
67  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
68  (amps). A camera is also assumed to have a camera geometry description
69  (CameraGeom object) as a policy file, a filter description (Filter class
70  static configuration) as another policy file.
71 
72  Information from the camera geometry and defects are inserted into all
73  Exposure objects returned.
74 
75  The mapper uses one or two registries to retrieve metadata about the
76  images. The first is a registry of all raw exposures. This must contain
77  the time of the observation. One or more tables (or the equivalent)
78  within the registry are used to look up data identifier components that
79  are not specified by the user (e.g. filter) and to return results for
80  metadata queries. The second is an optional registry of all calibration
81  data. This should contain validity start and end entries for each
82  calibration dataset in the same timescale as the observation time.
83 
84  Subclasses will typically set MakeRawVisitInfoClass and optionally the
85  metadata translator class:
86 
87  MakeRawVisitInfoClass: a class variable that points to a subclass of
88  MakeRawVisitInfo, a functor that creates an
89  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
90 
91  translatorClass: The `~astro_metadata_translator.MetadataTranslator`
92  class to use for fixing metadata values. If it is not set an attempt
93  will be made to infer the class from ``MakeRawVisitInfoClass``, failing
94  that the metadata fixup will try to infer the translator class from the
95  header itself.
96 
97  Subclasses must provide the following methods:
98 
99  _extractDetectorName(self, dataId): returns the detector name for a CCD
100  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
101  a dataset identifier referring to that CCD or a subcomponent of it.
102 
103  _computeCcdExposureId(self, dataId): see below
104 
105  _computeCoaddExposureId(self, dataId, singleFilter): see below
106 
107  Subclasses may also need to override the following methods:
108 
109  _transformId(self, dataId): transformation of a data identifier
110  from colloquial usage (e.g., "ccdname") to proper/actual usage
111  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
112  commas). The default implementation does nothing. Note that this
113  method should not modify its input parameter.
114 
115  getShortCcdName(self, ccdName): a static method that returns a shortened
116  name suitable for use as a filename. The default version converts spaces
117  to underscores.
118 
119  _mapActualToPath(self, template, actualId): convert a template path to an
120  actual path, using the actual dataset identifier.
121 
122  The mapper's behaviors are largely specified by the policy file.
123  See the MapperDictionary.paf for descriptions of the available items.
124 
125  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
126  mappings (see Mappings class).
127 
128  Common default mappings for all subclasses can be specified in the
129  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
130  provides a simple way to add a product to all camera mappers.
131 
132  Functions to map (provide a path to the data given a dataset
133  identifier dictionary) and standardize (convert data into some standard
134  format or type) may be provided in the subclass as "map_{dataset type}"
135  and "std_{dataset type}", respectively.
136 
137  If non-Exposure datasets cannot be retrieved using standard
138  daf_persistence methods alone, a "bypass_{dataset type}" function may be
139  provided in the subclass to return the dataset instead of using the
140  "datasets" subpolicy.
141 
142  Implementations of map_camera and bypass_camera that should typically be
143  sufficient are provided in this base class.
144 
145  Notes
146  -----
147  .. todo::
148 
149  Instead of auto-loading the camera at construction time, load it from
150  the calibration registry
151 
152  Parameters
153  ----------
154  policy : daf_persistence.Policy,
155  Policy with per-camera defaults already merged.
156  repositoryDir : string
157  Policy repository for the subclassing module (obtained with
158  getRepositoryPath() on the per-camera default dictionary).
159  root : string, optional
160  Path to the root directory for data.
161  registry : string, optional
162  Path to registry with data's metadata.
163  calibRoot : string, optional
164  Root directory for calibrations.
165  calibRegistry : string, optional
166  Path to registry with calibrations' metadata.
167  provided : list of string, optional
168  Keys provided by the mapper.
169  parentRegistry : Registry subclass, optional
170  Registry from a parent repository that may be used to look up
171  data's metadata.
172  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
173  The configuration information for the repository this mapper is
174  being used with.
175  """
176  packageName = None
177 
178  # a class or subclass of MakeRawVisitInfo, a functor that makes an
179  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
180  MakeRawVisitInfoClass = MakeRawVisitInfo
181 
182  # a class or subclass of PupilFactory
183  PupilFactoryClass = afwCameraGeom.PupilFactory
184 
185  # Class to use for metadata translations
186  translatorClass = None
187 
188  def __init__(self, policy, repositoryDir,
189  root=None, registry=None, calibRoot=None, calibRegistry=None,
190  provided=None, parentRegistry=None, repositoryCfg=None):
191 
192  dafPersist.Mapper.__init__(self)
193 
194  self.log = lsstLog.Log.getLogger("CameraMapper")
195 
196  if root:
197  self.root = root
198  elif repositoryCfg:
199  self.root = repositoryCfg.root
200  else:
201  self.root = None
202 
203  repoPolicy = repositoryCfg.policy if repositoryCfg else None
204  if repoPolicy is not None:
205  policy.update(repoPolicy)
206 
207  # Levels
208  self.levels = dict()
209  if 'levels' in policy:
210  levelsPolicy = policy['levels']
211  for key in levelsPolicy.names(True):
212  self.levels[key] = set(levelsPolicy.asArray(key))
213  self.defaultLevel = policy['defaultLevel']
214  self.defaultSubLevels = dict()
215  if 'defaultSubLevels' in policy:
216  self.defaultSubLevels = policy['defaultSubLevels']
217 
218  # Root directories
219  if root is None:
220  root = "."
221  root = dafPersist.LogicalLocation(root).locString()
222 
223  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
224 
225  # If the calibRoot is passed in, use that. If not and it's indicated in
226  # the policy, use that. And otherwise, the calibs are in the regular
227  # root.
228  # If the location indicated by the calib root does not exist, do not
229  # create it.
230  calibStorage = None
231  if calibRoot is not None:
232  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
234  create=False)
235  else:
236  calibRoot = policy.get('calibRoot', None)
237  if calibRoot:
238  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239  create=False)
240  if calibStorage is None:
241  calibStorage = self.rootStorage
242 
243  self.root = root
244 
245  # Registries
246  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
247  self.rootStorage, searchParents=False,
248  posixIfNoSql=(not parentRegistry))
249  if not self.registry:
250  self.registry = parentRegistry
251  needCalibRegistry = policy.get('needCalibRegistry', None)
252  if needCalibRegistry:
253  if calibStorage:
254  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
255  "calibRegistryPath", calibStorage,
256  posixIfNoSql=False) # NB never use posix for calibs
257  else:
258  raise RuntimeError(
259  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
260  "calibRoot ivar:%s or policy['calibRoot']:%s" %
261  (calibRoot, policy.get('calibRoot', None)))
262  else:
263  self.calibRegistry = None
264 
265  # Dict of valid keys and their value types
266  self.keyDict = dict()
267 
268  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
269  self._initWriteRecipes()
270 
271  # Camera geometry
272  self.cameraDataLocation = None # path to camera geometry config file
273  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
274 
275  # Filter translation table
276  self.filters = None
277 
278  # verify that the class variable packageName is set before attempting
279  # to instantiate an instance
280  if self.packageName is None:
281  raise ValueError('class variable packageName must not be None')
282 
284 
285  # Assign a metadata translator if one has not been defined by
286  # subclass. We can sometimes infer one from the RawVisitInfo
287  # class.
288  if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
289  self.translatorClass = self.makeRawVisitInfo.metadataTranslator
290 
291  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
292  """Initialize mappings
293 
294  For each of the dataset types that we want to be able to read, there
295  are methods that can be created to support them:
296  * map_<dataset> : determine the path for dataset
297  * std_<dataset> : standardize the retrieved dataset
298  * bypass_<dataset> : retrieve the dataset (bypassing the usual
299  retrieval machinery)
300  * query_<dataset> : query the registry
301 
302  Besides the dataset types explicitly listed in the policy, we create
303  additional, derived datasets for additional conveniences,
304  e.g., reading the header of an image, retrieving only the size of a
305  catalog.
306 
307  Parameters
308  ----------
309  policy : `lsst.daf.persistence.Policy`
310  Policy with per-camera defaults already merged
311  rootStorage : `Storage subclass instance`
312  Interface to persisted repository data.
313  calibRoot : `Storage subclass instance`
314  Interface to persisted calib repository data
315  provided : `list` of `str`
316  Keys provided by the mapper
317  """
318  # Sub-dictionaries (for exposure/calibration/dataset types)
319  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320  "obs_base", "ImageMappingDefaults.yaml", "policy"))
321  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
323  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
325  dsMappingPolicy = dafPersist.Policy()
326 
327  # Mappings
328  mappingList = (
329  ("images", imgMappingPolicy, ImageMapping),
330  ("exposures", expMappingPolicy, ExposureMapping),
331  ("calibrations", calMappingPolicy, CalibrationMapping),
332  ("datasets", dsMappingPolicy, DatasetMapping)
333  )
334  self.mappings = dict()
335  for name, defPolicy, cls in mappingList:
336  if name in policy:
337  datasets = policy[name]
338 
339  # Centrally-defined datasets
340  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
341  if os.path.exists(defaultsPath):
342  datasets.merge(dafPersist.Policy(defaultsPath))
343 
344  mappings = dict()
345  setattr(self, name, mappings)
346  for datasetType in datasets.names(True):
347  subPolicy = datasets[datasetType]
348  subPolicy.merge(defPolicy)
349 
350  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
351  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
352  subPolicy=subPolicy):
353  components = subPolicy.get('composite')
354  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
355  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
356  python = subPolicy['python']
357  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
358  disassembler=disassembler,
359  python=python,
360  dataId=dataId,
361  mapper=self)
362  for name, component in components.items():
363  butlerComposite.add(id=name,
364  datasetType=component.get('datasetType'),
365  setter=component.get('setter', None),
366  getter=component.get('getter', None),
367  subset=component.get('subset', False),
368  inputOnly=component.get('inputOnly', False))
369  return butlerComposite
370  setattr(self, "map_" + datasetType, compositeClosure)
371  # for now at least, don't set up any other handling for this dataset type.
372  continue
373 
374  if name == "calibrations":
375  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
376  provided=provided, dataRoot=rootStorage)
377  else:
378  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
379 
380  if datasetType in self.mappings:
381  raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
382  self.keyDict.update(mapping.keys())
383  mappings[datasetType] = mapping
384  self.mappings[datasetType] = mapping
385  if not hasattr(self, "map_" + datasetType):
386  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
387  return mapping.map(mapper, dataId, write)
388  setattr(self, "map_" + datasetType, mapClosure)
389  if not hasattr(self, "query_" + datasetType):
390  def queryClosure(format, dataId, mapping=mapping):
391  return mapping.lookup(format, dataId)
392  setattr(self, "query_" + datasetType, queryClosure)
393  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
394  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
395  return mapping.standardize(mapper, item, dataId)
396  setattr(self, "std_" + datasetType, stdClosure)
397 
398  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
399  """Set convenience methods on CameraMapper"""
400  mapName = "map_" + datasetType + "_" + suffix
401  bypassName = "bypass_" + datasetType + "_" + suffix
402  queryName = "query_" + datasetType + "_" + suffix
403  if not hasattr(self, mapName):
404  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
405  if not hasattr(self, bypassName):
406  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
407  bypassImpl = getattr(self, "bypass_" + datasetType)
408  if bypassImpl is not None:
409  setattr(self, bypassName, bypassImpl)
410  if not hasattr(self, queryName):
411  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
412 
413  # Filename of dataset
414  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
415  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
416  # Metadata from FITS file
417  if subPolicy["storage"] == "FitsStorage": # a FITS image
418  def getMetadata(datasetType, pythonType, location, dataId):
419  md = readMetadata(location.getLocationsWithRoot()[0])
420  fix_header(md, translator_class=self.translatorClass)
421  return md
422 
423  setMethods("md", bypassImpl=getMetadata)
424 
425  # Add support for configuring FITS compression
426  addName = "add_" + datasetType
427  if not hasattr(self, addName):
428  setattr(self, addName, self.getImageCompressionSettings)
429 
430  if name == "exposures":
431  def getSkyWcs(datasetType, pythonType, location, dataId):
432  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
433  return fitsReader.readWcs()
434 
435  setMethods("wcs", bypassImpl=getSkyWcs)
436 
437  def getPhotoCalib(datasetType, pythonType, location, dataId):
438  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
439  return fitsReader.readPhotoCalib()
440 
441  setMethods("photoCalib", bypassImpl=getPhotoCalib)
442 
443  def getVisitInfo(datasetType, pythonType, location, dataId):
444  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
445  return fitsReader.readVisitInfo()
446 
447  setMethods("visitInfo", bypassImpl=getVisitInfo)
448 
449  def getFilter(datasetType, pythonType, location, dataId):
450  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
451  return fitsReader.readFilter()
452 
453  setMethods("filter", bypassImpl=getFilter)
454 
455  setMethods("detector",
456  mapImpl=lambda dataId, write=False:
457  dafPersist.ButlerLocation(
458  pythonType="lsst.afw.cameraGeom.CameraConfig",
459  cppType="Config",
460  storageName="Internal",
461  locationList="ignored",
462  dataId=dataId,
463  mapper=self,
464  storage=None,
465  ),
466  bypassImpl=lambda datasetType, pythonType, location, dataId:
467  self.camera[self._extractDetectorName(dataId)]
468  )
469 
470  def getBBox(datasetType, pythonType, location, dataId):
471  md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
472  fix_header(md, translator_class=self.translatorClass)
473  return afwImage.bboxFromMetadata(md)
474 
475  setMethods("bbox", bypassImpl=getBBox)
476 
477  elif name == "images":
478  def getBBox(datasetType, pythonType, location, dataId):
479  md = readMetadata(location.getLocationsWithRoot()[0])
480  fix_header(md, translator_class=self.translatorClass)
481  return afwImage.bboxFromMetadata(md)
482  setMethods("bbox", bypassImpl=getBBox)
483 
484  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
485 
486  def getMetadata(datasetType, pythonType, location, dataId):
487  md = readMetadata(os.path.join(location.getStorage().root,
488  location.getLocations()[0]), hdu=1)
489  fix_header(md, translator_class=self.translatorClass)
490  return md
491 
492  setMethods("md", bypassImpl=getMetadata)
493 
494  # Sub-images
495  if subPolicy["storage"] == "FitsStorage":
496  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
497  subId = dataId.copy()
498  del subId['bbox']
499  loc = mapping.map(mapper, subId, write)
500  bbox = dataId['bbox']
501  llcX = bbox.getMinX()
502  llcY = bbox.getMinY()
503  width = bbox.getWidth()
504  height = bbox.getHeight()
505  loc.additionalData.set('llcX', llcX)
506  loc.additionalData.set('llcY', llcY)
507  loc.additionalData.set('width', width)
508  loc.additionalData.set('height', height)
509  if 'imageOrigin' in dataId:
510  loc.additionalData.set('imageOrigin',
511  dataId['imageOrigin'])
512  return loc
513 
514  def querySubClosure(key, format, dataId, mapping=mapping):
515  subId = dataId.copy()
516  del subId['bbox']
517  return mapping.lookup(format, subId)
518  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
519 
520  if subPolicy["storage"] == "FitsCatalogStorage":
521  # Length of catalog
522 
523  def getLen(datasetType, pythonType, location, dataId):
524  md = readMetadata(os.path.join(location.getStorage().root,
525  location.getLocations()[0]), hdu=1)
526  fix_header(md, translator_class=self.translatorClass)
527  return md["NAXIS2"]
528 
529  setMethods("len", bypassImpl=getLen)
530 
531  # Schema of catalog
532  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
533  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
534  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
535  location.getLocations()[0])))
536 
537  def _computeCcdExposureId(self, dataId):
538  """Compute the 64-bit (long) identifier for a CCD exposure.
539 
540  Subclasses must override
541 
542  Parameters
543  ----------
544  dataId : `dict`
545  Data identifier with visit, ccd.
546  """
547  raise NotImplementedError()
548 
549  def _computeCoaddExposureId(self, dataId, singleFilter):
550  """Compute the 64-bit (long) identifier for a coadd.
551 
552  Subclasses must override
553 
554  Parameters
555  ----------
556  dataId : `dict`
557  Data identifier with tract and patch.
558  singleFilter : `bool`
559  True means the desired ID is for a single-filter coadd, in which
560  case dataIdmust contain filter.
561  """
562  raise NotImplementedError()
563 
564  def _search(self, path):
565  """Search for path in the associated repository's storage.
566 
567  Parameters
568  ----------
569  path : string
570  Path that describes an object in the repository associated with
571  this mapper.
572  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
573  indicator will be stripped when searching and so will match
574  filenames without the HDU indicator, e.g. 'foo.fits'. The path
575  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
576 
577  Returns
578  -------
579  string
580  The path for this object in the repository. Will return None if the
581  object can't be found. If the input argument path contained an HDU
582  indicator, the returned path will also contain the HDU indicator.
583  """
584  return self.rootStorage.search(path)
585 
586  def backup(self, datasetType, dataId):
587  """Rename any existing object with the given type and dataId.
588 
589  The CameraMapper implementation saves objects in a sequence of e.g.:
590 
591  - foo.fits
592  - foo.fits~1
593  - foo.fits~2
594 
595  All of the backups will be placed in the output repo, however, and will
596  not be removed if they are found elsewhere in the _parent chain. This
597  means that the same file will be stored twice if the previous version
598  was found in an input repo.
599  """
600 
601  # Calling PosixStorage directly is not the long term solution in this
602  # function, this is work-in-progress on epic DM-6225. The plan is for
603  # parentSearch to be changed to 'search', and search only the storage
604  # associated with this mapper. All searching of parents will be handled
605  # by traversing the container of repositories in Butler.
606 
607  def firstElement(list):
608  """Get the first element in the list, or None if that can't be
609  done.
610  """
611  return list[0] if list is not None and len(list) else None
612 
613  n = 0
614  newLocation = self.map(datasetType, dataId, write=True)
615  newPath = newLocation.getLocations()[0]
616  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
617  path = firstElement(path)
618  oldPaths = []
619  while path is not None:
620  n += 1
621  oldPaths.append((n, path))
622  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
623  path = firstElement(path)
624  for n, oldPath in reversed(oldPaths):
625  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
626 
627  def keys(self):
628  """Return supported keys.
629 
630  Returns
631  -------
632  iterable
633  List of keys usable in a dataset identifier
634  """
635  return iter(self.keyDict.keys())
636 
637  def getKeys(self, datasetType, level):
638  """Return a dict of supported keys and their value types for a given
639  dataset type at a given level of the key hierarchy.
640 
641  Parameters
642  ----------
643  datasetType : `str`
644  Dataset type or None for all dataset types.
645  level : `str` or None
646  Level or None for all levels or '' for the default level for the
647  camera.
648 
649  Returns
650  -------
651  `dict`
652  Keys are strings usable in a dataset identifier, values are their
653  value types.
654  """
655 
656  # not sure if this is how we want to do this. what if None was intended?
657  if level == '':
658  level = self.getDefaultLevel()
659 
660  if datasetType is None:
661  keyDict = copy.copy(self.keyDict)
662  else:
663  keyDict = self.mappings[datasetType].keys()
664  if level is not None and level in self.levels:
665  keyDict = copy.copy(keyDict)
666  for l in self.levels[level]:
667  if l in keyDict:
668  del keyDict[l]
669  return keyDict
670 
671  def getDefaultLevel(self):
672  return self.defaultLevel
673 
674  def getDefaultSubLevel(self, level):
675  if level in self.defaultSubLevels:
676  return self.defaultSubLevels[level]
677  return None
678 
679  @classmethod
680  def getCameraName(cls):
681  """Return the name of the camera that this CameraMapper is for."""
682  className = str(cls)
683  className = className[className.find('.'):-1]
684  m = re.search(r'(\w+)Mapper', className)
685  if m is None:
686  m = re.search(r"class '[\w.]*?(\w+)'", className)
687  name = m.group(1)
688  return name[:1].lower() + name[1:] if name else ''
689 
690  @classmethod
691  def getPackageName(cls):
692  """Return the name of the package containing this CameraMapper."""
693  if cls.packageName is None:
694  raise ValueError('class variable packageName must not be None')
695  return cls.packageName
696 
697  @classmethod
698  def getPackageDir(cls):
699  """Return the base directory of this package"""
700  return getPackageDir(cls.getPackageName())
701 
702  def map_camera(self, dataId, write=False):
703  """Map a camera dataset."""
704  if self.camera is None:
705  raise RuntimeError("No camera dataset available.")
706  actualId = self._transformId(dataId)
707  return dafPersist.ButlerLocation(
708  pythonType="lsst.afw.cameraGeom.CameraConfig",
709  cppType="Config",
710  storageName="ConfigStorage",
711  locationList=self.cameraDataLocation or "ignored",
712  dataId=actualId,
713  mapper=self,
714  storage=self.rootStorage
715  )
716 
717  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
718  """Return the (preloaded) camera object.
719  """
720  if self.camera is None:
721  raise RuntimeError("No camera dataset available.")
722  return self.camera
723 
724  def map_expIdInfo(self, dataId, write=False):
725  return dafPersist.ButlerLocation(
726  pythonType="lsst.obs.base.ExposureIdInfo",
727  cppType=None,
728  storageName="Internal",
729  locationList="ignored",
730  dataId=dataId,
731  mapper=self,
732  storage=self.rootStorage
733  )
734 
735  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
736  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
737  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
738  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
739  return ExposureIdInfo(expId=expId, expBits=expBits)
740 
741  def std_bfKernel(self, item, dataId):
742  """Disable standardization for bfKernel
743 
744  bfKernel is a calibration product that is numpy array,
745  unlike other calibration products that are all images;
746  all calibration images are sent through _standardizeExposure
747  due to CalibrationMapping, but we don't want that to happen to bfKernel
748  """
749  return item
750 
751  def std_raw(self, item, dataId):
752  """Standardize a raw dataset by converting it to an Exposure instead
753  of an Image"""
754  return self._standardizeExposure(self.exposures['raw'], item, dataId,
755  trimmed=False, setVisitInfo=True)
756 
757  def map_skypolicy(self, dataId):
758  """Map a sky policy."""
759  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
760  "Internal", None, None, self,
761  storage=self.rootStorage)
762 
763  def std_skypolicy(self, item, dataId):
764  """Standardize a sky policy by returning the one we use."""
765  return self.skypolicy
766 
767 
772 
773  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
774  posixIfNoSql=True):
775  """Set up a registry (usually SQLite3), trying a number of possible
776  paths.
777 
778  Parameters
779  ----------
780  name : string
781  Name of registry.
782  description: `str`
783  Description of registry (for log messages)
784  path : string
785  Path for registry.
786  policy : string
787  Policy that contains the registry name, used if path is None.
788  policyKey : string
789  Key in policy for registry path.
790  storage : Storage subclass
791  Repository Storage to look in.
792  searchParents : bool, optional
793  True if the search for a registry should follow any Butler v1
794  _parent symlinks.
795  posixIfNoSql : bool, optional
796  If an sqlite registry is not found, will create a posix registry if
797  this is True.
798 
799  Returns
800  -------
801  lsst.daf.persistence.Registry
802  Registry object
803  """
804  if path is None and policyKey in policy:
805  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
806  if os.path.isabs(path):
807  raise RuntimeError("Policy should not indicate an absolute path for registry.")
808  if not storage.exists(path):
809  newPath = storage.instanceSearch(path)
810 
811  newPath = newPath[0] if newPath is not None and len(newPath) else None
812  if newPath is None:
813  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
814  path)
815  path = newPath
816  else:
817  self.log.warn("Unable to locate registry at policy path: %s", path)
818  path = None
819 
820  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
821  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
822  # root from path. Currently only works with PosixStorage
823  try:
824  root = storage.root
825  if path and (path.startswith(root)):
826  path = path[len(root + '/'):]
827  except AttributeError:
828  pass
829 
830  # determine if there is an sqlite registry and if not, try the posix registry.
831  registry = None
832 
833  def search(filename, description):
834  """Search for file in storage
835 
836  Parameters
837  ----------
838  filename : `str`
839  Filename to search for
840  description : `str`
841  Description of file, for error message.
842 
843  Returns
844  -------
845  path : `str` or `None`
846  Path to file, or None
847  """
848  result = storage.instanceSearch(filename)
849  if result:
850  return result[0]
851  self.log.debug("Unable to locate %s: %s", description, filename)
852  return None
853 
854  # Search for a suitable registry database
855  if path is None:
856  path = search("%s.pgsql" % name, "%s in root" % description)
857  if path is None:
858  path = search("%s.sqlite3" % name, "%s in root" % description)
859  if path is None:
860  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
861 
862  if path is not None:
863  if not storage.exists(path):
864  newPath = storage.instanceSearch(path)
865  newPath = newPath[0] if newPath is not None and len(newPath) else None
866  if newPath is not None:
867  path = newPath
868  localFileObj = storage.getLocalFile(path)
869  self.log.info("Loading %s registry from %s", description, localFileObj.name)
870  registry = dafPersist.Registry.create(localFileObj.name)
871  localFileObj.close()
872  elif not registry and posixIfNoSql:
873  try:
874  self.log.info("Loading Posix %s registry from %s", description, storage.root)
875  registry = dafPersist.PosixRegistry(storage.root)
876  except Exception:
877  registry = None
878 
879  return registry
880 
881  def _transformId(self, dataId):
882  """Generate a standard ID dict from a camera-specific ID dict.
883 
884  Canonical keys include:
885  - amp: amplifier name
886  - ccd: CCD name (in LSST this is a combination of raft and sensor)
887  The default implementation returns a copy of its input.
888 
889  Parameters
890  ----------
891  dataId : `dict`
892  Dataset identifier; this must not be modified
893 
894  Returns
895  -------
896  `dict`
897  Transformed dataset identifier.
898  """
899 
900  return dataId.copy()
901 
902  def _mapActualToPath(self, template, actualId):
903  """Convert a template path to an actual path, using the actual data
904  identifier. This implementation is usually sufficient but can be
905  overridden by the subclass.
906 
907  Parameters
908  ----------
909  template : `str`
910  Template path
911  actualId : `dict`
912  Dataset identifier
913 
914  Returns
915  -------
916  `str`
917  Pathname
918  """
919 
920  try:
921  transformedId = self._transformId(actualId)
922  return template % transformedId
923  except Exception as e:
924  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
925 
926  @staticmethod
927  def getShortCcdName(ccdName):
928  """Convert a CCD name to a form useful as a filename
929 
930  The default implementation converts spaces to underscores.
931  """
932  return ccdName.replace(" ", "_")
933 
934  def _extractDetectorName(self, dataId):
935  """Extract the detector (CCD) name from the dataset identifier.
936 
937  The name in question is the detector name used by lsst.afw.cameraGeom.
938 
939  Parameters
940  ----------
941  dataId : `dict`
942  Dataset identifier.
943 
944  Returns
945  -------
946  `str`
947  Detector name
948  """
949  raise NotImplementedError("No _extractDetectorName() function specified")
950 
951  @deprecated("This method is no longer used for ISR (will be removed after v11)", category=FutureWarning)
952  def _extractAmpId(self, dataId):
953  """Extract the amplifier identifer from a dataset identifier.
954 
955  .. note:: Deprecated in 11_0
956 
957  amplifier identifier has two parts: the detector name for the CCD
958  containing the amplifier and index of the amplifier in the detector.
959 
960  Parameters
961  ----------
962  dataId : `dict`
963  Dataset identifer
964 
965  Returns
966  -------
967  `tuple`
968  Amplifier identifier
969  """
970 
971  trDataId = self._transformId(dataId)
972  return (trDataId["ccd"], int(trDataId['amp']))
973 
974  def _setAmpDetector(self, item, dataId, trimmed=True):
975  """Set the detector object in an Exposure for an amplifier.
976 
977  Defects are also added to the Exposure based on the detector object.
978 
979  Parameters
980  ----------
981  item : `lsst.afw.image.Exposure`
982  Exposure to set the detector in.
983  dataId : `dict`
984  Dataset identifier
985  trimmed : `bool`
986  Should detector be marked as trimmed? (ignored)
987  """
988 
989  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
990 
991  def _setCcdDetector(self, item, dataId, trimmed=True):
992  """Set the detector object in an Exposure for a CCD.
993 
994  Parameters
995  ----------
996  item : `lsst.afw.image.Exposure`
997  Exposure to set the detector in.
998  dataId : `dict`
999  Dataset identifier
1000  trimmed : `bool`
1001  Should detector be marked as trimmed? (ignored)
1002  """
1003  if item.getDetector() is not None:
1004  return
1005 
1006  detectorName = self._extractDetectorName(dataId)
1007  detector = self.camera[detectorName]
1008  item.setDetector(detector)
1009 
1010  def _setFilter(self, mapping, item, dataId):
1011  """Set the filter object in an Exposure. If the Exposure had a FILTER
1012  keyword, this was already processed during load. But if it didn't,
1013  use the filter from the registry.
1014 
1015  Parameters
1016  ----------
1017  mapping : `lsst.obs.base.Mapping`
1018  Where to get the filter from.
1019  item : `lsst.afw.image.Exposure`
1020  Exposure to set the filter in.
1021  dataId : `dict`
1022  Dataset identifier.
1023  """
1024 
1025  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1026  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1027  return
1028 
1029  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1030  return
1031 
1032  actualId = mapping.need(['filter'], dataId)
1033  filterName = actualId['filter']
1034  if self.filters is not None and filterName in self.filters:
1035  filterName = self.filters[filterName]
1036  try:
1037  item.setFilter(afwImage.Filter(filterName))
1038  except pexExcept.NotFoundError:
1039  self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName))
1040 
1041  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1042  trimmed=True, setVisitInfo=True):
1043  """Default standardization function for images.
1044 
1045  This sets the Detector from the camera geometry
1046  and optionally set the Filter. In both cases this saves
1047  having to persist some data in each exposure (or image).
1048 
1049  Parameters
1050  ----------
1051  mapping : `lsst.obs.base.Mapping`
1052  Where to get the values from.
1053  item : image-like object
1054  Can be any of lsst.afw.image.Exposure,
1055  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1056  or lsst.afw.image.MaskedImage
1057 
1058  dataId : `dict`
1059  Dataset identifier
1060  filter : `bool`
1061  Set filter? Ignored if item is already an exposure
1062  trimmed : `bool`
1063  Should detector be marked as trimmed?
1064  setVisitInfo : `bool`
1065  Should Exposure have its VisitInfo filled out from the metadata?
1066 
1067  Returns
1068  -------
1069  `lsst.afw.image.Exposure`
1070  The standardized Exposure.
1071  """
1072  try:
1073  exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1074  setVisitInfo=setVisitInfo)
1075  except Exception as e:
1076  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1077  raise
1078 
1079  if mapping.level.lower() == "amp":
1080  self._setAmpDetector(exposure, dataId, trimmed)
1081  elif mapping.level.lower() == "ccd":
1082  self._setCcdDetector(exposure, dataId, trimmed)
1083 
1084  # We can only create a WCS if it doesn't already have one and
1085  # we have either a VisitInfo or exposure metadata.
1086  # Do not calculate a WCS if this is an amplifier exposure
1087  if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1088  (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1089  self._createInitialSkyWcs(exposure)
1090 
1091  if filter:
1092  self._setFilter(mapping, exposure, dataId)
1093 
1094  return exposure
1095 
1096  def _createSkyWcsFromMetadata(self, exposure):
1097  """Create a SkyWcs from the FITS header metadata in an Exposure.
1098 
1099  Parameters
1100  ----------
1101  exposure : `lsst.afw.image.Exposure`
1102  The exposure to get metadata from, and attach the SkyWcs to.
1103  """
1104  metadata = exposure.getMetadata()
1105  try:
1106  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1107  exposure.setWcs(wcs)
1108  except pexExcept.TypeError as e:
1109  # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata).
1110  self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1111  " %s", e.args[0])
1112  # ensure any WCS values stripped from the metadata are removed in the exposure
1113  exposure.setMetadata(metadata)
1114 
1115  def _createInitialSkyWcs(self, exposure):
1116  """Create a SkyWcs from the boresight and camera geometry.
1117 
1118  If the boresight or camera geometry do not support this method of
1119  WCS creation, this falls back on the header metadata-based version
1120  (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1121 
1122  Parameters
1123  ----------
1124  exposure : `lsst.afw.image.Exposure`
1125  The exposure to get data from, and attach the SkyWcs to.
1126  """
1127  # Always use try to use metadata first, to strip WCS keys from it.
1128  self._createSkyWcsFromMetadata(exposure)
1129 
1130  if exposure.getInfo().getVisitInfo() is None:
1131  msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1132  self.log.warn(msg)
1133  return
1134  try:
1135  newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1136  exposure.setWcs(newSkyWcs)
1137  except InitialSkyWcsError as e:
1138  msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1139  self.log.warn(msg, e)
1140  self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1141  if e.__context__ is not None:
1142  self.log.debug("Root-cause Exception was: %s",
1143  traceback.TracebackException.from_exception(e.__context__))
1144 
1145  def _makeCamera(self, policy, repositoryDir):
1146  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1147  the camera geometry
1148 
1149  Also set self.cameraDataLocation, if relevant (else it can be left
1150  None).
1151 
1152  This implementation assumes that policy contains an entry "camera"
1153  that points to the subdirectory in this package of camera data;
1154  specifically, that subdirectory must contain:
1155  - a file named `camera.py` that contains persisted camera config
1156  - ampInfo table FITS files, as required by
1157  lsst.afw.cameraGeom.makeCameraFromPath
1158 
1159  Parameters
1160  ----------
1161  policy : `lsst.daf.persistence.Policy`
1162  Policy with per-camera defaults already merged
1163  (PexPolicy only for backward compatibility).
1164  repositoryDir : `str`
1165  Policy repository for the subclassing module (obtained with
1166  getRepositoryPath() on the per-camera default dictionary).
1167  """
1168  if 'camera' not in policy:
1169  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1170  cameraDataSubdir = policy['camera']
1171  self.cameraDataLocation = os.path.normpath(
1172  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1173  cameraConfig = afwCameraGeom.CameraConfig()
1174  cameraConfig.load(self.cameraDataLocation)
1175  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1176  return afwCameraGeom.makeCameraFromPath(
1177  cameraConfig=cameraConfig,
1178  ampInfoPath=ampInfoPath,
1179  shortNameFunc=self.getShortCcdName,
1180  pupilFactoryClass=self.PupilFactoryClass
1181  )
1182 
1183  def getRegistry(self):
1184  """Get the registry used by this mapper.
1185 
1186  Returns
1187  -------
1188  Registry or None
1189  The registry used by this mapper for this mapper's repository.
1190  """
1191  return self.registry
1192 
1193  def getImageCompressionSettings(self, datasetType, dataId):
1194  """Stuff image compression settings into a daf.base.PropertySet
1195 
1196  This goes into the ButlerLocation's "additionalData", which gets
1197  passed into the boost::persistence framework.
1198 
1199  Parameters
1200  ----------
1201  datasetType : `str`
1202  Type of dataset for which to get the image compression settings.
1203  dataId : `dict`
1204  Dataset identifier.
1205 
1206  Returns
1207  -------
1208  additionalData : `lsst.daf.base.PropertySet`
1209  Image compression settings.
1210  """
1211  mapping = self.mappings[datasetType]
1212  recipeName = mapping.recipe
1213  storageType = mapping.storage
1214  if storageType not in self._writeRecipes:
1215  return dafBase.PropertySet()
1216  if recipeName not in self._writeRecipes[storageType]:
1217  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1218  (datasetType, storageType, recipeName))
1219  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1220  seed = hash(tuple(dataId.items())) % 2**31
1221  for plane in ("image", "mask", "variance"):
1222  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1223  recipe.set(plane + ".scaling.seed", seed)
1224  return recipe
1225 
1226  def _initWriteRecipes(self):
1227  """Read the recipes for writing files
1228 
1229  These recipes are currently used for configuring FITS compression,
1230  but they could have wider uses for configuring different flavors
1231  of the storage types. A recipe is referred to by a symbolic name,
1232  which has associated settings. These settings are stored as a
1233  `PropertySet` so they can easily be passed down to the
1234  boost::persistence framework as the "additionalData" parameter.
1235 
1236  The list of recipes is written in YAML. A default recipe and
1237  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1238  and these may be overridden or supplemented by the individual obs_*
1239  packages' own policy/writeRecipes.yaml files.
1240 
1241  Recipes are grouped by the storage type. Currently, only the
1242  ``FitsStorage`` storage type uses recipes, which uses it to
1243  configure FITS image compression.
1244 
1245  Each ``FitsStorage`` recipe for FITS compression should define
1246  "image", "mask" and "variance" entries, each of which may contain
1247  "compression" and "scaling" entries. Defaults will be provided for
1248  any missing elements under "compression" and "scaling".
1249 
1250  The allowed entries under "compression" are:
1251 
1252  * algorithm (string): compression algorithm to use
1253  * rows (int): number of rows per tile (0 = entire dimension)
1254  * columns (int): number of columns per tile (0 = entire dimension)
1255  * quantizeLevel (float): cfitsio quantization level
1256 
1257  The allowed entries under "scaling" are:
1258 
1259  * algorithm (string): scaling algorithm to use
1260  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1261  * fuzz (bool): fuzz the values when quantising floating-point values?
1262  * seed (long): seed for random number generator when fuzzing
1263  * maskPlanes (list of string): mask planes to ignore when doing
1264  statistics
1265  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1266  * quantizePad: number of stdev to allow on the low side (for
1267  STDEV_POSITIVE/NEGATIVE)
1268  * bscale: manually specified BSCALE (for MANUAL scaling)
1269  * bzero: manually specified BSCALE (for MANUAL scaling)
1270 
1271  A very simple example YAML recipe:
1272 
1273  FitsStorage:
1274  default:
1275  image: &default
1276  compression:
1277  algorithm: GZIP_SHUFFLE
1278  mask: *default
1279  variance: *default
1280  """
1281  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1282  recipes = dafPersist.Policy(recipesFile)
1283  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1284  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1285  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1286  supplements = dafPersist.Policy(supplementsFile)
1287  # Don't allow overrides, only supplements
1288  for entry in validationMenu:
1289  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1290  if intersection:
1291  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1292  (supplementsFile, entry, recipesFile, intersection))
1293  recipes.update(supplements)
1294 
1295  self._writeRecipes = {}
1296  for storageType in recipes.names(True):
1297  if "default" not in recipes[storageType]:
1298  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1299  (storageType, recipesFile))
1300  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1301 
1302 
1303 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1304  """Generate an Exposure from an image-like object
1305 
1306  If the image is a DecoratedImage then also set its WCS and metadata
1307  (Image and MaskedImage are missing the necessary metadata
1308  and Exposure already has those set)
1309 
1310  Parameters
1311  ----------
1312  image : Image-like object
1313  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1314  Exposure.
1315 
1316  Returns
1317  -------
1318  `lsst.afw.image.Exposure`
1319  Exposure containing input image.
1320  """
1321  metadata = None
1322  if isinstance(image, afwImage.MaskedImage):
1323  exposure = afwImage.makeExposure(image)
1324  elif isinstance(image, afwImage.DecoratedImage):
1325  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1326  metadata = image.getMetadata()
1327  exposure.setMetadata(metadata)
1328  elif isinstance(image, afwImage.Exposure):
1329  exposure = image
1330  metadata = exposure.getMetadata()
1331  else: # Image
1332  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1333 
1334  # set VisitInfo if we can
1335  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1336  if metadata is not None:
1337  if mapper is None:
1338  if not logger:
1339  logger = lsstLog.Log.getLogger("CameraMapper")
1340  logger.warn("I can only set the VisitInfo if you provide a mapper")
1341  else:
1342  exposureId = mapper._computeCcdExposureId(dataId)
1343  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1344 
1345  exposure.getInfo().setVisitInfo(visitInfo)
1346 
1347  return exposure
1348 
1349 
1351  """Validate recipes for FitsStorage
1352 
1353  The recipes are supplemented with default values where appropriate.
1354 
1355  TODO: replace this custom validation code with Cerberus (DM-11846)
1356 
1357  Parameters
1358  ----------
1359  recipes : `lsst.daf.persistence.Policy`
1360  FitsStorage recipes to validate.
1361 
1362  Returns
1363  -------
1364  validated : `lsst.daf.base.PropertySet`
1365  Validated FitsStorage recipe.
1366 
1367  Raises
1368  ------
1369  `RuntimeError`
1370  If validation fails.
1371  """
1372  # Schemas define what should be there, and the default values (and by the default
1373  # value, the expected type).
1374  compressionSchema = {
1375  "algorithm": "NONE",
1376  "rows": 1,
1377  "columns": 0,
1378  "quantizeLevel": 0.0,
1379  }
1380  scalingSchema = {
1381  "algorithm": "NONE",
1382  "bitpix": 0,
1383  "maskPlanes": ["NO_DATA"],
1384  "seed": 0,
1385  "quantizeLevel": 4.0,
1386  "quantizePad": 5.0,
1387  "fuzz": True,
1388  "bscale": 1.0,
1389  "bzero": 0.0,
1390  }
1391 
1392  def checkUnrecognized(entry, allowed, description):
1393  """Check to see if the entry contains unrecognised keywords"""
1394  unrecognized = set(entry.keys()) - set(allowed)
1395  if unrecognized:
1396  raise RuntimeError(
1397  "Unrecognized entries when parsing image compression recipe %s: %s" %
1398  (description, unrecognized))
1399 
1400  validated = {}
1401  for name in recipes.names(True):
1402  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1403  rr = dafBase.PropertySet()
1404  validated[name] = rr
1405  for plane in ("image", "mask", "variance"):
1406  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1407  name + "->" + plane)
1408 
1409  for settings, schema in (("compression", compressionSchema),
1410  ("scaling", scalingSchema)):
1411  prefix = plane + "." + settings
1412  if settings not in recipes[name][plane]:
1413  for key in schema:
1414  rr.set(prefix + "." + key, schema[key])
1415  continue
1416  entry = recipes[name][plane][settings]
1417  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1418  for key in schema:
1419  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1420  rr.set(prefix + "." + key, value)
1421  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def _createSkyWcsFromMetadata(self, exposure)
def createInitialSkyWcs(visitInfo, detector, flipX=False)
Definition: utils.py:43
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)