lsst.obs.base  18.1.0-16-g069e110
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 import re
26 import traceback
27 import weakref
28 
29 from astro_metadata_translator import fix_header
30 import lsst.daf.persistence as dafPersist
31 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
32 import lsst.daf.base as dafBase
33 import lsst.afw.geom as afwGeom
34 import lsst.afw.image as afwImage
35 import lsst.afw.table as afwTable
36 from lsst.afw.fits import readMetadata
37 import lsst.afw.cameraGeom as afwCameraGeom
38 import lsst.log as lsstLog
39 import lsst.pex.exceptions as pexExcept
40 from .exposureIdInfo import ExposureIdInfo
41 from .makeRawVisitInfo import MakeRawVisitInfo
42 from .utils import createInitialSkyWcs, InitialSkyWcsError
43 from lsst.utils import getPackageDir
44 
45 __all__ = ["CameraMapper", "exposureFromImage"]
46 
47 
48 class CameraMapper(dafPersist.Mapper):
49 
50  """CameraMapper is a base class for mappers that handle images from a
51  camera and products derived from them. This provides an abstraction layer
52  between the data on disk and the code.
53 
54  Public methods: keys, queryMetadata, getDatasetTypes, map,
55  canStandardize, standardize
56 
57  Mappers for specific data sources (e.g., CFHT Megacam, LSST
58  simulations, etc.) should inherit this class.
59 
60  The CameraMapper manages datasets within a "root" directory. Note that
61  writing to a dataset present in the input root will hide the existing
62  dataset but not overwrite it. See #2160 for design discussion.
63 
64  A camera is assumed to consist of one or more rafts, each composed of
65  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
66  (amps). A camera is also assumed to have a camera geometry description
67  (CameraGeom object) as a policy file, a filter description (Filter class
68  static configuration) as another policy file.
69 
70  Information from the camera geometry and defects are inserted into all
71  Exposure objects returned.
72 
73  The mapper uses one or two registries to retrieve metadata about the
74  images. The first is a registry of all raw exposures. This must contain
75  the time of the observation. One or more tables (or the equivalent)
76  within the registry are used to look up data identifier components that
77  are not specified by the user (e.g. filter) and to return results for
78  metadata queries. The second is an optional registry of all calibration
79  data. This should contain validity start and end entries for each
80  calibration dataset in the same timescale as the observation time.
81 
82  Subclasses will typically set MakeRawVisitInfoClass and optionally the
83  metadata translator class:
84 
85  MakeRawVisitInfoClass: a class variable that points to a subclass of
86  MakeRawVisitInfo, a functor that creates an
87  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
88 
89  translatorClass: The `~astro_metadata_translator.MetadataTranslator`
90  class to use for fixing metadata values. If it is not set an attempt
91  will be made to infer the class from ``MakeRawVisitInfoClass``, failing
92  that the metadata fixup will try to infer the translator class from the
93  header itself.
94 
95  Subclasses must provide the following methods:
96 
97  _extractDetectorName(self, dataId): returns the detector name for a CCD
98  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
99  a dataset identifier referring to that CCD or a subcomponent of it.
100 
101  _computeCcdExposureId(self, dataId): see below
102 
103  _computeCoaddExposureId(self, dataId, singleFilter): see below
104 
105  Subclasses may also need to override the following methods:
106 
107  _transformId(self, dataId): transformation of a data identifier
108  from colloquial usage (e.g., "ccdname") to proper/actual usage
109  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
110  commas). The default implementation does nothing. Note that this
111  method should not modify its input parameter.
112 
113  getShortCcdName(self, ccdName): a static method that returns a shortened
114  name suitable for use as a filename. The default version converts spaces
115  to underscores.
116 
117  _mapActualToPath(self, template, actualId): convert a template path to an
118  actual path, using the actual dataset identifier.
119 
120  The mapper's behaviors are largely specified by the policy file.
121  See the MapperDictionary.paf for descriptions of the available items.
122 
123  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
124  mappings (see Mappings class).
125 
126  Common default mappings for all subclasses can be specified in the
127  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
128  provides a simple way to add a product to all camera mappers.
129 
130  Functions to map (provide a path to the data given a dataset
131  identifier dictionary) and standardize (convert data into some standard
132  format or type) may be provided in the subclass as "map_{dataset type}"
133  and "std_{dataset type}", respectively.
134 
135  If non-Exposure datasets cannot be retrieved using standard
136  daf_persistence methods alone, a "bypass_{dataset type}" function may be
137  provided in the subclass to return the dataset instead of using the
138  "datasets" subpolicy.
139 
140  Implementations of map_camera and bypass_camera that should typically be
141  sufficient are provided in this base class.
142 
143  Notes
144  -----
145  .. todo::
146 
147  Instead of auto-loading the camera at construction time, load it from
148  the calibration registry
149 
150  Parameters
151  ----------
152  policy : daf_persistence.Policy,
153  Policy with per-camera defaults already merged.
154  repositoryDir : string
155  Policy repository for the subclassing module (obtained with
156  getRepositoryPath() on the per-camera default dictionary).
157  root : string, optional
158  Path to the root directory for data.
159  registry : string, optional
160  Path to registry with data's metadata.
161  calibRoot : string, optional
162  Root directory for calibrations.
163  calibRegistry : string, optional
164  Path to registry with calibrations' metadata.
165  provided : list of string, optional
166  Keys provided by the mapper.
167  parentRegistry : Registry subclass, optional
168  Registry from a parent repository that may be used to look up
169  data's metadata.
170  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
171  The configuration information for the repository this mapper is
172  being used with.
173  """
174  packageName = None
175 
176  # a class or subclass of MakeRawVisitInfo, a functor that makes an
177  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
178  MakeRawVisitInfoClass = MakeRawVisitInfo
179 
180  # a class or subclass of PupilFactory
181  PupilFactoryClass = afwCameraGeom.PupilFactory
182 
183  # Class to use for metadata translations
184  translatorClass = None
185 
186  def __init__(self, policy, repositoryDir,
187  root=None, registry=None, calibRoot=None, calibRegistry=None,
188  provided=None, parentRegistry=None, repositoryCfg=None):
189 
190  dafPersist.Mapper.__init__(self)
191 
192  self.log = lsstLog.Log.getLogger("CameraMapper")
193 
194  if root:
195  self.root = root
196  elif repositoryCfg:
197  self.root = repositoryCfg.root
198  else:
199  self.root = None
200 
201  repoPolicy = repositoryCfg.policy if repositoryCfg else None
202  if repoPolicy is not None:
203  policy.update(repoPolicy)
204 
205  # Levels
206  self.levels = dict()
207  if 'levels' in policy:
208  levelsPolicy = policy['levels']
209  for key in levelsPolicy.names(True):
210  self.levels[key] = set(levelsPolicy.asArray(key))
211  self.defaultLevel = policy['defaultLevel']
212  self.defaultSubLevels = dict()
213  if 'defaultSubLevels' in policy:
214  self.defaultSubLevels = policy['defaultSubLevels']
215 
216  # Root directories
217  if root is None:
218  root = "."
219  root = dafPersist.LogicalLocation(root).locString()
220 
221  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
222 
223  # If the calibRoot is passed in, use that. If not and it's indicated in
224  # the policy, use that. And otherwise, the calibs are in the regular
225  # root.
226  # If the location indicated by the calib root does not exist, do not
227  # create it.
228  calibStorage = None
229  if calibRoot is not None:
230  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
231  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
232  create=False)
233  else:
234  calibRoot = policy.get('calibRoot', None)
235  if calibRoot:
236  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
237  create=False)
238  if calibStorage is None:
239  calibStorage = self.rootStorage
240 
241  self.root = root
242 
243  # Registries
244  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
245  self.rootStorage, searchParents=False,
246  posixIfNoSql=(not parentRegistry))
247  if not self.registry:
248  self.registry = parentRegistry
249  needCalibRegistry = policy.get('needCalibRegistry', None)
250  if needCalibRegistry:
251  if calibStorage:
252  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
253  "calibRegistryPath", calibStorage,
254  posixIfNoSql=False) # NB never use posix for calibs
255  else:
256  raise RuntimeError(
257  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
258  "calibRoot ivar:%s or policy['calibRoot']:%s" %
259  (calibRoot, policy.get('calibRoot', None)))
260  else:
261  self.calibRegistry = None
262 
263  # Dict of valid keys and their value types
264  self.keyDict = dict()
265 
266  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
267  self._initWriteRecipes()
268 
269  # Camera geometry
270  self.cameraDataLocation = None # path to camera geometry config file
271  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
272 
273  # Filter translation table
274  self.filters = None
275 
276  # verify that the class variable packageName is set before attempting
277  # to instantiate an instance
278  if self.packageName is None:
279  raise ValueError('class variable packageName must not be None')
280 
282 
283  # Assign a metadata translator if one has not been defined by
284  # subclass. We can sometimes infer one from the RawVisitInfo
285  # class.
286  if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
287  self.translatorClass = self.makeRawVisitInfo.metadataTranslator
288 
289  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
290  """Initialize mappings
291 
292  For each of the dataset types that we want to be able to read, there
293  are methods that can be created to support them:
294  * map_<dataset> : determine the path for dataset
295  * std_<dataset> : standardize the retrieved dataset
296  * bypass_<dataset> : retrieve the dataset (bypassing the usual
297  retrieval machinery)
298  * query_<dataset> : query the registry
299 
300  Besides the dataset types explicitly listed in the policy, we create
301  additional, derived datasets for additional conveniences,
302  e.g., reading the header of an image, retrieving only the size of a
303  catalog.
304 
305  Parameters
306  ----------
307  policy : `lsst.daf.persistence.Policy`
308  Policy with per-camera defaults already merged
309  rootStorage : `Storage subclass instance`
310  Interface to persisted repository data.
311  calibRoot : `Storage subclass instance`
312  Interface to persisted calib repository data
313  provided : `list` of `str`
314  Keys provided by the mapper
315  """
316  # Sub-dictionaries (for exposure/calibration/dataset types)
317  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318  "obs_base", "ImageMappingDefaults.yaml", "policy"))
319  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
321  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
323  dsMappingPolicy = dafPersist.Policy()
324 
325  # Mappings
326  mappingList = (
327  ("images", imgMappingPolicy, ImageMapping),
328  ("exposures", expMappingPolicy, ExposureMapping),
329  ("calibrations", calMappingPolicy, CalibrationMapping),
330  ("datasets", dsMappingPolicy, DatasetMapping)
331  )
332  self.mappings = dict()
333  for name, defPolicy, cls in mappingList:
334  if name in policy:
335  datasets = policy[name]
336 
337  # Centrally-defined datasets
338  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
339  if os.path.exists(defaultsPath):
340  datasets.merge(dafPersist.Policy(defaultsPath))
341 
342  mappings = dict()
343  setattr(self, name, mappings)
344  for datasetType in datasets.names(True):
345  subPolicy = datasets[datasetType]
346  subPolicy.merge(defPolicy)
347 
348  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
349  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350  subPolicy=subPolicy):
351  components = subPolicy.get('composite')
352  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
353  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
354  python = subPolicy['python']
355  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356  disassembler=disassembler,
357  python=python,
358  dataId=dataId,
359  mapper=self)
360  for name, component in components.items():
361  butlerComposite.add(id=name,
362  datasetType=component.get('datasetType'),
363  setter=component.get('setter', None),
364  getter=component.get('getter', None),
365  subset=component.get('subset', False),
366  inputOnly=component.get('inputOnly', False))
367  return butlerComposite
368  setattr(self, "map_" + datasetType, compositeClosure)
369  # for now at least, don't set up any other handling for this dataset type.
370  continue
371 
372  if name == "calibrations":
373  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
374  provided=provided, dataRoot=rootStorage)
375  else:
376  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
377 
378  if datasetType in self.mappings:
379  raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
380  self.keyDict.update(mapping.keys())
381  mappings[datasetType] = mapping
382  self.mappings[datasetType] = mapping
383  if not hasattr(self, "map_" + datasetType):
384  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
385  return mapping.map(mapper, dataId, write)
386  setattr(self, "map_" + datasetType, mapClosure)
387  if not hasattr(self, "query_" + datasetType):
388  def queryClosure(format, dataId, mapping=mapping):
389  return mapping.lookup(format, dataId)
390  setattr(self, "query_" + datasetType, queryClosure)
391  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
392  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
393  return mapping.standardize(mapper, item, dataId)
394  setattr(self, "std_" + datasetType, stdClosure)
395 
396  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
397  """Set convenience methods on CameraMapper"""
398  mapName = "map_" + datasetType + "_" + suffix
399  bypassName = "bypass_" + datasetType + "_" + suffix
400  queryName = "query_" + datasetType + "_" + suffix
401  if not hasattr(self, mapName):
402  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
403  if not hasattr(self, bypassName):
404  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
405  bypassImpl = getattr(self, "bypass_" + datasetType)
406  if bypassImpl is not None:
407  setattr(self, bypassName, bypassImpl)
408  if not hasattr(self, queryName):
409  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
410 
411  # Filename of dataset
412  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
413  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
414  # Metadata from FITS file
415  if subPolicy["storage"] == "FitsStorage": # a FITS image
416  def getMetadata(datasetType, pythonType, location, dataId):
417  md = readMetadata(location.getLocationsWithRoot()[0])
418  fix_header(md, translator_class=self.translatorClass)
419  return md
420 
421  setMethods("md", bypassImpl=getMetadata)
422 
423  # Add support for configuring FITS compression
424  addName = "add_" + datasetType
425  if not hasattr(self, addName):
426  setattr(self, addName, self.getImageCompressionSettings)
427 
428  if name == "exposures":
429  def getSkyWcs(datasetType, pythonType, location, dataId):
430  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431  return fitsReader.readWcs()
432 
433  setMethods("wcs", bypassImpl=getSkyWcs)
434 
435  def getPhotoCalib(datasetType, pythonType, location, dataId):
436  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437  return fitsReader.readPhotoCalib()
438 
439  setMethods("photoCalib", bypassImpl=getPhotoCalib)
440 
441  def getVisitInfo(datasetType, pythonType, location, dataId):
442  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
443  return fitsReader.readVisitInfo()
444 
445  setMethods("visitInfo", bypassImpl=getVisitInfo)
446 
447  def getFilter(datasetType, pythonType, location, dataId):
448  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
449  return fitsReader.readFilter()
450 
451  setMethods("filter", bypassImpl=getFilter)
452 
453  setMethods("detector",
454  mapImpl=lambda dataId, write=False:
455  dafPersist.ButlerLocation(
456  pythonType="lsst.afw.cameraGeom.CameraConfig",
457  cppType="Config",
458  storageName="Internal",
459  locationList="ignored",
460  dataId=dataId,
461  mapper=self,
462  storage=None,
463  ),
464  bypassImpl=lambda datasetType, pythonType, location, dataId:
465  self.camera[self._extractDetectorName(dataId)]
466  )
467 
468  def getBBox(datasetType, pythonType, location, dataId):
469  md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
470  fix_header(md, translator_class=self.translatorClass)
471  return afwImage.bboxFromMetadata(md)
472 
473  setMethods("bbox", bypassImpl=getBBox)
474 
475  elif name == "images":
476  def getBBox(datasetType, pythonType, location, dataId):
477  md = readMetadata(location.getLocationsWithRoot()[0])
478  fix_header(md, translator_class=self.translatorClass)
479  return afwImage.bboxFromMetadata(md)
480  setMethods("bbox", bypassImpl=getBBox)
481 
482  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
483 
484  def getMetadata(datasetType, pythonType, location, dataId):
485  md = readMetadata(os.path.join(location.getStorage().root,
486  location.getLocations()[0]), hdu=1)
487  fix_header(md, translator_class=self.translatorClass)
488  return md
489 
490  setMethods("md", bypassImpl=getMetadata)
491 
492  # Sub-images
493  if subPolicy["storage"] == "FitsStorage":
494  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
495  subId = dataId.copy()
496  del subId['bbox']
497  loc = mapping.map(mapper, subId, write)
498  bbox = dataId['bbox']
499  llcX = bbox.getMinX()
500  llcY = bbox.getMinY()
501  width = bbox.getWidth()
502  height = bbox.getHeight()
503  loc.additionalData.set('llcX', llcX)
504  loc.additionalData.set('llcY', llcY)
505  loc.additionalData.set('width', width)
506  loc.additionalData.set('height', height)
507  if 'imageOrigin' in dataId:
508  loc.additionalData.set('imageOrigin',
509  dataId['imageOrigin'])
510  return loc
511 
512  def querySubClosure(key, format, dataId, mapping=mapping):
513  subId = dataId.copy()
514  del subId['bbox']
515  return mapping.lookup(format, subId)
516  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
517 
518  if subPolicy["storage"] == "FitsCatalogStorage":
519  # Length of catalog
520 
521  def getLen(datasetType, pythonType, location, dataId):
522  md = readMetadata(os.path.join(location.getStorage().root,
523  location.getLocations()[0]), hdu=1)
524  fix_header(md, translator_class=self.translatorClass)
525  return md["NAXIS2"]
526 
527  setMethods("len", bypassImpl=getLen)
528 
529  # Schema of catalog
530  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
531  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
532  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
533  location.getLocations()[0])))
534 
535  def _computeCcdExposureId(self, dataId):
536  """Compute the 64-bit (long) identifier for a CCD exposure.
537 
538  Subclasses must override
539 
540  Parameters
541  ----------
542  dataId : `dict`
543  Data identifier with visit, ccd.
544  """
545  raise NotImplementedError()
546 
547  def _computeCoaddExposureId(self, dataId, singleFilter):
548  """Compute the 64-bit (long) identifier for a coadd.
549 
550  Subclasses must override
551 
552  Parameters
553  ----------
554  dataId : `dict`
555  Data identifier with tract and patch.
556  singleFilter : `bool`
557  True means the desired ID is for a single-filter coadd, in which
558  case dataIdmust contain filter.
559  """
560  raise NotImplementedError()
561 
562  def _search(self, path):
563  """Search for path in the associated repository's storage.
564 
565  Parameters
566  ----------
567  path : string
568  Path that describes an object in the repository associated with
569  this mapper.
570  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
571  indicator will be stripped when searching and so will match
572  filenames without the HDU indicator, e.g. 'foo.fits'. The path
573  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
574 
575  Returns
576  -------
577  string
578  The path for this object in the repository. Will return None if the
579  object can't be found. If the input argument path contained an HDU
580  indicator, the returned path will also contain the HDU indicator.
581  """
582  return self.rootStorage.search(path)
583 
584  def backup(self, datasetType, dataId):
585  """Rename any existing object with the given type and dataId.
586 
587  The CameraMapper implementation saves objects in a sequence of e.g.:
588 
589  - foo.fits
590  - foo.fits~1
591  - foo.fits~2
592 
593  All of the backups will be placed in the output repo, however, and will
594  not be removed if they are found elsewhere in the _parent chain. This
595  means that the same file will be stored twice if the previous version
596  was found in an input repo.
597  """
598 
599  # Calling PosixStorage directly is not the long term solution in this
600  # function, this is work-in-progress on epic DM-6225. The plan is for
601  # parentSearch to be changed to 'search', and search only the storage
602  # associated with this mapper. All searching of parents will be handled
603  # by traversing the container of repositories in Butler.
604 
605  def firstElement(list):
606  """Get the first element in the list, or None if that can't be
607  done.
608  """
609  return list[0] if list is not None and len(list) else None
610 
611  n = 0
612  newLocation = self.map(datasetType, dataId, write=True)
613  newPath = newLocation.getLocations()[0]
614  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
615  path = firstElement(path)
616  oldPaths = []
617  while path is not None:
618  n += 1
619  oldPaths.append((n, path))
620  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
621  path = firstElement(path)
622  for n, oldPath in reversed(oldPaths):
623  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
624 
625  def keys(self):
626  """Return supported keys.
627 
628  Returns
629  -------
630  iterable
631  List of keys usable in a dataset identifier
632  """
633  return iter(self.keyDict.keys())
634 
635  def getKeys(self, datasetType, level):
636  """Return a dict of supported keys and their value types for a given
637  dataset type at a given level of the key hierarchy.
638 
639  Parameters
640  ----------
641  datasetType : `str`
642  Dataset type or None for all dataset types.
643  level : `str` or None
644  Level or None for all levels or '' for the default level for the
645  camera.
646 
647  Returns
648  -------
649  `dict`
650  Keys are strings usable in a dataset identifier, values are their
651  value types.
652  """
653 
654  # not sure if this is how we want to do this. what if None was intended?
655  if level == '':
656  level = self.getDefaultLevel()
657 
658  if datasetType is None:
659  keyDict = copy.copy(self.keyDict)
660  else:
661  keyDict = self.mappings[datasetType].keys()
662  if level is not None and level in self.levels:
663  keyDict = copy.copy(keyDict)
664  for l in self.levels[level]:
665  if l in keyDict:
666  del keyDict[l]
667  return keyDict
668 
669  def getDefaultLevel(self):
670  return self.defaultLevel
671 
672  def getDefaultSubLevel(self, level):
673  if level in self.defaultSubLevels:
674  return self.defaultSubLevels[level]
675  return None
676 
677  @classmethod
678  def getCameraName(cls):
679  """Return the name of the camera that this CameraMapper is for."""
680  className = str(cls)
681  className = className[className.find('.'):-1]
682  m = re.search(r'(\w+)Mapper', className)
683  if m is None:
684  m = re.search(r"class '[\w.]*?(\w+)'", className)
685  name = m.group(1)
686  return name[:1].lower() + name[1:] if name else ''
687 
688  @classmethod
689  def getPackageName(cls):
690  """Return the name of the package containing this CameraMapper."""
691  if cls.packageName is None:
692  raise ValueError('class variable packageName must not be None')
693  return cls.packageName
694 
695  @classmethod
696  def getPackageDir(cls):
697  """Return the base directory of this package"""
698  return getPackageDir(cls.getPackageName())
699 
700  def map_camera(self, dataId, write=False):
701  """Map a camera dataset."""
702  if self.camera is None:
703  raise RuntimeError("No camera dataset available.")
704  actualId = self._transformId(dataId)
705  return dafPersist.ButlerLocation(
706  pythonType="lsst.afw.cameraGeom.CameraConfig",
707  cppType="Config",
708  storageName="ConfigStorage",
709  locationList=self.cameraDataLocation or "ignored",
710  dataId=actualId,
711  mapper=self,
712  storage=self.rootStorage
713  )
714 
715  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
716  """Return the (preloaded) camera object.
717  """
718  if self.camera is None:
719  raise RuntimeError("No camera dataset available.")
720  return self.camera
721 
722  def map_expIdInfo(self, dataId, write=False):
723  return dafPersist.ButlerLocation(
724  pythonType="lsst.obs.base.ExposureIdInfo",
725  cppType=None,
726  storageName="Internal",
727  locationList="ignored",
728  dataId=dataId,
729  mapper=self,
730  storage=self.rootStorage
731  )
732 
733  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
734  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
735  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
736  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
737  return ExposureIdInfo(expId=expId, expBits=expBits)
738 
739  def std_bfKernel(self, item, dataId):
740  """Disable standardization for bfKernel
741 
742  bfKernel is a calibration product that is numpy array,
743  unlike other calibration products that are all images;
744  all calibration images are sent through _standardizeExposure
745  due to CalibrationMapping, but we don't want that to happen to bfKernel
746  """
747  return item
748 
749  def std_raw(self, item, dataId):
750  """Standardize a raw dataset by converting it to an Exposure instead
751  of an Image"""
752  return self._standardizeExposure(self.exposures['raw'], item, dataId,
753  trimmed=False, setVisitInfo=True)
754 
755  def map_skypolicy(self, dataId):
756  """Map a sky policy."""
757  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
758  "Internal", None, None, self,
759  storage=self.rootStorage)
760 
761  def std_skypolicy(self, item, dataId):
762  """Standardize a sky policy by returning the one we use."""
763  return self.skypolicy
764 
765 
770 
771  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
772  posixIfNoSql=True):
773  """Set up a registry (usually SQLite3), trying a number of possible
774  paths.
775 
776  Parameters
777  ----------
778  name : string
779  Name of registry.
780  description: `str`
781  Description of registry (for log messages)
782  path : string
783  Path for registry.
784  policy : string
785  Policy that contains the registry name, used if path is None.
786  policyKey : string
787  Key in policy for registry path.
788  storage : Storage subclass
789  Repository Storage to look in.
790  searchParents : bool, optional
791  True if the search for a registry should follow any Butler v1
792  _parent symlinks.
793  posixIfNoSql : bool, optional
794  If an sqlite registry is not found, will create a posix registry if
795  this is True.
796 
797  Returns
798  -------
799  lsst.daf.persistence.Registry
800  Registry object
801  """
802  if path is None and policyKey in policy:
803  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
804  if os.path.isabs(path):
805  raise RuntimeError("Policy should not indicate an absolute path for registry.")
806  if not storage.exists(path):
807  newPath = storage.instanceSearch(path)
808 
809  newPath = newPath[0] if newPath is not None and len(newPath) else None
810  if newPath is None:
811  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
812  path)
813  path = newPath
814  else:
815  self.log.warn("Unable to locate registry at policy path: %s", path)
816  path = None
817 
818  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
819  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
820  # root from path. Currently only works with PosixStorage
821  try:
822  root = storage.root
823  if path and (path.startswith(root)):
824  path = path[len(root + '/'):]
825  except AttributeError:
826  pass
827 
828  # determine if there is an sqlite registry and if not, try the posix registry.
829  registry = None
830 
831  def search(filename, description):
832  """Search for file in storage
833 
834  Parameters
835  ----------
836  filename : `str`
837  Filename to search for
838  description : `str`
839  Description of file, for error message.
840 
841  Returns
842  -------
843  path : `str` or `None`
844  Path to file, or None
845  """
846  result = storage.instanceSearch(filename)
847  if result:
848  return result[0]
849  self.log.debug("Unable to locate %s: %s", description, filename)
850  return None
851 
852  # Search for a suitable registry database
853  if path is None:
854  path = search("%s.pgsql" % name, "%s in root" % description)
855  if path is None:
856  path = search("%s.sqlite3" % name, "%s in root" % description)
857  if path is None:
858  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
859 
860  if path is not None:
861  if not storage.exists(path):
862  newPath = storage.instanceSearch(path)
863  newPath = newPath[0] if newPath is not None and len(newPath) else None
864  if newPath is not None:
865  path = newPath
866  localFileObj = storage.getLocalFile(path)
867  self.log.info("Loading %s registry from %s", description, localFileObj.name)
868  registry = dafPersist.Registry.create(localFileObj.name)
869  localFileObj.close()
870  elif not registry and posixIfNoSql:
871  try:
872  self.log.info("Loading Posix %s registry from %s", description, storage.root)
873  registry = dafPersist.PosixRegistry(storage.root)
874  except Exception:
875  registry = None
876 
877  return registry
878 
879  def _transformId(self, dataId):
880  """Generate a standard ID dict from a camera-specific ID dict.
881 
882  Canonical keys include:
883  - amp: amplifier name
884  - ccd: CCD name (in LSST this is a combination of raft and sensor)
885  The default implementation returns a copy of its input.
886 
887  Parameters
888  ----------
889  dataId : `dict`
890  Dataset identifier; this must not be modified
891 
892  Returns
893  -------
894  `dict`
895  Transformed dataset identifier.
896  """
897 
898  return dataId.copy()
899 
900  def _mapActualToPath(self, template, actualId):
901  """Convert a template path to an actual path, using the actual data
902  identifier. This implementation is usually sufficient but can be
903  overridden by the subclass.
904 
905  Parameters
906  ----------
907  template : `str`
908  Template path
909  actualId : `dict`
910  Dataset identifier
911 
912  Returns
913  -------
914  `str`
915  Pathname
916  """
917 
918  try:
919  transformedId = self._transformId(actualId)
920  return template % transformedId
921  except Exception as e:
922  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
923 
924  @staticmethod
925  def getShortCcdName(ccdName):
926  """Convert a CCD name to a form useful as a filename
927 
928  The default implementation converts spaces to underscores.
929  """
930  return ccdName.replace(" ", "_")
931 
932  def _extractDetectorName(self, dataId):
933  """Extract the detector (CCD) name from the dataset identifier.
934 
935  The name in question is the detector name used by lsst.afw.cameraGeom.
936 
937  Parameters
938  ----------
939  dataId : `dict`
940  Dataset identifier.
941 
942  Returns
943  -------
944  `str`
945  Detector name
946  """
947  raise NotImplementedError("No _extractDetectorName() function specified")
948 
949  def _extractAmpId(self, dataId):
950  """Extract the amplifier identifer from a dataset identifier.
951 
952  .. note:: Deprecated in 11_0
953 
954  amplifier identifier has two parts: the detector name for the CCD
955  containing the amplifier and index of the amplifier in the detector.
956 
957  Parameters
958  ----------
959  dataId : `dict`
960  Dataset identifer
961 
962  Returns
963  -------
964  `tuple`
965  Amplifier identifier
966  """
967 
968  trDataId = self._transformId(dataId)
969  return (trDataId["ccd"], int(trDataId['amp']))
970 
971  def _setAmpDetector(self, item, dataId, trimmed=True):
972  """Set the detector object in an Exposure for an amplifier.
973 
974  Defects are also added to the Exposure based on the detector object.
975 
976  Parameters
977  ----------
978  item : `lsst.afw.image.Exposure`
979  Exposure to set the detector in.
980  dataId : `dict`
981  Dataset identifier
982  trimmed : `bool`
983  Should detector be marked as trimmed? (ignored)
984  """
985 
986  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
987 
988  def _setCcdDetector(self, item, dataId, trimmed=True):
989  """Set the detector object in an Exposure for a CCD.
990 
991  Parameters
992  ----------
993  item : `lsst.afw.image.Exposure`
994  Exposure to set the detector in.
995  dataId : `dict`
996  Dataset identifier
997  trimmed : `bool`
998  Should detector be marked as trimmed? (ignored)
999  """
1000  if item.getDetector() is not None:
1001  return
1002 
1003  detectorName = self._extractDetectorName(dataId)
1004  detector = self.camera[detectorName]
1005  item.setDetector(detector)
1006 
1007  def _setFilter(self, mapping, item, dataId):
1008  """Set the filter object in an Exposure. If the Exposure had a FILTER
1009  keyword, this was already processed during load. But if it didn't,
1010  use the filter from the registry.
1011 
1012  Parameters
1013  ----------
1014  mapping : `lsst.obs.base.Mapping`
1015  Where to get the filter from.
1016  item : `lsst.afw.image.Exposure`
1017  Exposure to set the filter in.
1018  dataId : `dict`
1019  Dataset identifier.
1020  """
1021 
1022  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1023  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1024  return
1025 
1026  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1027  return
1028 
1029  actualId = mapping.need(['filter'], dataId)
1030  filterName = actualId['filter']
1031  if self.filters is not None and filterName in self.filters:
1032  filterName = self.filters[filterName]
1033  try:
1034  item.setFilter(afwImage.Filter(filterName))
1035  except pexExcept.NotFoundError:
1036  self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName))
1037 
1038  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1039  trimmed=True, setVisitInfo=True):
1040  """Default standardization function for images.
1041 
1042  This sets the Detector from the camera geometry
1043  and optionally set the Filter. In both cases this saves
1044  having to persist some data in each exposure (or image).
1045 
1046  Parameters
1047  ----------
1048  mapping : `lsst.obs.base.Mapping`
1049  Where to get the values from.
1050  item : image-like object
1051  Can be any of lsst.afw.image.Exposure,
1052  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1053  or lsst.afw.image.MaskedImage
1054 
1055  dataId : `dict`
1056  Dataset identifier
1057  filter : `bool`
1058  Set filter? Ignored if item is already an exposure
1059  trimmed : `bool`
1060  Should detector be marked as trimmed?
1061  setVisitInfo : `bool`
1062  Should Exposure have its VisitInfo filled out from the metadata?
1063 
1064  Returns
1065  -------
1066  `lsst.afw.image.Exposure`
1067  The standardized Exposure.
1068  """
1069  try:
1070  exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1071  setVisitInfo=setVisitInfo)
1072  except Exception as e:
1073  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1074  raise
1075 
1076  if mapping.level.lower() == "amp":
1077  self._setAmpDetector(exposure, dataId, trimmed)
1078  elif mapping.level.lower() == "ccd":
1079  self._setCcdDetector(exposure, dataId, trimmed)
1080 
1081  # We can only create a WCS if it doesn't already have one and
1082  # we have either a VisitInfo or exposure metadata.
1083  if exposure.getWcs() is None and \
1084  (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict() != {}):
1085  self._createInitialSkyWcs(exposure)
1086 
1087  if filter:
1088  self._setFilter(mapping, exposure, dataId)
1089 
1090  return exposure
1091 
1092  def _createSkyWcsFromMetadata(self, exposure):
1093  """Create a SkyWcs from the FITS header metadata in an Exposure.
1094 
1095  Parameters
1096  ----------
1097  exposure : `lsst.afw.image.Exposure`
1098  The exposure to get metadata from, and attach the SkyWcs to.
1099  """
1100  metadata = exposure.getMetadata()
1101  try:
1102  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1103  exposure.setWcs(wcs)
1104  except pexExcept.TypeError as e:
1105  # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata).
1106  self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1107  " %s", e.args[0])
1108  # ensure any WCS values stripped from the metadata are removed in the exposure
1109  exposure.setMetadata(metadata)
1110 
1111  def _createInitialSkyWcs(self, exposure):
1112  """Create a SkyWcs from the boresight and camera geometry.
1113 
1114  If the boresight or camera geometry do not support this method of
1115  WCS creation, this falls back on the header metadata-based version
1116  (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1117 
1118  Parameters
1119  ----------
1120  exposure : `lsst.afw.image.Exposure`
1121  The exposure to get data from, and attach the SkyWcs to.
1122  """
1123  # Always use try to use metadata first, to strip WCS keys from it.
1124  self._createSkyWcsFromMetadata(exposure)
1125 
1126  if exposure.getInfo().getVisitInfo() is None:
1127  msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1128  self.log.warn(msg)
1129  return
1130  try:
1131  newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1132  exposure.setWcs(newSkyWcs)
1133  except InitialSkyWcsError as e:
1134  msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1135  self.log.warn(msg, e)
1136  self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1137  if e.__context__ is not None:
1138  self.log.debug("Root-cause Exception was: %s",
1139  traceback.TracebackException.from_exception(e.__context__))
1140 
1141  def _makeCamera(self, policy, repositoryDir):
1142  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1143  the camera geometry
1144 
1145  Also set self.cameraDataLocation, if relevant (else it can be left
1146  None).
1147 
1148  This implementation assumes that policy contains an entry "camera"
1149  that points to the subdirectory in this package of camera data;
1150  specifically, that subdirectory must contain:
1151  - a file named `camera.py` that contains persisted camera config
1152  - ampInfo table FITS files, as required by
1153  lsst.afw.cameraGeom.makeCameraFromPath
1154 
1155  Parameters
1156  ----------
1157  policy : `lsst.daf.persistence.Policy`
1158  Policy with per-camera defaults already merged
1159  (PexPolicy only for backward compatibility).
1160  repositoryDir : `str`
1161  Policy repository for the subclassing module (obtained with
1162  getRepositoryPath() on the per-camera default dictionary).
1163  """
1164  if 'camera' not in policy:
1165  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1166  cameraDataSubdir = policy['camera']
1167  self.cameraDataLocation = os.path.normpath(
1168  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1169  cameraConfig = afwCameraGeom.CameraConfig()
1170  cameraConfig.load(self.cameraDataLocation)
1171  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1172  return afwCameraGeom.makeCameraFromPath(
1173  cameraConfig=cameraConfig,
1174  ampInfoPath=ampInfoPath,
1175  shortNameFunc=self.getShortCcdName,
1176  pupilFactoryClass=self.PupilFactoryClass
1177  )
1178 
1179  def getRegistry(self):
1180  """Get the registry used by this mapper.
1181 
1182  Returns
1183  -------
1184  Registry or None
1185  The registry used by this mapper for this mapper's repository.
1186  """
1187  return self.registry
1188 
1189  def getImageCompressionSettings(self, datasetType, dataId):
1190  """Stuff image compression settings into a daf.base.PropertySet
1191 
1192  This goes into the ButlerLocation's "additionalData", which gets
1193  passed into the boost::persistence framework.
1194 
1195  Parameters
1196  ----------
1197  datasetType : `str`
1198  Type of dataset for which to get the image compression settings.
1199  dataId : `dict`
1200  Dataset identifier.
1201 
1202  Returns
1203  -------
1204  additionalData : `lsst.daf.base.PropertySet`
1205  Image compression settings.
1206  """
1207  mapping = self.mappings[datasetType]
1208  recipeName = mapping.recipe
1209  storageType = mapping.storage
1210  if storageType not in self._writeRecipes:
1211  return dafBase.PropertySet()
1212  if recipeName not in self._writeRecipes[storageType]:
1213  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1214  (datasetType, storageType, recipeName))
1215  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1216  seed = hash(tuple(dataId.items())) % 2**31
1217  for plane in ("image", "mask", "variance"):
1218  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1219  recipe.set(plane + ".scaling.seed", seed)
1220  return recipe
1221 
1222  def _initWriteRecipes(self):
1223  """Read the recipes for writing files
1224 
1225  These recipes are currently used for configuring FITS compression,
1226  but they could have wider uses for configuring different flavors
1227  of the storage types. A recipe is referred to by a symbolic name,
1228  which has associated settings. These settings are stored as a
1229  `PropertySet` so they can easily be passed down to the
1230  boost::persistence framework as the "additionalData" parameter.
1231 
1232  The list of recipes is written in YAML. A default recipe and
1233  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1234  and these may be overridden or supplemented by the individual obs_*
1235  packages' own policy/writeRecipes.yaml files.
1236 
1237  Recipes are grouped by the storage type. Currently, only the
1238  ``FitsStorage`` storage type uses recipes, which uses it to
1239  configure FITS image compression.
1240 
1241  Each ``FitsStorage`` recipe for FITS compression should define
1242  "image", "mask" and "variance" entries, each of which may contain
1243  "compression" and "scaling" entries. Defaults will be provided for
1244  any missing elements under "compression" and "scaling".
1245 
1246  The allowed entries under "compression" are:
1247 
1248  * algorithm (string): compression algorithm to use
1249  * rows (int): number of rows per tile (0 = entire dimension)
1250  * columns (int): number of columns per tile (0 = entire dimension)
1251  * quantizeLevel (float): cfitsio quantization level
1252 
1253  The allowed entries under "scaling" are:
1254 
1255  * algorithm (string): scaling algorithm to use
1256  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1257  * fuzz (bool): fuzz the values when quantising floating-point values?
1258  * seed (long): seed for random number generator when fuzzing
1259  * maskPlanes (list of string): mask planes to ignore when doing
1260  statistics
1261  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1262  * quantizePad: number of stdev to allow on the low side (for
1263  STDEV_POSITIVE/NEGATIVE)
1264  * bscale: manually specified BSCALE (for MANUAL scaling)
1265  * bzero: manually specified BSCALE (for MANUAL scaling)
1266 
1267  A very simple example YAML recipe:
1268 
1269  FitsStorage:
1270  default:
1271  image: &default
1272  compression:
1273  algorithm: GZIP_SHUFFLE
1274  mask: *default
1275  variance: *default
1276  """
1277  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1278  recipes = dafPersist.Policy(recipesFile)
1279  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1280  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1281  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1282  supplements = dafPersist.Policy(supplementsFile)
1283  # Don't allow overrides, only supplements
1284  for entry in validationMenu:
1285  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1286  if intersection:
1287  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1288  (supplementsFile, entry, recipesFile, intersection))
1289  recipes.update(supplements)
1290 
1291  self._writeRecipes = {}
1292  for storageType in recipes.names(True):
1293  if "default" not in recipes[storageType]:
1294  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1295  (storageType, recipesFile))
1296  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1297 
1298 
1299 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1300  """Generate an Exposure from an image-like object
1301 
1302  If the image is a DecoratedImage then also set its WCS and metadata
1303  (Image and MaskedImage are missing the necessary metadata
1304  and Exposure already has those set)
1305 
1306  Parameters
1307  ----------
1308  image : Image-like object
1309  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1310  Exposure.
1311 
1312  Returns
1313  -------
1314  `lsst.afw.image.Exposure`
1315  Exposure containing input image.
1316  """
1317  metadata = None
1318  if isinstance(image, afwImage.MaskedImage):
1319  exposure = afwImage.makeExposure(image)
1320  elif isinstance(image, afwImage.DecoratedImage):
1321  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1322  metadata = image.getMetadata()
1323  exposure.setMetadata(metadata)
1324  elif isinstance(image, afwImage.Exposure):
1325  exposure = image
1326  metadata = exposure.getMetadata()
1327  else: # Image
1328  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1329 
1330  # set VisitInfo if we can
1331  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1332  if metadata is not None:
1333  if mapper is None:
1334  if not logger:
1335  logger = lsstLog.Log.getLogger("CameraMapper")
1336  logger.warn("I can only set the VisitInfo if you provide a mapper")
1337  else:
1338  exposureId = mapper._computeCcdExposureId(dataId)
1339  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1340 
1341  exposure.getInfo().setVisitInfo(visitInfo)
1342 
1343  return exposure
1344 
1345 
1347  """Validate recipes for FitsStorage
1348 
1349  The recipes are supplemented with default values where appropriate.
1350 
1351  TODO: replace this custom validation code with Cerberus (DM-11846)
1352 
1353  Parameters
1354  ----------
1355  recipes : `lsst.daf.persistence.Policy`
1356  FitsStorage recipes to validate.
1357 
1358  Returns
1359  -------
1360  validated : `lsst.daf.base.PropertySet`
1361  Validated FitsStorage recipe.
1362 
1363  Raises
1364  ------
1365  `RuntimeError`
1366  If validation fails.
1367  """
1368  # Schemas define what should be there, and the default values (and by the default
1369  # value, the expected type).
1370  compressionSchema = {
1371  "algorithm": "NONE",
1372  "rows": 1,
1373  "columns": 0,
1374  "quantizeLevel": 0.0,
1375  }
1376  scalingSchema = {
1377  "algorithm": "NONE",
1378  "bitpix": 0,
1379  "maskPlanes": ["NO_DATA"],
1380  "seed": 0,
1381  "quantizeLevel": 4.0,
1382  "quantizePad": 5.0,
1383  "fuzz": True,
1384  "bscale": 1.0,
1385  "bzero": 0.0,
1386  }
1387 
1388  def checkUnrecognized(entry, allowed, description):
1389  """Check to see if the entry contains unrecognised keywords"""
1390  unrecognized = set(entry.keys()) - set(allowed)
1391  if unrecognized:
1392  raise RuntimeError(
1393  "Unrecognized entries when parsing image compression recipe %s: %s" %
1394  (description, unrecognized))
1395 
1396  validated = {}
1397  for name in recipes.names(True):
1398  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1399  rr = dafBase.PropertySet()
1400  validated[name] = rr
1401  for plane in ("image", "mask", "variance"):
1402  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1403  name + "->" + plane)
1404 
1405  for settings, schema in (("compression", compressionSchema),
1406  ("scaling", scalingSchema)):
1407  prefix = plane + "." + settings
1408  if settings not in recipes[name][plane]:
1409  for key in schema:
1410  rr.set(prefix + "." + key, schema[key])
1411  continue
1412  entry = recipes[name][plane][settings]
1413  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1414  for key in schema:
1415  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1416  rr.set(prefix + "." + key, value)
1417  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def _createSkyWcsFromMetadata(self, exposure)
def createInitialSkyWcs(visitInfo, detector, flipX=False)
Definition: utils.py:43
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)