lsst.obs.base  17.0.1-21-ga02730c+3
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 import re
26 import weakref
27 import lsst.daf.persistence as dafPersist
28 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
29 import lsst.daf.base as dafBase
30 import lsst.afw.geom as afwGeom
31 import lsst.afw.image as afwImage
32 import lsst.afw.table as afwTable
33 from lsst.afw.fits import readMetadata
34 import lsst.afw.cameraGeom as afwCameraGeom
35 import lsst.log as lsstLog
36 import lsst.pex.exceptions as pexExcept
37 from .exposureIdInfo import ExposureIdInfo
38 from .makeRawVisitInfo import MakeRawVisitInfo
39 from lsst.utils import getPackageDir
40 
41 __all__ = ["CameraMapper", "exposureFromImage"]
42 
43 
44 class CameraMapper(dafPersist.Mapper):
45 
46  """CameraMapper is a base class for mappers that handle images from a
47  camera and products derived from them. This provides an abstraction layer
48  between the data on disk and the code.
49 
50  Public methods: keys, queryMetadata, getDatasetTypes, map,
51  canStandardize, standardize
52 
53  Mappers for specific data sources (e.g., CFHT Megacam, LSST
54  simulations, etc.) should inherit this class.
55 
56  The CameraMapper manages datasets within a "root" directory. Note that
57  writing to a dataset present in the input root will hide the existing
58  dataset but not overwrite it. See #2160 for design discussion.
59 
60  A camera is assumed to consist of one or more rafts, each composed of
61  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
62  (amps). A camera is also assumed to have a camera geometry description
63  (CameraGeom object) as a policy file, a filter description (Filter class
64  static configuration) as another policy file.
65 
66  Information from the camera geometry and defects are inserted into all
67  Exposure objects returned.
68 
69  The mapper uses one or two registries to retrieve metadata about the
70  images. The first is a registry of all raw exposures. This must contain
71  the time of the observation. One or more tables (or the equivalent)
72  within the registry are used to look up data identifier components that
73  are not specified by the user (e.g. filter) and to return results for
74  metadata queries. The second is an optional registry of all calibration
75  data. This should contain validity start and end entries for each
76  calibration dataset in the same timescale as the observation time.
77 
78  Subclasses will typically set MakeRawVisitInfoClass:
79 
80  MakeRawVisitInfoClass: a class variable that points to a subclass of
81  MakeRawVisitInfo, a functor that creates an
82  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
83 
84  Subclasses must provide the following methods:
85 
86  _extractDetectorName(self, dataId): returns the detector name for a CCD
87  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
88  a dataset identifier referring to that CCD or a subcomponent of it.
89 
90  _computeCcdExposureId(self, dataId): see below
91 
92  _computeCoaddExposureId(self, dataId, singleFilter): see below
93 
94  Subclasses may also need to override the following methods:
95 
96  _transformId(self, dataId): transformation of a data identifier
97  from colloquial usage (e.g., "ccdname") to proper/actual usage
98  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
99  commas). The default implementation does nothing. Note that this
100  method should not modify its input parameter.
101 
102  getShortCcdName(self, ccdName): a static method that returns a shortened
103  name suitable for use as a filename. The default version converts spaces
104  to underscores.
105 
106  _mapActualToPath(self, template, actualId): convert a template path to an
107  actual path, using the actual dataset identifier.
108 
109  The mapper's behaviors are largely specified by the policy file.
110  See the MapperDictionary.paf for descriptions of the available items.
111 
112  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
113  mappings (see Mappings class).
114 
115  Common default mappings for all subclasses can be specified in the
116  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
117  provides a simple way to add a product to all camera mappers.
118 
119  Functions to map (provide a path to the data given a dataset
120  identifier dictionary) and standardize (convert data into some standard
121  format or type) may be provided in the subclass as "map_{dataset type}"
122  and "std_{dataset type}", respectively.
123 
124  If non-Exposure datasets cannot be retrieved using standard
125  daf_persistence methods alone, a "bypass_{dataset type}" function may be
126  provided in the subclass to return the dataset instead of using the
127  "datasets" subpolicy.
128 
129  Implementations of map_camera and bypass_camera that should typically be
130  sufficient are provided in this base class.
131 
132  Notes
133  -----
134  TODO:
135 
136  - Instead of auto-loading the camera at construction time, load it from
137  the calibration registry
138  """
139  packageName = None
140 
141  # a class or subclass of MakeRawVisitInfo, a functor that makes an
142  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
143  MakeRawVisitInfoClass = MakeRawVisitInfo
144 
145  # a class or subclass of PupilFactory
146  PupilFactoryClass = afwCameraGeom.PupilFactory
147 
148  def __init__(self, policy, repositoryDir,
149  root=None, registry=None, calibRoot=None, calibRegistry=None,
150  provided=None, parentRegistry=None, repositoryCfg=None):
151  """Initialize the CameraMapper.
152 
153  Parameters
154  ----------
155  policy : daf_persistence.Policy,
156  Policy with per-camera defaults already merged.
157  repositoryDir : string
158  Policy repository for the subclassing module (obtained with
159  getRepositoryPath() on the per-camera default dictionary).
160  root : string, optional
161  Path to the root directory for data.
162  registry : string, optional
163  Path to registry with data's metadata.
164  calibRoot : string, optional
165  Root directory for calibrations.
166  calibRegistry : string, optional
167  Path to registry with calibrations' metadata.
168  provided : list of string, optional
169  Keys provided by the mapper.
170  parentRegistry : Registry subclass, optional
171  Registry from a parent repository that may be used to look up
172  data's metadata.
173  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
174  The configuration information for the repository this mapper is
175  being used with.
176  """
177 
178  dafPersist.Mapper.__init__(self)
179 
180  self.log = lsstLog.Log.getLogger("CameraMapper")
181 
182  if root:
183  self.root = root
184  elif repositoryCfg:
185  self.root = repositoryCfg.root
186  else:
187  self.root = None
188 
189  repoPolicy = repositoryCfg.policy if repositoryCfg else None
190  if repoPolicy is not None:
191  policy.update(repoPolicy)
192 
193  # Levels
194  self.levels = dict()
195  if 'levels' in policy:
196  levelsPolicy = policy['levels']
197  for key in levelsPolicy.names(True):
198  self.levels[key] = set(levelsPolicy.asArray(key))
199  self.defaultLevel = policy['defaultLevel']
200  self.defaultSubLevels = dict()
201  if 'defaultSubLevels' in policy:
202  self.defaultSubLevels = policy['defaultSubLevels']
203 
204  # Root directories
205  if root is None:
206  root = "."
207  root = dafPersist.LogicalLocation(root).locString()
208 
209  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
210 
211  # If the calibRoot is passed in, use that. If not and it's indicated in
212  # the policy, use that. And otherwise, the calibs are in the regular
213  # root.
214  # If the location indicated by the calib root does not exist, do not
215  # create it.
216  calibStorage = None
217  if calibRoot is not None:
218  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
219  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
220  create=False)
221  else:
222  calibRoot = policy.get('calibRoot', None)
223  if calibRoot:
224  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
225  create=False)
226  if calibStorage is None:
227  calibStorage = self.rootStorage
228 
229  self.root = root
230 
231  # Registries
232  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
233  self.rootStorage, searchParents=False,
234  posixIfNoSql=(not parentRegistry))
235  if not self.registry:
236  self.registry = parentRegistry
237  needCalibRegistry = policy.get('needCalibRegistry', None)
238  if needCalibRegistry:
239  if calibStorage:
240  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
241  "calibRegistryPath", calibStorage,
242  posixIfNoSql=False) # NB never use posix for calibs
243  else:
244  raise RuntimeError(
245  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
246  "calibRoot ivar:%s or policy['calibRoot']:%s" %
247  (calibRoot, policy.get('calibRoot', None)))
248  else:
249  self.calibRegistry = None
250 
251  # Dict of valid keys and their value types
252  self.keyDict = dict()
253 
254  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
255  self._initWriteRecipes()
256 
257  # Camera geometry
258  self.cameraDataLocation = None # path to camera geometry config file
259  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
260 
261  # Filter translation table
262  self.filters = None
263 
264  # verify that the class variable packageName is set before attempting
265  # to instantiate an instance
266  if self.packageName is None:
267  raise ValueError('class variable packageName must not be None')
268 
270 
271  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
272  """Initialize mappings
273 
274  For each of the dataset types that we want to be able to read, there
275  are methods that can be created to support them:
276  * map_<dataset> : determine the path for dataset
277  * std_<dataset> : standardize the retrieved dataset
278  * bypass_<dataset> : retrieve the dataset (bypassing the usual
279  retrieval machinery)
280  * query_<dataset> : query the registry
281 
282  Besides the dataset types explicitly listed in the policy, we create
283  additional, derived datasets for additional conveniences,
284  e.g., reading the header of an image, retrieving only the size of a
285  catalog.
286 
287  Parameters
288  ----------
289  policy : `lsst.daf.persistence.Policy`
290  Policy with per-camera defaults already merged
291  rootStorage : `Storage subclass instance`
292  Interface to persisted repository data.
293  calibRoot : `Storage subclass instance`
294  Interface to persisted calib repository data
295  provided : `list` of `str`
296  Keys provided by the mapper
297  """
298  # Sub-dictionaries (for exposure/calibration/dataset types)
299  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
300  "obs_base", "ImageMappingDefaults.yaml", "policy"))
301  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
302  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
303  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
304  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
305  dsMappingPolicy = dafPersist.Policy()
306 
307  # Mappings
308  mappingList = (
309  ("images", imgMappingPolicy, ImageMapping),
310  ("exposures", expMappingPolicy, ExposureMapping),
311  ("calibrations", calMappingPolicy, CalibrationMapping),
312  ("datasets", dsMappingPolicy, DatasetMapping)
313  )
314  self.mappings = dict()
315  for name, defPolicy, cls in mappingList:
316  if name in policy:
317  datasets = policy[name]
318 
319  # Centrally-defined datasets
320  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
321  if os.path.exists(defaultsPath):
322  datasets.merge(dafPersist.Policy(defaultsPath))
323 
324  mappings = dict()
325  setattr(self, name, mappings)
326  for datasetType in datasets.names(True):
327  subPolicy = datasets[datasetType]
328  subPolicy.merge(defPolicy)
329 
330  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
331  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
332  subPolicy=subPolicy):
333  components = subPolicy.get('composite')
334  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
335  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
336  python = subPolicy['python']
337  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
338  disassembler=disassembler,
339  python=python,
340  dataId=dataId,
341  mapper=self)
342  for name, component in components.items():
343  butlerComposite.add(id=name,
344  datasetType=component.get('datasetType'),
345  setter=component.get('setter', None),
346  getter=component.get('getter', None),
347  subset=component.get('subset', False),
348  inputOnly=component.get('inputOnly', False))
349  return butlerComposite
350  setattr(self, "map_" + datasetType, compositeClosure)
351  # for now at least, don't set up any other handling for this dataset type.
352  continue
353 
354  if name == "calibrations":
355  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
356  provided=provided, dataRoot=rootStorage)
357  else:
358  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
359 
360  if datasetType in self.mappings:
361  raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
362  self.keyDict.update(mapping.keys())
363  mappings[datasetType] = mapping
364  self.mappings[datasetType] = mapping
365  if not hasattr(self, "map_" + datasetType):
366  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
367  return mapping.map(mapper, dataId, write)
368  setattr(self, "map_" + datasetType, mapClosure)
369  if not hasattr(self, "query_" + datasetType):
370  def queryClosure(format, dataId, mapping=mapping):
371  return mapping.lookup(format, dataId)
372  setattr(self, "query_" + datasetType, queryClosure)
373  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
374  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
375  return mapping.standardize(mapper, item, dataId)
376  setattr(self, "std_" + datasetType, stdClosure)
377 
378  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
379  """Set convenience methods on CameraMapper"""
380  mapName = "map_" + datasetType + "_" + suffix
381  bypassName = "bypass_" + datasetType + "_" + suffix
382  queryName = "query_" + datasetType + "_" + suffix
383  if not hasattr(self, mapName):
384  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
385  if not hasattr(self, bypassName):
386  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
387  bypassImpl = getattr(self, "bypass_" + datasetType)
388  if bypassImpl is not None:
389  setattr(self, bypassName, bypassImpl)
390  if not hasattr(self, queryName):
391  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
392 
393  # Filename of dataset
394  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
395  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
396  # Metadata from FITS file
397  if subPolicy["storage"] == "FitsStorage": # a FITS image
398  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
399  readMetadata(location.getLocationsWithRoot()[0]))
400 
401  # Add support for configuring FITS compression
402  addName = "add_" + datasetType
403  if not hasattr(self, addName):
404  setattr(self, addName, self.getImageCompressionSettings)
405 
406  if name == "exposures":
407  def getSkyWcs(datasetType, pythonType, location, dataId):
408  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
409  return fitsReader.readWcs()
410 
411  setMethods("wcs", bypassImpl=getSkyWcs)
412 
413  def getPhotoCalib(datasetType, pythonType, location, dataId):
414  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
415  return fitsReader.readPhotoCalib()
416 
417  setMethods("photoCalib", bypassImpl=getPhotoCalib)
418 
419  def getVisitInfo(datasetType, pythonType, location, dataId):
420  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
421  return fitsReader.readVisitInfo()
422 
423  setMethods("visitInfo", bypassImpl=getVisitInfo)
424 
425  def getFilter(datasetType, pythonType, location, dataId):
426  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
427  return fitsReader.readFilter()
428 
429  setMethods("filter", bypassImpl=getFilter)
430 
431  setMethods("detector",
432  mapImpl=lambda dataId, write=False:
433  dafPersist.ButlerLocation(
434  pythonType="lsst.afw.cameraGeom.CameraConfig",
435  cppType="Config",
436  storageName="Internal",
437  locationList="ignored",
438  dataId=dataId,
439  mapper=self,
440  storage=None,
441  ),
442  bypassImpl=lambda datasetType, pythonType, location, dataId:
443  self.camera[self._extractDetectorName(dataId)]
444  )
445  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
446  afwImage.bboxFromMetadata(
447  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
448 
449  elif name == "images":
450  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
451  afwImage.bboxFromMetadata(
452  readMetadata(location.getLocationsWithRoot()[0])))
453 
454  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
455  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
456  readMetadata(os.path.join(location.getStorage().root,
457  location.getLocations()[0]), hdu=1))
458 
459  # Sub-images
460  if subPolicy["storage"] == "FitsStorage":
461  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
462  subId = dataId.copy()
463  del subId['bbox']
464  loc = mapping.map(mapper, subId, write)
465  bbox = dataId['bbox']
466  llcX = bbox.getMinX()
467  llcY = bbox.getMinY()
468  width = bbox.getWidth()
469  height = bbox.getHeight()
470  loc.additionalData.set('llcX', llcX)
471  loc.additionalData.set('llcY', llcY)
472  loc.additionalData.set('width', width)
473  loc.additionalData.set('height', height)
474  if 'imageOrigin' in dataId:
475  loc.additionalData.set('imageOrigin',
476  dataId['imageOrigin'])
477  return loc
478 
479  def querySubClosure(key, format, dataId, mapping=mapping):
480  subId = dataId.copy()
481  del subId['bbox']
482  return mapping.lookup(format, subId)
483  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
484 
485  if subPolicy["storage"] == "FitsCatalogStorage":
486  # Length of catalog
487  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
488  readMetadata(os.path.join(location.getStorage().root,
489  location.getLocations()[0]),
490  hdu=1).getScalar("NAXIS2"))
491 
492  # Schema of catalog
493  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
494  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
495  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
496  location.getLocations()[0])))
497 
498  def _computeCcdExposureId(self, dataId):
499  """Compute the 64-bit (long) identifier for a CCD exposure.
500 
501  Subclasses must override
502 
503  Parameters
504  ----------
505  dataId : `dict`
506  Data identifier with visit, ccd.
507  """
508  raise NotImplementedError()
509 
510  def _computeCoaddExposureId(self, dataId, singleFilter):
511  """Compute the 64-bit (long) identifier for a coadd.
512 
513  Subclasses must override
514 
515  Parameters
516  ----------
517  dataId : `dict`
518  Data identifier with tract and patch.
519  singleFilter : `bool`
520  True means the desired ID is for a single-filter coadd, in which
521  case dataIdmust contain filter.
522  """
523  raise NotImplementedError()
524 
525  def _search(self, path):
526  """Search for path in the associated repository's storage.
527 
528  Parameters
529  ----------
530  path : string
531  Path that describes an object in the repository associated with
532  this mapper.
533  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
534  indicator will be stripped when searching and so will match
535  filenames without the HDU indicator, e.g. 'foo.fits'. The path
536  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
537 
538  Returns
539  -------
540  string
541  The path for this object in the repository. Will return None if the
542  object can't be found. If the input argument path contained an HDU
543  indicator, the returned path will also contain the HDU indicator.
544  """
545  return self.rootStorage.search(path)
546 
547  def backup(self, datasetType, dataId):
548  """Rename any existing object with the given type and dataId.
549 
550  The CameraMapper implementation saves objects in a sequence of e.g.:
551 
552  - foo.fits
553  - foo.fits~1
554  - foo.fits~2
555 
556  All of the backups will be placed in the output repo, however, and will
557  not be removed if they are found elsewhere in the _parent chain. This
558  means that the same file will be stored twice if the previous version
559  was found in an input repo.
560  """
561 
562  # Calling PosixStorage directly is not the long term solution in this
563  # function, this is work-in-progress on epic DM-6225. The plan is for
564  # parentSearch to be changed to 'search', and search only the storage
565  # associated with this mapper. All searching of parents will be handled
566  # by traversing the container of repositories in Butler.
567 
568  def firstElement(list):
569  """Get the first element in the list, or None if that can't be
570  done.
571  """
572  return list[0] if list is not None and len(list) else None
573 
574  n = 0
575  newLocation = self.map(datasetType, dataId, write=True)
576  newPath = newLocation.getLocations()[0]
577  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
578  path = firstElement(path)
579  oldPaths = []
580  while path is not None:
581  n += 1
582  oldPaths.append((n, path))
583  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
584  path = firstElement(path)
585  for n, oldPath in reversed(oldPaths):
586  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
587 
588  def keys(self):
589  """Return supported keys.
590 
591  Returns
592  -------
593  iterable
594  List of keys usable in a dataset identifier
595  """
596  return iter(self.keyDict.keys())
597 
598  def getKeys(self, datasetType, level):
599  """Return a dict of supported keys and their value types for a given
600  dataset type at a given level of the key hierarchy.
601 
602  Parameters
603  ----------
604  datasetType : `str`
605  Dataset type or None for all dataset types.
606  level : `str` or None
607  Level or None for all levels or '' for the default level for the
608  camera.
609 
610  Returns
611  -------
612  `dict`
613  Keys are strings usable in a dataset identifier, values are their
614  value types.
615  """
616 
617  # not sure if this is how we want to do this. what if None was intended?
618  if level == '':
619  level = self.getDefaultLevel()
620 
621  if datasetType is None:
622  keyDict = copy.copy(self.keyDict)
623  else:
624  keyDict = self.mappings[datasetType].keys()
625  if level is not None and level in self.levels:
626  keyDict = copy.copy(keyDict)
627  for l in self.levels[level]:
628  if l in keyDict:
629  del keyDict[l]
630  return keyDict
631 
632  def getDefaultLevel(self):
633  return self.defaultLevel
634 
635  def getDefaultSubLevel(self, level):
636  if level in self.defaultSubLevels:
637  return self.defaultSubLevels[level]
638  return None
639 
640  @classmethod
641  def getCameraName(cls):
642  """Return the name of the camera that this CameraMapper is for."""
643  className = str(cls)
644  className = className[className.find('.'):-1]
645  m = re.search(r'(\w+)Mapper', className)
646  if m is None:
647  m = re.search(r"class '[\w.]*?(\w+)'", className)
648  name = m.group(1)
649  return name[:1].lower() + name[1:] if name else ''
650 
651  @classmethod
652  def getPackageName(cls):
653  """Return the name of the package containing this CameraMapper."""
654  if cls.packageName is None:
655  raise ValueError('class variable packageName must not be None')
656  return cls.packageName
657 
658  @classmethod
659  def getPackageDir(cls):
660  """Return the base directory of this package"""
661  return getPackageDir(cls.getPackageName())
662 
663  def map_camera(self, dataId, write=False):
664  """Map a camera dataset."""
665  if self.camera is None:
666  raise RuntimeError("No camera dataset available.")
667  actualId = self._transformId(dataId)
668  return dafPersist.ButlerLocation(
669  pythonType="lsst.afw.cameraGeom.CameraConfig",
670  cppType="Config",
671  storageName="ConfigStorage",
672  locationList=self.cameraDataLocation or "ignored",
673  dataId=actualId,
674  mapper=self,
675  storage=self.rootStorage
676  )
677 
678  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
679  """Return the (preloaded) camera object.
680  """
681  if self.camera is None:
682  raise RuntimeError("No camera dataset available.")
683  return self.camera
684 
685  def map_expIdInfo(self, dataId, write=False):
686  return dafPersist.ButlerLocation(
687  pythonType="lsst.obs.base.ExposureIdInfo",
688  cppType=None,
689  storageName="Internal",
690  locationList="ignored",
691  dataId=dataId,
692  mapper=self,
693  storage=self.rootStorage
694  )
695 
696  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
697  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
698  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
699  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
700  return ExposureIdInfo(expId=expId, expBits=expBits)
701 
702  def std_bfKernel(self, item, dataId):
703  """Disable standardization for bfKernel
704 
705  bfKernel is a calibration product that is numpy array,
706  unlike other calibration products that are all images;
707  all calibration images are sent through _standardizeExposure
708  due to CalibrationMapping, but we don't want that to happen to bfKernel
709  """
710  return item
711 
712  def std_raw(self, item, dataId):
713  """Standardize a raw dataset by converting it to an Exposure instead
714  of an Image"""
715  return self._standardizeExposure(self.exposures['raw'], item, dataId,
716  trimmed=False, setVisitInfo=True)
717 
718  def map_skypolicy(self, dataId):
719  """Map a sky policy."""
720  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
721  "Internal", None, None, self,
722  storage=self.rootStorage)
723 
724  def std_skypolicy(self, item, dataId):
725  """Standardize a sky policy by returning the one we use."""
726  return self.skypolicy
727 
728 
733 
734  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
735  posixIfNoSql=True):
736  """Set up a registry (usually SQLite3), trying a number of possible
737  paths.
738 
739  Parameters
740  ----------
741  name : string
742  Name of registry.
743  description: `str`
744  Description of registry (for log messages)
745  path : string
746  Path for registry.
747  policy : string
748  Policy that contains the registry name, used if path is None.
749  policyKey : string
750  Key in policy for registry path.
751  storage : Storage subclass
752  Repository Storage to look in.
753  searchParents : bool, optional
754  True if the search for a registry should follow any Butler v1
755  _parent symlinks.
756  posixIfNoSql : bool, optional
757  If an sqlite registry is not found, will create a posix registry if
758  this is True.
759 
760  Returns
761  -------
762  lsst.daf.persistence.Registry
763  Registry object
764  """
765  if path is None and policyKey in policy:
766  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
767  if os.path.isabs(path):
768  raise RuntimeError("Policy should not indicate an absolute path for registry.")
769  if not storage.exists(path):
770  newPath = storage.instanceSearch(path)
771 
772  newPath = newPath[0] if newPath is not None and len(newPath) else None
773  if newPath is None:
774  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
775  path)
776  path = newPath
777  else:
778  self.log.warn("Unable to locate registry at policy path: %s", path)
779  path = None
780 
781  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
782  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
783  # root from path. Currently only works with PosixStorage
784  try:
785  root = storage.root
786  if path and (path.startswith(root)):
787  path = path[len(root + '/'):]
788  except AttributeError:
789  pass
790 
791  # determine if there is an sqlite registry and if not, try the posix registry.
792  registry = None
793 
794  def search(filename, description):
795  """Search for file in storage
796 
797  Parameters
798  ----------
799  filename : `str`
800  Filename to search for
801  description : `str`
802  Description of file, for error message.
803 
804  Returns
805  -------
806  path : `str` or `None`
807  Path to file, or None
808  """
809  result = storage.instanceSearch(filename)
810  if result:
811  return result[0]
812  self.log.debug("Unable to locate %s: %s", description, filename)
813  return None
814 
815  # Search for a suitable registry database
816  if path is None:
817  path = search("%s.pgsql" % name, "%s in root" % description)
818  if path is None:
819  path = search("%s.sqlite3" % name, "%s in root" % description)
820  if path is None:
821  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
822 
823  if path is not None:
824  if not storage.exists(path):
825  newPath = storage.instanceSearch(path)
826  newPath = newPath[0] if newPath is not None and len(newPath) else None
827  if newPath is not None:
828  path = newPath
829  localFileObj = storage.getLocalFile(path)
830  self.log.info("Loading %s registry from %s", description, localFileObj.name)
831  registry = dafPersist.Registry.create(localFileObj.name)
832  localFileObj.close()
833  elif not registry and posixIfNoSql:
834  try:
835  self.log.info("Loading Posix %s registry from %s", description, storage.root)
836  registry = dafPersist.PosixRegistry(storage.root)
837  except Exception:
838  registry = None
839 
840  return registry
841 
842  def _transformId(self, dataId):
843  """Generate a standard ID dict from a camera-specific ID dict.
844 
845  Canonical keys include:
846  - amp: amplifier name
847  - ccd: CCD name (in LSST this is a combination of raft and sensor)
848  The default implementation returns a copy of its input.
849 
850  Parameters
851  ----------
852  dataId : `dict`
853  Dataset identifier; this must not be modified
854 
855  Returns
856  -------
857  `dict`
858  Transformed dataset identifier.
859  """
860 
861  return dataId.copy()
862 
863  def _mapActualToPath(self, template, actualId):
864  """Convert a template path to an actual path, using the actual data
865  identifier. This implementation is usually sufficient but can be
866  overridden by the subclass.
867 
868  Parameters
869  ----------
870  template : `str`
871  Template path
872  actualId : `dict`
873  Dataset identifier
874 
875  Returns
876  -------
877  `str`
878  Pathname
879  """
880 
881  try:
882  transformedId = self._transformId(actualId)
883  return template % transformedId
884  except Exception as e:
885  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
886 
887  @staticmethod
888  def getShortCcdName(ccdName):
889  """Convert a CCD name to a form useful as a filename
890 
891  The default implementation converts spaces to underscores.
892  """
893  return ccdName.replace(" ", "_")
894 
895  def _extractDetectorName(self, dataId):
896  """Extract the detector (CCD) name from the dataset identifier.
897 
898  The name in question is the detector name used by lsst.afw.cameraGeom.
899 
900  Parameters
901  ----------
902  dataId : `dict`
903  Dataset identifier.
904 
905  Returns
906  -------
907  `str`
908  Detector name
909  """
910  raise NotImplementedError("No _extractDetectorName() function specified")
911 
912  def _extractAmpId(self, dataId):
913  """Extract the amplifier identifer from a dataset identifier.
914 
915  .. note:: Deprecated in 11_0
916 
917  amplifier identifier has two parts: the detector name for the CCD
918  containing the amplifier and index of the amplifier in the detector.
919 
920  Parameters
921  ----------
922  dataId : `dict`
923  Dataset identifer
924 
925  Returns
926  -------
927  `tuple`
928  Amplifier identifier
929  """
930 
931  trDataId = self._transformId(dataId)
932  return (trDataId["ccd"], int(trDataId['amp']))
933 
934  def _setAmpDetector(self, item, dataId, trimmed=True):
935  """Set the detector object in an Exposure for an amplifier.
936 
937  Defects are also added to the Exposure based on the detector object.
938 
939  Parameters
940  ----------
941  item : `lsst.afw.image.Exposure`
942  Exposure to set the detector in.
943  dataId : `dict`
944  Dataset identifier
945  trimmed : `bool`
946  Should detector be marked as trimmed? (ignored)
947  """
948 
949  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
950 
951  def _setCcdDetector(self, item, dataId, trimmed=True):
952  """Set the detector object in an Exposure for a CCD.
953 
954  Parameters
955  ----------
956  item : `lsst.afw.image.Exposure`
957  Exposure to set the detector in.
958  dataId : `dict`
959  Dataset identifier
960  trimmed : `bool`
961  Should detector be marked as trimmed? (ignored)
962  """
963  if item.getDetector() is not None:
964  return
965 
966  detectorName = self._extractDetectorName(dataId)
967  detector = self.camera[detectorName]
968  item.setDetector(detector)
969 
970  def _setFilter(self, mapping, item, dataId):
971  """Set the filter object in an Exposure. If the Exposure had a FILTER
972  keyword, this was already processed during load. But if it didn't,
973  use the filter from the registry.
974 
975  Parameters
976  ----------
977  mapping : `lsst.obs.base.Mapping`
978  Where to get the filter from.
979  item : `lsst.afw.image.Exposure`
980  Exposure to set the filter in.
981  dataId : `dict`
982  Dataset identifier.
983  """
984 
985  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
986  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
987  return
988 
989  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
990  return
991 
992  actualId = mapping.need(['filter'], dataId)
993  filterName = actualId['filter']
994  if self.filters is not None and filterName in self.filters:
995  filterName = self.filters[filterName]
996  item.setFilter(afwImage.Filter(filterName))
997 
998  # Default standardization function for exposures
999  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1000  trimmed=True, setVisitInfo=True):
1001  """Default standardization function for images.
1002 
1003  This sets the Detector from the camera geometry
1004  and optionally set the Fiter. In both cases this saves
1005  having to persist some data in each exposure (or image).
1006 
1007  Parameters
1008  ----------
1009  mapping : `lsst.obs.base.Mapping`
1010  Where to get the values from.
1011  item : image-like object
1012  Can be any of lsst.afw.image.Exposure,
1013  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1014  or lsst.afw.image.MaskedImage
1015 
1016  dataId : `dict`
1017  Dataset identifier
1018  filter : `bool`
1019  Set filter? Ignored if item is already an exposure
1020  trimmed : `bool`
1021  Should detector be marked as trimmed?
1022  setVisitInfo : `bool`
1023  Should Exposure have its VisitInfo filled out from the metadata?
1024 
1025  Returns
1026  -------
1027  `lsst.afw.image.Exposure`
1028  The standardized Exposure.
1029  """
1030  try:
1031  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1032  except Exception as e:
1033  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1034  raise
1035 
1036  if mapping.level.lower() == "amp":
1037  self._setAmpDetector(item, dataId, trimmed)
1038  elif mapping.level.lower() == "ccd":
1039  self._setCcdDetector(item, dataId, trimmed)
1040 
1041  if filter:
1042  self._setFilter(mapping, item, dataId)
1043 
1044  return item
1045 
1046  def _makeCamera(self, policy, repositoryDir):
1047  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1048  the camera geometry
1049 
1050  Also set self.cameraDataLocation, if relevant (else it can be left
1051  None).
1052 
1053  This implementation assumes that policy contains an entry "camera"
1054  that points to the subdirectory in this package of camera data;
1055  specifically, that subdirectory must contain:
1056  - a file named `camera.py` that contains persisted camera config
1057  - ampInfo table FITS files, as required by
1058  lsst.afw.cameraGeom.makeCameraFromPath
1059 
1060  Parameters
1061  ----------
1062  policy : `lsst.daf.persistence.Policy`
1063  Policy with per-camera defaults already merged
1064  (PexPolicy only for backward compatibility).
1065  repositoryDir : `str`
1066  Policy repository for the subclassing module (obtained with
1067  getRepositoryPath() on the per-camera default dictionary).
1068  """
1069  if 'camera' not in policy:
1070  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1071  cameraDataSubdir = policy['camera']
1072  self.cameraDataLocation = os.path.normpath(
1073  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1074  cameraConfig = afwCameraGeom.CameraConfig()
1075  cameraConfig.load(self.cameraDataLocation)
1076  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1077  return afwCameraGeom.makeCameraFromPath(
1078  cameraConfig=cameraConfig,
1079  ampInfoPath=ampInfoPath,
1080  shortNameFunc=self.getShortCcdName,
1081  pupilFactoryClass=self.PupilFactoryClass
1082  )
1083 
1084  def getRegistry(self):
1085  """Get the registry used by this mapper.
1086 
1087  Returns
1088  -------
1089  Registry or None
1090  The registry used by this mapper for this mapper's repository.
1091  """
1092  return self.registry
1093 
1094  def getImageCompressionSettings(self, datasetType, dataId):
1095  """Stuff image compression settings into a daf.base.PropertySet
1096 
1097  This goes into the ButlerLocation's "additionalData", which gets
1098  passed into the boost::persistence framework.
1099 
1100  Parameters
1101  ----------
1102  datasetType : `str`
1103  Type of dataset for which to get the image compression settings.
1104  dataId : `dict`
1105  Dataset identifier.
1106 
1107  Returns
1108  -------
1109  additionalData : `lsst.daf.base.PropertySet`
1110  Image compression settings.
1111  """
1112  mapping = self.mappings[datasetType]
1113  recipeName = mapping.recipe
1114  storageType = mapping.storage
1115  if storageType not in self._writeRecipes:
1116  return dafBase.PropertySet()
1117  if recipeName not in self._writeRecipes[storageType]:
1118  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1119  (datasetType, storageType, recipeName))
1120  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1121  seed = hash(tuple(dataId.items())) % 2**31
1122  for plane in ("image", "mask", "variance"):
1123  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1124  recipe.set(plane + ".scaling.seed", seed)
1125  return recipe
1126 
1127  def _initWriteRecipes(self):
1128  """Read the recipes for writing files
1129 
1130  These recipes are currently used for configuring FITS compression,
1131  but they could have wider uses for configuring different flavors
1132  of the storage types. A recipe is referred to by a symbolic name,
1133  which has associated settings. These settings are stored as a
1134  `PropertySet` so they can easily be passed down to the
1135  boost::persistence framework as the "additionalData" parameter.
1136 
1137  The list of recipes is written in YAML. A default recipe and
1138  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1139  and these may be overridden or supplemented by the individual obs_*
1140  packages' own policy/writeRecipes.yaml files.
1141 
1142  Recipes are grouped by the storage type. Currently, only the
1143  ``FitsStorage`` storage type uses recipes, which uses it to
1144  configure FITS image compression.
1145 
1146  Each ``FitsStorage`` recipe for FITS compression should define
1147  "image", "mask" and "variance" entries, each of which may contain
1148  "compression" and "scaling" entries. Defaults will be provided for
1149  any missing elements under "compression" and "scaling".
1150 
1151  The allowed entries under "compression" are:
1152 
1153  * algorithm (string): compression algorithm to use
1154  * rows (int): number of rows per tile (0 = entire dimension)
1155  * columns (int): number of columns per tile (0 = entire dimension)
1156  * quantizeLevel (float): cfitsio quantization level
1157 
1158  The allowed entries under "scaling" are:
1159 
1160  * algorithm (string): scaling algorithm to use
1161  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1162  * fuzz (bool): fuzz the values when quantising floating-point values?
1163  * seed (long): seed for random number generator when fuzzing
1164  * maskPlanes (list of string): mask planes to ignore when doing
1165  statistics
1166  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1167  * quantizePad: number of stdev to allow on the low side (for
1168  STDEV_POSITIVE/NEGATIVE)
1169  * bscale: manually specified BSCALE (for MANUAL scaling)
1170  * bzero: manually specified BSCALE (for MANUAL scaling)
1171 
1172  A very simple example YAML recipe:
1173 
1174  FitsStorage:
1175  default:
1176  image: &default
1177  compression:
1178  algorithm: GZIP_SHUFFLE
1179  mask: *default
1180  variance: *default
1181  """
1182  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1183  recipes = dafPersist.Policy(recipesFile)
1184  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1185  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1186  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1187  supplements = dafPersist.Policy(supplementsFile)
1188  # Don't allow overrides, only supplements
1189  for entry in validationMenu:
1190  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1191  if intersection:
1192  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1193  (supplementsFile, entry, recipesFile, intersection))
1194  recipes.update(supplements)
1195 
1196  self._writeRecipes = {}
1197  for storageType in recipes.names(True):
1198  if "default" not in recipes[storageType]:
1199  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1200  (storageType, recipesFile))
1201  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1202 
1203 
1204 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1205  """Generate an Exposure from an image-like object
1206 
1207  If the image is a DecoratedImage then also set its WCS and metadata
1208  (Image and MaskedImage are missing the necessary metadata
1209  and Exposure already has those set)
1210 
1211  Parameters
1212  ----------
1213  image : Image-like object
1214  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1215  Exposure.
1216 
1217  Returns
1218  -------
1219  `lsst.afw.image.Exposure`
1220  Exposure containing input image.
1221  """
1222  metadata = None
1223  if isinstance(image, afwImage.MaskedImage):
1224  exposure = afwImage.makeExposure(image)
1225  elif isinstance(image, afwImage.DecoratedImage):
1226  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1227  metadata = image.getMetadata()
1228  try:
1229  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1230  exposure.setWcs(wcs)
1231  except pexExcept.TypeError as e:
1232  # raised on failure to create a wcs (and possibly others)
1233  if logger is None:
1234  logger = lsstLog.Log.getLogger("CameraMapper")
1235  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1236  " %s", e.args[0])
1237 
1238  exposure.setMetadata(metadata)
1239  elif isinstance(image, afwImage.Exposure):
1240  # Exposure
1241  exposure = image
1242  metadata = exposure.getMetadata()
1243  else:
1244  # Image
1245  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1246  #
1247  # set VisitInfo if we can
1248  #
1249  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1250  if metadata is not None:
1251  if mapper is None:
1252  if not logger:
1253  logger = lsstLog.Log.getLogger("CameraMapper")
1254  logger.warn("I can only set the VisitInfo if you provide a mapper")
1255  else:
1256  exposureId = mapper._computeCcdExposureId(dataId)
1257  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1258 
1259  exposure.getInfo().setVisitInfo(visitInfo)
1260 
1261  return exposure
1262 
1263 
1265  """Validate recipes for FitsStorage
1266 
1267  The recipes are supplemented with default values where appropriate.
1268 
1269  TODO: replace this custom validation code with Cerberus (DM-11846)
1270 
1271  Parameters
1272  ----------
1273  recipes : `lsst.daf.persistence.Policy`
1274  FitsStorage recipes to validate.
1275 
1276  Returns
1277  -------
1278  validated : `lsst.daf.base.PropertySet`
1279  Validated FitsStorage recipe.
1280 
1281  Raises
1282  ------
1283  `RuntimeError`
1284  If validation fails.
1285  """
1286  # Schemas define what should be there, and the default values (and by the default
1287  # value, the expected type).
1288  compressionSchema = {
1289  "algorithm": "NONE",
1290  "rows": 1,
1291  "columns": 0,
1292  "quantizeLevel": 0.0,
1293  }
1294  scalingSchema = {
1295  "algorithm": "NONE",
1296  "bitpix": 0,
1297  "maskPlanes": ["NO_DATA"],
1298  "seed": 0,
1299  "quantizeLevel": 4.0,
1300  "quantizePad": 5.0,
1301  "fuzz": True,
1302  "bscale": 1.0,
1303  "bzero": 0.0,
1304  }
1305 
1306  def checkUnrecognized(entry, allowed, description):
1307  """Check to see if the entry contains unrecognised keywords"""
1308  unrecognized = set(entry.keys()) - set(allowed)
1309  if unrecognized:
1310  raise RuntimeError(
1311  "Unrecognized entries when parsing image compression recipe %s: %s" %
1312  (description, unrecognized))
1313 
1314  validated = {}
1315  for name in recipes.names(True):
1316  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1317  rr = dafBase.PropertySet()
1318  validated[name] = rr
1319  for plane in ("image", "mask", "variance"):
1320  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1321  name + "->" + plane)
1322 
1323  for settings, schema in (("compression", compressionSchema),
1324  ("scaling", scalingSchema)):
1325  prefix = plane + "." + settings
1326  if settings not in recipes[name][plane]:
1327  for key in schema:
1328  rr.set(prefix + "." + key, schema[key])
1329  continue
1330  entry = recipes[name][plane][settings]
1331  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1332  for key in schema:
1333  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1334  rr.set(prefix + "." + key, value)
1335  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)