lsst.obs.base  18.1.0-11-g311e899+6
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 import re
26 import weakref
27 from astro_metadata_translator import fix_header
28 import lsst.daf.persistence as dafPersist
29 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base as dafBase
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 from lsst.afw.fits import readMetadata
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.exceptions as pexExcept
38 from .exposureIdInfo import ExposureIdInfo
39 from .makeRawVisitInfo import MakeRawVisitInfo
40 from lsst.utils import getPackageDir
41 
42 __all__ = ["CameraMapper", "exposureFromImage"]
43 
44 
45 class CameraMapper(dafPersist.Mapper):
46 
47  """CameraMapper is a base class for mappers that handle images from a
48  camera and products derived from them. This provides an abstraction layer
49  between the data on disk and the code.
50 
51  Public methods: keys, queryMetadata, getDatasetTypes, map,
52  canStandardize, standardize
53 
54  Mappers for specific data sources (e.g., CFHT Megacam, LSST
55  simulations, etc.) should inherit this class.
56 
57  The CameraMapper manages datasets within a "root" directory. Note that
58  writing to a dataset present in the input root will hide the existing
59  dataset but not overwrite it. See #2160 for design discussion.
60 
61  A camera is assumed to consist of one or more rafts, each composed of
62  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
63  (amps). A camera is also assumed to have a camera geometry description
64  (CameraGeom object) as a policy file, a filter description (Filter class
65  static configuration) as another policy file.
66 
67  Information from the camera geometry and defects are inserted into all
68  Exposure objects returned.
69 
70  The mapper uses one or two registries to retrieve metadata about the
71  images. The first is a registry of all raw exposures. This must contain
72  the time of the observation. One or more tables (or the equivalent)
73  within the registry are used to look up data identifier components that
74  are not specified by the user (e.g. filter) and to return results for
75  metadata queries. The second is an optional registry of all calibration
76  data. This should contain validity start and end entries for each
77  calibration dataset in the same timescale as the observation time.
78 
79  Subclasses will typically set MakeRawVisitInfoClass and optionally the
80  metadata translator class:
81 
82  MakeRawVisitInfoClass: a class variable that points to a subclass of
83  MakeRawVisitInfo, a functor that creates an
84  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
85 
86  translatorClass: The `~astro_metadata_translator.MetadataTranslator`
87  class to use for fixing metadata values. If it is not set an attempt
88  will be made to infer the class from ``MakeRawVisitInfoClass``, failing
89  that the metadata fixup will try to infer the translator class from the
90  header itself.
91 
92  Subclasses must provide the following methods:
93 
94  _extractDetectorName(self, dataId): returns the detector name for a CCD
95  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
96  a dataset identifier referring to that CCD or a subcomponent of it.
97 
98  _computeCcdExposureId(self, dataId): see below
99 
100  _computeCoaddExposureId(self, dataId, singleFilter): see below
101 
102  Subclasses may also need to override the following methods:
103 
104  _transformId(self, dataId): transformation of a data identifier
105  from colloquial usage (e.g., "ccdname") to proper/actual usage
106  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
107  commas). The default implementation does nothing. Note that this
108  method should not modify its input parameter.
109 
110  getShortCcdName(self, ccdName): a static method that returns a shortened
111  name suitable for use as a filename. The default version converts spaces
112  to underscores.
113 
114  _mapActualToPath(self, template, actualId): convert a template path to an
115  actual path, using the actual dataset identifier.
116 
117  The mapper's behaviors are largely specified by the policy file.
118  See the MapperDictionary.paf for descriptions of the available items.
119 
120  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
121  mappings (see Mappings class).
122 
123  Common default mappings for all subclasses can be specified in the
124  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
125  provides a simple way to add a product to all camera mappers.
126 
127  Functions to map (provide a path to the data given a dataset
128  identifier dictionary) and standardize (convert data into some standard
129  format or type) may be provided in the subclass as "map_{dataset type}"
130  and "std_{dataset type}", respectively.
131 
132  If non-Exposure datasets cannot be retrieved using standard
133  daf_persistence methods alone, a "bypass_{dataset type}" function may be
134  provided in the subclass to return the dataset instead of using the
135  "datasets" subpolicy.
136 
137  Implementations of map_camera and bypass_camera that should typically be
138  sufficient are provided in this base class.
139 
140  Notes
141  -----
142  .. todo::
143 
144  Instead of auto-loading the camera at construction time, load it from
145  the calibration registry
146 
147  Parameters
148  ----------
149  policy : daf_persistence.Policy,
150  Policy with per-camera defaults already merged.
151  repositoryDir : string
152  Policy repository for the subclassing module (obtained with
153  getRepositoryPath() on the per-camera default dictionary).
154  root : string, optional
155  Path to the root directory for data.
156  registry : string, optional
157  Path to registry with data's metadata.
158  calibRoot : string, optional
159  Root directory for calibrations.
160  calibRegistry : string, optional
161  Path to registry with calibrations' metadata.
162  provided : list of string, optional
163  Keys provided by the mapper.
164  parentRegistry : Registry subclass, optional
165  Registry from a parent repository that may be used to look up
166  data's metadata.
167  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
168  The configuration information for the repository this mapper is
169  being used with.
170  """
171  packageName = None
172 
173  # a class or subclass of MakeRawVisitInfo, a functor that makes an
174  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
175  MakeRawVisitInfoClass = MakeRawVisitInfo
176 
177  # a class or subclass of PupilFactory
178  PupilFactoryClass = afwCameraGeom.PupilFactory
179 
180  # Class to use for metadata translations
181  translatorClass = None
182 
183  def __init__(self, policy, repositoryDir,
184  root=None, registry=None, calibRoot=None, calibRegistry=None,
185  provided=None, parentRegistry=None, repositoryCfg=None):
186 
187  dafPersist.Mapper.__init__(self)
188 
189  self.log = lsstLog.Log.getLogger("CameraMapper")
190 
191  if root:
192  self.root = root
193  elif repositoryCfg:
194  self.root = repositoryCfg.root
195  else:
196  self.root = None
197 
198  repoPolicy = repositoryCfg.policy if repositoryCfg else None
199  if repoPolicy is not None:
200  policy.update(repoPolicy)
201 
202  # Levels
203  self.levels = dict()
204  if 'levels' in policy:
205  levelsPolicy = policy['levels']
206  for key in levelsPolicy.names(True):
207  self.levels[key] = set(levelsPolicy.asArray(key))
208  self.defaultLevel = policy['defaultLevel']
209  self.defaultSubLevels = dict()
210  if 'defaultSubLevels' in policy:
211  self.defaultSubLevels = policy['defaultSubLevels']
212 
213  # Root directories
214  if root is None:
215  root = "."
216  root = dafPersist.LogicalLocation(root).locString()
217 
218  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
219 
220  # If the calibRoot is passed in, use that. If not and it's indicated in
221  # the policy, use that. And otherwise, the calibs are in the regular
222  # root.
223  # If the location indicated by the calib root does not exist, do not
224  # create it.
225  calibStorage = None
226  if calibRoot is not None:
227  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
228  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
229  create=False)
230  else:
231  calibRoot = policy.get('calibRoot', None)
232  if calibRoot:
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
234  create=False)
235  if calibStorage is None:
236  calibStorage = self.rootStorage
237 
238  self.root = root
239 
240  # Registries
241  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
242  self.rootStorage, searchParents=False,
243  posixIfNoSql=(not parentRegistry))
244  if not self.registry:
245  self.registry = parentRegistry
246  needCalibRegistry = policy.get('needCalibRegistry', None)
247  if needCalibRegistry:
248  if calibStorage:
249  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
250  "calibRegistryPath", calibStorage,
251  posixIfNoSql=False) # NB never use posix for calibs
252  else:
253  raise RuntimeError(
254  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
255  "calibRoot ivar:%s or policy['calibRoot']:%s" %
256  (calibRoot, policy.get('calibRoot', None)))
257  else:
258  self.calibRegistry = None
259 
260  # Dict of valid keys and their value types
261  self.keyDict = dict()
262 
263  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
264  self._initWriteRecipes()
265 
266  # Camera geometry
267  self.cameraDataLocation = None # path to camera geometry config file
268  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
269 
270  # Filter translation table
271  self.filters = None
272 
273  # verify that the class variable packageName is set before attempting
274  # to instantiate an instance
275  if self.packageName is None:
276  raise ValueError('class variable packageName must not be None')
277 
279 
280  # Assign a metadata translator if one has not been defined by
281  # subclass. We can sometimes infer one from the RawVisitInfo
282  # class.
283  if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
284  self.translatorClass = self.makeRawVisitInfo.metadataTranslator
285 
286  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
287  """Initialize mappings
288 
289  For each of the dataset types that we want to be able to read, there
290  are methods that can be created to support them:
291  * map_<dataset> : determine the path for dataset
292  * std_<dataset> : standardize the retrieved dataset
293  * bypass_<dataset> : retrieve the dataset (bypassing the usual
294  retrieval machinery)
295  * query_<dataset> : query the registry
296 
297  Besides the dataset types explicitly listed in the policy, we create
298  additional, derived datasets for additional conveniences,
299  e.g., reading the header of an image, retrieving only the size of a
300  catalog.
301 
302  Parameters
303  ----------
304  policy : `lsst.daf.persistence.Policy`
305  Policy with per-camera defaults already merged
306  rootStorage : `Storage subclass instance`
307  Interface to persisted repository data.
308  calibRoot : `Storage subclass instance`
309  Interface to persisted calib repository data
310  provided : `list` of `str`
311  Keys provided by the mapper
312  """
313  # Sub-dictionaries (for exposure/calibration/dataset types)
314  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
315  "obs_base", "ImageMappingDefaults.yaml", "policy"))
316  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
317  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
318  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
320  dsMappingPolicy = dafPersist.Policy()
321 
322  # Mappings
323  mappingList = (
324  ("images", imgMappingPolicy, ImageMapping),
325  ("exposures", expMappingPolicy, ExposureMapping),
326  ("calibrations", calMappingPolicy, CalibrationMapping),
327  ("datasets", dsMappingPolicy, DatasetMapping)
328  )
329  self.mappings = dict()
330  for name, defPolicy, cls in mappingList:
331  if name in policy:
332  datasets = policy[name]
333 
334  # Centrally-defined datasets
335  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
336  if os.path.exists(defaultsPath):
337  datasets.merge(dafPersist.Policy(defaultsPath))
338 
339  mappings = dict()
340  setattr(self, name, mappings)
341  for datasetType in datasets.names(True):
342  subPolicy = datasets[datasetType]
343  subPolicy.merge(defPolicy)
344 
345  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
346  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
347  subPolicy=subPolicy):
348  components = subPolicy.get('composite')
349  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
350  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
351  python = subPolicy['python']
352  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
353  disassembler=disassembler,
354  python=python,
355  dataId=dataId,
356  mapper=self)
357  for name, component in components.items():
358  butlerComposite.add(id=name,
359  datasetType=component.get('datasetType'),
360  setter=component.get('setter', None),
361  getter=component.get('getter', None),
362  subset=component.get('subset', False),
363  inputOnly=component.get('inputOnly', False))
364  return butlerComposite
365  setattr(self, "map_" + datasetType, compositeClosure)
366  # for now at least, don't set up any other handling for this dataset type.
367  continue
368 
369  if name == "calibrations":
370  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
371  provided=provided, dataRoot=rootStorage)
372  else:
373  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
374 
375  if datasetType in self.mappings:
376  raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
377  self.keyDict.update(mapping.keys())
378  mappings[datasetType] = mapping
379  self.mappings[datasetType] = mapping
380  if not hasattr(self, "map_" + datasetType):
381  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
382  return mapping.map(mapper, dataId, write)
383  setattr(self, "map_" + datasetType, mapClosure)
384  if not hasattr(self, "query_" + datasetType):
385  def queryClosure(format, dataId, mapping=mapping):
386  return mapping.lookup(format, dataId)
387  setattr(self, "query_" + datasetType, queryClosure)
388  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
389  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
390  return mapping.standardize(mapper, item, dataId)
391  setattr(self, "std_" + datasetType, stdClosure)
392 
393  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
394  """Set convenience methods on CameraMapper"""
395  mapName = "map_" + datasetType + "_" + suffix
396  bypassName = "bypass_" + datasetType + "_" + suffix
397  queryName = "query_" + datasetType + "_" + suffix
398  if not hasattr(self, mapName):
399  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
400  if not hasattr(self, bypassName):
401  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
402  bypassImpl = getattr(self, "bypass_" + datasetType)
403  if bypassImpl is not None:
404  setattr(self, bypassName, bypassImpl)
405  if not hasattr(self, queryName):
406  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
407 
408  # Filename of dataset
409  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
410  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
411  # Metadata from FITS file
412  if subPolicy["storage"] == "FitsStorage": # a FITS image
413  def getMetadata(datasetType, pythonType, location, dataId):
414  md = readMetadata(location.getLocationsWithRoot()[0])
415  fix_header(md, translator_class=self.translatorClass)
416  return md
417 
418  setMethods("md", bypassImpl=getMetadata)
419 
420  # Add support for configuring FITS compression
421  addName = "add_" + datasetType
422  if not hasattr(self, addName):
423  setattr(self, addName, self.getImageCompressionSettings)
424 
425  if name == "exposures":
426  def getSkyWcs(datasetType, pythonType, location, dataId):
427  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
428  return fitsReader.readWcs()
429 
430  setMethods("wcs", bypassImpl=getSkyWcs)
431 
432  def getPhotoCalib(datasetType, pythonType, location, dataId):
433  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
434  return fitsReader.readPhotoCalib()
435 
436  setMethods("photoCalib", bypassImpl=getPhotoCalib)
437 
438  def getVisitInfo(datasetType, pythonType, location, dataId):
439  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
440  return fitsReader.readVisitInfo()
441 
442  setMethods("visitInfo", bypassImpl=getVisitInfo)
443 
444  def getFilter(datasetType, pythonType, location, dataId):
445  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
446  return fitsReader.readFilter()
447 
448  setMethods("filter", bypassImpl=getFilter)
449 
450  setMethods("detector",
451  mapImpl=lambda dataId, write=False:
452  dafPersist.ButlerLocation(
453  pythonType="lsst.afw.cameraGeom.CameraConfig",
454  cppType="Config",
455  storageName="Internal",
456  locationList="ignored",
457  dataId=dataId,
458  mapper=self,
459  storage=None,
460  ),
461  bypassImpl=lambda datasetType, pythonType, location, dataId:
462  self.camera[self._extractDetectorName(dataId)]
463  )
464 
465  def getBBox(datasetType, pythonType, location, dataId):
466  md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
467  fix_header(md, translator_class=self.translatorClass)
468  return afwImage.bboxFromMetadata(md)
469 
470  setMethods("bbox", bypassImpl=getBBox)
471 
472  elif name == "images":
473  def getBBox(datasetType, pythonType, location, dataId):
474  md = readMetadata(location.getLocationsWithRoot()[0])
475  fix_header(md, translator_class=self.translatorClass)
476  return afwImage.bboxFromMetadata(md)
477  setMethods("bbox", bypassImpl=getBBox)
478 
479  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
480 
481  def getMetadata(datasetType, pythonType, location, dataId):
482  md = readMetadata(os.path.join(location.getStorage().root,
483  location.getLocations()[0]), hdu=1)
484  fix_header(md, translator_class=self.translatorClass)
485  return md
486 
487  setMethods("md", bypassImpl=getMetadata)
488 
489  # Sub-images
490  if subPolicy["storage"] == "FitsStorage":
491  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
492  subId = dataId.copy()
493  del subId['bbox']
494  loc = mapping.map(mapper, subId, write)
495  bbox = dataId['bbox']
496  llcX = bbox.getMinX()
497  llcY = bbox.getMinY()
498  width = bbox.getWidth()
499  height = bbox.getHeight()
500  loc.additionalData.set('llcX', llcX)
501  loc.additionalData.set('llcY', llcY)
502  loc.additionalData.set('width', width)
503  loc.additionalData.set('height', height)
504  if 'imageOrigin' in dataId:
505  loc.additionalData.set('imageOrigin',
506  dataId['imageOrigin'])
507  return loc
508 
509  def querySubClosure(key, format, dataId, mapping=mapping):
510  subId = dataId.copy()
511  del subId['bbox']
512  return mapping.lookup(format, subId)
513  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
514 
515  if subPolicy["storage"] == "FitsCatalogStorage":
516  # Length of catalog
517 
518  def getLen(datasetType, pythonType, location, dataId):
519  md = readMetadata(os.path.join(location.getStorage().root,
520  location.getLocations()[0]), hdu=1)
521  fix_header(md, translator_class=self.translatorClass)
522  return md["NAXIS2"]
523 
524  setMethods("len", bypassImpl=getLen)
525 
526  # Schema of catalog
527  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
528  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
529  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
530  location.getLocations()[0])))
531 
532  def _computeCcdExposureId(self, dataId):
533  """Compute the 64-bit (long) identifier for a CCD exposure.
534 
535  Subclasses must override
536 
537  Parameters
538  ----------
539  dataId : `dict`
540  Data identifier with visit, ccd.
541  """
542  raise NotImplementedError()
543 
544  def _computeCoaddExposureId(self, dataId, singleFilter):
545  """Compute the 64-bit (long) identifier for a coadd.
546 
547  Subclasses must override
548 
549  Parameters
550  ----------
551  dataId : `dict`
552  Data identifier with tract and patch.
553  singleFilter : `bool`
554  True means the desired ID is for a single-filter coadd, in which
555  case dataIdmust contain filter.
556  """
557  raise NotImplementedError()
558 
559  def _search(self, path):
560  """Search for path in the associated repository's storage.
561 
562  Parameters
563  ----------
564  path : string
565  Path that describes an object in the repository associated with
566  this mapper.
567  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
568  indicator will be stripped when searching and so will match
569  filenames without the HDU indicator, e.g. 'foo.fits'. The path
570  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
571 
572  Returns
573  -------
574  string
575  The path for this object in the repository. Will return None if the
576  object can't be found. If the input argument path contained an HDU
577  indicator, the returned path will also contain the HDU indicator.
578  """
579  return self.rootStorage.search(path)
580 
581  def backup(self, datasetType, dataId):
582  """Rename any existing object with the given type and dataId.
583 
584  The CameraMapper implementation saves objects in a sequence of e.g.:
585 
586  - foo.fits
587  - foo.fits~1
588  - foo.fits~2
589 
590  All of the backups will be placed in the output repo, however, and will
591  not be removed if they are found elsewhere in the _parent chain. This
592  means that the same file will be stored twice if the previous version
593  was found in an input repo.
594  """
595 
596  # Calling PosixStorage directly is not the long term solution in this
597  # function, this is work-in-progress on epic DM-6225. The plan is for
598  # parentSearch to be changed to 'search', and search only the storage
599  # associated with this mapper. All searching of parents will be handled
600  # by traversing the container of repositories in Butler.
601 
602  def firstElement(list):
603  """Get the first element in the list, or None if that can't be
604  done.
605  """
606  return list[0] if list is not None and len(list) else None
607 
608  n = 0
609  newLocation = self.map(datasetType, dataId, write=True)
610  newPath = newLocation.getLocations()[0]
611  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
612  path = firstElement(path)
613  oldPaths = []
614  while path is not None:
615  n += 1
616  oldPaths.append((n, path))
617  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
618  path = firstElement(path)
619  for n, oldPath in reversed(oldPaths):
620  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
621 
622  def keys(self):
623  """Return supported keys.
624 
625  Returns
626  -------
627  iterable
628  List of keys usable in a dataset identifier
629  """
630  return iter(self.keyDict.keys())
631 
632  def getKeys(self, datasetType, level):
633  """Return a dict of supported keys and their value types for a given
634  dataset type at a given level of the key hierarchy.
635 
636  Parameters
637  ----------
638  datasetType : `str`
639  Dataset type or None for all dataset types.
640  level : `str` or None
641  Level or None for all levels or '' for the default level for the
642  camera.
643 
644  Returns
645  -------
646  `dict`
647  Keys are strings usable in a dataset identifier, values are their
648  value types.
649  """
650 
651  # not sure if this is how we want to do this. what if None was intended?
652  if level == '':
653  level = self.getDefaultLevel()
654 
655  if datasetType is None:
656  keyDict = copy.copy(self.keyDict)
657  else:
658  keyDict = self.mappings[datasetType].keys()
659  if level is not None and level in self.levels:
660  keyDict = copy.copy(keyDict)
661  for l in self.levels[level]:
662  if l in keyDict:
663  del keyDict[l]
664  return keyDict
665 
666  def getDefaultLevel(self):
667  return self.defaultLevel
668 
669  def getDefaultSubLevel(self, level):
670  if level in self.defaultSubLevels:
671  return self.defaultSubLevels[level]
672  return None
673 
674  @classmethod
675  def getCameraName(cls):
676  """Return the name of the camera that this CameraMapper is for."""
677  className = str(cls)
678  className = className[className.find('.'):-1]
679  m = re.search(r'(\w+)Mapper', className)
680  if m is None:
681  m = re.search(r"class '[\w.]*?(\w+)'", className)
682  name = m.group(1)
683  return name[:1].lower() + name[1:] if name else ''
684 
685  @classmethod
686  def getPackageName(cls):
687  """Return the name of the package containing this CameraMapper."""
688  if cls.packageName is None:
689  raise ValueError('class variable packageName must not be None')
690  return cls.packageName
691 
692  @classmethod
693  def getPackageDir(cls):
694  """Return the base directory of this package"""
695  return getPackageDir(cls.getPackageName())
696 
697  def map_camera(self, dataId, write=False):
698  """Map a camera dataset."""
699  if self.camera is None:
700  raise RuntimeError("No camera dataset available.")
701  actualId = self._transformId(dataId)
702  return dafPersist.ButlerLocation(
703  pythonType="lsst.afw.cameraGeom.CameraConfig",
704  cppType="Config",
705  storageName="ConfigStorage",
706  locationList=self.cameraDataLocation or "ignored",
707  dataId=actualId,
708  mapper=self,
709  storage=self.rootStorage
710  )
711 
712  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
713  """Return the (preloaded) camera object.
714  """
715  if self.camera is None:
716  raise RuntimeError("No camera dataset available.")
717  return self.camera
718 
719  def map_expIdInfo(self, dataId, write=False):
720  return dafPersist.ButlerLocation(
721  pythonType="lsst.obs.base.ExposureIdInfo",
722  cppType=None,
723  storageName="Internal",
724  locationList="ignored",
725  dataId=dataId,
726  mapper=self,
727  storage=self.rootStorage
728  )
729 
730  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
731  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
732  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
733  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
734  return ExposureIdInfo(expId=expId, expBits=expBits)
735 
736  def std_bfKernel(self, item, dataId):
737  """Disable standardization for bfKernel
738 
739  bfKernel is a calibration product that is numpy array,
740  unlike other calibration products that are all images;
741  all calibration images are sent through _standardizeExposure
742  due to CalibrationMapping, but we don't want that to happen to bfKernel
743  """
744  return item
745 
746  def std_raw(self, item, dataId):
747  """Standardize a raw dataset by converting it to an Exposure instead
748  of an Image"""
749  return self._standardizeExposure(self.exposures['raw'], item, dataId,
750  trimmed=False, setVisitInfo=True)
751 
752  def map_skypolicy(self, dataId):
753  """Map a sky policy."""
754  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
755  "Internal", None, None, self,
756  storage=self.rootStorage)
757 
758  def std_skypolicy(self, item, dataId):
759  """Standardize a sky policy by returning the one we use."""
760  return self.skypolicy
761 
762 
767 
768  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
769  posixIfNoSql=True):
770  """Set up a registry (usually SQLite3), trying a number of possible
771  paths.
772 
773  Parameters
774  ----------
775  name : string
776  Name of registry.
777  description: `str`
778  Description of registry (for log messages)
779  path : string
780  Path for registry.
781  policy : string
782  Policy that contains the registry name, used if path is None.
783  policyKey : string
784  Key in policy for registry path.
785  storage : Storage subclass
786  Repository Storage to look in.
787  searchParents : bool, optional
788  True if the search for a registry should follow any Butler v1
789  _parent symlinks.
790  posixIfNoSql : bool, optional
791  If an sqlite registry is not found, will create a posix registry if
792  this is True.
793 
794  Returns
795  -------
796  lsst.daf.persistence.Registry
797  Registry object
798  """
799  if path is None and policyKey in policy:
800  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
801  if os.path.isabs(path):
802  raise RuntimeError("Policy should not indicate an absolute path for registry.")
803  if not storage.exists(path):
804  newPath = storage.instanceSearch(path)
805 
806  newPath = newPath[0] if newPath is not None and len(newPath) else None
807  if newPath is None:
808  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
809  path)
810  path = newPath
811  else:
812  self.log.warn("Unable to locate registry at policy path: %s", path)
813  path = None
814 
815  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
816  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
817  # root from path. Currently only works with PosixStorage
818  try:
819  root = storage.root
820  if path and (path.startswith(root)):
821  path = path[len(root + '/'):]
822  except AttributeError:
823  pass
824 
825  # determine if there is an sqlite registry and if not, try the posix registry.
826  registry = None
827 
828  def search(filename, description):
829  """Search for file in storage
830 
831  Parameters
832  ----------
833  filename : `str`
834  Filename to search for
835  description : `str`
836  Description of file, for error message.
837 
838  Returns
839  -------
840  path : `str` or `None`
841  Path to file, or None
842  """
843  result = storage.instanceSearch(filename)
844  if result:
845  return result[0]
846  self.log.debug("Unable to locate %s: %s", description, filename)
847  return None
848 
849  # Search for a suitable registry database
850  if path is None:
851  path = search("%s.pgsql" % name, "%s in root" % description)
852  if path is None:
853  path = search("%s.sqlite3" % name, "%s in root" % description)
854  if path is None:
855  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
856 
857  if path is not None:
858  if not storage.exists(path):
859  newPath = storage.instanceSearch(path)
860  newPath = newPath[0] if newPath is not None and len(newPath) else None
861  if newPath is not None:
862  path = newPath
863  localFileObj = storage.getLocalFile(path)
864  self.log.info("Loading %s registry from %s", description, localFileObj.name)
865  registry = dafPersist.Registry.create(localFileObj.name)
866  localFileObj.close()
867  elif not registry and posixIfNoSql:
868  try:
869  self.log.info("Loading Posix %s registry from %s", description, storage.root)
870  registry = dafPersist.PosixRegistry(storage.root)
871  except Exception:
872  registry = None
873 
874  return registry
875 
876  def _transformId(self, dataId):
877  """Generate a standard ID dict from a camera-specific ID dict.
878 
879  Canonical keys include:
880  - amp: amplifier name
881  - ccd: CCD name (in LSST this is a combination of raft and sensor)
882  The default implementation returns a copy of its input.
883 
884  Parameters
885  ----------
886  dataId : `dict`
887  Dataset identifier; this must not be modified
888 
889  Returns
890  -------
891  `dict`
892  Transformed dataset identifier.
893  """
894 
895  return dataId.copy()
896 
897  def _mapActualToPath(self, template, actualId):
898  """Convert a template path to an actual path, using the actual data
899  identifier. This implementation is usually sufficient but can be
900  overridden by the subclass.
901 
902  Parameters
903  ----------
904  template : `str`
905  Template path
906  actualId : `dict`
907  Dataset identifier
908 
909  Returns
910  -------
911  `str`
912  Pathname
913  """
914 
915  try:
916  transformedId = self._transformId(actualId)
917  return template % transformedId
918  except Exception as e:
919  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
920 
921  @staticmethod
922  def getShortCcdName(ccdName):
923  """Convert a CCD name to a form useful as a filename
924 
925  The default implementation converts spaces to underscores.
926  """
927  return ccdName.replace(" ", "_")
928 
929  def _extractDetectorName(self, dataId):
930  """Extract the detector (CCD) name from the dataset identifier.
931 
932  The name in question is the detector name used by lsst.afw.cameraGeom.
933 
934  Parameters
935  ----------
936  dataId : `dict`
937  Dataset identifier.
938 
939  Returns
940  -------
941  `str`
942  Detector name
943  """
944  raise NotImplementedError("No _extractDetectorName() function specified")
945 
946  def _extractAmpId(self, dataId):
947  """Extract the amplifier identifer from a dataset identifier.
948 
949  .. note:: Deprecated in 11_0
950 
951  amplifier identifier has two parts: the detector name for the CCD
952  containing the amplifier and index of the amplifier in the detector.
953 
954  Parameters
955  ----------
956  dataId : `dict`
957  Dataset identifer
958 
959  Returns
960  -------
961  `tuple`
962  Amplifier identifier
963  """
964 
965  trDataId = self._transformId(dataId)
966  return (trDataId["ccd"], int(trDataId['amp']))
967 
968  def _setAmpDetector(self, item, dataId, trimmed=True):
969  """Set the detector object in an Exposure for an amplifier.
970 
971  Defects are also added to the Exposure based on the detector object.
972 
973  Parameters
974  ----------
975  item : `lsst.afw.image.Exposure`
976  Exposure to set the detector in.
977  dataId : `dict`
978  Dataset identifier
979  trimmed : `bool`
980  Should detector be marked as trimmed? (ignored)
981  """
982 
983  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
984 
985  def _setCcdDetector(self, item, dataId, trimmed=True):
986  """Set the detector object in an Exposure for a CCD.
987 
988  Parameters
989  ----------
990  item : `lsst.afw.image.Exposure`
991  Exposure to set the detector in.
992  dataId : `dict`
993  Dataset identifier
994  trimmed : `bool`
995  Should detector be marked as trimmed? (ignored)
996  """
997  if item.getDetector() is not None:
998  return
999 
1000  detectorName = self._extractDetectorName(dataId)
1001  detector = self.camera[detectorName]
1002  item.setDetector(detector)
1003 
1004  def _setFilter(self, mapping, item, dataId):
1005  """Set the filter object in an Exposure. If the Exposure had a FILTER
1006  keyword, this was already processed during load. But if it didn't,
1007  use the filter from the registry.
1008 
1009  Parameters
1010  ----------
1011  mapping : `lsst.obs.base.Mapping`
1012  Where to get the filter from.
1013  item : `lsst.afw.image.Exposure`
1014  Exposure to set the filter in.
1015  dataId : `dict`
1016  Dataset identifier.
1017  """
1018 
1019  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1020  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1021  return
1022 
1023  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1024  return
1025 
1026  actualId = mapping.need(['filter'], dataId)
1027  filterName = actualId['filter']
1028  if self.filters is not None and filterName in self.filters:
1029  filterName = self.filters[filterName]
1030  item.setFilter(afwImage.Filter(filterName))
1031 
1032  # Default standardization function for exposures
1033  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1034  trimmed=True, setVisitInfo=True):
1035  """Default standardization function for images.
1036 
1037  This sets the Detector from the camera geometry
1038  and optionally set the Fiter. In both cases this saves
1039  having to persist some data in each exposure (or image).
1040 
1041  Parameters
1042  ----------
1043  mapping : `lsst.obs.base.Mapping`
1044  Where to get the values from.
1045  item : image-like object
1046  Can be any of lsst.afw.image.Exposure,
1047  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1048  or lsst.afw.image.MaskedImage
1049 
1050  dataId : `dict`
1051  Dataset identifier
1052  filter : `bool`
1053  Set filter? Ignored if item is already an exposure
1054  trimmed : `bool`
1055  Should detector be marked as trimmed?
1056  setVisitInfo : `bool`
1057  Should Exposure have its VisitInfo filled out from the metadata?
1058 
1059  Returns
1060  -------
1061  `lsst.afw.image.Exposure`
1062  The standardized Exposure.
1063  """
1064  try:
1065  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1066  except Exception as e:
1067  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1068  raise
1069 
1070  if mapping.level.lower() == "amp":
1071  self._setAmpDetector(item, dataId, trimmed)
1072  elif mapping.level.lower() == "ccd":
1073  self._setCcdDetector(item, dataId, trimmed)
1074 
1075  if filter:
1076  self._setFilter(mapping, item, dataId)
1077 
1078  return item
1079 
1080  def _makeCamera(self, policy, repositoryDir):
1081  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1082  the camera geometry
1083 
1084  Also set self.cameraDataLocation, if relevant (else it can be left
1085  None).
1086 
1087  This implementation assumes that policy contains an entry "camera"
1088  that points to the subdirectory in this package of camera data;
1089  specifically, that subdirectory must contain:
1090  - a file named `camera.py` that contains persisted camera config
1091  - ampInfo table FITS files, as required by
1092  lsst.afw.cameraGeom.makeCameraFromPath
1093 
1094  Parameters
1095  ----------
1096  policy : `lsst.daf.persistence.Policy`
1097  Policy with per-camera defaults already merged
1098  (PexPolicy only for backward compatibility).
1099  repositoryDir : `str`
1100  Policy repository for the subclassing module (obtained with
1101  getRepositoryPath() on the per-camera default dictionary).
1102  """
1103  if 'camera' not in policy:
1104  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1105  cameraDataSubdir = policy['camera']
1106  self.cameraDataLocation = os.path.normpath(
1107  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1108  cameraConfig = afwCameraGeom.CameraConfig()
1109  cameraConfig.load(self.cameraDataLocation)
1110  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1111  return afwCameraGeom.makeCameraFromPath(
1112  cameraConfig=cameraConfig,
1113  ampInfoPath=ampInfoPath,
1114  shortNameFunc=self.getShortCcdName,
1115  pupilFactoryClass=self.PupilFactoryClass
1116  )
1117 
1118  def getRegistry(self):
1119  """Get the registry used by this mapper.
1120 
1121  Returns
1122  -------
1123  Registry or None
1124  The registry used by this mapper for this mapper's repository.
1125  """
1126  return self.registry
1127 
1128  def getImageCompressionSettings(self, datasetType, dataId):
1129  """Stuff image compression settings into a daf.base.PropertySet
1130 
1131  This goes into the ButlerLocation's "additionalData", which gets
1132  passed into the boost::persistence framework.
1133 
1134  Parameters
1135  ----------
1136  datasetType : `str`
1137  Type of dataset for which to get the image compression settings.
1138  dataId : `dict`
1139  Dataset identifier.
1140 
1141  Returns
1142  -------
1143  additionalData : `lsst.daf.base.PropertySet`
1144  Image compression settings.
1145  """
1146  mapping = self.mappings[datasetType]
1147  recipeName = mapping.recipe
1148  storageType = mapping.storage
1149  if storageType not in self._writeRecipes:
1150  return dafBase.PropertySet()
1151  if recipeName not in self._writeRecipes[storageType]:
1152  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1153  (datasetType, storageType, recipeName))
1154  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1155  seed = hash(tuple(dataId.items())) % 2**31
1156  for plane in ("image", "mask", "variance"):
1157  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1158  recipe.set(plane + ".scaling.seed", seed)
1159  return recipe
1160 
1161  def _initWriteRecipes(self):
1162  """Read the recipes for writing files
1163 
1164  These recipes are currently used for configuring FITS compression,
1165  but they could have wider uses for configuring different flavors
1166  of the storage types. A recipe is referred to by a symbolic name,
1167  which has associated settings. These settings are stored as a
1168  `PropertySet` so they can easily be passed down to the
1169  boost::persistence framework as the "additionalData" parameter.
1170 
1171  The list of recipes is written in YAML. A default recipe and
1172  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1173  and these may be overridden or supplemented by the individual obs_*
1174  packages' own policy/writeRecipes.yaml files.
1175 
1176  Recipes are grouped by the storage type. Currently, only the
1177  ``FitsStorage`` storage type uses recipes, which uses it to
1178  configure FITS image compression.
1179 
1180  Each ``FitsStorage`` recipe for FITS compression should define
1181  "image", "mask" and "variance" entries, each of which may contain
1182  "compression" and "scaling" entries. Defaults will be provided for
1183  any missing elements under "compression" and "scaling".
1184 
1185  The allowed entries under "compression" are:
1186 
1187  * algorithm (string): compression algorithm to use
1188  * rows (int): number of rows per tile (0 = entire dimension)
1189  * columns (int): number of columns per tile (0 = entire dimension)
1190  * quantizeLevel (float): cfitsio quantization level
1191 
1192  The allowed entries under "scaling" are:
1193 
1194  * algorithm (string): scaling algorithm to use
1195  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1196  * fuzz (bool): fuzz the values when quantising floating-point values?
1197  * seed (long): seed for random number generator when fuzzing
1198  * maskPlanes (list of string): mask planes to ignore when doing
1199  statistics
1200  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1201  * quantizePad: number of stdev to allow on the low side (for
1202  STDEV_POSITIVE/NEGATIVE)
1203  * bscale: manually specified BSCALE (for MANUAL scaling)
1204  * bzero: manually specified BSCALE (for MANUAL scaling)
1205 
1206  A very simple example YAML recipe:
1207 
1208  FitsStorage:
1209  default:
1210  image: &default
1211  compression:
1212  algorithm: GZIP_SHUFFLE
1213  mask: *default
1214  variance: *default
1215  """
1216  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1217  recipes = dafPersist.Policy(recipesFile)
1218  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1219  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1220  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1221  supplements = dafPersist.Policy(supplementsFile)
1222  # Don't allow overrides, only supplements
1223  for entry in validationMenu:
1224  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1225  if intersection:
1226  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1227  (supplementsFile, entry, recipesFile, intersection))
1228  recipes.update(supplements)
1229 
1230  self._writeRecipes = {}
1231  for storageType in recipes.names(True):
1232  if "default" not in recipes[storageType]:
1233  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1234  (storageType, recipesFile))
1235  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1236 
1237 
1238 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1239  """Generate an Exposure from an image-like object
1240 
1241  If the image is a DecoratedImage then also set its WCS and metadata
1242  (Image and MaskedImage are missing the necessary metadata
1243  and Exposure already has those set)
1244 
1245  Parameters
1246  ----------
1247  image : Image-like object
1248  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1249  Exposure.
1250 
1251  Returns
1252  -------
1253  `lsst.afw.image.Exposure`
1254  Exposure containing input image.
1255  """
1256  metadata = None
1257  if isinstance(image, afwImage.MaskedImage):
1258  exposure = afwImage.makeExposure(image)
1259  elif isinstance(image, afwImage.DecoratedImage):
1260  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1261  metadata = image.getMetadata()
1262  try:
1263  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1264  exposure.setWcs(wcs)
1265  except pexExcept.TypeError as e:
1266  # raised on failure to create a wcs (and possibly others)
1267  if logger is None:
1268  logger = lsstLog.Log.getLogger("CameraMapper")
1269  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1270  " %s", e.args[0])
1271 
1272  exposure.setMetadata(metadata)
1273  elif isinstance(image, afwImage.Exposure):
1274  # Exposure
1275  exposure = image
1276  metadata = exposure.getMetadata()
1277  else:
1278  # Image
1279  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1280  #
1281  # set VisitInfo if we can
1282  #
1283  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1284  if metadata is not None:
1285  if mapper is None:
1286  if not logger:
1287  logger = lsstLog.Log.getLogger("CameraMapper")
1288  logger.warn("I can only set the VisitInfo if you provide a mapper")
1289  else:
1290  exposureId = mapper._computeCcdExposureId(dataId)
1291  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1292 
1293  exposure.getInfo().setVisitInfo(visitInfo)
1294 
1295  return exposure
1296 
1297 
1299  """Validate recipes for FitsStorage
1300 
1301  The recipes are supplemented with default values where appropriate.
1302 
1303  TODO: replace this custom validation code with Cerberus (DM-11846)
1304 
1305  Parameters
1306  ----------
1307  recipes : `lsst.daf.persistence.Policy`
1308  FitsStorage recipes to validate.
1309 
1310  Returns
1311  -------
1312  validated : `lsst.daf.base.PropertySet`
1313  Validated FitsStorage recipe.
1314 
1315  Raises
1316  ------
1317  `RuntimeError`
1318  If validation fails.
1319  """
1320  # Schemas define what should be there, and the default values (and by the default
1321  # value, the expected type).
1322  compressionSchema = {
1323  "algorithm": "NONE",
1324  "rows": 1,
1325  "columns": 0,
1326  "quantizeLevel": 0.0,
1327  }
1328  scalingSchema = {
1329  "algorithm": "NONE",
1330  "bitpix": 0,
1331  "maskPlanes": ["NO_DATA"],
1332  "seed": 0,
1333  "quantizeLevel": 4.0,
1334  "quantizePad": 5.0,
1335  "fuzz": True,
1336  "bscale": 1.0,
1337  "bzero": 0.0,
1338  }
1339 
1340  def checkUnrecognized(entry, allowed, description):
1341  """Check to see if the entry contains unrecognised keywords"""
1342  unrecognized = set(entry.keys()) - set(allowed)
1343  if unrecognized:
1344  raise RuntimeError(
1345  "Unrecognized entries when parsing image compression recipe %s: %s" %
1346  (description, unrecognized))
1347 
1348  validated = {}
1349  for name in recipes.names(True):
1350  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1351  rr = dafBase.PropertySet()
1352  validated[name] = rr
1353  for plane in ("image", "mask", "variance"):
1354  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1355  name + "->" + plane)
1356 
1357  for settings, schema in (("compression", compressionSchema),
1358  ("scaling", scalingSchema)):
1359  prefix = plane + "." + settings
1360  if settings not in recipes[name][plane]:
1361  for key in schema:
1362  rr.set(prefix + "." + key, schema[key])
1363  continue
1364  entry = recipes[name][plane][settings]
1365  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1366  for key in schema:
1367  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1368  rr.set(prefix + "." + key, value)
1369  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)