lsst.obs.base  14.0-23-g83b7482+1
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base as dafBase
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.image as afwImage
34 import lsst.afw.table as afwTable
35 from lsst.afw.fits import readMetadata
36 import lsst.afw.cameraGeom as afwCameraGeom
37 import lsst.log as lsstLog
38 import lsst.pex.policy as pexPolicy
39 import lsst.pex.exceptions as pexExcept
40 from .exposureIdInfo import ExposureIdInfo
41 from .makeRawVisitInfo import MakeRawVisitInfo
42 from lsst.utils import getPackageDir
43 
44 """This module defines the CameraMapper base class."""
45 
46 
47 class CameraMapper(dafPersist.Mapper):
48 
49  """CameraMapper is a base class for mappers that handle images from a
50  camera and products derived from them. This provides an abstraction layer
51  between the data on disk and the code.
52 
53  Public methods: keys, queryMetadata, getDatasetTypes, map,
54  canStandardize, standardize
55 
56  Mappers for specific data sources (e.g., CFHT Megacam, LSST
57  simulations, etc.) should inherit this class.
58 
59  The CameraMapper manages datasets within a "root" directory. Note that
60  writing to a dataset present in the input root will hide the existing
61  dataset but not overwrite it. See #2160 for design discussion.
62 
63  A camera is assumed to consist of one or more rafts, each composed of
64  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
65  (amps). A camera is also assumed to have a camera geometry description
66  (CameraGeom object) as a policy file, a filter description (Filter class
67  static configuration) as another policy file, and an optional defects
68  description directory.
69 
70  Information from the camera geometry and defects are inserted into all
71  Exposure objects returned.
72 
73  The mapper uses one or two registries to retrieve metadata about the
74  images. The first is a registry of all raw exposures. This must contain
75  the time of the observation. One or more tables (or the equivalent)
76  within the registry are used to look up data identifier components that
77  are not specified by the user (e.g. filter) and to return results for
78  metadata queries. The second is an optional registry of all calibration
79  data. This should contain validity start and end entries for each
80  calibration dataset in the same timescale as the observation time.
81 
82  Subclasses will typically set MakeRawVisitInfoClass:
83 
84  MakeRawVisitInfoClass: a class variable that points to a subclass of
85  MakeRawVisitInfo, a functor that creates an
86  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
87 
88  Subclasses must provide the following methods:
89 
90  _extractDetectorName(self, dataId): returns the detector name for a CCD
91  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
92  a dataset identifier referring to that CCD or a subcomponent of it.
93 
94  _computeCcdExposureId(self, dataId): see below
95 
96  _computeCoaddExposureId(self, dataId, singleFilter): see below
97 
98  Subclasses may also need to override the following methods:
99 
100  _transformId(self, dataId): transformation of a data identifier
101  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
102  including making suitable for path expansion (e.g. removing commas).
103  The default implementation does nothing. Note that this
104  method should not modify its input parameter.
105 
106  getShortCcdName(self, ccdName): a static method that returns a shortened name
107  suitable for use as a filename. The default version converts spaces to underscores.
108 
109  _getCcdKeyVal(self, dataId): return a CCD key and value
110  by which to look up defects in the defects registry.
111  The default value returns ("ccd", detector name)
112 
113  _mapActualToPath(self, template, actualId): convert a template path to an
114  actual path, using the actual dataset identifier.
115 
116  The mapper's behaviors are largely specified by the policy file.
117  See the MapperDictionary.paf for descriptions of the available items.
118 
119  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
120  mappings (see Mappings class).
121 
122  Common default mappings for all subclasses can be specified in the
123  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
124  a simple way to add a product to all camera mappers.
125 
126  Functions to map (provide a path to the data given a dataset
127  identifier dictionary) and standardize (convert data into some standard
128  format or type) may be provided in the subclass as "map_{dataset type}"
129  and "std_{dataset type}", respectively.
130 
131  If non-Exposure datasets cannot be retrieved using standard
132  daf_persistence methods alone, a "bypass_{dataset type}" function may be
133  provided in the subclass to return the dataset instead of using the
134  "datasets" subpolicy.
135 
136  Implementations of map_camera and bypass_camera that should typically be
137  sufficient are provided in this base class.
138 
139  @todo
140  * Handle defects the same was as all other calibration products, using the calibration registry
141  * Instead of auto-loading the camera at construction time, load it from the calibration registry
142  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
143  of pyfits from this package.
144  """
145  packageName = None
146 
147  # a class or subclass of MakeRawVisitInfo, a functor that makes an
148  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
149  MakeRawVisitInfoClass = MakeRawVisitInfo
150 
151  # a class or subclass of PupilFactory
152  PupilFactoryClass = afwCameraGeom.PupilFactory
153 
154  def __init__(self, policy, repositoryDir,
155  root=None, registry=None, calibRoot=None, calibRegistry=None,
156  provided=None, parentRegistry=None, repositoryCfg=None):
157  """Initialize the CameraMapper.
158 
159  Parameters
160  ----------
161  policy : daf_persistence.Policy,
162  Can also be pexPolicy.Policy, only for backward compatibility.
163  Policy with per-camera defaults already merged.
164  repositoryDir : string
165  Policy repository for the subclassing module (obtained with
166  getRepositoryPath() on the per-camera default dictionary).
167  root : string, optional
168  Path to the root directory for data.
169  registry : string, optional
170  Path to registry with data's metadata.
171  calibRoot : string, optional
172  Root directory for calibrations.
173  calibRegistry : string, optional
174  Path to registry with calibrations' metadata.
175  provided : list of string, optional
176  Keys provided by the mapper.
177  parentRegistry : Registry subclass, optional
178  Registry from a parent repository that may be used to look up
179  data's metadata.
180  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
181  The configuration information for the repository this mapper is
182  being used with.
183  """
184 
185  dafPersist.Mapper.__init__(self)
186 
187  self.log = lsstLog.Log.getLogger("CameraMapper")
188 
189  if root:
190  self.root = root
191  elif repositoryCfg:
192  self.root = repositoryCfg.root
193  else:
194  self.root = None
195  if isinstance(policy, pexPolicy.Policy):
196  policy = dafPersist.Policy(policy)
197 
198  repoPolicy = repositoryCfg.policy if repositoryCfg else None
199  if repoPolicy is not None:
200  policy.update(repoPolicy)
201 
202  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
203  "MapperDictionary.paf",
204  "policy")
205  dictPolicy = dafPersist.Policy(defaultPolicyFile)
206  policy.merge(dictPolicy)
207 
208  # Levels
209  self.levels = dict()
210  if 'levels' in policy:
211  levelsPolicy = policy['levels']
212  for key in levelsPolicy.names(True):
213  self.levels[key] = set(levelsPolicy.asArray(key))
214  self.defaultLevel = policy['defaultLevel']
215  self.defaultSubLevels = dict()
216  if 'defaultSubLevels' in policy:
217  self.defaultSubLevels = policy['defaultSubLevels']
218 
219  # Root directories
220  if root is None:
221  root = "."
222  root = dafPersist.LogicalLocation(root).locString()
223 
224  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
225 
226  # If the calibRoot is passed in, use that. If not and it's indicated in
227  # the policy, use that. And otherwise, the calibs are in the regular
228  # root.
229  # If the location indicated by the calib root does not exist, do not
230  # create it.
231  calibStorage = None
232  if calibRoot is not None:
233  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
234  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
235  create=False)
236  else:
237  calibRoot = policy.get('calibRoot', None)
238  if calibRoot:
239  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240  create=False)
241  if calibStorage is None:
242  calibStorage = self.rootStorage
243 
244  self.root = root
245 
246  # Registries
247  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
248  self.rootStorage, searchParents=False,
249  posixIfNoSql=(not parentRegistry))
250  if not self.registry:
251  self.registry = parentRegistry
252  needCalibRegistry = policy.get('needCalibRegistry', None)
253  if needCalibRegistry:
254  if calibStorage:
255  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
256  "calibRegistryPath", calibStorage,
257  posixIfNoSql=False) # NB never use posix for calibs
258  else:
259  raise RuntimeError(
260  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
261  "calibRoot ivar:%s or policy['calibRoot']:%s" %
262  (calibRoot, policy.get('calibRoot', None)))
263  else:
264  self.calibRegistry = None
265 
266  # Dict of valid keys and their value types
267  self.keyDict = dict()
268 
269  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
270  self._initWriteRecipes()
271 
272  # Camera geometry
273  self.cameraDataLocation = None # path to camera geometry config file
274  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
275 
276  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
277  # camera package, which is on the local filesystem.
278  self.defectRegistry = None
279  if 'defects' in policy:
280  self.defectPath = os.path.join(repositoryDir, policy['defects'])
281  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
282  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
283 
284  # Filter translation table
285  self.filters = None
286 
287  # Skytile policy
288  self.skypolicy = policy['skytiles']
289 
290  # verify that the class variable packageName is set before attempting
291  # to instantiate an instance
292  if self.packageName is None:
293  raise ValueError('class variable packageName must not be None')
294 
296 
297  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298  """Initialize mappings
299 
300  For each of the dataset types that we want to be able to read, there are
301  methods that can be created to support them:
302  * map_<dataset> : determine the path for dataset
303  * std_<dataset> : standardize the retrieved dataset
304  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
305  * query_<dataset> : query the registry
306 
307  Besides the dataset types explicitly listed in the policy, we create
308  additional, derived datasets for additional conveniences, e.g., reading
309  the header of an image, retrieving only the size of a catalog.
310 
311  @param policy (Policy) Policy with per-camera defaults already merged
312  @param rootStorage (Storage subclass instance) Interface to persisted repository data
313  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
314  @param provided (list of strings) Keys provided by the mapper
315  """
316  # Sub-dictionaries (for exposure/calibration/dataset types)
317  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318  "obs_base", "ImageMappingDictionary.paf", "policy"))
319  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320  "obs_base", "ExposureMappingDictionary.paf", "policy"))
321  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
323  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324  "obs_base", "DatasetMappingDictionary.paf", "policy"))
325 
326  # Mappings
327  mappingList = (
328  ("images", imgMappingPolicy, ImageMapping),
329  ("exposures", expMappingPolicy, ExposureMapping),
330  ("calibrations", calMappingPolicy, CalibrationMapping),
331  ("datasets", dsMappingPolicy, DatasetMapping)
332  )
333  self.mappings = dict()
334  for name, defPolicy, cls in mappingList:
335  if name in policy:
336  datasets = policy[name]
337 
338  # Centrally-defined datasets
339  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
340  if os.path.exists(defaultsPath):
341  datasets.merge(dafPersist.Policy(defaultsPath))
342 
343  mappings = dict()
344  setattr(self, name, mappings)
345  for datasetType in datasets.names(True):
346  subPolicy = datasets[datasetType]
347  subPolicy.merge(defPolicy)
348 
349  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
350  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351  subPolicy=subPolicy):
352  components = subPolicy.get('composite')
353  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
354  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
355  python = subPolicy['python']
356  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357  disassembler=disassembler,
358  python=python,
359  dataId=dataId,
360  mapper=self)
361  for name, component in components.items():
362  butlerComposite.add(id=name,
363  datasetType=component.get('datasetType'),
364  setter=component.get('setter', None),
365  getter=component.get('getter', None),
366  subset=component.get('subset', False),
367  inputOnly=component.get('inputOnly', False))
368  return butlerComposite
369  setattr(self, "map_" + datasetType, compositeClosure)
370  # for now at least, don't set up any other handling for this dataset type.
371  continue
372 
373  if name == "calibrations":
374  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
375  provided=provided, dataRoot=rootStorage)
376  else:
377  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
378  self.keyDict.update(mapping.keys())
379  mappings[datasetType] = mapping
380  self.mappings[datasetType] = mapping
381  if not hasattr(self, "map_" + datasetType):
382  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383  return mapping.map(mapper, dataId, write)
384  setattr(self, "map_" + datasetType, mapClosure)
385  if not hasattr(self, "query_" + datasetType):
386  def queryClosure(format, dataId, mapping=mapping):
387  return mapping.lookup(format, dataId)
388  setattr(self, "query_" + datasetType, queryClosure)
389  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
390  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391  return mapping.standardize(mapper, item, dataId)
392  setattr(self, "std_" + datasetType, stdClosure)
393 
394  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395  """Set convenience methods on CameraMapper"""
396  mapName = "map_" + datasetType + "_" + suffix
397  bypassName = "bypass_" + datasetType + "_" + suffix
398  queryName = "query_" + datasetType + "_" + suffix
399  if not hasattr(self, mapName):
400  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
401  if not hasattr(self, bypassName):
402  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
403  bypassImpl = getattr(self, "bypass_" + datasetType)
404  if bypassImpl is not None:
405  setattr(self, bypassName, bypassImpl)
406  if not hasattr(self, queryName):
407  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
408 
409  # Filename of dataset
410  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
411  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
412  # Metadata from FITS file
413  if subPolicy["storage"] == "FitsStorage": # a FITS image
414  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
415  readMetadata(location.getLocationsWithRoot()[0]))
416 
417  # Add support for configuring FITS compression
418  addName = "add_" + datasetType
419  if not hasattr(self, addName):
420  setattr(self, addName, self.getImageCompressionSettings)
421 
422  if name == "exposures":
423  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
424  afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
425  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
426  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
427  setMethods("visitInfo",
428  bypassImpl=lambda datasetType, pythonType, location, dataId:
429  afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
430  setMethods("filter",
431  bypassImpl=lambda datasetType, pythonType, location, dataId:
432  afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
433  setMethods("detector",
434  mapImpl=lambda dataId, write=False:
435  dafPersist.ButlerLocation(
436  pythonType="lsst.afw.cameraGeom.CameraConfig",
437  cppType="Config",
438  storageName="Internal",
439  locationList="ignored",
440  dataId=dataId,
441  mapper=self,
442  storage=None,
443  ),
444  bypassImpl=lambda datasetType, pythonType, location, dataId:
445  self.camera[self._extractDetectorName(dataId)]
446  )
447  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
448  afwImage.bboxFromMetadata(
449  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
450 
451  elif name == "images":
452  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
453  afwImage.bboxFromMetadata(
454  readMetadata(location.getLocationsWithRoot()[0])))
455 
456  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
457  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
458  readMetadata(os.path.join(location.getStorage().root,
459  location.getLocations()[0]), hdu=1))
460 
461  # Sub-images
462  if subPolicy["storage"] == "FitsStorage":
463  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
464  subId = dataId.copy()
465  del subId['bbox']
466  loc = mapping.map(mapper, subId, write)
467  bbox = dataId['bbox']
468  llcX = bbox.getMinX()
469  llcY = bbox.getMinY()
470  width = bbox.getWidth()
471  height = bbox.getHeight()
472  loc.additionalData.set('llcX', llcX)
473  loc.additionalData.set('llcY', llcY)
474  loc.additionalData.set('width', width)
475  loc.additionalData.set('height', height)
476  if 'imageOrigin' in dataId:
477  loc.additionalData.set('imageOrigin',
478  dataId['imageOrigin'])
479  return loc
480 
481  def querySubClosure(key, format, dataId, mapping=mapping):
482  subId = dataId.copy()
483  del subId['bbox']
484  return mapping.lookup(format, subId)
485  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
486 
487  if subPolicy["storage"] == "FitsCatalogStorage":
488  # Length of catalog
489  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
490  readMetadata(os.path.join(location.getStorage().root,
491  location.getLocations()[0]),
492  hdu=1).get("NAXIS2"))
493 
494  # Schema of catalog
495  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
496  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
497  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
498  location.getLocations()[0])))
499 
500  def _computeCcdExposureId(self, dataId):
501  """Compute the 64-bit (long) identifier for a CCD exposure.
502 
503  Subclasses must override
504 
505  @param dataId (dict) Data identifier with visit, ccd
506  """
507  raise NotImplementedError()
508 
509  def _computeCoaddExposureId(self, dataId, singleFilter):
510  """Compute the 64-bit (long) identifier for a coadd.
511 
512  Subclasses must override
513 
514  @param dataId (dict) Data identifier with tract and patch.
515  @param singleFilter (bool) True means the desired ID is for a single-
516  filter coadd, in which case dataId
517  must contain filter.
518  """
519  raise NotImplementedError()
520 
521  def _search(self, path):
522  """Search for path in the associated repository's storage.
523 
524  Parameters
525  ----------
526  path : string
527  Path that describes an object in the repository associated with
528  this mapper.
529  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
530  indicator will be stripped when searching and so will match
531  filenames without the HDU indicator, e.g. 'foo.fits'. The path
532  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
533 
534  Returns
535  -------
536  string
537  The path for this object in the repository. Will return None if the
538  object can't be found. If the input argument path contained an HDU
539  indicator, the returned path will also contain the HDU indicator.
540  """
541  return self.rootStorage.search(path)
542 
543  def backup(self, datasetType, dataId):
544  """Rename any existing object with the given type and dataId.
545 
546  The CameraMapper implementation saves objects in a sequence of e.g.:
547  foo.fits
548  foo.fits~1
549  foo.fits~2
550  All of the backups will be placed in the output repo, however, and will
551  not be removed if they are found elsewhere in the _parent chain. This
552  means that the same file will be stored twice if the previous version was
553  found in an input repo.
554  """
555 
556  # Calling PosixStorage directly is not the long term solution in this
557  # function, this is work-in-progress on epic DM-6225. The plan is for
558  # parentSearch to be changed to 'search', and search only the storage
559  # associated with this mapper. All searching of parents will be handled
560  # by traversing the container of repositories in Butler.
561 
562  def firstElement(list):
563  """Get the first element in the list, or None if that can't be done.
564  """
565  return list[0] if list is not None and len(list) else None
566 
567  n = 0
568  newLocation = self.map(datasetType, dataId, write=True)
569  newPath = newLocation.getLocations()[0]
570  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
571  path = firstElement(path)
572  oldPaths = []
573  while path is not None:
574  n += 1
575  oldPaths.append((n, path))
576  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
577  path = firstElement(path)
578  for n, oldPath in reversed(oldPaths):
579  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
580 
581  def keys(self):
582  """Return supported keys.
583  @return (iterable) List of keys usable in a dataset identifier"""
584  return iter(self.keyDict.keys())
585 
586  def getKeys(self, datasetType, level):
587  """Return a dict of supported keys and their value types for a given dataset
588  type at a given level of the key hierarchy.
589 
590  @param datasetType (str) dataset type or None for all dataset types
591  @param level (str) level or None for all levels or '' for the default level for the camera
592  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
593 
594  # not sure if this is how we want to do this. what if None was intended?
595  if level == '':
596  level = self.getDefaultLevel()
597 
598  if datasetType is None:
599  keyDict = copy.copy(self.keyDict)
600  else:
601  keyDict = self.mappings[datasetType].keys()
602  if level is not None and level in self.levels:
603  keyDict = copy.copy(keyDict)
604  for l in self.levels[level]:
605  if l in keyDict:
606  del keyDict[l]
607  return keyDict
608 
609  def getDefaultLevel(self):
610  return self.defaultLevel
611 
612  def getDefaultSubLevel(self, level):
613  if level in self.defaultSubLevels:
614  return self.defaultSubLevels[level]
615  return None
616 
617  @classmethod
618  def getCameraName(cls):
619  """Return the name of the camera that this CameraMapper is for."""
620  className = str(cls)
621  className = className[className.find('.'):-1]
622  m = re.search(r'(\w+)Mapper', className)
623  if m is None:
624  m = re.search(r"class '[\w.]*?(\w+)'", className)
625  name = m.group(1)
626  return name[:1].lower() + name[1:] if name else ''
627 
628  @classmethod
629  def getPackageName(cls):
630  """Return the name of the package containing this CameraMapper."""
631  if cls.packageName is None:
632  raise ValueError('class variable packageName must not be None')
633  return cls.packageName
634 
635  @classmethod
636  def getPackageDir(cls):
637  """Return the base directory of this package"""
638  return getPackageDir(cls.getPackageName())
639 
640  def map_camera(self, dataId, write=False):
641  """Map a camera dataset."""
642  if self.camera is None:
643  raise RuntimeError("No camera dataset available.")
644  actualId = self._transformId(dataId)
645  return dafPersist.ButlerLocation(
646  pythonType="lsst.afw.cameraGeom.CameraConfig",
647  cppType="Config",
648  storageName="ConfigStorage",
649  locationList=self.cameraDataLocation or "ignored",
650  dataId=actualId,
651  mapper=self,
652  storage=self.rootStorage
653  )
654 
655  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
656  """Return the (preloaded) camera object.
657  """
658  if self.camera is None:
659  raise RuntimeError("No camera dataset available.")
660  return self.camera
661 
662  def map_defects(self, dataId, write=False):
663  """Map defects dataset.
664 
665  @return a very minimal ButlerLocation containing just the locationList field
666  (just enough information that bypass_defects can use it).
667  """
668  defectFitsPath = self._defectLookup(dataId=dataId)
669  if defectFitsPath is None:
670  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
671 
672  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
673  dataId, self,
674  storage=self.rootStorage)
675 
676  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
677  """Return a defect based on the butler location returned by map_defects
678 
679  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
680  @param[in] dataId: the usual data ID; "ccd" must be set
681 
682  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
683  into an object, which is what we want for now, since that conversion is a bit tricky.
684  """
685  detectorName = self._extractDetectorName(dataId)
686  defectsFitsPath = butlerLocation.locationList[0]
687  with pyfits.open(defectsFitsPath) as hduList:
688  for hdu in hduList[1:]:
689  if hdu.header["name"] != detectorName:
690  continue
691 
692  defectList = []
693  for data in hdu.data:
694  bbox = afwGeom.Box2I(
695  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
696  afwGeom.Extent2I(int(data['width']), int(data['height'])),
697  )
698  defectList.append(afwImage.DefectBase(bbox))
699  return defectList
700 
701  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
702 
703  def map_expIdInfo(self, dataId, write=False):
704  return dafPersist.ButlerLocation(
705  pythonType="lsst.obs.base.ExposureIdInfo",
706  cppType=None,
707  storageName="Internal",
708  locationList="ignored",
709  dataId=dataId,
710  mapper=self,
711  storage=self.rootStorage
712  )
713 
714  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
715  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
716  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
717  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
718  return ExposureIdInfo(expId=expId, expBits=expBits)
719 
720  def std_bfKernel(self, item, dataId):
721  """Disable standardization for bfKernel
722 
723  bfKernel is a calibration product that is numpy array,
724  unlike other calibration products that are all images;
725  all calibration images are sent through _standardizeExposure
726  due to CalibrationMapping, but we don't want that to happen to bfKernel
727  """
728  return item
729 
730  def std_raw(self, item, dataId):
731  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
732  return self._standardizeExposure(self.exposures['raw'], item, dataId,
733  trimmed=False, setVisitInfo=True)
734 
735  def map_skypolicy(self, dataId):
736  """Map a sky policy."""
737  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
738  "Internal", None, None, self,
739  storage=self.rootStorage)
740 
741  def std_skypolicy(self, item, dataId):
742  """Standardize a sky policy by returning the one we use."""
743  return self.skypolicy
744 
745 
750 
751  def _getCcdKeyVal(self, dataId):
752  """Return CCD key and value used to look a defect in the defect registry
753 
754  The default implementation simply returns ("ccd", full detector name)
755  """
756  return ("ccd", self._extractDetectorName(dataId))
757 
758  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
759  posixIfNoSql=True):
760  """Set up a registry (usually SQLite3), trying a number of possible
761  paths.
762 
763  Parameters
764  ----------
765  name : string
766  Name of registry.
767  description: `str`
768  Description of registry (for log messages)
769  path : string
770  Path for registry.
771  policy : string
772  Policy that contains the registry name, used if path is None.
773  policyKey : string
774  Key in policy for registry path.
775  storage : Storage subclass
776  Repository Storage to look in.
777  searchParents : bool, optional
778  True if the search for a registry should follow any Butler v1
779  _parent symlinks.
780  posixIfNoSql : bool, optional
781  If an sqlite registry is not found, will create a posix registry if
782  this is True.
783 
784  Returns
785  -------
786  lsst.daf.persistence.Registry
787  Registry object
788  """
789  if path is None and policyKey in policy:
790  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
791  if os.path.isabs(path):
792  raise RuntimeError("Policy should not indicate an absolute path for registry.")
793  if not storage.exists(path):
794  newPath = storage.instanceSearch(path)
795 
796  newPath = newPath[0] if newPath is not None and len(newPath) else None
797  if newPath is None:
798  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
799  path)
800  path = newPath
801  else:
802  self.log.warn("Unable to locate registry at policy path: %s", path)
803  path = None
804 
805  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
806  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
807  # root from path. Currently only works with PosixStorage
808  try:
809  root = storage.root
810  if path and (path.startswith(root)):
811  path = path[len(root + '/'):]
812  except AttributeError:
813  pass
814 
815  # determine if there is an sqlite registry and if not, try the posix registry.
816  registry = None
817 
818  def search(filename, description):
819  """Search for file in storage
820 
821  Parameters
822  ----------
823  filename : `str`
824  Filename to search for
825  description : `str`
826  Description of file, for error message.
827 
828  Returns
829  -------
830  path : `str` or `None`
831  Path to file, or None
832  """
833  result = storage.instanceSearch(filename)
834  if result:
835  return result[0]
836  self.log.debug("Unable to locate %s: %s", description, filename)
837  return None
838 
839  # Search for a suitable registry database
840  if path is None:
841  path = search("%s.pgsql" % name, "%s in root" % description)
842  if path is None:
843  path = search("%s.sqlite3" % name, "%s in root" % description)
844  if path is None:
845  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
846 
847  if path is not None:
848  if not storage.exists(path):
849  newPath = storage.instanceSearch(path)
850  newPath = newPath[0] if newPath is not None and len(newPath) else None
851  if newPath is not None:
852  path = newPath
853  localFileObj = storage.getLocalFile(path)
854  self.log.info("Loading %s registry from %s", description, localFileObj.name)
855  registry = dafPersist.Registry.create(localFileObj.name)
856  localFileObj.close()
857  elif not registry and posixIfNoSql:
858  try:
859  self.log.info("Loading Posix %s registry from %s", description, storage.root)
860  registry = dafPersist.PosixRegistry(storage.root)
861  except:
862  registry = None
863 
864  return registry
865 
866  def _transformId(self, dataId):
867  """Generate a standard ID dict from a camera-specific ID dict.
868 
869  Canonical keys include:
870  - amp: amplifier name
871  - ccd: CCD name (in LSST this is a combination of raft and sensor)
872  The default implementation returns a copy of its input.
873 
874  @param dataId[in] (dict) Dataset identifier; this must not be modified
875  @return (dict) Transformed dataset identifier"""
876 
877  return dataId.copy()
878 
879  def _mapActualToPath(self, template, actualId):
880  """Convert a template path to an actual path, using the actual data
881  identifier. This implementation is usually sufficient but can be
882  overridden by the subclass.
883  @param template (string) Template path
884  @param actualId (dict) Dataset identifier
885  @return (string) Pathname"""
886 
887  try:
888  transformedId = self._transformId(actualId)
889  return template % transformedId
890  except Exception as e:
891  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
892 
893  @staticmethod
894  def getShortCcdName(ccdName):
895  """Convert a CCD name to a form useful as a filename
896 
897  The default implementation converts spaces to underscores.
898  """
899  return ccdName.replace(" ", "_")
900 
901  def _extractDetectorName(self, dataId):
902  """Extract the detector (CCD) name from the dataset identifier.
903 
904  The name in question is the detector name used by lsst.afw.cameraGeom.
905 
906  @param dataId (dict) Dataset identifier
907  @return (string) Detector name
908  """
909  raise NotImplementedError("No _extractDetectorName() function specified")
910 
911  def _extractAmpId(self, dataId):
912  """Extract the amplifier identifer from a dataset identifier.
913 
914  @warning this is deprecated; DO NOT USE IT
915 
916  amplifier identifier has two parts: the detector name for the CCD
917  containing the amplifier and index of the amplifier in the detector.
918  @param dataId (dict) Dataset identifer
919  @return (tuple) Amplifier identifier"""
920 
921  trDataId = self._transformId(dataId)
922  return (trDataId["ccd"], int(trDataId['amp']))
923 
924  def _setAmpDetector(self, item, dataId, trimmed=True):
925  """Set the detector object in an Exposure for an amplifier.
926  Defects are also added to the Exposure based on the detector object.
927  @param[in,out] item (lsst.afw.image.Exposure)
928  @param dataId (dict) Dataset identifier
929  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
930 
931  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
932 
933  def _setCcdDetector(self, item, dataId, trimmed=True):
934  """Set the detector object in an Exposure for a CCD.
935  @param[in,out] item (lsst.afw.image.Exposure)
936  @param dataId (dict) Dataset identifier
937  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
938 
939  if item.getDetector() is not None:
940  return
941 
942  detectorName = self._extractDetectorName(dataId)
943  detector = self.camera[detectorName]
944  item.setDetector(detector)
945 
946  def _setFilter(self, mapping, item, dataId):
947  """Set the filter object in an Exposure. If the Exposure had a FILTER
948  keyword, this was already processed during load. But if it didn't,
949  use the filter from the registry.
950  @param mapping (lsst.obs.base.Mapping)
951  @param[in,out] item (lsst.afw.image.Exposure)
952  @param dataId (dict) Dataset identifier"""
953 
954  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
955  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
956  return
957 
958  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
959  return
960 
961  actualId = mapping.need(['filter'], dataId)
962  filterName = actualId['filter']
963  if self.filters is not None and filterName in self.filters:
964  filterName = self.filters[filterName]
965  item.setFilter(afwImage.Filter(filterName))
966 
967  # Default standardization function for exposures
968  def _standardizeExposure(self, mapping, item, dataId, filter=True,
969  trimmed=True, setVisitInfo=True):
970  """Default standardization function for images.
971 
972  This sets the Detector from the camera geometry
973  and optionally set the Fiter. In both cases this saves
974  having to persist some data in each exposure (or image).
975 
976  @param mapping (lsst.obs.base.Mapping)
977  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
978  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
979  or lsst.afw.image.MaskedImage
980  @param dataId (dict) Dataset identifier
981  @param filter (bool) Set filter? Ignored if item is already an exposure
982  @param trimmed (bool) Should detector be marked as trimmed?
983  @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata?
984  @return (lsst.afw.image.Exposure) the standardized Exposure"""
985  try:
986  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
987  except Exception as e:
988  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
989  raise
990 
991  if mapping.level.lower() == "amp":
992  self._setAmpDetector(item, dataId, trimmed)
993  elif mapping.level.lower() == "ccd":
994  self._setCcdDetector(item, dataId, trimmed)
995 
996  if filter:
997  self._setFilter(mapping, item, dataId)
998 
999  return item
1000 
1001  def _defectLookup(self, dataId):
1002  """Find the defects for a given CCD.
1003  @param dataId (dict) Dataset identifier
1004  @return (string) path to the defects file or None if not available"""
1005  if self.defectRegistry is None:
1006  return None
1007  if self.registry is None:
1008  raise RuntimeError("No registry for defect lookup")
1009 
1010  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1011 
1012  dataIdForLookup = {'visit': dataId['visit']}
1013  # .lookup will fail in a posix registry because there is no template to provide.
1014  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1015  if len(rows) == 0:
1016  return None
1017  assert len(rows) == 1
1018  taiObs = rows[0][0]
1019 
1020  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1021  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1022  [(ccdKey, "?")],
1023  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1024  (ccdVal, taiObs))
1025  if not rows or len(rows) == 0:
1026  return None
1027  if len(rows) == 1:
1028  return os.path.join(self.defectPath, rows[0][0])
1029  else:
1030  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1031  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1032 
1033  def _makeCamera(self, policy, repositoryDir):
1034  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1035 
1036  Also set self.cameraDataLocation, if relevant (else it can be left None).
1037 
1038  This implementation assumes that policy contains an entry "camera" that points to the
1039  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1040  - a file named `camera.py` that contains persisted camera config
1041  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1042 
1043  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
1044  Policy with per-camera defaults already merged
1045  @param repositoryDir (string) Policy repository for the subclassing
1046  module (obtained with getRepositoryPath() on the
1047  per-camera default dictionary)
1048  """
1049  if isinstance(policy, pexPolicy.Policy):
1050  policy = dafPersist.Policy(pexPolicy=policy)
1051  if 'camera' not in policy:
1052  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1053  cameraDataSubdir = policy['camera']
1054  self.cameraDataLocation = os.path.normpath(
1055  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1056  cameraConfig = afwCameraGeom.CameraConfig()
1057  cameraConfig.load(self.cameraDataLocation)
1058  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1059  return afwCameraGeom.makeCameraFromPath(
1060  cameraConfig=cameraConfig,
1061  ampInfoPath=ampInfoPath,
1062  shortNameFunc=self.getShortCcdName,
1063  pupilFactoryClass=self.PupilFactoryClass
1064  )
1065 
1066  def getRegistry(self):
1067  """Get the registry used by this mapper.
1068 
1069  Returns
1070  -------
1071  Registry or None
1072  The registry used by this mapper for this mapper's repository.
1073  """
1074  return self.registry
1075 
1076  def getImageCompressionSettings(self, datasetType, dataId):
1077  """Stuff image compression settings into a daf.base.PropertySet
1078 
1079  This goes into the ButlerLocation's "additionalData", which gets
1080  passed into the boost::persistence framework.
1081 
1082  Parameters
1083  ----------
1084  datasetType : `str`
1085  Type of dataset for which to get the image compression settings.
1086  dataId : `dict`
1087  Dataset identifier.
1088 
1089  Returns
1090  -------
1091  additionalData : `lsst.daf.base.PropertySet`
1092  Image compression settings.
1093  """
1094  mapping = self.mappings[datasetType]
1095  recipeName = mapping.recipe
1096  storageType = mapping.storage
1097  if storageType not in self._writeRecipes:
1098  return dafBase.PropertySet()
1099  if recipeName not in self._writeRecipes[storageType]:
1100  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1101  (datasetType, storageType, recipeName))
1102  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1103  seed = hash(tuple(dataId.items())) % 2**31
1104  for plane in ("image", "mask", "variance"):
1105  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1106  recipe.set(plane + ".scaling.seed", seed)
1107  return recipe
1108 
1109  def _initWriteRecipes(self):
1110  """Read the recipes for writing files
1111 
1112  These recipes are currently used for configuring FITS compression,
1113  but they could have wider uses for configuring different flavors
1114  of the storage types. A recipe is referred to by a symbolic name,
1115  which has associated settings. These settings are stored as a
1116  `PropertySet` so they can easily be passed down to the
1117  boost::persistence framework as the "additionalData" parameter.
1118 
1119  The list of recipes is written in YAML. A default recipe and
1120  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1121  and these may be overridden or supplemented by the individual obs_*
1122  packages' own policy/writeRecipes.yaml files.
1123 
1124  Recipes are grouped by the storage type. Currently, only the
1125  ``FitsStorage`` storage type uses recipes, which uses it to
1126  configure FITS image compression.
1127 
1128  Each ``FitsStorage`` recipe for FITS compression should define
1129  "image", "mask" and "variance" entries, each of which may contain
1130  "compression" and "scaling" entries. Defaults will be provided for
1131  any missing elements under "compression" and "scaling".
1132 
1133  The allowed entries under "compression" are:
1134 
1135  * algorithm (string): compression algorithm to use
1136  * rows (int): number of rows per tile (0 = entire dimension)
1137  * columns (int): number of columns per tile (0 = entire dimension)
1138  * quantizeLevel (float): cfitsio quantization level
1139 
1140  The allowed entries under "scaling" are:
1141 
1142  * algorithm (string): scaling algorithm to use
1143  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1144  * fuzz (bool): fuzz the values when quantising floating-point values?
1145  * seed (long): seed for random number generator when fuzzing
1146  * maskPlanes (list of string): mask planes to ignore when doing statistics
1147  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1148  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1149  * bscale: manually specified BSCALE (for MANUAL scaling)
1150  * bzero: manually specified BSCALE (for MANUAL scaling)
1151 
1152  A very simple example YAML recipe:
1153 
1154  FitsStorage:
1155  default:
1156  image: &default
1157  compression:
1158  algorithm: GZIP_SHUFFLE
1159  mask: *default
1160  variance: *default
1161  """
1162  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1163  recipes = dafPersist.Policy(recipesFile)
1164  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1165  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1166  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1167  supplements = dafPersist.Policy(supplementsFile)
1168  # Don't allow overrides, only supplements
1169  for entry in validationMenu:
1170  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1171  if intersection:
1172  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1173  (supplementsFile, entry, recipesFile, intersection))
1174  recipes.update(supplements)
1175 
1176  self._writeRecipes = {}
1177  for storageType in recipes.names(True):
1178  if "default" not in recipes[storageType]:
1179  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1180  (storageType, recipesFile))
1181  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1182 
1183 
1184 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1185  """Generate an Exposure from an image-like object
1186 
1187  If the image is a DecoratedImage then also set its WCS and metadata
1188  (Image and MaskedImage are missing the necessary metadata
1189  and Exposure already has those set)
1190 
1191  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1192  @return (lsst.afw.image.Exposure) Exposure containing input image
1193  """
1194  metadata = None
1195  if isinstance(image, afwImage.MaskedImage):
1196  exposure = afwImage.makeExposure(image)
1197  elif isinstance(image, afwImage.DecoratedImage):
1198  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1199  metadata = image.getMetadata()
1200  try:
1201  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1202  exposure.setWcs(wcs)
1203  except pexExcept.TypeError as e:
1204  # raised on failure to create a wcs (and possibly others)
1205  if logger is None:
1206  logger = lsstLog.Log.getLogger("CameraMapper")
1207  logger.warn("wcs set to None; insufficient information found in metadata to create a valid wcs: "
1208  "%s", e.args[0])
1209 
1210  exposure.setMetadata(metadata)
1211  elif isinstance(image, afwImage.Exposure):
1212  # Exposure
1213  exposure = image
1214  metadata = exposure.getMetadata()
1215  else:
1216  # Image
1217  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1218  #
1219  # set VisitInfo if we can
1220  #
1221  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1222  if metadata is not None:
1223  if mapper is None:
1224  if not logger:
1225  logger = lsstLog.Log.getLogger("CameraMapper")
1226  logger.warn("I can only set the VisitInfo if you provide a mapper")
1227  else:
1228  exposureId = mapper._computeCcdExposureId(dataId)
1229  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1230 
1231  exposure.getInfo().setVisitInfo(visitInfo)
1232 
1233  return exposure
1234 
1235 
1237  """Validate recipes for FitsStorage
1238 
1239  The recipes are supplemented with default values where appropriate.
1240 
1241  TODO: replace this custom validation code with Cerberus (DM-11846)
1242 
1243  Parameters
1244  ----------
1245  recipes : `lsst.daf.persistence.Policy`
1246  FitsStorage recipes to validate.
1247 
1248  Returns
1249  -------
1250  validated : `lsst.daf.base.PropertySet`
1251  Validated FitsStorage recipe.
1252 
1253  Raises
1254  ------
1255  `RuntimeError`
1256  If validation fails.
1257  """
1258  # Schemas define what should be there, and the default values (and by the default
1259  # value, the expected type).
1260  compressionSchema = {
1261  "algorithm": "NONE",
1262  "rows": 1,
1263  "columns": 0,
1264  "quantizeLevel": 0.0,
1265  }
1266  scalingSchema = {
1267  "algorithm": "NONE",
1268  "bitpix": 0,
1269  "maskPlanes": ["NO_DATA"],
1270  "seed": 0,
1271  "quantizeLevel": 4.0,
1272  "quantizePad": 5.0,
1273  "fuzz": True,
1274  "bscale": 1.0,
1275  "bzero": 0.0,
1276  }
1277 
1278  def checkUnrecognized(entry, allowed, description):
1279  """Check to see if the entry contains unrecognised keywords"""
1280  unrecognized = set(entry.keys()) - set(allowed)
1281  if unrecognized:
1282  raise RuntimeError(
1283  "Unrecognized entries when parsing image compression recipe %s: %s" %
1284  (description, unrecognized))
1285 
1286  validated = {}
1287  for name in recipes.names(True):
1288  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1289  rr = dafBase.PropertySet()
1290  validated[name] = rr
1291  for plane in ("image", "mask", "variance"):
1292  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1293  name + "->" + plane)
1294 
1295  for settings, schema in (("compression", compressionSchema),
1296  ("scaling", scalingSchema)):
1297  prefix = plane + "." + settings
1298  if settings not in recipes[name][plane]:
1299  for key in schema:
1300  rr.set(prefix + "." + key, schema[key])
1301  continue
1302  entry = recipes[name][plane][settings]
1303  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1304  for key in schema:
1305  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1306  rr.set(prefix + "." + key, value)
1307  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.