lsst.obs.base  13.0-51-ga83c964+2
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import yaml
30 import collections
31 import lsst.daf.persistence as dafPersist
32 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33 import lsst.daf.base as dafBase
34 import lsst.afw.geom as afwGeom
35 import lsst.afw.image as afwImage
36 import lsst.afw.table as afwTable
37 import lsst.afw.cameraGeom as afwCameraGeom
38 import lsst.log as lsstLog
39 import lsst.pex.policy as pexPolicy
40 import lsst.pex.exceptions as pexExcept
41 from .exposureIdInfo import ExposureIdInfo
42 from .makeRawVisitInfo import MakeRawVisitInfo
43 from lsst.utils import getPackageDir
44 
45 """This module defines the CameraMapper base class."""
46 
47 
48 class CameraMapper(dafPersist.Mapper):
49 
50  """CameraMapper is a base class for mappers that handle images from a
51  camera and products derived from them. This provides an abstraction layer
52  between the data on disk and the code.
53 
54  Public methods: keys, queryMetadata, getDatasetTypes, map,
55  canStandardize, standardize
56 
57  Mappers for specific data sources (e.g., CFHT Megacam, LSST
58  simulations, etc.) should inherit this class.
59 
60  The CameraMapper manages datasets within a "root" directory. Note that
61  writing to a dataset present in the input root will hide the existing
62  dataset but not overwrite it. See #2160 for design discussion.
63 
64  A camera is assumed to consist of one or more rafts, each composed of
65  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
66  (amps). A camera is also assumed to have a camera geometry description
67  (CameraGeom object) as a policy file, a filter description (Filter class
68  static configuration) as another policy file, and an optional defects
69  description directory.
70 
71  Information from the camera geometry and defects are inserted into all
72  Exposure objects returned.
73 
74  The mapper uses one or two registries to retrieve metadata about the
75  images. The first is a registry of all raw exposures. This must contain
76  the time of the observation. One or more tables (or the equivalent)
77  within the registry are used to look up data identifier components that
78  are not specified by the user (e.g. filter) and to return results for
79  metadata queries. The second is an optional registry of all calibration
80  data. This should contain validity start and end entries for each
81  calibration dataset in the same timescale as the observation time.
82 
83  Subclasses will typically set MakeRawVisitInfoClass:
84 
85  MakeRawVisitInfoClass: a class variable that points to a subclass of
86  MakeRawVisitInfo, a functor that creates an
87  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
88 
89  Subclasses must provide the following methods:
90 
91  _extractDetectorName(self, dataId): returns the detector name for a CCD
92  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
93  a dataset identifier referring to that CCD or a subcomponent of it.
94 
95  _computeCcdExposureId(self, dataId): see below
96 
97  _computeCoaddExposureId(self, dataId, singleFilter): see below
98 
99  Subclasses may also need to override the following methods:
100 
101  _transformId(self, dataId): transformation of a data identifier
102  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
103  including making suitable for path expansion (e.g. removing commas).
104  The default implementation does nothing. Note that this
105  method should not modify its input parameter.
106 
107  getShortCcdName(self, ccdName): a static method that returns a shortened name
108  suitable for use as a filename. The default version converts spaces to underscores.
109 
110  _getCcdKeyVal(self, dataId): return a CCD key and value
111  by which to look up defects in the defects registry.
112  The default value returns ("ccd", detector name)
113 
114  _mapActualToPath(self, template, actualId): convert a template path to an
115  actual path, using the actual dataset identifier.
116 
117  The mapper's behaviors are largely specified by the policy file.
118  See the MapperDictionary.paf for descriptions of the available items.
119 
120  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
121  mappings (see Mappings class).
122 
123  Common default mappings for all subclasses can be specified in the
124  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
125  a simple way to add a product to all camera mappers.
126 
127  Functions to map (provide a path to the data given a dataset
128  identifier dictionary) and standardize (convert data into some standard
129  format or type) may be provided in the subclass as "map_{dataset type}"
130  and "std_{dataset type}", respectively.
131 
132  If non-Exposure datasets cannot be retrieved using standard
133  daf_persistence methods alone, a "bypass_{dataset type}" function may be
134  provided in the subclass to return the dataset instead of using the
135  "datasets" subpolicy.
136 
137  Implementations of map_camera and bypass_camera that should typically be
138  sufficient are provided in this base class.
139 
140  @todo
141  * Handle defects the same was as all other calibration products, using the calibration registry
142  * Instead of auto-loading the camera at construction time, load it from the calibration registry
143  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
144  of pyfits from this package.
145  """
146  packageName = None
147 
148  # a class or subclass of MakeRawVisitInfo, a functor that makes an
149  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
150  MakeRawVisitInfoClass = MakeRawVisitInfo
151 
152  # a class or subclass of PupilFactory
153  PupilFactoryClass = afwCameraGeom.PupilFactory
154 
155  def __init__(self, policy, repositoryDir,
156  root=None, registry=None, calibRoot=None, calibRegistry=None,
157  provided=None, parentRegistry=None, repositoryCfg=None):
158  """Initialize the CameraMapper.
159 
160  Parameters
161  ----------
162  policy : daf_persistence.Policy,
163  Can also be pexPolicy.Policy, only for backward compatibility.
164  Policy with per-camera defaults already merged.
165  repositoryDir : string
166  Policy repository for the subclassing module (obtained with
167  getRepositoryPath() on the per-camera default dictionary).
168  root : string, optional
169  Path to the root directory for data.
170  registry : string, optional
171  Path to registry with data's metadata.
172  calibRoot : string, optional
173  Root directory for calibrations.
174  calibRegistry : string, optional
175  Path to registry with calibrations' metadata.
176  provided : list of string, optional
177  Keys provided by the mapper.
178  parentRegistry : Registry subclass, optional
179  Registry from a parent repository that may be used to look up
180  data's metadata.
181  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
182  The configuration information for the repository this mapper is
183  being used with.
184  """
185 
186  dafPersist.Mapper.__init__(self)
187 
188  self.log = lsstLog.Log.getLogger("CameraMapper")
189 
190  if root:
191  self.root = root
192  elif repositoryCfg:
193  self.root = repositoryCfg.root
194  else:
195  self.root = None
196  if isinstance(policy, pexPolicy.Policy):
197  policy = dafPersist.Policy(policy)
198 
199  repoPolicy = repositoryCfg.policy if repositoryCfg else None
200  if repoPolicy is not None:
201  policy.update(repoPolicy)
202 
203  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
204  "MapperDictionary.paf",
205  "policy")
206  dictPolicy = dafPersist.Policy(defaultPolicyFile)
207  policy.merge(dictPolicy)
208 
209  # Levels
210  self.levels = dict()
211  if 'levels' in policy:
212  levelsPolicy = policy['levels']
213  for key in levelsPolicy.names(True):
214  self.levels[key] = set(levelsPolicy.asArray(key))
215  self.defaultLevel = policy['defaultLevel']
216  self.defaultSubLevels = dict()
217  if 'defaultSubLevels' in policy:
218  self.defaultSubLevels = policy['defaultSubLevels']
219 
220  # Root directories
221  if root is None:
222  root = "."
223  root = dafPersist.LogicalLocation(root).locString()
224 
225  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
226 
227  # If the calibRoot is passed in, use that. If not and it's indicated in
228  # the policy, use that. And otherwise, the calibs are in the regular
229  # root.
230  # If the location indicated by the calib root does not exist, do not
231  # create it.
232  calibStorage = None
233  if calibRoot is not None:
234  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
235  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236  create=False)
237  else:
238  calibRoot = policy.get('calibRoot', None)
239  if calibRoot:
240  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
241  create=False)
242  if calibStorage is None:
243  calibStorage = self.rootStorage
244 
245  self.root = root
246 
247  # Registries
248  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
249  self.rootStorage, searchParents=False,
250  posixIfNoSql=(not parentRegistry))
251  if not self.registry:
252  self.registry = parentRegistry
253  needCalibRegistry = policy.get('needCalibRegistry', None)
254  if needCalibRegistry:
255  if calibStorage:
256  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
257  "calibRegistryPath", calibStorage)
258  else:
259  raise RuntimeError(
260  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
261  "calibRoot ivar:%s or policy['calibRoot']:%s" %
262  (calibRoot, policy.get('calibRoot', None)))
263  else:
264  self.calibRegistry = None
265 
266  # Dict of valid keys and their value types
267  self.keyDict = dict()
268 
269  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
270  self._initWriteRecipes()
271 
272  # Camera geometry
273  self.cameraDataLocation = None # path to camera geometry config file
274  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
275 
276  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
277  # camera package, which is on the local filesystem.
278  self.defectRegistry = None
279  if 'defects' in policy:
280  self.defectPath = os.path.join(repositoryDir, policy['defects'])
281  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
282  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
283 
284  # Filter translation table
285  self.filters = None
286 
287  # Skytile policy
288  self.skypolicy = policy['skytiles']
289 
290  # verify that the class variable packageName is set before attempting
291  # to instantiate an instance
292  if self.packageName is None:
293  raise ValueError('class variable packageName must not be None')
294 
296 
297  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298  """Initialize mappings
299 
300  For each of the dataset types that we want to be able to read, there are
301  methods that can be created to support them:
302  * map_<dataset> : determine the path for dataset
303  * std_<dataset> : standardize the retrieved dataset
304  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
305  * query_<dataset> : query the registry
306 
307  Besides the dataset types explicitly listed in the policy, we create
308  additional, derived datasets for additional conveniences, e.g., reading
309  the header of an image, retrieving only the size of a catalog.
310 
311  @param policy (Policy) Policy with per-camera defaults already merged
312  @param rootStorage (Storage subclass instance) Interface to persisted repository data
313  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
314  @param provided (list of strings) Keys provided by the mapper
315  """
316  # Sub-dictionaries (for exposure/calibration/dataset types)
317  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318  "obs_base", "ImageMappingDictionary.paf", "policy"))
319  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320  "obs_base", "ExposureMappingDictionary.paf", "policy"))
321  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
323  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324  "obs_base", "DatasetMappingDictionary.paf", "policy"))
325 
326  # Mappings
327  mappingList = (
328  ("images", imgMappingPolicy, ImageMapping),
329  ("exposures", expMappingPolicy, ExposureMapping),
330  ("calibrations", calMappingPolicy, CalibrationMapping),
331  ("datasets", dsMappingPolicy, DatasetMapping)
332  )
333  self.mappings = dict()
334  for name, defPolicy, cls in mappingList:
335  if name in policy:
336  datasets = policy[name]
337 
338  # Centrally-defined datasets
339  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
340  if os.path.exists(defaultsPath):
341  datasets.merge(dafPersist.Policy(defaultsPath))
342 
343  mappings = dict()
344  setattr(self, name, mappings)
345  for datasetType in datasets.names(True):
346  subPolicy = datasets[datasetType]
347  subPolicy.merge(defPolicy)
348 
349  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
350  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351  subPolicy=subPolicy):
352  components = subPolicy.get('composite')
353  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
354  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
355  python = subPolicy['python']
356  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357  disassembler=disassembler,
358  python=python,
359  dataId=dataId,
360  mapper=self)
361  for name, component in components.items():
362  butlerComposite.add(id=name,
363  datasetType=component.get('datasetType'),
364  setter=component.get('setter', None),
365  getter=component.get('getter', None),
366  subset=component.get('subset', False),
367  inputOnly=component.get('inputOnly', False))
368  return butlerComposite
369  setattr(self, "map_" + datasetType, compositeClosure)
370  # for now at least, don't set up any other handling for this dataset type.
371  continue
372 
373  if name == "calibrations":
374  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
375  provided=provided, dataRoot=rootStorage)
376  else:
377  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
378  self.keyDict.update(mapping.keys())
379  mappings[datasetType] = mapping
380  self.mappings[datasetType] = mapping
381  if not hasattr(self, "map_" + datasetType):
382  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383  return mapping.map(mapper, dataId, write)
384  setattr(self, "map_" + datasetType, mapClosure)
385  if not hasattr(self, "query_" + datasetType):
386  def queryClosure(format, dataId, mapping=mapping):
387  return mapping.lookup(format, dataId)
388  setattr(self, "query_" + datasetType, queryClosure)
389  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
390  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391  return mapping.standardize(mapper, item, dataId)
392  setattr(self, "std_" + datasetType, stdClosure)
393 
394  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395  """Set convenience methods on CameraMapper"""
396  mapName = "map_" + datasetType + "_" + suffix
397  bypassName = "bypass_" + datasetType + "_" + suffix
398  queryName = "query_" + datasetType + "_" + suffix
399  if not hasattr(self, mapName):
400  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
401  if not hasattr(self, bypassName):
402  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
403  bypassImpl = getattr(self, "bypass_" + datasetType)
404  if bypassImpl is not None:
405  setattr(self, bypassName, bypassImpl)
406  if not hasattr(self, queryName):
407  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
408 
409  # Filename of dataset
410  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
411  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
412  # Metadata from FITS file
413  if subPolicy["storage"] == "FitsStorage": # a FITS image
414  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
415  afwImage.readMetadata(location.getLocationsWithRoot()[0]))
416 
417  # Add support for configuring FITS compression
418  addName = "add_" + datasetType
419  if not hasattr(self, addName):
420  setattr(self, addName, self.getImageCompressionSettings)
421 
422  if name == "exposures":
423  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
424  afwImage.makeWcs(
425  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
426  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
427  afwImage.Calib(
428  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
429  setMethods("visitInfo",
430  bypassImpl=lambda datasetType, pythonType, location, dataId:
431  afwImage.VisitInfo(
432  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
433  setMethods("filter",
434  bypassImpl=lambda datasetType, pythonType, location, dataId:
435  afwImage.Filter(
436  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
437  setMethods("detector",
438  mapImpl=lambda dataId, write=False:
439  dafPersist.ButlerLocation(
440  pythonType="lsst.afw.cameraGeom.CameraConfig",
441  cppType="Config",
442  storageName="Internal",
443  locationList="ignored",
444  dataId=dataId,
445  mapper=self,
446  storage=None,
447  ),
448  bypassImpl=lambda datasetType, pythonType, location, dataId:
449  self.camera[self._extractDetectorName(dataId)]
450  )
451  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
452  afwImage.bboxFromMetadata(
453  afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
454 
455  elif name == "images":
456  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
457  afwImage.bboxFromMetadata(
458  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
459 
460  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
461  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
462  afwImage.readMetadata(os.path.join(location.getStorage().root,
463  location.getLocations()[0]), hdu=1))
464 
465  # Sub-images
466  if subPolicy["storage"] == "FitsStorage":
467  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
468  subId = dataId.copy()
469  del subId['bbox']
470  loc = mapping.map(mapper, subId, write)
471  bbox = dataId['bbox']
472  llcX = bbox.getMinX()
473  llcY = bbox.getMinY()
474  width = bbox.getWidth()
475  height = bbox.getHeight()
476  loc.additionalData.set('llcX', llcX)
477  loc.additionalData.set('llcY', llcY)
478  loc.additionalData.set('width', width)
479  loc.additionalData.set('height', height)
480  if 'imageOrigin' in dataId:
481  loc.additionalData.set('imageOrigin',
482  dataId['imageOrigin'])
483  return loc
484 
485  def querySubClosure(key, format, dataId, mapping=mapping):
486  subId = dataId.copy()
487  del subId['bbox']
488  return mapping.lookup(format, subId)
489  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
490 
491  if subPolicy["storage"] == "FitsCatalogStorage":
492  # Length of catalog
493  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
494  afwImage.readMetadata(os.path.join(location.getStorage().root,
495  location.getLocations()[0]),
496  hdu=1).get("NAXIS2"))
497 
498  # Schema of catalog
499  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
500  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
501  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
502  location.getLocations()[0])))
503 
504  def _computeCcdExposureId(self, dataId):
505  """Compute the 64-bit (long) identifier for a CCD exposure.
506 
507  Subclasses must override
508 
509  @param dataId (dict) Data identifier with visit, ccd
510  """
511  raise NotImplementedError()
512 
513  def _computeCoaddExposureId(self, dataId, singleFilter):
514  """Compute the 64-bit (long) identifier for a coadd.
515 
516  Subclasses must override
517 
518  @param dataId (dict) Data identifier with tract and patch.
519  @param singleFilter (bool) True means the desired ID is for a single-
520  filter coadd, in which case dataId
521  must contain filter.
522  """
523  raise NotImplementedError()
524 
525  def _search(self, path):
526  """Search for path in the associated repository's storage.
527 
528  Parameters
529  ----------
530  path : string
531  Path that describes an object in the repository associated with
532  this mapper.
533  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
534  indicator will be stripped when searching and so will match
535  filenames without the HDU indicator, e.g. 'foo.fits'. The path
536  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
537 
538  Returns
539  -------
540  string
541  The path for this object in the repository. Will return None if the
542  object can't be found. If the input argument path contained an HDU
543  indicator, the returned path will also contain the HDU indicator.
544  """
545  return self.rootStorage.search(path)
546 
547  def backup(self, datasetType, dataId):
548  """Rename any existing object with the given type and dataId.
549 
550  The CameraMapper implementation saves objects in a sequence of e.g.:
551  foo.fits
552  foo.fits~1
553  foo.fits~2
554  All of the backups will be placed in the output repo, however, and will
555  not be removed if they are found elsewhere in the _parent chain. This
556  means that the same file will be stored twice if the previous version was
557  found in an input repo.
558  """
559 
560  # Calling PosixStorage directly is not the long term solution in this
561  # function, this is work-in-progress on epic DM-6225. The plan is for
562  # parentSearch to be changed to 'search', and search only the storage
563  # associated with this mapper. All searching of parents will be handled
564  # by traversing the container of repositories in Butler.
565 
566  def firstElement(list):
567  """Get the first element in the list, or None if that can't be done.
568  """
569  return list[0] if list is not None and len(list) else None
570 
571  n = 0
572  newLocation = self.map(datasetType, dataId, write=True)
573  newPath = newLocation.getLocations()[0]
574  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
575  path = firstElement(path)
576  oldPaths = []
577  while path is not None:
578  n += 1
579  oldPaths.append((n, path))
580  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
581  path = firstElement(path)
582  for n, oldPath in reversed(oldPaths):
583  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
584 
585  def keys(self):
586  """Return supported keys.
587  @return (iterable) List of keys usable in a dataset identifier"""
588  return iter(self.keyDict.keys())
589 
590  def getKeys(self, datasetType, level):
591  """Return a dict of supported keys and their value types for a given dataset
592  type at a given level of the key hierarchy.
593 
594  @param datasetType (str) dataset type or None for all dataset types
595  @param level (str) level or None for all levels or '' for the default level for the camera
596  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
597 
598  # not sure if this is how we want to do this. what if None was intended?
599  if level == '':
600  level = self.getDefaultLevel()
601 
602  if datasetType is None:
603  keyDict = copy.copy(self.keyDict)
604  else:
605  keyDict = self.mappings[datasetType].keys()
606  if level is not None and level in self.levels:
607  keyDict = copy.copy(keyDict)
608  for l in self.levels[level]:
609  if l in keyDict:
610  del keyDict[l]
611  return keyDict
612 
613  def getDefaultLevel(self):
614  return self.defaultLevel
615 
616  def getDefaultSubLevel(self, level):
617  if level in self.defaultSubLevels:
618  return self.defaultSubLevels[level]
619  return None
620 
621  @classmethod
622  def getCameraName(cls):
623  """Return the name of the camera that this CameraMapper is for."""
624  className = str(cls)
625  className = className[className.find('.'):-1]
626  m = re.search(r'(\w+)Mapper', className)
627  if m is None:
628  m = re.search(r"class '[\w.]*?(\w+)'", className)
629  name = m.group(1)
630  return name[:1].lower() + name[1:] if name else ''
631 
632  @classmethod
633  def getPackageName(cls):
634  """Return the name of the package containing this CameraMapper."""
635  if cls.packageName is None:
636  raise ValueError('class variable packageName must not be None')
637  return cls.packageName
638 
639  @classmethod
640  def getPackageDir(cls):
641  """Return the base directory of this package"""
642  return getPackageDir(cls.getPackageName())
643 
644  def map_camera(self, dataId, write=False):
645  """Map a camera dataset."""
646  if self.camera is None:
647  raise RuntimeError("No camera dataset available.")
648  actualId = self._transformId(dataId)
649  return dafPersist.ButlerLocation(
650  pythonType="lsst.afw.cameraGeom.CameraConfig",
651  cppType="Config",
652  storageName="ConfigStorage",
653  locationList=self.cameraDataLocation or "ignored",
654  dataId=actualId,
655  mapper=self,
656  storage=self.rootStorage
657  )
658 
659  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
660  """Return the (preloaded) camera object.
661  """
662  if self.camera is None:
663  raise RuntimeError("No camera dataset available.")
664  return self.camera
665 
666  def map_defects(self, dataId, write=False):
667  """Map defects dataset.
668 
669  @return a very minimal ButlerLocation containing just the locationList field
670  (just enough information that bypass_defects can use it).
671  """
672  defectFitsPath = self._defectLookup(dataId=dataId)
673  if defectFitsPath is None:
674  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
675 
676  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
677  dataId, self,
678  storage=self.rootStorage)
679 
680  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
681  """Return a defect based on the butler location returned by map_defects
682 
683  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
684  @param[in] dataId: the usual data ID; "ccd" must be set
685 
686  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
687  into an object, which is what we want for now, since that conversion is a bit tricky.
688  """
689  detectorName = self._extractDetectorName(dataId)
690  defectsFitsPath = butlerLocation.locationList[0]
691  with pyfits.open(defectsFitsPath) as hduList:
692  for hdu in hduList[1:]:
693  if hdu.header["name"] != detectorName:
694  continue
695 
696  defectList = []
697  for data in hdu.data:
698  bbox = afwGeom.Box2I(
699  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
700  afwGeom.Extent2I(int(data['width']), int(data['height'])),
701  )
702  defectList.append(afwImage.DefectBase(bbox))
703  return defectList
704 
705  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
706 
707  def map_expIdInfo(self, dataId, write=False):
708  return dafPersist.ButlerLocation(
709  pythonType="lsst.obs.base.ExposureIdInfo",
710  cppType=None,
711  storageName="Internal",
712  locationList="ignored",
713  dataId=dataId,
714  mapper=self,
715  storage=self.rootStorage
716  )
717 
718  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
719  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
720  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
721  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
722  return ExposureIdInfo(expId=expId, expBits=expBits)
723 
724  def std_bfKernel(self, item, dataId):
725  """Disable standardization for bfKernel
726 
727  bfKernel is a calibration product that is numpy array,
728  unlike other calibration products that are all images;
729  all calibration images are sent through _standardizeExposure
730  due to CalibrationMapping, but we don't want that to happen to bfKernel
731  """
732  return item
733 
734  def std_raw(self, item, dataId):
735  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
736  return self._standardizeExposure(self.exposures['raw'], item, dataId,
737  trimmed=False, setVisitInfo=True)
738 
739  def map_skypolicy(self, dataId):
740  """Map a sky policy."""
741  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
742  "Internal", None, None, self,
743  storage=self.rootStorage)
744 
745  def std_skypolicy(self, item, dataId):
746  """Standardize a sky policy by returning the one we use."""
747  return self.skypolicy
748 
749 
754 
755  def _getCcdKeyVal(self, dataId):
756  """Return CCD key and value used to look a defect in the defect registry
757 
758  The default implementation simply returns ("ccd", full detector name)
759  """
760  return ("ccd", self._extractDetectorName(dataId))
761 
762  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
763  posixIfNoSql=True):
764  """Set up a registry (usually SQLite3), trying a number of possible
765  paths.
766 
767  Parameters
768  ----------
769  name : string
770  Name of registry.
771  description: `str`
772  Description of registry (for log messages)
773  path : string
774  Path for registry.
775  policy : string
776  Policy that contains the registry name, used if path is None.
777  policyKey : string
778  Key in policy for registry path.
779  storage : Storage subclass
780  Repository Storage to look in.
781  searchParents : bool, optional
782  True if the search for a registry should follow any Butler v1
783  _parent symlinks.
784  posixIfNoSql : bool, optional
785  If an sqlite registry is not found, will create a posix registry if
786  this is True.
787 
788  Returns
789  -------
790  lsst.daf.persistence.Registry
791  Registry object
792  """
793  if path is None and policyKey in policy:
794  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
795  if os.path.isabs(path):
796  raise RuntimeError("Policy should not indicate an absolute path for registry.")
797  if not storage.exists(path):
798  newPath = storage.instanceSearch(path)
799 
800  newPath = newPath[0] if newPath is not None and len(newPath) else None
801  if newPath is None:
802  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
803  path)
804  path = newPath
805  else:
806  self.log.warn("Unable to locate registry at policy path: %s", path)
807  path = None
808 
809  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
810  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
811  # root from path. Currently only works with PosixStorage
812  try:
813  root = storage.root
814  if path and (path.startswith(root)):
815  path = path[len(root + '/'):]
816  except AttributeError:
817  pass
818 
819  # determine if there is an sqlite registry and if not, try the posix registry.
820  registry = None
821 
822  def search(filename, description):
823  """Search for file in storage
824 
825  Parameters
826  ----------
827  filename : `str`
828  Filename to search for
829  description : `str`
830  Description of file, for error message.
831 
832  Returns
833  -------
834  path : `str` or `None`
835  Path to file, or None
836  """
837  result = storage.instanceSearch(filename)
838  if result:
839  return result[0]
840  self.log.debug("Unable to locate %s: %s", description, filename)
841  return None
842 
843  # Search for a suitable registry database
844  if path is None:
845  path = search("%s.pgsql" % name, "%s in root" % description)
846  if path is None:
847  path = search("%s.sqlite3" % name, "%s in root" % description)
848  if path is None:
849  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
850 
851  if path is not None:
852  if not storage.exists(path):
853  newPath = storage.instanceSearch(path)
854  newPath = newPath[0] if newPath is not None and len(newPath) else None
855  if newPath is not None:
856  path = newPath
857  localFileObj = storage.getLocalFile(path)
858  self.log.info("Loading %s registry from %s", description, localFileObj.name)
859  registry = dafPersist.Registry.create(localFileObj.name)
860  localFileObj.close()
861  elif not registry and posixIfNoSql:
862  try:
863  self.log.info("Loading Posix %s registry from %s", description, storage.root)
864  registry = dafPersist.PosixRegistry(storage.root)
865  except:
866  registry = None
867 
868  return registry
869 
870  def _transformId(self, dataId):
871  """Generate a standard ID dict from a camera-specific ID dict.
872 
873  Canonical keys include:
874  - amp: amplifier name
875  - ccd: CCD name (in LSST this is a combination of raft and sensor)
876  The default implementation returns a copy of its input.
877 
878  @param dataId[in] (dict) Dataset identifier; this must not be modified
879  @return (dict) Transformed dataset identifier"""
880 
881  return dataId.copy()
882 
883  def _mapActualToPath(self, template, actualId):
884  """Convert a template path to an actual path, using the actual data
885  identifier. This implementation is usually sufficient but can be
886  overridden by the subclass.
887  @param template (string) Template path
888  @param actualId (dict) Dataset identifier
889  @return (string) Pathname"""
890 
891  try:
892  transformedId = self._transformId(actualId)
893  return template % transformedId
894  except Exception as e:
895  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
896 
897  @staticmethod
898  def getShortCcdName(ccdName):
899  """Convert a CCD name to a form useful as a filename
900 
901  The default implementation converts spaces to underscores.
902  """
903  return ccdName.replace(" ", "_")
904 
905  def _extractDetectorName(self, dataId):
906  """Extract the detector (CCD) name from the dataset identifier.
907 
908  The name in question is the detector name used by lsst.afw.cameraGeom.
909 
910  @param dataId (dict) Dataset identifier
911  @return (string) Detector name
912  """
913  raise NotImplementedError("No _extractDetectorName() function specified")
914 
915  def _extractAmpId(self, dataId):
916  """Extract the amplifier identifer from a dataset identifier.
917 
918  @warning this is deprecated; DO NOT USE IT
919 
920  amplifier identifier has two parts: the detector name for the CCD
921  containing the amplifier and index of the amplifier in the detector.
922  @param dataId (dict) Dataset identifer
923  @return (tuple) Amplifier identifier"""
924 
925  trDataId = self._transformId(dataId)
926  return (trDataId["ccd"], int(trDataId['amp']))
927 
928  def _setAmpDetector(self, item, dataId, trimmed=True):
929  """Set the detector object in an Exposure for an amplifier.
930  Defects are also added to the Exposure based on the detector object.
931  @param[in,out] item (lsst.afw.image.Exposure)
932  @param dataId (dict) Dataset identifier
933  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
934 
935  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
936 
937  def _setCcdDetector(self, item, dataId, trimmed=True):
938  """Set the detector object in an Exposure for a CCD.
939  @param[in,out] item (lsst.afw.image.Exposure)
940  @param dataId (dict) Dataset identifier
941  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
942 
943  if item.getDetector() is not None:
944  return
945 
946  detectorName = self._extractDetectorName(dataId)
947  detector = self.camera[detectorName]
948  item.setDetector(detector)
949 
950  def _setFilter(self, mapping, item, dataId):
951  """Set the filter object in an Exposure. If the Exposure had a FILTER
952  keyword, this was already processed during load. But if it didn't,
953  use the filter from the registry.
954  @param mapping (lsst.obs.base.Mapping)
955  @param[in,out] item (lsst.afw.image.Exposure)
956  @param dataId (dict) Dataset identifier"""
957 
958  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
959  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
960  return
961 
962  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
963  return
964 
965  actualId = mapping.need(['filter'], dataId)
966  filterName = actualId['filter']
967  if self.filters is not None and filterName in self.filters:
968  filterName = self.filters[filterName]
969  item.setFilter(afwImage.Filter(filterName))
970 
971  # Default standardization function for exposures
972  def _standardizeExposure(self, mapping, item, dataId, filter=True,
973  trimmed=True, setVisitInfo=True):
974  """Default standardization function for images.
975 
976  This sets the Detector from the camera geometry
977  and optionally set the Fiter. In both cases this saves
978  having to persist some data in each exposure (or image).
979 
980  @param mapping (lsst.obs.base.Mapping)
981  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
982  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
983  or lsst.afw.image.MaskedImage
984  @param dataId (dict) Dataset identifier
985  @param filter (bool) Set filter? Ignored if item is already an exposure
986  @param trimmed (bool) Should detector be marked as trimmed?
987  @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata?
988  @return (lsst.afw.image.Exposure) the standardized Exposure"""
989  try:
990  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
991  except Exception as e:
992  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
993  raise
994 
995  if mapping.level.lower() == "amp":
996  self._setAmpDetector(item, dataId, trimmed)
997  elif mapping.level.lower() == "ccd":
998  self._setCcdDetector(item, dataId, trimmed)
999 
1000  if filter:
1001  self._setFilter(mapping, item, dataId)
1002 
1003  return item
1004 
1005  def _defectLookup(self, dataId):
1006  """Find the defects for a given CCD.
1007  @param dataId (dict) Dataset identifier
1008  @return (string) path to the defects file or None if not available"""
1009  if self.defectRegistry is None:
1010  return None
1011  if self.registry is None:
1012  raise RuntimeError("No registry for defect lookup")
1013 
1014  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1015 
1016  dataIdForLookup = {'visit': dataId['visit']}
1017  # .lookup will fail in a posix registry because there is no template to provide.
1018  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1019  if len(rows) == 0:
1020  return None
1021  assert len(rows) == 1
1022  taiObs = rows[0][0]
1023 
1024  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1025  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1026  [(ccdKey, "?")],
1027  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1028  (ccdVal, taiObs))
1029  if not rows or len(rows) == 0:
1030  return None
1031  if len(rows) == 1:
1032  return os.path.join(self.defectPath, rows[0][0])
1033  else:
1034  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1035  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1036 
1037  def _makeCamera(self, policy, repositoryDir):
1038  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1039 
1040  Also set self.cameraDataLocation, if relevant (else it can be left None).
1041 
1042  This implementation assumes that policy contains an entry "camera" that points to the
1043  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1044  - a file named `camera.py` that contains persisted camera config
1045  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1046 
1047  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
1048  Policy with per-camera defaults already merged
1049  @param repositoryDir (string) Policy repository for the subclassing
1050  module (obtained with getRepositoryPath() on the
1051  per-camera default dictionary)
1052  """
1053  if isinstance(policy, pexPolicy.Policy):
1054  policy = dafPersist.Policy(pexPolicy=policy)
1055  if 'camera' not in policy:
1056  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1057  cameraDataSubdir = policy['camera']
1058  self.cameraDataLocation = os.path.normpath(
1059  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1060  cameraConfig = afwCameraGeom.CameraConfig()
1061  cameraConfig.load(self.cameraDataLocation)
1062  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1063  return afwCameraGeom.makeCameraFromPath(
1064  cameraConfig=cameraConfig,
1065  ampInfoPath=ampInfoPath,
1066  shortNameFunc=self.getShortCcdName,
1067  pupilFactoryClass=self.PupilFactoryClass
1068  )
1069 
1070  def getRegistry(self):
1071  """Get the registry used by this mapper.
1072 
1073  Returns
1074  -------
1075  Registry or None
1076  The registry used by this mapper for this mapper's repository.
1077  """
1078  return self.registry
1079 
1080  def getImageCompressionSettings(self, datasetType, dataId):
1081  """Stuff image compression settings into a daf.base.PropertySet
1082 
1083  This goes into the ButlerLocation's "additionalData", which gets
1084  passed into the boost::persistence framework.
1085 
1086  Parameters
1087  ----------
1088  datasetType : `str`
1089  Type of dataset for which to get the image compression settings.
1090  dataId : `dict`
1091  Dataset identifier.
1092 
1093  Returns
1094  -------
1095  additionalData : `lsst.daf.base.PropertySet`
1096  Image compression settings.
1097  """
1098  mapping = self.mappings[datasetType]
1099  recipeName = mapping.recipe
1100  storageType = mapping.storage
1101  if storageType not in self._writeRecipes:
1102  return lsst.daf.base.PropertySet()
1103  if recipeName not in self._writeRecipes[storageType]:
1104  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1105  (datasetType, storageType, recipeName))
1106  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1107  seed = hash(tuple(dataId.items())) % 2**31
1108  for plane in ("image", "mask", "variance"):
1109  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1110  recipe.set(plane + ".scaling.seed", seed)
1111  return recipe
1112 
1113  def _initWriteRecipes(self):
1114  """Read the recipes for writing files
1115 
1116  These recipes are currently used for configuring FITS compression,
1117  but they could have wider uses for configuring different flavors
1118  of the storage types. A recipe is referred to by a symbolic name,
1119  which has associated settings. These settings are stored as a
1120  `PropertySet` so they can easily be passed down to the
1121  boost::persistence framework as the "additionalData" parameter.
1122 
1123  The list of recipes is written in YAML. A default recipe and
1124  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1125  and these may be overridden or supplemented by the individual obs_*
1126  packages' own policy/writeRecipes.yaml files.
1127 
1128  Recipes are grouped by the storage type. Currently, only the
1129  ``FitsStorage`` storage type uses recipes, which uses it to
1130  configure FITS image compression.
1131 
1132  Each ``FitsStorage`` recipe for FITS compression should define
1133  "image", "mask" and "variance" entries, each of which may contain
1134  "compression" and "scaling" entries. Defaults will be provided for
1135  any missing elements under "compression" and "scaling".
1136 
1137  The allowed entries under "compression" are:
1138 
1139  * algorithm (string): compression algorithm to use
1140  * rows (int): number of rows per tile (0 = entire dimension)
1141  * columns (int): number of columns per tile (0 = entire dimension)
1142  * quantizeLevel (float): cfitsio quantization level
1143 
1144  The allowed entries under "scaling" are:
1145 
1146  * algorithm (string): scaling algorithm to use
1147  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1148  * fuzz (bool): fuzz the values when quantising floating-point values?
1149  * seed (long): seed for random number generator when fuzzing
1150  * maskPlanes (list of string): mask planes to ignore when doing statistics
1151  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1152  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1153  * bscale: manually specified BSCALE (for MANUAL scaling)
1154  * bzero: manually specified BSCALE (for MANUAL scaling)
1155 
1156  A very simple example YAML recipe:
1157 
1158  FitsStorage:
1159  default:
1160  image: &default
1161  compression:
1162  algorithm: GZIP_SHUFFLE
1163  mask: *default
1164  variance: *default
1165  """
1166  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1167  recipes = dafPersist.Policy(recipesFile)
1168  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1169  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1170  supplements = dafPersist.Policy(supplementsFile)
1171  # Don't allow overrides, only supplements
1172  intersection = set(recipes.names()).intersection(set(supplements.names()))
1173  if intersection:
1174  raise RuntimeError("Recipes provided in %s may not override those in %s: %s" %
1175  (supplementsFile, recipesFile, intersection))
1176  recipes.update(overrides)
1177 
1178  self._writeRecipes = {}
1179  validationMenu = {'FitsStorage': validateRecipeFitsStorage,}
1180  for storageType in recipes.names(True):
1181  if "default" not in recipes[storageType]:
1182  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1183  (storageType, recipesFile))
1184  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1185 
1186 
1187 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1188  """Generate an Exposure from an image-like object
1189 
1190  If the image is a DecoratedImage then also set its WCS and metadata
1191  (Image and MaskedImage are missing the necessary metadata
1192  and Exposure already has those set)
1193 
1194  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1195  @return (lsst.afw.image.Exposure) Exposure containing input image
1196  """
1197  metadata = None
1198  if isinstance(image, afwImage.MaskedImage):
1199  exposure = afwImage.makeExposure(image)
1200  elif isinstance(image, afwImage.DecoratedImage):
1201  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1202  metadata = image.getMetadata()
1203  try:
1204  wcs = afwImage.makeWcs(metadata, True)
1205  exposure.setWcs(wcs)
1206  except pexExcept.InvalidParameterError as e:
1207  # raised on failure to create a wcs (and possibly others)
1208  if logger is None:
1209  logger = lsstLog.Log.getLogger("CameraMapper")
1210  logger.warn("wcs set to None; insufficient information found in metadata to create a valid wcs: "
1211  "%s", e.args[0])
1212 
1213  exposure.setMetadata(metadata)
1214  elif isinstance(image, afwImage.Exposure):
1215  # Exposure
1216  exposure = image
1217  metadata = exposure.getMetadata()
1218  else:
1219  # Image
1220  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1221  #
1222  # set VisitInfo if we can
1223  #
1224  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1225  if metadata is not None:
1226  if mapper is None:
1227  if not logger:
1228  logger = lsstLog.Log.getLogger("CameraMapper")
1229  logger.warn("I can only set the VisitInfo if you provide a mapper")
1230  else:
1231  exposureId = mapper._computeCcdExposureId(dataId)
1232  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1233 
1234  exposure.getInfo().setVisitInfo(visitInfo)
1235 
1236  return exposure
1237 
1238 
1240  """Validate recipes for FitsStorage
1241 
1242  The recipes are supplemented with default values where appropriate.
1243 
1244  TODO: replace this custom validation code with Cerberus (DM-11846)
1245 
1246  Parameters
1247  ----------
1248  recipes : `lsst.daf.persistence.Policy`
1249  FitsStorage recipes to validate.
1250 
1251  Returns
1252  -------
1253  validated : `lsst.daf.base.PropertySet`
1254  Validated FitsStorage recipe.
1255 
1256  Raises
1257  ------
1258  `RuntimeError`
1259  If validation fails.
1260  """
1261  # Schemas define what should be there, and the default values (and by the default
1262  # value, the expected type).
1263  compressionSchema = {
1264  "algorithm": "NONE",
1265  "rows": 1,
1266  "columns": 0,
1267  "quantizeLevel": 0.0,
1268  }
1269  scalingSchema = {
1270  "algorithm": "NONE",
1271  "bitpix": 0,
1272  "maskPlanes": ["NO_DATA"],
1273  "seed": 0,
1274  "quantizeLevel": 4.0,
1275  "quantizePad": 5.0,
1276  "fuzz": True,
1277  "bscale": 1.0,
1278  "bzero": 0.0,
1279  }
1280 
1281  def checkUnrecognized(entry, allowed, description):
1282  """Check to see if the entry contains unrecognised keywords"""
1283  unrecognized = set(entry.keys()) - set(allowed)
1284  if unrecognized:
1285  raise RuntimeError(
1286  "Unrecognized entries when parsing image compression recipe %s: %s" %
1287  (description, unrecognized))
1288 
1289  validated = {}
1290  for name in recipes.names(True):
1291  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1292  rr = dafBase.PropertySet()
1293  validated[name] = rr
1294  for plane in ("image", "mask", "variance"):
1295  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1296  name + "->" + plane)
1297 
1298  for settings, schema in (("compression", compressionSchema),
1299  ("scaling", scalingSchema)):
1300  prefix = plane + "." + settings
1301  if settings not in recipes[name][plane]:
1302  for key in schema:
1303  rr.set(prefix + "." + key, schema[key])
1304  continue
1305  entry = recipes[name][plane][settings]
1306  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1307  for key in schema:
1308  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1309  rr.set(prefix + "." + key, value)
1310  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.