lsst.obs.base  13.0-55-gb064ced+6
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base as dafBase
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.image as afwImage
34 import lsst.afw.table as afwTable
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.policy as pexPolicy
38 import lsst.pex.exceptions as pexExcept
39 from .exposureIdInfo import ExposureIdInfo
40 from .makeRawVisitInfo import MakeRawVisitInfo
41 from lsst.utils import getPackageDir
42 
43 """This module defines the CameraMapper base class."""
44 
45 
46 class CameraMapper(dafPersist.Mapper):
47 
48  """CameraMapper is a base class for mappers that handle images from a
49  camera and products derived from them. This provides an abstraction layer
50  between the data on disk and the code.
51 
52  Public methods: keys, queryMetadata, getDatasetTypes, map,
53  canStandardize, standardize
54 
55  Mappers for specific data sources (e.g., CFHT Megacam, LSST
56  simulations, etc.) should inherit this class.
57 
58  The CameraMapper manages datasets within a "root" directory. Note that
59  writing to a dataset present in the input root will hide the existing
60  dataset but not overwrite it. See #2160 for design discussion.
61 
62  A camera is assumed to consist of one or more rafts, each composed of
63  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
64  (amps). A camera is also assumed to have a camera geometry description
65  (CameraGeom object) as a policy file, a filter description (Filter class
66  static configuration) as another policy file, and an optional defects
67  description directory.
68 
69  Information from the camera geometry and defects are inserted into all
70  Exposure objects returned.
71 
72  The mapper uses one or two registries to retrieve metadata about the
73  images. The first is a registry of all raw exposures. This must contain
74  the time of the observation. One or more tables (or the equivalent)
75  within the registry are used to look up data identifier components that
76  are not specified by the user (e.g. filter) and to return results for
77  metadata queries. The second is an optional registry of all calibration
78  data. This should contain validity start and end entries for each
79  calibration dataset in the same timescale as the observation time.
80 
81  Subclasses will typically set MakeRawVisitInfoClass:
82 
83  MakeRawVisitInfoClass: a class variable that points to a subclass of
84  MakeRawVisitInfo, a functor that creates an
85  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
86 
87  Subclasses must provide the following methods:
88 
89  _extractDetectorName(self, dataId): returns the detector name for a CCD
90  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
91  a dataset identifier referring to that CCD or a subcomponent of it.
92 
93  _computeCcdExposureId(self, dataId): see below
94 
95  _computeCoaddExposureId(self, dataId, singleFilter): see below
96 
97  Subclasses may also need to override the following methods:
98 
99  _transformId(self, dataId): transformation of a data identifier
100  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
101  including making suitable for path expansion (e.g. removing commas).
102  The default implementation does nothing. Note that this
103  method should not modify its input parameter.
104 
105  getShortCcdName(self, ccdName): a static method that returns a shortened name
106  suitable for use as a filename. The default version converts spaces to underscores.
107 
108  _getCcdKeyVal(self, dataId): return a CCD key and value
109  by which to look up defects in the defects registry.
110  The default value returns ("ccd", detector name)
111 
112  _mapActualToPath(self, template, actualId): convert a template path to an
113  actual path, using the actual dataset identifier.
114 
115  The mapper's behaviors are largely specified by the policy file.
116  See the MapperDictionary.paf for descriptions of the available items.
117 
118  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
119  mappings (see Mappings class).
120 
121  Common default mappings for all subclasses can be specified in the
122  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
123  a simple way to add a product to all camera mappers.
124 
125  Functions to map (provide a path to the data given a dataset
126  identifier dictionary) and standardize (convert data into some standard
127  format or type) may be provided in the subclass as "map_{dataset type}"
128  and "std_{dataset type}", respectively.
129 
130  If non-Exposure datasets cannot be retrieved using standard
131  daf_persistence methods alone, a "bypass_{dataset type}" function may be
132  provided in the subclass to return the dataset instead of using the
133  "datasets" subpolicy.
134 
135  Implementations of map_camera and bypass_camera that should typically be
136  sufficient are provided in this base class.
137 
138  @todo
139  * Handle defects the same was as all other calibration products, using the calibration registry
140  * Instead of auto-loading the camera at construction time, load it from the calibration registry
141  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
142  of pyfits from this package.
143  """
144  packageName = None
145 
146  # a class or subclass of MakeRawVisitInfo, a functor that makes an
147  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
148  MakeRawVisitInfoClass = MakeRawVisitInfo
149 
150  # a class or subclass of PupilFactory
151  PupilFactoryClass = afwCameraGeom.PupilFactory
152 
153  def __init__(self, policy, repositoryDir,
154  root=None, registry=None, calibRoot=None, calibRegistry=None,
155  provided=None, parentRegistry=None, repositoryCfg=None):
156  """Initialize the CameraMapper.
157 
158  Parameters
159  ----------
160  policy : daf_persistence.Policy,
161  Can also be pexPolicy.Policy, only for backward compatibility.
162  Policy with per-camera defaults already merged.
163  repositoryDir : string
164  Policy repository for the subclassing module (obtained with
165  getRepositoryPath() on the per-camera default dictionary).
166  root : string, optional
167  Path to the root directory for data.
168  registry : string, optional
169  Path to registry with data's metadata.
170  calibRoot : string, optional
171  Root directory for calibrations.
172  calibRegistry : string, optional
173  Path to registry with calibrations' metadata.
174  provided : list of string, optional
175  Keys provided by the mapper.
176  parentRegistry : Registry subclass, optional
177  Registry from a parent repository that may be used to look up
178  data's metadata.
179  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
180  The configuration information for the repository this mapper is
181  being used with.
182  """
183 
184  dafPersist.Mapper.__init__(self)
185 
186  self.log = lsstLog.Log.getLogger("CameraMapper")
187 
188  if root:
189  self.root = root
190  elif repositoryCfg:
191  self.root = repositoryCfg.root
192  else:
193  self.root = None
194  if isinstance(policy, pexPolicy.Policy):
195  policy = dafPersist.Policy(policy)
196 
197  repoPolicy = repositoryCfg.policy if repositoryCfg else None
198  if repoPolicy is not None:
199  policy.update(repoPolicy)
200 
201  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
202  "MapperDictionary.paf",
203  "policy")
204  dictPolicy = dafPersist.Policy(defaultPolicyFile)
205  policy.merge(dictPolicy)
206 
207  # Levels
208  self.levels = dict()
209  if 'levels' in policy:
210  levelsPolicy = policy['levels']
211  for key in levelsPolicy.names(True):
212  self.levels[key] = set(levelsPolicy.asArray(key))
213  self.defaultLevel = policy['defaultLevel']
214  self.defaultSubLevels = dict()
215  if 'defaultSubLevels' in policy:
216  self.defaultSubLevels = policy['defaultSubLevels']
217 
218  # Root directories
219  if root is None:
220  root = "."
221  root = dafPersist.LogicalLocation(root).locString()
222 
223  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
224 
225  # If the calibRoot is passed in, use that. If not and it's indicated in
226  # the policy, use that. And otherwise, the calibs are in the regular
227  # root.
228  # If the location indicated by the calib root does not exist, do not
229  # create it.
230  calibStorage = None
231  if calibRoot is not None:
232  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
234  create=False)
235  else:
236  calibRoot = policy.get('calibRoot', None)
237  if calibRoot:
238  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239  create=False)
240  if calibStorage is None:
241  calibStorage = self.rootStorage
242 
243  self.root = root
244 
245  # Registries
246  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
247  self.rootStorage, searchParents=False,
248  posixIfNoSql=(not parentRegistry))
249  if not self.registry:
250  self.registry = parentRegistry
251  needCalibRegistry = policy.get('needCalibRegistry', None)
252  if needCalibRegistry:
253  if calibStorage:
254  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
255  "calibRegistryPath", calibStorage)
256  else:
257  raise RuntimeError(
258  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
259  "calibRoot ivar:%s or policy['calibRoot']:%s" %
260  (calibRoot, policy.get('calibRoot', None)))
261  else:
262  self.calibRegistry = None
263 
264  # Dict of valid keys and their value types
265  self.keyDict = dict()
266 
267  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
268  self._initWriteRecipes()
269 
270  # Camera geometry
271  self.cameraDataLocation = None # path to camera geometry config file
272  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
273 
274  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
275  # camera package, which is on the local filesystem.
276  self.defectRegistry = None
277  if 'defects' in policy:
278  self.defectPath = os.path.join(repositoryDir, policy['defects'])
279  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
280  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
281 
282  # Filter translation table
283  self.filters = None
284 
285  # Skytile policy
286  self.skypolicy = policy['skytiles']
287 
288  # verify that the class variable packageName is set before attempting
289  # to instantiate an instance
290  if self.packageName is None:
291  raise ValueError('class variable packageName must not be None')
292 
294 
295  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
296  """Initialize mappings
297 
298  For each of the dataset types that we want to be able to read, there are
299  methods that can be created to support them:
300  * map_<dataset> : determine the path for dataset
301  * std_<dataset> : standardize the retrieved dataset
302  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
303  * query_<dataset> : query the registry
304 
305  Besides the dataset types explicitly listed in the policy, we create
306  additional, derived datasets for additional conveniences, e.g., reading
307  the header of an image, retrieving only the size of a catalog.
308 
309  @param policy (Policy) Policy with per-camera defaults already merged
310  @param rootStorage (Storage subclass instance) Interface to persisted repository data
311  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
312  @param provided (list of strings) Keys provided by the mapper
313  """
314  # Sub-dictionaries (for exposure/calibration/dataset types)
315  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
316  "obs_base", "ImageMappingDictionary.paf", "policy"))
317  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318  "obs_base", "ExposureMappingDictionary.paf", "policy"))
319  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
321  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322  "obs_base", "DatasetMappingDictionary.paf", "policy"))
323 
324  # Mappings
325  mappingList = (
326  ("images", imgMappingPolicy, ImageMapping),
327  ("exposures", expMappingPolicy, ExposureMapping),
328  ("calibrations", calMappingPolicy, CalibrationMapping),
329  ("datasets", dsMappingPolicy, DatasetMapping)
330  )
331  self.mappings = dict()
332  for name, defPolicy, cls in mappingList:
333  if name in policy:
334  datasets = policy[name]
335 
336  # Centrally-defined datasets
337  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
338  if os.path.exists(defaultsPath):
339  datasets.merge(dafPersist.Policy(defaultsPath))
340 
341  mappings = dict()
342  setattr(self, name, mappings)
343  for datasetType in datasets.names(True):
344  subPolicy = datasets[datasetType]
345  subPolicy.merge(defPolicy)
346 
347  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
348  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
349  subPolicy=subPolicy):
350  components = subPolicy.get('composite')
351  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
352  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
353  python = subPolicy['python']
354  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
355  disassembler=disassembler,
356  python=python,
357  dataId=dataId,
358  mapper=self)
359  for name, component in components.items():
360  butlerComposite.add(id=name,
361  datasetType=component.get('datasetType'),
362  setter=component.get('setter', None),
363  getter=component.get('getter', None),
364  subset=component.get('subset', False),
365  inputOnly=component.get('inputOnly', False))
366  return butlerComposite
367  setattr(self, "map_" + datasetType, compositeClosure)
368  # for now at least, don't set up any other handling for this dataset type.
369  continue
370 
371  if name == "calibrations":
372  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
373  provided=provided, dataRoot=rootStorage)
374  else:
375  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
376  self.keyDict.update(mapping.keys())
377  mappings[datasetType] = mapping
378  self.mappings[datasetType] = mapping
379  if not hasattr(self, "map_" + datasetType):
380  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
381  return mapping.map(mapper, dataId, write)
382  setattr(self, "map_" + datasetType, mapClosure)
383  if not hasattr(self, "query_" + datasetType):
384  def queryClosure(format, dataId, mapping=mapping):
385  return mapping.lookup(format, dataId)
386  setattr(self, "query_" + datasetType, queryClosure)
387  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
388  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
389  return mapping.standardize(mapper, item, dataId)
390  setattr(self, "std_" + datasetType, stdClosure)
391 
392  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
393  """Set convenience methods on CameraMapper"""
394  mapName = "map_" + datasetType + "_" + suffix
395  bypassName = "bypass_" + datasetType + "_" + suffix
396  queryName = "query_" + datasetType + "_" + suffix
397  if not hasattr(self, mapName):
398  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
399  if not hasattr(self, bypassName):
400  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
401  bypassImpl = getattr(self, "bypass_" + datasetType)
402  if bypassImpl is not None:
403  setattr(self, bypassName, bypassImpl)
404  if not hasattr(self, queryName):
405  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
406 
407  # Filename of dataset
408  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
409  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
410  # Metadata from FITS file
411  if subPolicy["storage"] == "FitsStorage": # a FITS image
412  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
413  afwImage.readMetadata(location.getLocationsWithRoot()[0]))
414 
415  # Add support for configuring FITS compression
416  addName = "add_" + datasetType
417  if not hasattr(self, addName):
418  setattr(self, addName, self.getImageCompressionSettings)
419 
420  if name == "exposures":
421  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
422  afwImage.makeWcs(
423  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
424  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
425  afwImage.Calib(
426  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
427  setMethods("visitInfo",
428  bypassImpl=lambda datasetType, pythonType, location, dataId:
429  afwImage.VisitInfo(
430  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
431  setMethods("filter",
432  bypassImpl=lambda datasetType, pythonType, location, dataId:
433  afwImage.Filter(
434  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
435  setMethods("detector",
436  mapImpl=lambda dataId, write=False:
437  dafPersist.ButlerLocation(
438  pythonType="lsst.afw.cameraGeom.CameraConfig",
439  cppType="Config",
440  storageName="Internal",
441  locationList="ignored",
442  dataId=dataId,
443  mapper=self,
444  storage=None,
445  ),
446  bypassImpl=lambda datasetType, pythonType, location, dataId:
447  self.camera[self._extractDetectorName(dataId)]
448  )
449  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
450  afwImage.bboxFromMetadata(
451  afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
452 
453  elif name == "images":
454  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
455  afwImage.bboxFromMetadata(
456  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
457 
458  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
459  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
460  afwImage.readMetadata(os.path.join(location.getStorage().root,
461  location.getLocations()[0]), hdu=1))
462 
463  # Sub-images
464  if subPolicy["storage"] == "FitsStorage":
465  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
466  subId = dataId.copy()
467  del subId['bbox']
468  loc = mapping.map(mapper, subId, write)
469  bbox = dataId['bbox']
470  llcX = bbox.getMinX()
471  llcY = bbox.getMinY()
472  width = bbox.getWidth()
473  height = bbox.getHeight()
474  loc.additionalData.set('llcX', llcX)
475  loc.additionalData.set('llcY', llcY)
476  loc.additionalData.set('width', width)
477  loc.additionalData.set('height', height)
478  if 'imageOrigin' in dataId:
479  loc.additionalData.set('imageOrigin',
480  dataId['imageOrigin'])
481  return loc
482 
483  def querySubClosure(key, format, dataId, mapping=mapping):
484  subId = dataId.copy()
485  del subId['bbox']
486  return mapping.lookup(format, subId)
487  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
488 
489  if subPolicy["storage"] == "FitsCatalogStorage":
490  # Length of catalog
491  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
492  afwImage.readMetadata(os.path.join(location.getStorage().root,
493  location.getLocations()[0]),
494  hdu=1).get("NAXIS2"))
495 
496  # Schema of catalog
497  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
498  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
499  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
500  location.getLocations()[0])))
501 
502  def _computeCcdExposureId(self, dataId):
503  """Compute the 64-bit (long) identifier for a CCD exposure.
504 
505  Subclasses must override
506 
507  @param dataId (dict) Data identifier with visit, ccd
508  """
509  raise NotImplementedError()
510 
511  def _computeCoaddExposureId(self, dataId, singleFilter):
512  """Compute the 64-bit (long) identifier for a coadd.
513 
514  Subclasses must override
515 
516  @param dataId (dict) Data identifier with tract and patch.
517  @param singleFilter (bool) True means the desired ID is for a single-
518  filter coadd, in which case dataId
519  must contain filter.
520  """
521  raise NotImplementedError()
522 
523  def _search(self, path):
524  """Search for path in the associated repository's storage.
525 
526  Parameters
527  ----------
528  path : string
529  Path that describes an object in the repository associated with
530  this mapper.
531  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
532  indicator will be stripped when searching and so will match
533  filenames without the HDU indicator, e.g. 'foo.fits'. The path
534  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
535 
536  Returns
537  -------
538  string
539  The path for this object in the repository. Will return None if the
540  object can't be found. If the input argument path contained an HDU
541  indicator, the returned path will also contain the HDU indicator.
542  """
543  return self.rootStorage.search(path)
544 
545  def backup(self, datasetType, dataId):
546  """Rename any existing object with the given type and dataId.
547 
548  The CameraMapper implementation saves objects in a sequence of e.g.:
549  foo.fits
550  foo.fits~1
551  foo.fits~2
552  All of the backups will be placed in the output repo, however, and will
553  not be removed if they are found elsewhere in the _parent chain. This
554  means that the same file will be stored twice if the previous version was
555  found in an input repo.
556  """
557 
558  # Calling PosixStorage directly is not the long term solution in this
559  # function, this is work-in-progress on epic DM-6225. The plan is for
560  # parentSearch to be changed to 'search', and search only the storage
561  # associated with this mapper. All searching of parents will be handled
562  # by traversing the container of repositories in Butler.
563 
564  def firstElement(list):
565  """Get the first element in the list, or None if that can't be done.
566  """
567  return list[0] if list is not None and len(list) else None
568 
569  n = 0
570  newLocation = self.map(datasetType, dataId, write=True)
571  newPath = newLocation.getLocations()[0]
572  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
573  path = firstElement(path)
574  oldPaths = []
575  while path is not None:
576  n += 1
577  oldPaths.append((n, path))
578  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
579  path = firstElement(path)
580  for n, oldPath in reversed(oldPaths):
581  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
582 
583  def keys(self):
584  """Return supported keys.
585  @return (iterable) List of keys usable in a dataset identifier"""
586  return iter(self.keyDict.keys())
587 
588  def getKeys(self, datasetType, level):
589  """Return a dict of supported keys and their value types for a given dataset
590  type at a given level of the key hierarchy.
591 
592  @param datasetType (str) dataset type or None for all dataset types
593  @param level (str) level or None for all levels or '' for the default level for the camera
594  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
595 
596  # not sure if this is how we want to do this. what if None was intended?
597  if level == '':
598  level = self.getDefaultLevel()
599 
600  if datasetType is None:
601  keyDict = copy.copy(self.keyDict)
602  else:
603  keyDict = self.mappings[datasetType].keys()
604  if level is not None and level in self.levels:
605  keyDict = copy.copy(keyDict)
606  for l in self.levels[level]:
607  if l in keyDict:
608  del keyDict[l]
609  return keyDict
610 
611  def getDefaultLevel(self):
612  return self.defaultLevel
613 
614  def getDefaultSubLevel(self, level):
615  if level in self.defaultSubLevels:
616  return self.defaultSubLevels[level]
617  return None
618 
619  @classmethod
620  def getCameraName(cls):
621  """Return the name of the camera that this CameraMapper is for."""
622  className = str(cls)
623  className = className[className.find('.'):-1]
624  m = re.search(r'(\w+)Mapper', className)
625  if m is None:
626  m = re.search(r"class '[\w.]*?(\w+)'", className)
627  name = m.group(1)
628  return name[:1].lower() + name[1:] if name else ''
629 
630  @classmethod
631  def getPackageName(cls):
632  """Return the name of the package containing this CameraMapper."""
633  if cls.packageName is None:
634  raise ValueError('class variable packageName must not be None')
635  return cls.packageName
636 
637  @classmethod
638  def getPackageDir(cls):
639  """Return the base directory of this package"""
640  return getPackageDir(cls.getPackageName())
641 
642  def map_camera(self, dataId, write=False):
643  """Map a camera dataset."""
644  if self.camera is None:
645  raise RuntimeError("No camera dataset available.")
646  actualId = self._transformId(dataId)
647  return dafPersist.ButlerLocation(
648  pythonType="lsst.afw.cameraGeom.CameraConfig",
649  cppType="Config",
650  storageName="ConfigStorage",
651  locationList=self.cameraDataLocation or "ignored",
652  dataId=actualId,
653  mapper=self,
654  storage=self.rootStorage
655  )
656 
657  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
658  """Return the (preloaded) camera object.
659  """
660  if self.camera is None:
661  raise RuntimeError("No camera dataset available.")
662  return self.camera
663 
664  def map_defects(self, dataId, write=False):
665  """Map defects dataset.
666 
667  @return a very minimal ButlerLocation containing just the locationList field
668  (just enough information that bypass_defects can use it).
669  """
670  defectFitsPath = self._defectLookup(dataId=dataId)
671  if defectFitsPath is None:
672  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
673 
674  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
675  dataId, self,
676  storage=self.rootStorage)
677 
678  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
679  """Return a defect based on the butler location returned by map_defects
680 
681  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
682  @param[in] dataId: the usual data ID; "ccd" must be set
683 
684  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
685  into an object, which is what we want for now, since that conversion is a bit tricky.
686  """
687  detectorName = self._extractDetectorName(dataId)
688  defectsFitsPath = butlerLocation.locationList[0]
689  with pyfits.open(defectsFitsPath) as hduList:
690  for hdu in hduList[1:]:
691  if hdu.header["name"] != detectorName:
692  continue
693 
694  defectList = []
695  for data in hdu.data:
696  bbox = afwGeom.Box2I(
697  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
698  afwGeom.Extent2I(int(data['width']), int(data['height'])),
699  )
700  defectList.append(afwImage.DefectBase(bbox))
701  return defectList
702 
703  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
704 
705  def map_expIdInfo(self, dataId, write=False):
706  return dafPersist.ButlerLocation(
707  pythonType="lsst.obs.base.ExposureIdInfo",
708  cppType=None,
709  storageName="Internal",
710  locationList="ignored",
711  dataId=dataId,
712  mapper=self,
713  storage=self.rootStorage
714  )
715 
716  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
717  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
718  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
719  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
720  return ExposureIdInfo(expId=expId, expBits=expBits)
721 
722  def std_bfKernel(self, item, dataId):
723  """Disable standardization for bfKernel
724 
725  bfKernel is a calibration product that is numpy array,
726  unlike other calibration products that are all images;
727  all calibration images are sent through _standardizeExposure
728  due to CalibrationMapping, but we don't want that to happen to bfKernel
729  """
730  return item
731 
732  def std_raw(self, item, dataId):
733  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
734  return self._standardizeExposure(self.exposures['raw'], item, dataId,
735  trimmed=False, setVisitInfo=True)
736 
737  def map_skypolicy(self, dataId):
738  """Map a sky policy."""
739  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
740  "Internal", None, None, self,
741  storage=self.rootStorage)
742 
743  def std_skypolicy(self, item, dataId):
744  """Standardize a sky policy by returning the one we use."""
745  return self.skypolicy
746 
747 
752 
753  def _getCcdKeyVal(self, dataId):
754  """Return CCD key and value used to look a defect in the defect registry
755 
756  The default implementation simply returns ("ccd", full detector name)
757  """
758  return ("ccd", self._extractDetectorName(dataId))
759 
760  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
761  posixIfNoSql=True):
762  """Set up a registry (usually SQLite3), trying a number of possible
763  paths.
764 
765  Parameters
766  ----------
767  name : string
768  Name of registry.
769  description: `str`
770  Description of registry (for log messages)
771  path : string
772  Path for registry.
773  policy : string
774  Policy that contains the registry name, used if path is None.
775  policyKey : string
776  Key in policy for registry path.
777  storage : Storage subclass
778  Repository Storage to look in.
779  searchParents : bool, optional
780  True if the search for a registry should follow any Butler v1
781  _parent symlinks.
782  posixIfNoSql : bool, optional
783  If an sqlite registry is not found, will create a posix registry if
784  this is True.
785 
786  Returns
787  -------
788  lsst.daf.persistence.Registry
789  Registry object
790  """
791  if path is None and policyKey in policy:
792  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
793  if os.path.isabs(path):
794  raise RuntimeError("Policy should not indicate an absolute path for registry.")
795  if not storage.exists(path):
796  newPath = storage.instanceSearch(path)
797 
798  newPath = newPath[0] if newPath is not None and len(newPath) else None
799  if newPath is None:
800  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
801  path)
802  path = newPath
803  else:
804  self.log.warn("Unable to locate registry at policy path: %s", path)
805  path = None
806 
807  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
808  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
809  # root from path. Currently only works with PosixStorage
810  try:
811  root = storage.root
812  if path and (path.startswith(root)):
813  path = path[len(root + '/'):]
814  except AttributeError:
815  pass
816 
817  # determine if there is an sqlite registry and if not, try the posix registry.
818  registry = None
819 
820  def search(filename, description):
821  """Search for file in storage
822 
823  Parameters
824  ----------
825  filename : `str`
826  Filename to search for
827  description : `str`
828  Description of file, for error message.
829 
830  Returns
831  -------
832  path : `str` or `None`
833  Path to file, or None
834  """
835  result = storage.instanceSearch(filename)
836  if result:
837  return result[0]
838  self.log.debug("Unable to locate %s: %s", description, filename)
839  return None
840 
841  # Search for a suitable registry database
842  if path is None:
843  path = search("%s.pgsql" % name, "%s in root" % description)
844  if path is None:
845  path = search("%s.sqlite3" % name, "%s in root" % description)
846  if path is None:
847  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
848 
849  if path is not None:
850  if not storage.exists(path):
851  newPath = storage.instanceSearch(path)
852  newPath = newPath[0] if newPath is not None and len(newPath) else None
853  if newPath is not None:
854  path = newPath
855  localFileObj = storage.getLocalFile(path)
856  self.log.info("Loading %s registry from %s", description, localFileObj.name)
857  registry = dafPersist.Registry.create(localFileObj.name)
858  localFileObj.close()
859  elif not registry and posixIfNoSql:
860  try:
861  self.log.info("Loading Posix %s registry from %s", description, storage.root)
862  registry = dafPersist.PosixRegistry(storage.root)
863  except:
864  registry = None
865 
866  return registry
867 
868  def _transformId(self, dataId):
869  """Generate a standard ID dict from a camera-specific ID dict.
870 
871  Canonical keys include:
872  - amp: amplifier name
873  - ccd: CCD name (in LSST this is a combination of raft and sensor)
874  The default implementation returns a copy of its input.
875 
876  @param dataId[in] (dict) Dataset identifier; this must not be modified
877  @return (dict) Transformed dataset identifier"""
878 
879  return dataId.copy()
880 
881  def _mapActualToPath(self, template, actualId):
882  """Convert a template path to an actual path, using the actual data
883  identifier. This implementation is usually sufficient but can be
884  overridden by the subclass.
885  @param template (string) Template path
886  @param actualId (dict) Dataset identifier
887  @return (string) Pathname"""
888 
889  try:
890  transformedId = self._transformId(actualId)
891  return template % transformedId
892  except Exception as e:
893  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
894 
895  @staticmethod
896  def getShortCcdName(ccdName):
897  """Convert a CCD name to a form useful as a filename
898 
899  The default implementation converts spaces to underscores.
900  """
901  return ccdName.replace(" ", "_")
902 
903  def _extractDetectorName(self, dataId):
904  """Extract the detector (CCD) name from the dataset identifier.
905 
906  The name in question is the detector name used by lsst.afw.cameraGeom.
907 
908  @param dataId (dict) Dataset identifier
909  @return (string) Detector name
910  """
911  raise NotImplementedError("No _extractDetectorName() function specified")
912 
913  def _extractAmpId(self, dataId):
914  """Extract the amplifier identifer from a dataset identifier.
915 
916  @warning this is deprecated; DO NOT USE IT
917 
918  amplifier identifier has two parts: the detector name for the CCD
919  containing the amplifier and index of the amplifier in the detector.
920  @param dataId (dict) Dataset identifer
921  @return (tuple) Amplifier identifier"""
922 
923  trDataId = self._transformId(dataId)
924  return (trDataId["ccd"], int(trDataId['amp']))
925 
926  def _setAmpDetector(self, item, dataId, trimmed=True):
927  """Set the detector object in an Exposure for an amplifier.
928  Defects are also added to the Exposure based on the detector object.
929  @param[in,out] item (lsst.afw.image.Exposure)
930  @param dataId (dict) Dataset identifier
931  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
932 
933  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
934 
935  def _setCcdDetector(self, item, dataId, trimmed=True):
936  """Set the detector object in an Exposure for a CCD.
937  @param[in,out] item (lsst.afw.image.Exposure)
938  @param dataId (dict) Dataset identifier
939  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
940 
941  if item.getDetector() is not None:
942  return
943 
944  detectorName = self._extractDetectorName(dataId)
945  detector = self.camera[detectorName]
946  item.setDetector(detector)
947 
948  def _setFilter(self, mapping, item, dataId):
949  """Set the filter object in an Exposure. If the Exposure had a FILTER
950  keyword, this was already processed during load. But if it didn't,
951  use the filter from the registry.
952  @param mapping (lsst.obs.base.Mapping)
953  @param[in,out] item (lsst.afw.image.Exposure)
954  @param dataId (dict) Dataset identifier"""
955 
956  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
957  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
958  return
959 
960  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
961  return
962 
963  actualId = mapping.need(['filter'], dataId)
964  filterName = actualId['filter']
965  if self.filters is not None and filterName in self.filters:
966  filterName = self.filters[filterName]
967  item.setFilter(afwImage.Filter(filterName))
968 
969  # Default standardization function for exposures
970  def _standardizeExposure(self, mapping, item, dataId, filter=True,
971  trimmed=True, setVisitInfo=True):
972  """Default standardization function for images.
973 
974  This sets the Detector from the camera geometry
975  and optionally set the Fiter. In both cases this saves
976  having to persist some data in each exposure (or image).
977 
978  @param mapping (lsst.obs.base.Mapping)
979  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
980  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
981  or lsst.afw.image.MaskedImage
982  @param dataId (dict) Dataset identifier
983  @param filter (bool) Set filter? Ignored if item is already an exposure
984  @param trimmed (bool) Should detector be marked as trimmed?
985  @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata?
986  @return (lsst.afw.image.Exposure) the standardized Exposure"""
987  try:
988  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
989  except Exception as e:
990  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
991  raise
992 
993  if mapping.level.lower() == "amp":
994  self._setAmpDetector(item, dataId, trimmed)
995  elif mapping.level.lower() == "ccd":
996  self._setCcdDetector(item, dataId, trimmed)
997 
998  if filter:
999  self._setFilter(mapping, item, dataId)
1000 
1001  return item
1002 
1003  def _defectLookup(self, dataId):
1004  """Find the defects for a given CCD.
1005  @param dataId (dict) Dataset identifier
1006  @return (string) path to the defects file or None if not available"""
1007  if self.defectRegistry is None:
1008  return None
1009  if self.registry is None:
1010  raise RuntimeError("No registry for defect lookup")
1011 
1012  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1013 
1014  dataIdForLookup = {'visit': dataId['visit']}
1015  # .lookup will fail in a posix registry because there is no template to provide.
1016  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1017  if len(rows) == 0:
1018  return None
1019  assert len(rows) == 1
1020  taiObs = rows[0][0]
1021 
1022  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1023  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1024  [(ccdKey, "?")],
1025  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1026  (ccdVal, taiObs))
1027  if not rows or len(rows) == 0:
1028  return None
1029  if len(rows) == 1:
1030  return os.path.join(self.defectPath, rows[0][0])
1031  else:
1032  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1033  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1034 
1035  def _makeCamera(self, policy, repositoryDir):
1036  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1037 
1038  Also set self.cameraDataLocation, if relevant (else it can be left None).
1039 
1040  This implementation assumes that policy contains an entry "camera" that points to the
1041  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1042  - a file named `camera.py` that contains persisted camera config
1043  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1044 
1045  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
1046  Policy with per-camera defaults already merged
1047  @param repositoryDir (string) Policy repository for the subclassing
1048  module (obtained with getRepositoryPath() on the
1049  per-camera default dictionary)
1050  """
1051  if isinstance(policy, pexPolicy.Policy):
1052  policy = dafPersist.Policy(pexPolicy=policy)
1053  if 'camera' not in policy:
1054  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1055  cameraDataSubdir = policy['camera']
1056  self.cameraDataLocation = os.path.normpath(
1057  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1058  cameraConfig = afwCameraGeom.CameraConfig()
1059  cameraConfig.load(self.cameraDataLocation)
1060  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1061  return afwCameraGeom.makeCameraFromPath(
1062  cameraConfig=cameraConfig,
1063  ampInfoPath=ampInfoPath,
1064  shortNameFunc=self.getShortCcdName,
1065  pupilFactoryClass=self.PupilFactoryClass
1066  )
1067 
1068  def getRegistry(self):
1069  """Get the registry used by this mapper.
1070 
1071  Returns
1072  -------
1073  Registry or None
1074  The registry used by this mapper for this mapper's repository.
1075  """
1076  return self.registry
1077 
1078  def getImageCompressionSettings(self, datasetType, dataId):
1079  """Stuff image compression settings into a daf.base.PropertySet
1080 
1081  This goes into the ButlerLocation's "additionalData", which gets
1082  passed into the boost::persistence framework.
1083 
1084  Parameters
1085  ----------
1086  datasetType : `str`
1087  Type of dataset for which to get the image compression settings.
1088  dataId : `dict`
1089  Dataset identifier.
1090 
1091  Returns
1092  -------
1093  additionalData : `lsst.daf.base.PropertySet`
1094  Image compression settings.
1095  """
1096  mapping = self.mappings[datasetType]
1097  recipeName = mapping.recipe
1098  storageType = mapping.storage
1099  if storageType not in self._writeRecipes:
1100  return dafBase.PropertySet()
1101  if recipeName not in self._writeRecipes[storageType]:
1102  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1103  (datasetType, storageType, recipeName))
1104  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1105  seed = hash(tuple(dataId.items())) % 2**31
1106  for plane in ("image", "mask", "variance"):
1107  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1108  recipe.set(plane + ".scaling.seed", seed)
1109  return recipe
1110 
1111  def _initWriteRecipes(self):
1112  """Read the recipes for writing files
1113 
1114  These recipes are currently used for configuring FITS compression,
1115  but they could have wider uses for configuring different flavors
1116  of the storage types. A recipe is referred to by a symbolic name,
1117  which has associated settings. These settings are stored as a
1118  `PropertySet` so they can easily be passed down to the
1119  boost::persistence framework as the "additionalData" parameter.
1120 
1121  The list of recipes is written in YAML. A default recipe and
1122  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1123  and these may be overridden or supplemented by the individual obs_*
1124  packages' own policy/writeRecipes.yaml files.
1125 
1126  Recipes are grouped by the storage type. Currently, only the
1127  ``FitsStorage`` storage type uses recipes, which uses it to
1128  configure FITS image compression.
1129 
1130  Each ``FitsStorage`` recipe for FITS compression should define
1131  "image", "mask" and "variance" entries, each of which may contain
1132  "compression" and "scaling" entries. Defaults will be provided for
1133  any missing elements under "compression" and "scaling".
1134 
1135  The allowed entries under "compression" are:
1136 
1137  * algorithm (string): compression algorithm to use
1138  * rows (int): number of rows per tile (0 = entire dimension)
1139  * columns (int): number of columns per tile (0 = entire dimension)
1140  * quantizeLevel (float): cfitsio quantization level
1141 
1142  The allowed entries under "scaling" are:
1143 
1144  * algorithm (string): scaling algorithm to use
1145  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1146  * fuzz (bool): fuzz the values when quantising floating-point values?
1147  * seed (long): seed for random number generator when fuzzing
1148  * maskPlanes (list of string): mask planes to ignore when doing statistics
1149  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1150  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1151  * bscale: manually specified BSCALE (for MANUAL scaling)
1152  * bzero: manually specified BSCALE (for MANUAL scaling)
1153 
1154  A very simple example YAML recipe:
1155 
1156  FitsStorage:
1157  default:
1158  image: &default
1159  compression:
1160  algorithm: GZIP_SHUFFLE
1161  mask: *default
1162  variance: *default
1163  """
1164  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1165  recipes = dafPersist.Policy(recipesFile)
1166  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1167  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1168  supplements = dafPersist.Policy(supplementsFile)
1169  # Don't allow overrides, only supplements
1170  intersection = set(recipes.names()).intersection(set(supplements.names()))
1171  if intersection:
1172  raise RuntimeError("Recipes provided in %s may not override those in %s: %s" %
1173  (supplementsFile, recipesFile, intersection))
1174  recipes.update(overrides)
1175 
1176  self._writeRecipes = {}
1177  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1178  for storageType in recipes.names(True):
1179  if "default" not in recipes[storageType]:
1180  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1181  (storageType, recipesFile))
1182  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1183 
1184 
1185 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1186  """Generate an Exposure from an image-like object
1187 
1188  If the image is a DecoratedImage then also set its WCS and metadata
1189  (Image and MaskedImage are missing the necessary metadata
1190  and Exposure already has those set)
1191 
1192  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1193  @return (lsst.afw.image.Exposure) Exposure containing input image
1194  """
1195  metadata = None
1196  if isinstance(image, afwImage.MaskedImage):
1197  exposure = afwImage.makeExposure(image)
1198  elif isinstance(image, afwImage.DecoratedImage):
1199  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1200  metadata = image.getMetadata()
1201  try:
1202  wcs = afwImage.makeWcs(metadata, True)
1203  exposure.setWcs(wcs)
1204  except pexExcept.InvalidParameterError as e:
1205  # raised on failure to create a wcs (and possibly others)
1206  if logger is None:
1207  logger = lsstLog.Log.getLogger("CameraMapper")
1208  logger.warn("wcs set to None; insufficient information found in metadata to create a valid wcs: "
1209  "%s", e.args[0])
1210 
1211  exposure.setMetadata(metadata)
1212  elif isinstance(image, afwImage.Exposure):
1213  # Exposure
1214  exposure = image
1215  metadata = exposure.getMetadata()
1216  else:
1217  # Image
1218  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1219  #
1220  # set VisitInfo if we can
1221  #
1222  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1223  if metadata is not None:
1224  if mapper is None:
1225  if not logger:
1226  logger = lsstLog.Log.getLogger("CameraMapper")
1227  logger.warn("I can only set the VisitInfo if you provide a mapper")
1228  else:
1229  exposureId = mapper._computeCcdExposureId(dataId)
1230  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1231 
1232  exposure.getInfo().setVisitInfo(visitInfo)
1233 
1234  return exposure
1235 
1236 
1238  """Validate recipes for FitsStorage
1239 
1240  The recipes are supplemented with default values where appropriate.
1241 
1242  TODO: replace this custom validation code with Cerberus (DM-11846)
1243 
1244  Parameters
1245  ----------
1246  recipes : `lsst.daf.persistence.Policy`
1247  FitsStorage recipes to validate.
1248 
1249  Returns
1250  -------
1251  validated : `lsst.daf.base.PropertySet`
1252  Validated FitsStorage recipe.
1253 
1254  Raises
1255  ------
1256  `RuntimeError`
1257  If validation fails.
1258  """
1259  # Schemas define what should be there, and the default values (and by the default
1260  # value, the expected type).
1261  compressionSchema = {
1262  "algorithm": "NONE",
1263  "rows": 1,
1264  "columns": 0,
1265  "quantizeLevel": 0.0,
1266  }
1267  scalingSchema = {
1268  "algorithm": "NONE",
1269  "bitpix": 0,
1270  "maskPlanes": ["NO_DATA"],
1271  "seed": 0,
1272  "quantizeLevel": 4.0,
1273  "quantizePad": 5.0,
1274  "fuzz": True,
1275  "bscale": 1.0,
1276  "bzero": 0.0,
1277  }
1278 
1279  def checkUnrecognized(entry, allowed, description):
1280  """Check to see if the entry contains unrecognised keywords"""
1281  unrecognized = set(entry.keys()) - set(allowed)
1282  if unrecognized:
1283  raise RuntimeError(
1284  "Unrecognized entries when parsing image compression recipe %s: %s" %
1285  (description, unrecognized))
1286 
1287  validated = {}
1288  for name in recipes.names(True):
1289  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1290  rr = dafBase.PropertySet()
1291  validated[name] = rr
1292  for plane in ("image", "mask", "variance"):
1293  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1294  name + "->" + plane)
1295 
1296  for settings, schema in (("compression", compressionSchema),
1297  ("scaling", scalingSchema)):
1298  prefix = plane + "." + settings
1299  if settings not in recipes[name][plane]:
1300  for key in schema:
1301  rr.set(prefix + "." + key, schema[key])
1302  continue
1303  entry = recipes[name][plane][settings]
1304  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1305  for key in schema:
1306  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1307  rr.set(prefix + "." + key, value)
1308  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.