lsst.obs.base  14.0-22-gc48c03f+2
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base as dafBase
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.image as afwImage
34 import lsst.afw.table as afwTable
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.policy as pexPolicy
38 import lsst.pex.exceptions as pexExcept
39 from .exposureIdInfo import ExposureIdInfo
40 from .makeRawVisitInfo import MakeRawVisitInfo
41 from lsst.utils import getPackageDir
42 
43 """This module defines the CameraMapper base class."""
44 
45 
46 class CameraMapper(dafPersist.Mapper):
47 
48  """CameraMapper is a base class for mappers that handle images from a
49  camera and products derived from them. This provides an abstraction layer
50  between the data on disk and the code.
51 
52  Public methods: keys, queryMetadata, getDatasetTypes, map,
53  canStandardize, standardize
54 
55  Mappers for specific data sources (e.g., CFHT Megacam, LSST
56  simulations, etc.) should inherit this class.
57 
58  The CameraMapper manages datasets within a "root" directory. Note that
59  writing to a dataset present in the input root will hide the existing
60  dataset but not overwrite it. See #2160 for design discussion.
61 
62  A camera is assumed to consist of one or more rafts, each composed of
63  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
64  (amps). A camera is also assumed to have a camera geometry description
65  (CameraGeom object) as a policy file, a filter description (Filter class
66  static configuration) as another policy file, and an optional defects
67  description directory.
68 
69  Information from the camera geometry and defects are inserted into all
70  Exposure objects returned.
71 
72  The mapper uses one or two registries to retrieve metadata about the
73  images. The first is a registry of all raw exposures. This must contain
74  the time of the observation. One or more tables (or the equivalent)
75  within the registry are used to look up data identifier components that
76  are not specified by the user (e.g. filter) and to return results for
77  metadata queries. The second is an optional registry of all calibration
78  data. This should contain validity start and end entries for each
79  calibration dataset in the same timescale as the observation time.
80 
81  Subclasses will typically set MakeRawVisitInfoClass:
82 
83  MakeRawVisitInfoClass: a class variable that points to a subclass of
84  MakeRawVisitInfo, a functor that creates an
85  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
86 
87  Subclasses must provide the following methods:
88 
89  _extractDetectorName(self, dataId): returns the detector name for a CCD
90  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
91  a dataset identifier referring to that CCD or a subcomponent of it.
92 
93  _computeCcdExposureId(self, dataId): see below
94 
95  _computeCoaddExposureId(self, dataId, singleFilter): see below
96 
97  Subclasses may also need to override the following methods:
98 
99  _transformId(self, dataId): transformation of a data identifier
100  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
101  including making suitable for path expansion (e.g. removing commas).
102  The default implementation does nothing. Note that this
103  method should not modify its input parameter.
104 
105  getShortCcdName(self, ccdName): a static method that returns a shortened name
106  suitable for use as a filename. The default version converts spaces to underscores.
107 
108  _getCcdKeyVal(self, dataId): return a CCD key and value
109  by which to look up defects in the defects registry.
110  The default value returns ("ccd", detector name)
111 
112  _mapActualToPath(self, template, actualId): convert a template path to an
113  actual path, using the actual dataset identifier.
114 
115  The mapper's behaviors are largely specified by the policy file.
116  See the MapperDictionary.paf for descriptions of the available items.
117 
118  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
119  mappings (see Mappings class).
120 
121  Common default mappings for all subclasses can be specified in the
122  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
123  a simple way to add a product to all camera mappers.
124 
125  Functions to map (provide a path to the data given a dataset
126  identifier dictionary) and standardize (convert data into some standard
127  format or type) may be provided in the subclass as "map_{dataset type}"
128  and "std_{dataset type}", respectively.
129 
130  If non-Exposure datasets cannot be retrieved using standard
131  daf_persistence methods alone, a "bypass_{dataset type}" function may be
132  provided in the subclass to return the dataset instead of using the
133  "datasets" subpolicy.
134 
135  Implementations of map_camera and bypass_camera that should typically be
136  sufficient are provided in this base class.
137 
138  @todo
139  * Handle defects the same was as all other calibration products, using the calibration registry
140  * Instead of auto-loading the camera at construction time, load it from the calibration registry
141  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
142  of pyfits from this package.
143  """
144  packageName = None
145 
146  # a class or subclass of MakeRawVisitInfo, a functor that makes an
147  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
148  MakeRawVisitInfoClass = MakeRawVisitInfo
149 
150  # a class or subclass of PupilFactory
151  PupilFactoryClass = afwCameraGeom.PupilFactory
152 
153  def __init__(self, policy, repositoryDir,
154  root=None, registry=None, calibRoot=None, calibRegistry=None,
155  provided=None, parentRegistry=None, repositoryCfg=None):
156  """Initialize the CameraMapper.
157 
158  Parameters
159  ----------
160  policy : daf_persistence.Policy,
161  Can also be pexPolicy.Policy, only for backward compatibility.
162  Policy with per-camera defaults already merged.
163  repositoryDir : string
164  Policy repository for the subclassing module (obtained with
165  getRepositoryPath() on the per-camera default dictionary).
166  root : string, optional
167  Path to the root directory for data.
168  registry : string, optional
169  Path to registry with data's metadata.
170  calibRoot : string, optional
171  Root directory for calibrations.
172  calibRegistry : string, optional
173  Path to registry with calibrations' metadata.
174  provided : list of string, optional
175  Keys provided by the mapper.
176  parentRegistry : Registry subclass, optional
177  Registry from a parent repository that may be used to look up
178  data's metadata.
179  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
180  The configuration information for the repository this mapper is
181  being used with.
182  """
183 
184  dafPersist.Mapper.__init__(self)
185 
186  self.log = lsstLog.Log.getLogger("CameraMapper")
187 
188  if root:
189  self.root = root
190  elif repositoryCfg:
191  self.root = repositoryCfg.root
192  else:
193  self.root = None
194  if isinstance(policy, pexPolicy.Policy):
195  policy = dafPersist.Policy(policy)
196 
197  repoPolicy = repositoryCfg.policy if repositoryCfg else None
198  if repoPolicy is not None:
199  policy.update(repoPolicy)
200 
201  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
202  "MapperDictionary.paf",
203  "policy")
204  dictPolicy = dafPersist.Policy(defaultPolicyFile)
205  policy.merge(dictPolicy)
206 
207  # Levels
208  self.levels = dict()
209  if 'levels' in policy:
210  levelsPolicy = policy['levels']
211  for key in levelsPolicy.names(True):
212  self.levels[key] = set(levelsPolicy.asArray(key))
213  self.defaultLevel = policy['defaultLevel']
214  self.defaultSubLevels = dict()
215  if 'defaultSubLevels' in policy:
216  self.defaultSubLevels = policy['defaultSubLevels']
217 
218  # Root directories
219  if root is None:
220  root = "."
221  root = dafPersist.LogicalLocation(root).locString()
222 
223  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
224 
225  # If the calibRoot is passed in, use that. If not and it's indicated in
226  # the policy, use that. And otherwise, the calibs are in the regular
227  # root.
228  # If the location indicated by the calib root does not exist, do not
229  # create it.
230  calibStorage = None
231  if calibRoot is not None:
232  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
234  create=False)
235  else:
236  calibRoot = policy.get('calibRoot', None)
237  if calibRoot:
238  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239  create=False)
240  if calibStorage is None:
241  calibStorage = self.rootStorage
242 
243  self.root = root
244 
245  # Registries
246  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
247  self.rootStorage, searchParents=False,
248  posixIfNoSql=(not parentRegistry))
249  if not self.registry:
250  self.registry = parentRegistry
251  needCalibRegistry = policy.get('needCalibRegistry', None)
252  if needCalibRegistry:
253  if calibStorage:
254  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
255  "calibRegistryPath", calibStorage,
256  posixIfNoSql=False) # NB never use posix for calibs
257  else:
258  raise RuntimeError(
259  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
260  "calibRoot ivar:%s or policy['calibRoot']:%s" %
261  (calibRoot, policy.get('calibRoot', None)))
262  else:
263  self.calibRegistry = None
264 
265  # Dict of valid keys and their value types
266  self.keyDict = dict()
267 
268  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
269  self._initWriteRecipes()
270 
271  # Camera geometry
272  self.cameraDataLocation = None # path to camera geometry config file
273  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
274 
275  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
276  # camera package, which is on the local filesystem.
277  self.defectRegistry = None
278  if 'defects' in policy:
279  self.defectPath = os.path.join(repositoryDir, policy['defects'])
280  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
281  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
282 
283  # Filter translation table
284  self.filters = None
285 
286  # Skytile policy
287  self.skypolicy = policy['skytiles']
288 
289  # verify that the class variable packageName is set before attempting
290  # to instantiate an instance
291  if self.packageName is None:
292  raise ValueError('class variable packageName must not be None')
293 
295 
296  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
297  """Initialize mappings
298 
299  For each of the dataset types that we want to be able to read, there are
300  methods that can be created to support them:
301  * map_<dataset> : determine the path for dataset
302  * std_<dataset> : standardize the retrieved dataset
303  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
304  * query_<dataset> : query the registry
305 
306  Besides the dataset types explicitly listed in the policy, we create
307  additional, derived datasets for additional conveniences, e.g., reading
308  the header of an image, retrieving only the size of a catalog.
309 
310  @param policy (Policy) Policy with per-camera defaults already merged
311  @param rootStorage (Storage subclass instance) Interface to persisted repository data
312  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
313  @param provided (list of strings) Keys provided by the mapper
314  """
315  # Sub-dictionaries (for exposure/calibration/dataset types)
316  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
317  "obs_base", "ImageMappingDictionary.paf", "policy"))
318  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319  "obs_base", "ExposureMappingDictionary.paf", "policy"))
320  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
321  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
322  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323  "obs_base", "DatasetMappingDictionary.paf", "policy"))
324 
325  # Mappings
326  mappingList = (
327  ("images", imgMappingPolicy, ImageMapping),
328  ("exposures", expMappingPolicy, ExposureMapping),
329  ("calibrations", calMappingPolicy, CalibrationMapping),
330  ("datasets", dsMappingPolicy, DatasetMapping)
331  )
332  self.mappings = dict()
333  for name, defPolicy, cls in mappingList:
334  if name in policy:
335  datasets = policy[name]
336 
337  # Centrally-defined datasets
338  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
339  if os.path.exists(defaultsPath):
340  datasets.merge(dafPersist.Policy(defaultsPath))
341 
342  mappings = dict()
343  setattr(self, name, mappings)
344  for datasetType in datasets.names(True):
345  subPolicy = datasets[datasetType]
346  subPolicy.merge(defPolicy)
347 
348  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
349  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350  subPolicy=subPolicy):
351  components = subPolicy.get('composite')
352  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
353  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
354  python = subPolicy['python']
355  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356  disassembler=disassembler,
357  python=python,
358  dataId=dataId,
359  mapper=self)
360  for name, component in components.items():
361  butlerComposite.add(id=name,
362  datasetType=component.get('datasetType'),
363  setter=component.get('setter', None),
364  getter=component.get('getter', None),
365  subset=component.get('subset', False),
366  inputOnly=component.get('inputOnly', False))
367  return butlerComposite
368  setattr(self, "map_" + datasetType, compositeClosure)
369  # for now at least, don't set up any other handling for this dataset type.
370  continue
371 
372  if name == "calibrations":
373  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
374  provided=provided, dataRoot=rootStorage)
375  else:
376  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
377  self.keyDict.update(mapping.keys())
378  mappings[datasetType] = mapping
379  self.mappings[datasetType] = mapping
380  if not hasattr(self, "map_" + datasetType):
381  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
382  return mapping.map(mapper, dataId, write)
383  setattr(self, "map_" + datasetType, mapClosure)
384  if not hasattr(self, "query_" + datasetType):
385  def queryClosure(format, dataId, mapping=mapping):
386  return mapping.lookup(format, dataId)
387  setattr(self, "query_" + datasetType, queryClosure)
388  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
389  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
390  return mapping.standardize(mapper, item, dataId)
391  setattr(self, "std_" + datasetType, stdClosure)
392 
393  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
394  """Set convenience methods on CameraMapper"""
395  mapName = "map_" + datasetType + "_" + suffix
396  bypassName = "bypass_" + datasetType + "_" + suffix
397  queryName = "query_" + datasetType + "_" + suffix
398  if not hasattr(self, mapName):
399  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
400  if not hasattr(self, bypassName):
401  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
402  bypassImpl = getattr(self, "bypass_" + datasetType)
403  if bypassImpl is not None:
404  setattr(self, bypassName, bypassImpl)
405  if not hasattr(self, queryName):
406  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
407 
408  # Filename of dataset
409  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
410  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
411  # Metadata from FITS file
412  if subPolicy["storage"] == "FitsStorage": # a FITS image
413  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
414  afwImage.readMetadata(location.getLocationsWithRoot()[0]))
415 
416  # Add support for configuring FITS compression
417  addName = "add_" + datasetType
418  if not hasattr(self, addName):
419  setattr(self, addName, self.getImageCompressionSettings)
420 
421  if name == "exposures":
422  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
423  afwImage.makeWcs(
424  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
425  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
426  afwImage.Calib(
427  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
428  setMethods("visitInfo",
429  bypassImpl=lambda datasetType, pythonType, location, dataId:
430  afwImage.VisitInfo(
431  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
432  setMethods("filter",
433  bypassImpl=lambda datasetType, pythonType, location, dataId:
434  afwImage.Filter(
435  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
436  setMethods("detector",
437  mapImpl=lambda dataId, write=False:
438  dafPersist.ButlerLocation(
439  pythonType="lsst.afw.cameraGeom.CameraConfig",
440  cppType="Config",
441  storageName="Internal",
442  locationList="ignored",
443  dataId=dataId,
444  mapper=self,
445  storage=None,
446  ),
447  bypassImpl=lambda datasetType, pythonType, location, dataId:
448  self.camera[self._extractDetectorName(dataId)]
449  )
450  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
451  afwImage.bboxFromMetadata(
452  afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
453 
454  elif name == "images":
455  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
456  afwImage.bboxFromMetadata(
457  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
458 
459  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
460  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
461  afwImage.readMetadata(os.path.join(location.getStorage().root,
462  location.getLocations()[0]), hdu=1))
463 
464  # Sub-images
465  if subPolicy["storage"] == "FitsStorage":
466  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
467  subId = dataId.copy()
468  del subId['bbox']
469  loc = mapping.map(mapper, subId, write)
470  bbox = dataId['bbox']
471  llcX = bbox.getMinX()
472  llcY = bbox.getMinY()
473  width = bbox.getWidth()
474  height = bbox.getHeight()
475  loc.additionalData.set('llcX', llcX)
476  loc.additionalData.set('llcY', llcY)
477  loc.additionalData.set('width', width)
478  loc.additionalData.set('height', height)
479  if 'imageOrigin' in dataId:
480  loc.additionalData.set('imageOrigin',
481  dataId['imageOrigin'])
482  return loc
483 
484  def querySubClosure(key, format, dataId, mapping=mapping):
485  subId = dataId.copy()
486  del subId['bbox']
487  return mapping.lookup(format, subId)
488  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
489 
490  if subPolicy["storage"] == "FitsCatalogStorage":
491  # Length of catalog
492  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
493  afwImage.readMetadata(os.path.join(location.getStorage().root,
494  location.getLocations()[0]),
495  hdu=1).get("NAXIS2"))
496 
497  # Schema of catalog
498  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
499  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
500  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
501  location.getLocations()[0])))
502 
503  def _computeCcdExposureId(self, dataId):
504  """Compute the 64-bit (long) identifier for a CCD exposure.
505 
506  Subclasses must override
507 
508  @param dataId (dict) Data identifier with visit, ccd
509  """
510  raise NotImplementedError()
511 
512  def _computeCoaddExposureId(self, dataId, singleFilter):
513  """Compute the 64-bit (long) identifier for a coadd.
514 
515  Subclasses must override
516 
517  @param dataId (dict) Data identifier with tract and patch.
518  @param singleFilter (bool) True means the desired ID is for a single-
519  filter coadd, in which case dataId
520  must contain filter.
521  """
522  raise NotImplementedError()
523 
524  def _search(self, path):
525  """Search for path in the associated repository's storage.
526 
527  Parameters
528  ----------
529  path : string
530  Path that describes an object in the repository associated with
531  this mapper.
532  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
533  indicator will be stripped when searching and so will match
534  filenames without the HDU indicator, e.g. 'foo.fits'. The path
535  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
536 
537  Returns
538  -------
539  string
540  The path for this object in the repository. Will return None if the
541  object can't be found. If the input argument path contained an HDU
542  indicator, the returned path will also contain the HDU indicator.
543  """
544  return self.rootStorage.search(path)
545 
546  def backup(self, datasetType, dataId):
547  """Rename any existing object with the given type and dataId.
548 
549  The CameraMapper implementation saves objects in a sequence of e.g.:
550  foo.fits
551  foo.fits~1
552  foo.fits~2
553  All of the backups will be placed in the output repo, however, and will
554  not be removed if they are found elsewhere in the _parent chain. This
555  means that the same file will be stored twice if the previous version was
556  found in an input repo.
557  """
558 
559  # Calling PosixStorage directly is not the long term solution in this
560  # function, this is work-in-progress on epic DM-6225. The plan is for
561  # parentSearch to be changed to 'search', and search only the storage
562  # associated with this mapper. All searching of parents will be handled
563  # by traversing the container of repositories in Butler.
564 
565  def firstElement(list):
566  """Get the first element in the list, or None if that can't be done.
567  """
568  return list[0] if list is not None and len(list) else None
569 
570  n = 0
571  newLocation = self.map(datasetType, dataId, write=True)
572  newPath = newLocation.getLocations()[0]
573  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
574  path = firstElement(path)
575  oldPaths = []
576  while path is not None:
577  n += 1
578  oldPaths.append((n, path))
579  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
580  path = firstElement(path)
581  for n, oldPath in reversed(oldPaths):
582  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
583 
584  def keys(self):
585  """Return supported keys.
586  @return (iterable) List of keys usable in a dataset identifier"""
587  return iter(self.keyDict.keys())
588 
589  def getKeys(self, datasetType, level):
590  """Return a dict of supported keys and their value types for a given dataset
591  type at a given level of the key hierarchy.
592 
593  @param datasetType (str) dataset type or None for all dataset types
594  @param level (str) level or None for all levels or '' for the default level for the camera
595  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
596 
597  # not sure if this is how we want to do this. what if None was intended?
598  if level == '':
599  level = self.getDefaultLevel()
600 
601  if datasetType is None:
602  keyDict = copy.copy(self.keyDict)
603  else:
604  keyDict = self.mappings[datasetType].keys()
605  if level is not None and level in self.levels:
606  keyDict = copy.copy(keyDict)
607  for l in self.levels[level]:
608  if l in keyDict:
609  del keyDict[l]
610  return keyDict
611 
612  def getDefaultLevel(self):
613  return self.defaultLevel
614 
615  def getDefaultSubLevel(self, level):
616  if level in self.defaultSubLevels:
617  return self.defaultSubLevels[level]
618  return None
619 
620  @classmethod
621  def getCameraName(cls):
622  """Return the name of the camera that this CameraMapper is for."""
623  className = str(cls)
624  className = className[className.find('.'):-1]
625  m = re.search(r'(\w+)Mapper', className)
626  if m is None:
627  m = re.search(r"class '[\w.]*?(\w+)'", className)
628  name = m.group(1)
629  return name[:1].lower() + name[1:] if name else ''
630 
631  @classmethod
632  def getPackageName(cls):
633  """Return the name of the package containing this CameraMapper."""
634  if cls.packageName is None:
635  raise ValueError('class variable packageName must not be None')
636  return cls.packageName
637 
638  @classmethod
639  def getPackageDir(cls):
640  """Return the base directory of this package"""
641  return getPackageDir(cls.getPackageName())
642 
643  def map_camera(self, dataId, write=False):
644  """Map a camera dataset."""
645  if self.camera is None:
646  raise RuntimeError("No camera dataset available.")
647  actualId = self._transformId(dataId)
648  return dafPersist.ButlerLocation(
649  pythonType="lsst.afw.cameraGeom.CameraConfig",
650  cppType="Config",
651  storageName="ConfigStorage",
652  locationList=self.cameraDataLocation or "ignored",
653  dataId=actualId,
654  mapper=self,
655  storage=self.rootStorage
656  )
657 
658  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
659  """Return the (preloaded) camera object.
660  """
661  if self.camera is None:
662  raise RuntimeError("No camera dataset available.")
663  return self.camera
664 
665  def map_defects(self, dataId, write=False):
666  """Map defects dataset.
667 
668  @return a very minimal ButlerLocation containing just the locationList field
669  (just enough information that bypass_defects can use it).
670  """
671  defectFitsPath = self._defectLookup(dataId=dataId)
672  if defectFitsPath is None:
673  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
674 
675  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
676  dataId, self,
677  storage=self.rootStorage)
678 
679  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
680  """Return a defect based on the butler location returned by map_defects
681 
682  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
683  @param[in] dataId: the usual data ID; "ccd" must be set
684 
685  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
686  into an object, which is what we want for now, since that conversion is a bit tricky.
687  """
688  detectorName = self._extractDetectorName(dataId)
689  defectsFitsPath = butlerLocation.locationList[0]
690  with pyfits.open(defectsFitsPath) as hduList:
691  for hdu in hduList[1:]:
692  if hdu.header["name"] != detectorName:
693  continue
694 
695  defectList = []
696  for data in hdu.data:
697  bbox = afwGeom.Box2I(
698  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
699  afwGeom.Extent2I(int(data['width']), int(data['height'])),
700  )
701  defectList.append(afwImage.DefectBase(bbox))
702  return defectList
703 
704  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
705 
706  def map_expIdInfo(self, dataId, write=False):
707  return dafPersist.ButlerLocation(
708  pythonType="lsst.obs.base.ExposureIdInfo",
709  cppType=None,
710  storageName="Internal",
711  locationList="ignored",
712  dataId=dataId,
713  mapper=self,
714  storage=self.rootStorage
715  )
716 
717  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
718  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
719  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
720  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
721  return ExposureIdInfo(expId=expId, expBits=expBits)
722 
723  def std_bfKernel(self, item, dataId):
724  """Disable standardization for bfKernel
725 
726  bfKernel is a calibration product that is numpy array,
727  unlike other calibration products that are all images;
728  all calibration images are sent through _standardizeExposure
729  due to CalibrationMapping, but we don't want that to happen to bfKernel
730  """
731  return item
732 
733  def std_raw(self, item, dataId):
734  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
735  return self._standardizeExposure(self.exposures['raw'], item, dataId,
736  trimmed=False, setVisitInfo=True)
737 
738  def map_skypolicy(self, dataId):
739  """Map a sky policy."""
740  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
741  "Internal", None, None, self,
742  storage=self.rootStorage)
743 
744  def std_skypolicy(self, item, dataId):
745  """Standardize a sky policy by returning the one we use."""
746  return self.skypolicy
747 
748 
753 
754  def _getCcdKeyVal(self, dataId):
755  """Return CCD key and value used to look a defect in the defect registry
756 
757  The default implementation simply returns ("ccd", full detector name)
758  """
759  return ("ccd", self._extractDetectorName(dataId))
760 
761  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
762  posixIfNoSql=True):
763  """Set up a registry (usually SQLite3), trying a number of possible
764  paths.
765 
766  Parameters
767  ----------
768  name : string
769  Name of registry.
770  description: `str`
771  Description of registry (for log messages)
772  path : string
773  Path for registry.
774  policy : string
775  Policy that contains the registry name, used if path is None.
776  policyKey : string
777  Key in policy for registry path.
778  storage : Storage subclass
779  Repository Storage to look in.
780  searchParents : bool, optional
781  True if the search for a registry should follow any Butler v1
782  _parent symlinks.
783  posixIfNoSql : bool, optional
784  If an sqlite registry is not found, will create a posix registry if
785  this is True.
786 
787  Returns
788  -------
789  lsst.daf.persistence.Registry
790  Registry object
791  """
792  if path is None and policyKey in policy:
793  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
794  if os.path.isabs(path):
795  raise RuntimeError("Policy should not indicate an absolute path for registry.")
796  if not storage.exists(path):
797  newPath = storage.instanceSearch(path)
798 
799  newPath = newPath[0] if newPath is not None and len(newPath) else None
800  if newPath is None:
801  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
802  path)
803  path = newPath
804  else:
805  self.log.warn("Unable to locate registry at policy path: %s", path)
806  path = None
807 
808  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
809  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
810  # root from path. Currently only works with PosixStorage
811  try:
812  root = storage.root
813  if path and (path.startswith(root)):
814  path = path[len(root + '/'):]
815  except AttributeError:
816  pass
817 
818  # determine if there is an sqlite registry and if not, try the posix registry.
819  registry = None
820 
821  def search(filename, description):
822  """Search for file in storage
823 
824  Parameters
825  ----------
826  filename : `str`
827  Filename to search for
828  description : `str`
829  Description of file, for error message.
830 
831  Returns
832  -------
833  path : `str` or `None`
834  Path to file, or None
835  """
836  result = storage.instanceSearch(filename)
837  if result:
838  return result[0]
839  self.log.debug("Unable to locate %s: %s", description, filename)
840  return None
841 
842  # Search for a suitable registry database
843  if path is None:
844  path = search("%s.pgsql" % name, "%s in root" % description)
845  if path is None:
846  path = search("%s.sqlite3" % name, "%s in root" % description)
847  if path is None:
848  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
849 
850  if path is not None:
851  if not storage.exists(path):
852  newPath = storage.instanceSearch(path)
853  newPath = newPath[0] if newPath is not None and len(newPath) else None
854  if newPath is not None:
855  path = newPath
856  localFileObj = storage.getLocalFile(path)
857  self.log.info("Loading %s registry from %s", description, localFileObj.name)
858  registry = dafPersist.Registry.create(localFileObj.name)
859  localFileObj.close()
860  elif not registry and posixIfNoSql:
861  try:
862  self.log.info("Loading Posix %s registry from %s", description, storage.root)
863  registry = dafPersist.PosixRegistry(storage.root)
864  except:
865  registry = None
866 
867  return registry
868 
869  def _transformId(self, dataId):
870  """Generate a standard ID dict from a camera-specific ID dict.
871 
872  Canonical keys include:
873  - amp: amplifier name
874  - ccd: CCD name (in LSST this is a combination of raft and sensor)
875  The default implementation returns a copy of its input.
876 
877  @param dataId[in] (dict) Dataset identifier; this must not be modified
878  @return (dict) Transformed dataset identifier"""
879 
880  return dataId.copy()
881 
882  def _mapActualToPath(self, template, actualId):
883  """Convert a template path to an actual path, using the actual data
884  identifier. This implementation is usually sufficient but can be
885  overridden by the subclass.
886  @param template (string) Template path
887  @param actualId (dict) Dataset identifier
888  @return (string) Pathname"""
889 
890  try:
891  transformedId = self._transformId(actualId)
892  return template % transformedId
893  except Exception as e:
894  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
895 
896  @staticmethod
897  def getShortCcdName(ccdName):
898  """Convert a CCD name to a form useful as a filename
899 
900  The default implementation converts spaces to underscores.
901  """
902  return ccdName.replace(" ", "_")
903 
904  def _extractDetectorName(self, dataId):
905  """Extract the detector (CCD) name from the dataset identifier.
906 
907  The name in question is the detector name used by lsst.afw.cameraGeom.
908 
909  @param dataId (dict) Dataset identifier
910  @return (string) Detector name
911  """
912  raise NotImplementedError("No _extractDetectorName() function specified")
913 
914  def _extractAmpId(self, dataId):
915  """Extract the amplifier identifer from a dataset identifier.
916 
917  @warning this is deprecated; DO NOT USE IT
918 
919  amplifier identifier has two parts: the detector name for the CCD
920  containing the amplifier and index of the amplifier in the detector.
921  @param dataId (dict) Dataset identifer
922  @return (tuple) Amplifier identifier"""
923 
924  trDataId = self._transformId(dataId)
925  return (trDataId["ccd"], int(trDataId['amp']))
926 
927  def _setAmpDetector(self, item, dataId, trimmed=True):
928  """Set the detector object in an Exposure for an amplifier.
929  Defects are also added to the Exposure based on the detector object.
930  @param[in,out] item (lsst.afw.image.Exposure)
931  @param dataId (dict) Dataset identifier
932  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
933 
934  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
935 
936  def _setCcdDetector(self, item, dataId, trimmed=True):
937  """Set the detector object in an Exposure for a CCD.
938  @param[in,out] item (lsst.afw.image.Exposure)
939  @param dataId (dict) Dataset identifier
940  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
941 
942  if item.getDetector() is not None:
943  return
944 
945  detectorName = self._extractDetectorName(dataId)
946  detector = self.camera[detectorName]
947  item.setDetector(detector)
948 
949  def _setFilter(self, mapping, item, dataId):
950  """Set the filter object in an Exposure. If the Exposure had a FILTER
951  keyword, this was already processed during load. But if it didn't,
952  use the filter from the registry.
953  @param mapping (lsst.obs.base.Mapping)
954  @param[in,out] item (lsst.afw.image.Exposure)
955  @param dataId (dict) Dataset identifier"""
956 
957  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
958  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
959  return
960 
961  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
962  return
963 
964  actualId = mapping.need(['filter'], dataId)
965  filterName = actualId['filter']
966  if self.filters is not None and filterName in self.filters:
967  filterName = self.filters[filterName]
968  item.setFilter(afwImage.Filter(filterName))
969 
970  # Default standardization function for exposures
971  def _standardizeExposure(self, mapping, item, dataId, filter=True,
972  trimmed=True, setVisitInfo=True):
973  """Default standardization function for images.
974 
975  This sets the Detector from the camera geometry
976  and optionally set the Fiter. In both cases this saves
977  having to persist some data in each exposure (or image).
978 
979  @param mapping (lsst.obs.base.Mapping)
980  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
981  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
982  or lsst.afw.image.MaskedImage
983  @param dataId (dict) Dataset identifier
984  @param filter (bool) Set filter? Ignored if item is already an exposure
985  @param trimmed (bool) Should detector be marked as trimmed?
986  @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata?
987  @return (lsst.afw.image.Exposure) the standardized Exposure"""
988  try:
989  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
990  except Exception as e:
991  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
992  raise
993 
994  if mapping.level.lower() == "amp":
995  self._setAmpDetector(item, dataId, trimmed)
996  elif mapping.level.lower() == "ccd":
997  self._setCcdDetector(item, dataId, trimmed)
998 
999  if filter:
1000  self._setFilter(mapping, item, dataId)
1001 
1002  return item
1003 
1004  def _defectLookup(self, dataId):
1005  """Find the defects for a given CCD.
1006  @param dataId (dict) Dataset identifier
1007  @return (string) path to the defects file or None if not available"""
1008  if self.defectRegistry is None:
1009  return None
1010  if self.registry is None:
1011  raise RuntimeError("No registry for defect lookup")
1012 
1013  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1014 
1015  dataIdForLookup = {'visit': dataId['visit']}
1016  # .lookup will fail in a posix registry because there is no template to provide.
1017  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1018  if len(rows) == 0:
1019  return None
1020  assert len(rows) == 1
1021  taiObs = rows[0][0]
1022 
1023  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1024  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1025  [(ccdKey, "?")],
1026  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1027  (ccdVal, taiObs))
1028  if not rows or len(rows) == 0:
1029  return None
1030  if len(rows) == 1:
1031  return os.path.join(self.defectPath, rows[0][0])
1032  else:
1033  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1034  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1035 
1036  def _makeCamera(self, policy, repositoryDir):
1037  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1038 
1039  Also set self.cameraDataLocation, if relevant (else it can be left None).
1040 
1041  This implementation assumes that policy contains an entry "camera" that points to the
1042  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1043  - a file named `camera.py` that contains persisted camera config
1044  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1045 
1046  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
1047  Policy with per-camera defaults already merged
1048  @param repositoryDir (string) Policy repository for the subclassing
1049  module (obtained with getRepositoryPath() on the
1050  per-camera default dictionary)
1051  """
1052  if isinstance(policy, pexPolicy.Policy):
1053  policy = dafPersist.Policy(pexPolicy=policy)
1054  if 'camera' not in policy:
1055  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1056  cameraDataSubdir = policy['camera']
1057  self.cameraDataLocation = os.path.normpath(
1058  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1059  cameraConfig = afwCameraGeom.CameraConfig()
1060  cameraConfig.load(self.cameraDataLocation)
1061  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1062  return afwCameraGeom.makeCameraFromPath(
1063  cameraConfig=cameraConfig,
1064  ampInfoPath=ampInfoPath,
1065  shortNameFunc=self.getShortCcdName,
1066  pupilFactoryClass=self.PupilFactoryClass
1067  )
1068 
1069  def getRegistry(self):
1070  """Get the registry used by this mapper.
1071 
1072  Returns
1073  -------
1074  Registry or None
1075  The registry used by this mapper for this mapper's repository.
1076  """
1077  return self.registry
1078 
1079  def getImageCompressionSettings(self, datasetType, dataId):
1080  """Stuff image compression settings into a daf.base.PropertySet
1081 
1082  This goes into the ButlerLocation's "additionalData", which gets
1083  passed into the boost::persistence framework.
1084 
1085  Parameters
1086  ----------
1087  datasetType : `str`
1088  Type of dataset for which to get the image compression settings.
1089  dataId : `dict`
1090  Dataset identifier.
1091 
1092  Returns
1093  -------
1094  additionalData : `lsst.daf.base.PropertySet`
1095  Image compression settings.
1096  """
1097  mapping = self.mappings[datasetType]
1098  recipeName = mapping.recipe
1099  storageType = mapping.storage
1100  if storageType not in self._writeRecipes:
1101  return dafBase.PropertySet()
1102  if recipeName not in self._writeRecipes[storageType]:
1103  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1104  (datasetType, storageType, recipeName))
1105  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1106  seed = hash(tuple(dataId.items())) % 2**31
1107  for plane in ("image", "mask", "variance"):
1108  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1109  recipe.set(plane + ".scaling.seed", seed)
1110  return recipe
1111 
1112  def _initWriteRecipes(self):
1113  """Read the recipes for writing files
1114 
1115  These recipes are currently used for configuring FITS compression,
1116  but they could have wider uses for configuring different flavors
1117  of the storage types. A recipe is referred to by a symbolic name,
1118  which has associated settings. These settings are stored as a
1119  `PropertySet` so they can easily be passed down to the
1120  boost::persistence framework as the "additionalData" parameter.
1121 
1122  The list of recipes is written in YAML. A default recipe and
1123  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1124  and these may be overridden or supplemented by the individual obs_*
1125  packages' own policy/writeRecipes.yaml files.
1126 
1127  Recipes are grouped by the storage type. Currently, only the
1128  ``FitsStorage`` storage type uses recipes, which uses it to
1129  configure FITS image compression.
1130 
1131  Each ``FitsStorage`` recipe for FITS compression should define
1132  "image", "mask" and "variance" entries, each of which may contain
1133  "compression" and "scaling" entries. Defaults will be provided for
1134  any missing elements under "compression" and "scaling".
1135 
1136  The allowed entries under "compression" are:
1137 
1138  * algorithm (string): compression algorithm to use
1139  * rows (int): number of rows per tile (0 = entire dimension)
1140  * columns (int): number of columns per tile (0 = entire dimension)
1141  * quantizeLevel (float): cfitsio quantization level
1142 
1143  The allowed entries under "scaling" are:
1144 
1145  * algorithm (string): scaling algorithm to use
1146  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1147  * fuzz (bool): fuzz the values when quantising floating-point values?
1148  * seed (long): seed for random number generator when fuzzing
1149  * maskPlanes (list of string): mask planes to ignore when doing statistics
1150  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1151  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1152  * bscale: manually specified BSCALE (for MANUAL scaling)
1153  * bzero: manually specified BSCALE (for MANUAL scaling)
1154 
1155  A very simple example YAML recipe:
1156 
1157  FitsStorage:
1158  default:
1159  image: &default
1160  compression:
1161  algorithm: GZIP_SHUFFLE
1162  mask: *default
1163  variance: *default
1164  """
1165  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1166  recipes = dafPersist.Policy(recipesFile)
1167  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1168  validationMenu = {'FitsStorage': validateRecipeFitsStorage,}
1169  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1170  supplements = dafPersist.Policy(supplementsFile)
1171  # Don't allow overrides, only supplements
1172  for entry in validationMenu:
1173  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1174  if intersection:
1175  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1176  (supplementsFile, entry, recipesFile, intersection))
1177  recipes.update(supplements)
1178 
1179  self._writeRecipes = {}
1180  for storageType in recipes.names(True):
1181  if "default" not in recipes[storageType]:
1182  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1183  (storageType, recipesFile))
1184  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1185 
1186 
1187 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1188  """Generate an Exposure from an image-like object
1189 
1190  If the image is a DecoratedImage then also set its WCS and metadata
1191  (Image and MaskedImage are missing the necessary metadata
1192  and Exposure already has those set)
1193 
1194  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1195  @return (lsst.afw.image.Exposure) Exposure containing input image
1196  """
1197  metadata = None
1198  if isinstance(image, afwImage.MaskedImage):
1199  exposure = afwImage.makeExposure(image)
1200  elif isinstance(image, afwImage.DecoratedImage):
1201  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1202  metadata = image.getMetadata()
1203  try:
1204  wcs = afwImage.makeWcs(metadata, True)
1205  exposure.setWcs(wcs)
1206  except pexExcept.InvalidParameterError as e:
1207  # raised on failure to create a wcs (and possibly others)
1208  if logger is None:
1209  logger = lsstLog.Log.getLogger("CameraMapper")
1210  logger.warn("wcs set to None; insufficient information found in metadata to create a valid wcs: "
1211  "%s", e.args[0])
1212 
1213  exposure.setMetadata(metadata)
1214  elif isinstance(image, afwImage.Exposure):
1215  # Exposure
1216  exposure = image
1217  metadata = exposure.getMetadata()
1218  else:
1219  # Image
1220  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1221  #
1222  # set VisitInfo if we can
1223  #
1224  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1225  if metadata is not None:
1226  if mapper is None:
1227  if not logger:
1228  logger = lsstLog.Log.getLogger("CameraMapper")
1229  logger.warn("I can only set the VisitInfo if you provide a mapper")
1230  else:
1231  exposureId = mapper._computeCcdExposureId(dataId)
1232  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1233 
1234  exposure.getInfo().setVisitInfo(visitInfo)
1235 
1236  return exposure
1237 
1238 
1240  """Validate recipes for FitsStorage
1241 
1242  The recipes are supplemented with default values where appropriate.
1243 
1244  TODO: replace this custom validation code with Cerberus (DM-11846)
1245 
1246  Parameters
1247  ----------
1248  recipes : `lsst.daf.persistence.Policy`
1249  FitsStorage recipes to validate.
1250 
1251  Returns
1252  -------
1253  validated : `lsst.daf.base.PropertySet`
1254  Validated FitsStorage recipe.
1255 
1256  Raises
1257  ------
1258  `RuntimeError`
1259  If validation fails.
1260  """
1261  # Schemas define what should be there, and the default values (and by the default
1262  # value, the expected type).
1263  compressionSchema = {
1264  "algorithm": "NONE",
1265  "rows": 1,
1266  "columns": 0,
1267  "quantizeLevel": 0.0,
1268  }
1269  scalingSchema = {
1270  "algorithm": "NONE",
1271  "bitpix": 0,
1272  "maskPlanes": ["NO_DATA"],
1273  "seed": 0,
1274  "quantizeLevel": 4.0,
1275  "quantizePad": 5.0,
1276  "fuzz": True,
1277  "bscale": 1.0,
1278  "bzero": 0.0,
1279  }
1280 
1281  def checkUnrecognized(entry, allowed, description):
1282  """Check to see if the entry contains unrecognised keywords"""
1283  unrecognized = set(entry.keys()) - set(allowed)
1284  if unrecognized:
1285  raise RuntimeError(
1286  "Unrecognized entries when parsing image compression recipe %s: %s" %
1287  (description, unrecognized))
1288 
1289  validated = {}
1290  for name in recipes.names(True):
1291  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1292  rr = dafBase.PropertySet()
1293  validated[name] = rr
1294  for plane in ("image", "mask", "variance"):
1295  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1296  name + "->" + plane)
1297 
1298  for settings, schema in (("compression", compressionSchema),
1299  ("scaling", scalingSchema)):
1300  prefix = plane + "." + settings
1301  if settings not in recipes[name][plane]:
1302  for key in schema:
1303  rr.set(prefix + "." + key, schema[key])
1304  continue
1305  entry = recipes[name][plane][settings]
1306  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1307  for key in schema:
1308  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1309  rr.set(prefix + "." + key, value)
1310  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.