lsst.obs.base  16.0-2-gfe708a8+2
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
26 import re
27 import weakref
28 import lsst.daf.persistence as dafPersist
29 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base as dafBase
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 from lsst.afw.fits import readMetadata
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.policy as pexPolicy
38 import lsst.pex.exceptions as pexExcept
39 from .exposureIdInfo import ExposureIdInfo
40 from .makeRawVisitInfo import MakeRawVisitInfo
41 from lsst.utils import getPackageDir
42 
43 __all__ = ["CameraMapper", "exposureFromImage"]
44 
45 
46 class CameraMapper(dafPersist.Mapper):
47 
48  """CameraMapper is a base class for mappers that handle images from a
49  camera and products derived from them. This provides an abstraction layer
50  between the data on disk and the code.
51 
52  Public methods: keys, queryMetadata, getDatasetTypes, map,
53  canStandardize, standardize
54 
55  Mappers for specific data sources (e.g., CFHT Megacam, LSST
56  simulations, etc.) should inherit this class.
57 
58  The CameraMapper manages datasets within a "root" directory. Note that
59  writing to a dataset present in the input root will hide the existing
60  dataset but not overwrite it. See #2160 for design discussion.
61 
62  A camera is assumed to consist of one or more rafts, each composed of
63  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
64  (amps). A camera is also assumed to have a camera geometry description
65  (CameraGeom object) as a policy file, a filter description (Filter class
66  static configuration) as another policy file, and an optional defects
67  description directory.
68 
69  Information from the camera geometry and defects are inserted into all
70  Exposure objects returned.
71 
72  The mapper uses one or two registries to retrieve metadata about the
73  images. The first is a registry of all raw exposures. This must contain
74  the time of the observation. One or more tables (or the equivalent)
75  within the registry are used to look up data identifier components that
76  are not specified by the user (e.g. filter) and to return results for
77  metadata queries. The second is an optional registry of all calibration
78  data. This should contain validity start and end entries for each
79  calibration dataset in the same timescale as the observation time.
80 
81  Subclasses will typically set MakeRawVisitInfoClass:
82 
83  MakeRawVisitInfoClass: a class variable that points to a subclass of
84  MakeRawVisitInfo, a functor that creates an
85  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
86 
87  Subclasses must provide the following methods:
88 
89  _extractDetectorName(self, dataId): returns the detector name for a CCD
90  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
91  a dataset identifier referring to that CCD or a subcomponent of it.
92 
93  _computeCcdExposureId(self, dataId): see below
94 
95  _computeCoaddExposureId(self, dataId, singleFilter): see below
96 
97  Subclasses may also need to override the following methods:
98 
99  _transformId(self, dataId): transformation of a data identifier
100  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
101  including making suitable for path expansion (e.g. removing commas).
102  The default implementation does nothing. Note that this
103  method should not modify its input parameter.
104 
105  getShortCcdName(self, ccdName): a static method that returns a shortened name
106  suitable for use as a filename. The default version converts spaces to underscores.
107 
108  _getCcdKeyVal(self, dataId): return a CCD key and value
109  by which to look up defects in the defects registry.
110  The default value returns ("ccd", detector name)
111 
112  _mapActualToPath(self, template, actualId): convert a template path to an
113  actual path, using the actual dataset identifier.
114 
115  The mapper's behaviors are largely specified by the policy file.
116  See the MapperDictionary.paf for descriptions of the available items.
117 
118  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
119  mappings (see Mappings class).
120 
121  Common default mappings for all subclasses can be specified in the
122  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
123  a simple way to add a product to all camera mappers.
124 
125  Functions to map (provide a path to the data given a dataset
126  identifier dictionary) and standardize (convert data into some standard
127  format or type) may be provided in the subclass as "map_{dataset type}"
128  and "std_{dataset type}", respectively.
129 
130  If non-Exposure datasets cannot be retrieved using standard
131  daf_persistence methods alone, a "bypass_{dataset type}" function may be
132  provided in the subclass to return the dataset instead of using the
133  "datasets" subpolicy.
134 
135  Implementations of map_camera and bypass_camera that should typically be
136  sufficient are provided in this base class.
137 
138  Notes
139  -----
140  TODO:
141 
142  - Handle defects the same was as all other calibration products, using the calibration registry
143  - Instead of auto-loading the camera at construction time, load it from the calibration registry
144  - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
145  of pyfits from this package.
146  """
147  packageName = None
148 
149  # a class or subclass of MakeRawVisitInfo, a functor that makes an
150  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
151  MakeRawVisitInfoClass = MakeRawVisitInfo
152 
153  # a class or subclass of PupilFactory
154  PupilFactoryClass = afwCameraGeom.PupilFactory
155 
156  def __init__(self, policy, repositoryDir,
157  root=None, registry=None, calibRoot=None, calibRegistry=None,
158  provided=None, parentRegistry=None, repositoryCfg=None):
159  """Initialize the CameraMapper.
160 
161  Parameters
162  ----------
163  policy : daf_persistence.Policy,
164  Can also be pexPolicy.Policy, only for backward compatibility.
165  Policy with per-camera defaults already merged.
166  repositoryDir : string
167  Policy repository for the subclassing module (obtained with
168  getRepositoryPath() on the per-camera default dictionary).
169  root : string, optional
170  Path to the root directory for data.
171  registry : string, optional
172  Path to registry with data's metadata.
173  calibRoot : string, optional
174  Root directory for calibrations.
175  calibRegistry : string, optional
176  Path to registry with calibrations' metadata.
177  provided : list of string, optional
178  Keys provided by the mapper.
179  parentRegistry : Registry subclass, optional
180  Registry from a parent repository that may be used to look up
181  data's metadata.
182  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
183  The configuration information for the repository this mapper is
184  being used with.
185  """
186 
187  dafPersist.Mapper.__init__(self)
188 
189  self.log = lsstLog.Log.getLogger("CameraMapper")
190 
191  if root:
192  self.root = root
193  elif repositoryCfg:
194  self.root = repositoryCfg.root
195  else:
196  self.root = None
197  if isinstance(policy, pexPolicy.Policy):
198  policy = dafPersist.Policy(policy)
199 
200  repoPolicy = repositoryCfg.policy if repositoryCfg else None
201  if repoPolicy is not None:
202  policy.update(repoPolicy)
203 
204  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
205  "MapperDictionary.paf",
206  "policy")
207  dictPolicy = dafPersist.Policy(defaultPolicyFile)
208  policy.merge(dictPolicy)
209 
210  # Levels
211  self.levels = dict()
212  if 'levels' in policy:
213  levelsPolicy = policy['levels']
214  for key in levelsPolicy.names(True):
215  self.levels[key] = set(levelsPolicy.asArray(key))
216  self.defaultLevel = policy['defaultLevel']
217  self.defaultSubLevels = dict()
218  if 'defaultSubLevels' in policy:
219  self.defaultSubLevels = policy['defaultSubLevels']
220 
221  # Root directories
222  if root is None:
223  root = "."
224  root = dafPersist.LogicalLocation(root).locString()
225 
226  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
227 
228  # If the calibRoot is passed in, use that. If not and it's indicated in
229  # the policy, use that. And otherwise, the calibs are in the regular
230  # root.
231  # If the location indicated by the calib root does not exist, do not
232  # create it.
233  calibStorage = None
234  if calibRoot is not None:
235  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
236  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
237  create=False)
238  else:
239  calibRoot = policy.get('calibRoot', None)
240  if calibRoot:
241  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
242  create=False)
243  if calibStorage is None:
244  calibStorage = self.rootStorage
245 
246  self.root = root
247 
248  # Registries
249  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
250  self.rootStorage, searchParents=False,
251  posixIfNoSql=(not parentRegistry))
252  if not self.registry:
253  self.registry = parentRegistry
254  needCalibRegistry = policy.get('needCalibRegistry', None)
255  if needCalibRegistry:
256  if calibStorage:
257  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
258  "calibRegistryPath", calibStorage,
259  posixIfNoSql=False) # NB never use posix for calibs
260  else:
261  raise RuntimeError(
262  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
263  "calibRoot ivar:%s or policy['calibRoot']:%s" %
264  (calibRoot, policy.get('calibRoot', None)))
265  else:
266  self.calibRegistry = None
267 
268  # Dict of valid keys and their value types
269  self.keyDict = dict()
270 
271  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
272  self._initWriteRecipes()
273 
274  # Camera geometry
275  self.cameraDataLocation = None # path to camera geometry config file
276  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
277 
278  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
279  # camera package, which is on the local filesystem.
280  self.defectRegistry = None
281  if 'defects' in policy:
282  self.defectPath = os.path.join(repositoryDir, policy['defects'])
283  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
284  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
285 
286  # Filter translation table
287  self.filters = None
288 
289  # verify that the class variable packageName is set before attempting
290  # to instantiate an instance
291  if self.packageName is None:
292  raise ValueError('class variable packageName must not be None')
293 
295 
296  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
297  """Initialize mappings
298 
299  For each of the dataset types that we want to be able to read, there are
300  methods that can be created to support them:
301  * map_<dataset> : determine the path for dataset
302  * std_<dataset> : standardize the retrieved dataset
303  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
304  * query_<dataset> : query the registry
305 
306  Besides the dataset types explicitly listed in the policy, we create
307  additional, derived datasets for additional conveniences, e.g., reading
308  the header of an image, retrieving only the size of a catalog.
309 
310  Parameters
311  ----------
312  policy : `lsst.daf.persistence.Policy`
313  Policy with per-camera defaults already merged
314  rootStorage : `Storage subclass instance`
315  Interface to persisted repository data.
316  calibRoot : `Storage subclass instance`
317  Interface to persisted calib repository data
318  provided : `list` of `str`
319  Keys provided by the mapper
320  """
321  # Sub-dictionaries (for exposure/calibration/dataset types)
322  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323  "obs_base", "ImageMappingDictionary.paf", "policy"))
324  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
325  "obs_base", "ExposureMappingDictionary.paf", "policy"))
326  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
328  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329  "obs_base", "DatasetMappingDictionary.paf", "policy"))
330 
331  # Mappings
332  mappingList = (
333  ("images", imgMappingPolicy, ImageMapping),
334  ("exposures", expMappingPolicy, ExposureMapping),
335  ("calibrations", calMappingPolicy, CalibrationMapping),
336  ("datasets", dsMappingPolicy, DatasetMapping)
337  )
338  self.mappings = dict()
339  for name, defPolicy, cls in mappingList:
340  if name in policy:
341  datasets = policy[name]
342 
343  # Centrally-defined datasets
344  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
345  if os.path.exists(defaultsPath):
346  datasets.merge(dafPersist.Policy(defaultsPath))
347 
348  mappings = dict()
349  setattr(self, name, mappings)
350  for datasetType in datasets.names(True):
351  subPolicy = datasets[datasetType]
352  subPolicy.merge(defPolicy)
353 
354  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
355  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
356  subPolicy=subPolicy):
357  components = subPolicy.get('composite')
358  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
359  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
360  python = subPolicy['python']
361  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
362  disassembler=disassembler,
363  python=python,
364  dataId=dataId,
365  mapper=self)
366  for name, component in components.items():
367  butlerComposite.add(id=name,
368  datasetType=component.get('datasetType'),
369  setter=component.get('setter', None),
370  getter=component.get('getter', None),
371  subset=component.get('subset', False),
372  inputOnly=component.get('inputOnly', False))
373  return butlerComposite
374  setattr(self, "map_" + datasetType, compositeClosure)
375  # for now at least, don't set up any other handling for this dataset type.
376  continue
377 
378  if name == "calibrations":
379  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
380  provided=provided, dataRoot=rootStorage)
381  else:
382  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
383  self.keyDict.update(mapping.keys())
384  mappings[datasetType] = mapping
385  self.mappings[datasetType] = mapping
386  if not hasattr(self, "map_" + datasetType):
387  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
388  return mapping.map(mapper, dataId, write)
389  setattr(self, "map_" + datasetType, mapClosure)
390  if not hasattr(self, "query_" + datasetType):
391  def queryClosure(format, dataId, mapping=mapping):
392  return mapping.lookup(format, dataId)
393  setattr(self, "query_" + datasetType, queryClosure)
394  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
395  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
396  return mapping.standardize(mapper, item, dataId)
397  setattr(self, "std_" + datasetType, stdClosure)
398 
399  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
400  """Set convenience methods on CameraMapper"""
401  mapName = "map_" + datasetType + "_" + suffix
402  bypassName = "bypass_" + datasetType + "_" + suffix
403  queryName = "query_" + datasetType + "_" + suffix
404  if not hasattr(self, mapName):
405  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
406  if not hasattr(self, bypassName):
407  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
408  bypassImpl = getattr(self, "bypass_" + datasetType)
409  if bypassImpl is not None:
410  setattr(self, bypassName, bypassImpl)
411  if not hasattr(self, queryName):
412  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
413 
414  # Filename of dataset
415  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
416  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
417  # Metadata from FITS file
418  if subPolicy["storage"] == "FitsStorage": # a FITS image
419  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
420  readMetadata(location.getLocationsWithRoot()[0]))
421 
422  # Add support for configuring FITS compression
423  addName = "add_" + datasetType
424  if not hasattr(self, addName):
425  setattr(self, addName, self.getImageCompressionSettings)
426 
427  if name == "exposures":
428  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
429  afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
430  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
431  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
432  setMethods("visitInfo",
433  bypassImpl=lambda datasetType, pythonType, location, dataId:
434  afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
435  setMethods("filter",
436  bypassImpl=lambda datasetType, pythonType, location, dataId:
437  afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
438  setMethods("detector",
439  mapImpl=lambda dataId, write=False:
440  dafPersist.ButlerLocation(
441  pythonType="lsst.afw.cameraGeom.CameraConfig",
442  cppType="Config",
443  storageName="Internal",
444  locationList="ignored",
445  dataId=dataId,
446  mapper=self,
447  storage=None,
448  ),
449  bypassImpl=lambda datasetType, pythonType, location, dataId:
450  self.camera[self._extractDetectorName(dataId)]
451  )
452  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
453  afwImage.bboxFromMetadata(
454  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
455 
456  elif name == "images":
457  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
458  afwImage.bboxFromMetadata(
459  readMetadata(location.getLocationsWithRoot()[0])))
460 
461  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
462  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
463  readMetadata(os.path.join(location.getStorage().root,
464  location.getLocations()[0]), hdu=1))
465 
466  # Sub-images
467  if subPolicy["storage"] == "FitsStorage":
468  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
469  subId = dataId.copy()
470  del subId['bbox']
471  loc = mapping.map(mapper, subId, write)
472  bbox = dataId['bbox']
473  llcX = bbox.getMinX()
474  llcY = bbox.getMinY()
475  width = bbox.getWidth()
476  height = bbox.getHeight()
477  loc.additionalData.set('llcX', llcX)
478  loc.additionalData.set('llcY', llcY)
479  loc.additionalData.set('width', width)
480  loc.additionalData.set('height', height)
481  if 'imageOrigin' in dataId:
482  loc.additionalData.set('imageOrigin',
483  dataId['imageOrigin'])
484  return loc
485 
486  def querySubClosure(key, format, dataId, mapping=mapping):
487  subId = dataId.copy()
488  del subId['bbox']
489  return mapping.lookup(format, subId)
490  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
491 
492  if subPolicy["storage"] == "FitsCatalogStorage":
493  # Length of catalog
494  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
495  readMetadata(os.path.join(location.getStorage().root,
496  location.getLocations()[0]),
497  hdu=1).getScalar("NAXIS2"))
498 
499  # Schema of catalog
500  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
501  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
502  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
503  location.getLocations()[0])))
504 
505  def _computeCcdExposureId(self, dataId):
506  """Compute the 64-bit (long) identifier for a CCD exposure.
507 
508  Subclasses must override
509 
510  Parameters
511  ----------
512  dataId : `dict`
513  Data identifier with visit, ccd.
514  """
515  raise NotImplementedError()
516 
517  def _computeCoaddExposureId(self, dataId, singleFilter):
518  """Compute the 64-bit (long) identifier for a coadd.
519 
520  Subclasses must override
521 
522  Parameters
523  ----------
524  dataId : `dict`
525  Data identifier with tract and patch.
526  singleFilter : `bool`
527  True means the desired ID is for a single-filter coadd, in which
528  case dataIdmust contain filter.
529  """
530  raise NotImplementedError()
531 
532  def _search(self, path):
533  """Search for path in the associated repository's storage.
534 
535  Parameters
536  ----------
537  path : string
538  Path that describes an object in the repository associated with
539  this mapper.
540  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
541  indicator will be stripped when searching and so will match
542  filenames without the HDU indicator, e.g. 'foo.fits'. The path
543  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
544 
545  Returns
546  -------
547  string
548  The path for this object in the repository. Will return None if the
549  object can't be found. If the input argument path contained an HDU
550  indicator, the returned path will also contain the HDU indicator.
551  """
552  return self.rootStorage.search(path)
553 
554  def backup(self, datasetType, dataId):
555  """Rename any existing object with the given type and dataId.
556 
557  The CameraMapper implementation saves objects in a sequence of e.g.:
558 
559  - foo.fits
560  - foo.fits~1
561  - foo.fits~2
562 
563  All of the backups will be placed in the output repo, however, and will
564  not be removed if they are found elsewhere in the _parent chain. This
565  means that the same file will be stored twice if the previous version was
566  found in an input repo.
567  """
568 
569  # Calling PosixStorage directly is not the long term solution in this
570  # function, this is work-in-progress on epic DM-6225. The plan is for
571  # parentSearch to be changed to 'search', and search only the storage
572  # associated with this mapper. All searching of parents will be handled
573  # by traversing the container of repositories in Butler.
574 
575  def firstElement(list):
576  """Get the first element in the list, or None if that can't be done.
577  """
578  return list[0] if list is not None and len(list) else None
579 
580  n = 0
581  newLocation = self.map(datasetType, dataId, write=True)
582  newPath = newLocation.getLocations()[0]
583  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
584  path = firstElement(path)
585  oldPaths = []
586  while path is not None:
587  n += 1
588  oldPaths.append((n, path))
589  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
590  path = firstElement(path)
591  for n, oldPath in reversed(oldPaths):
592  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
593 
594  def keys(self):
595  """Return supported keys.
596 
597  Returns
598  -------
599  iterable
600  List of keys usable in a dataset identifier
601  """
602  return iter(self.keyDict.keys())
603 
604  def getKeys(self, datasetType, level):
605  """Return a dict of supported keys and their value types for a given dataset
606  type at a given level of the key hierarchy.
607 
608  Parameters
609  ----------
610  datasetType : `str`
611  Dataset type or None for all dataset types.
612  level : `str` or None
613  Level or None for all levels or '' for the default level for the
614  camera.
615 
616  Returns
617  -------
618  `dict`
619  Keys are strings usable in a dataset identifier, values are their
620  value types.
621  """
622 
623  # not sure if this is how we want to do this. what if None was intended?
624  if level == '':
625  level = self.getDefaultLevel()
626 
627  if datasetType is None:
628  keyDict = copy.copy(self.keyDict)
629  else:
630  keyDict = self.mappings[datasetType].keys()
631  if level is not None and level in self.levels:
632  keyDict = copy.copy(keyDict)
633  for l in self.levels[level]:
634  if l in keyDict:
635  del keyDict[l]
636  return keyDict
637 
638  def getDefaultLevel(self):
639  return self.defaultLevel
640 
641  def getDefaultSubLevel(self, level):
642  if level in self.defaultSubLevels:
643  return self.defaultSubLevels[level]
644  return None
645 
646  @classmethod
647  def getCameraName(cls):
648  """Return the name of the camera that this CameraMapper is for."""
649  className = str(cls)
650  className = className[className.find('.'):-1]
651  m = re.search(r'(\w+)Mapper', className)
652  if m is None:
653  m = re.search(r"class '[\w.]*?(\w+)'", className)
654  name = m.group(1)
655  return name[:1].lower() + name[1:] if name else ''
656 
657  @classmethod
658  def getPackageName(cls):
659  """Return the name of the package containing this CameraMapper."""
660  if cls.packageName is None:
661  raise ValueError('class variable packageName must not be None')
662  return cls.packageName
663 
664  @classmethod
665  def getPackageDir(cls):
666  """Return the base directory of this package"""
667  return getPackageDir(cls.getPackageName())
668 
669  def map_camera(self, dataId, write=False):
670  """Map a camera dataset."""
671  if self.camera is None:
672  raise RuntimeError("No camera dataset available.")
673  actualId = self._transformId(dataId)
674  return dafPersist.ButlerLocation(
675  pythonType="lsst.afw.cameraGeom.CameraConfig",
676  cppType="Config",
677  storageName="ConfigStorage",
678  locationList=self.cameraDataLocation or "ignored",
679  dataId=actualId,
680  mapper=self,
681  storage=self.rootStorage
682  )
683 
684  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
685  """Return the (preloaded) camera object.
686  """
687  if self.camera is None:
688  raise RuntimeError("No camera dataset available.")
689  return self.camera
690 
691  def map_defects(self, dataId, write=False):
692  """Map defects dataset.
693 
694  Returns
695  -------
696  `lsst.daf.butler.ButlerLocation`
697  Minimal ButlerLocation containing just the locationList field
698  (just enough information that bypass_defects can use it).
699  """
700  defectFitsPath = self._defectLookup(dataId=dataId)
701  if defectFitsPath is None:
702  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
703 
704  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
705  dataId, self,
706  storage=self.rootStorage)
707 
708  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
709  """Return a defect based on the butler location returned by map_defects
710 
711  Parameters
712  ----------
713  butlerLocation : `lsst.daf.persistence.ButlerLocation`
714  locationList = path to defects FITS file
715  dataId : `dict`
716  Butler data ID; "ccd" must be set.
717 
718  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
719  into an object, which is what we want for now, since that conversion is a bit tricky.
720  """
721  detectorName = self._extractDetectorName(dataId)
722  defectsFitsPath = butlerLocation.locationList[0]
723  with pyfits.open(defectsFitsPath) as hduList:
724  for hdu in hduList[1:]:
725  if hdu.header["name"] != detectorName:
726  continue
727 
728  defectList = []
729  for data in hdu.data:
730  bbox = afwGeom.Box2I(
731  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
732  afwGeom.Extent2I(int(data['width']), int(data['height'])),
733  )
734  defectList.append(afwImage.DefectBase(bbox))
735  return defectList
736 
737  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
738 
739  def map_expIdInfo(self, dataId, write=False):
740  return dafPersist.ButlerLocation(
741  pythonType="lsst.obs.base.ExposureIdInfo",
742  cppType=None,
743  storageName="Internal",
744  locationList="ignored",
745  dataId=dataId,
746  mapper=self,
747  storage=self.rootStorage
748  )
749 
750  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
751  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
752  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
753  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
754  return ExposureIdInfo(expId=expId, expBits=expBits)
755 
756  def std_bfKernel(self, item, dataId):
757  """Disable standardization for bfKernel
758 
759  bfKernel is a calibration product that is numpy array,
760  unlike other calibration products that are all images;
761  all calibration images are sent through _standardizeExposure
762  due to CalibrationMapping, but we don't want that to happen to bfKernel
763  """
764  return item
765 
766  def std_raw(self, item, dataId):
767  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
768  return self._standardizeExposure(self.exposures['raw'], item, dataId,
769  trimmed=False, setVisitInfo=True)
770 
771  def map_skypolicy(self, dataId):
772  """Map a sky policy."""
773  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
774  "Internal", None, None, self,
775  storage=self.rootStorage)
776 
777  def std_skypolicy(self, item, dataId):
778  """Standardize a sky policy by returning the one we use."""
779  return self.skypolicy
780 
781 
786 
787  def _getCcdKeyVal(self, dataId):
788  """Return CCD key and value used to look a defect in the defect registry
789 
790  The default implementation simply returns ("ccd", full detector name)
791  """
792  return ("ccd", self._extractDetectorName(dataId))
793 
794  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
795  posixIfNoSql=True):
796  """Set up a registry (usually SQLite3), trying a number of possible
797  paths.
798 
799  Parameters
800  ----------
801  name : string
802  Name of registry.
803  description: `str`
804  Description of registry (for log messages)
805  path : string
806  Path for registry.
807  policy : string
808  Policy that contains the registry name, used if path is None.
809  policyKey : string
810  Key in policy for registry path.
811  storage : Storage subclass
812  Repository Storage to look in.
813  searchParents : bool, optional
814  True if the search for a registry should follow any Butler v1
815  _parent symlinks.
816  posixIfNoSql : bool, optional
817  If an sqlite registry is not found, will create a posix registry if
818  this is True.
819 
820  Returns
821  -------
822  lsst.daf.persistence.Registry
823  Registry object
824  """
825  if path is None and policyKey in policy:
826  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
827  if os.path.isabs(path):
828  raise RuntimeError("Policy should not indicate an absolute path for registry.")
829  if not storage.exists(path):
830  newPath = storage.instanceSearch(path)
831 
832  newPath = newPath[0] if newPath is not None and len(newPath) else None
833  if newPath is None:
834  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
835  path)
836  path = newPath
837  else:
838  self.log.warn("Unable to locate registry at policy path: %s", path)
839  path = None
840 
841  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
842  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
843  # root from path. Currently only works with PosixStorage
844  try:
845  root = storage.root
846  if path and (path.startswith(root)):
847  path = path[len(root + '/'):]
848  except AttributeError:
849  pass
850 
851  # determine if there is an sqlite registry and if not, try the posix registry.
852  registry = None
853 
854  def search(filename, description):
855  """Search for file in storage
856 
857  Parameters
858  ----------
859  filename : `str`
860  Filename to search for
861  description : `str`
862  Description of file, for error message.
863 
864  Returns
865  -------
866  path : `str` or `None`
867  Path to file, or None
868  """
869  result = storage.instanceSearch(filename)
870  if result:
871  return result[0]
872  self.log.debug("Unable to locate %s: %s", description, filename)
873  return None
874 
875  # Search for a suitable registry database
876  if path is None:
877  path = search("%s.pgsql" % name, "%s in root" % description)
878  if path is None:
879  path = search("%s.sqlite3" % name, "%s in root" % description)
880  if path is None:
881  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
882 
883  if path is not None:
884  if not storage.exists(path):
885  newPath = storage.instanceSearch(path)
886  newPath = newPath[0] if newPath is not None and len(newPath) else None
887  if newPath is not None:
888  path = newPath
889  localFileObj = storage.getLocalFile(path)
890  self.log.info("Loading %s registry from %s", description, localFileObj.name)
891  registry = dafPersist.Registry.create(localFileObj.name)
892  localFileObj.close()
893  elif not registry and posixIfNoSql:
894  try:
895  self.log.info("Loading Posix %s registry from %s", description, storage.root)
896  registry = dafPersist.PosixRegistry(storage.root)
897  except Exception:
898  registry = None
899 
900  return registry
901 
902  def _transformId(self, dataId):
903  """Generate a standard ID dict from a camera-specific ID dict.
904 
905  Canonical keys include:
906  - amp: amplifier name
907  - ccd: CCD name (in LSST this is a combination of raft and sensor)
908  The default implementation returns a copy of its input.
909 
910  Parameters
911  ----------
912  dataId : `dict`
913  Dataset identifier; this must not be modified
914 
915  Returns
916  -------
917  `dict`
918  Transformed dataset identifier.
919  """
920 
921  return dataId.copy()
922 
923  def _mapActualToPath(self, template, actualId):
924  """Convert a template path to an actual path, using the actual data
925  identifier. This implementation is usually sufficient but can be
926  overridden by the subclass.
927 
928  Parameters
929  ----------
930  template : `str`
931  Template path
932  actualId : `dict`
933  Dataset identifier
934 
935  Returns
936  -------
937  `str`
938  Pathname
939  """
940 
941  try:
942  transformedId = self._transformId(actualId)
943  return template % transformedId
944  except Exception as e:
945  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
946 
947  @staticmethod
948  def getShortCcdName(ccdName):
949  """Convert a CCD name to a form useful as a filename
950 
951  The default implementation converts spaces to underscores.
952  """
953  return ccdName.replace(" ", "_")
954 
955  def _extractDetectorName(self, dataId):
956  """Extract the detector (CCD) name from the dataset identifier.
957 
958  The name in question is the detector name used by lsst.afw.cameraGeom.
959 
960  Parameters
961  ----------
962  dataId : `dict`
963  Dataset identifier.
964 
965  Returns
966  -------
967  `str`
968  Detector name
969  """
970  raise NotImplementedError("No _extractDetectorName() function specified")
971 
972  def _extractAmpId(self, dataId):
973  """Extract the amplifier identifer from a dataset identifier.
974 
975  .. note:: Deprecated in 11_0
976 
977  amplifier identifier has two parts: the detector name for the CCD
978  containing the amplifier and index of the amplifier in the detector.
979 
980  Parameters
981  ----------
982  dataId : `dict`
983  Dataset identifer
984 
985  Returns
986  -------
987  `tuple`
988  Amplifier identifier
989  """
990 
991  trDataId = self._transformId(dataId)
992  return (trDataId["ccd"], int(trDataId['amp']))
993 
994  def _setAmpDetector(self, item, dataId, trimmed=True):
995  """Set the detector object in an Exposure for an amplifier.
996 
997  Defects are also added to the Exposure based on the detector object.
998 
999  Parameters
1000  ----------
1001  item : `lsst.afw.image.Exposure`
1002  Exposure to set the detector in.
1003  dataId : `dict`
1004  Dataset identifier
1005  trimmed : `bool`
1006  Should detector be marked as trimmed? (ignored)
1007  """
1008 
1009  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1010 
1011  def _setCcdDetector(self, item, dataId, trimmed=True):
1012  """Set the detector object in an Exposure for a CCD.
1013 
1014  Parameters
1015  ----------
1016  item : `lsst.afw.image.Exposure`
1017  Exposure to set the detector in.
1018  dataId : `dict`
1019  Dataset identifier
1020  trimmed : `bool`
1021  Should detector be marked as trimmed? (ignored)
1022  """
1023  if item.getDetector() is not None:
1024  return
1025 
1026  detectorName = self._extractDetectorName(dataId)
1027  detector = self.camera[detectorName]
1028  item.setDetector(detector)
1029 
1030  def _setFilter(self, mapping, item, dataId):
1031  """Set the filter object in an Exposure. If the Exposure had a FILTER
1032  keyword, this was already processed during load. But if it didn't,
1033  use the filter from the registry.
1034 
1035  Parameters
1036  ----------
1037  mapping : `lsst.obs.base.Mapping`
1038  Where to get the filter from.
1039  item : `lsst.afw.image.Exposure`
1040  Exposure to set the filter in.
1041  dataId : `dict`
1042  Dataset identifier.
1043  """
1044 
1045  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1046  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1047  return
1048 
1049  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1050  return
1051 
1052  actualId = mapping.need(['filter'], dataId)
1053  filterName = actualId['filter']
1054  if self.filters is not None and filterName in self.filters:
1055  filterName = self.filters[filterName]
1056  item.setFilter(afwImage.Filter(filterName))
1057 
1058  # Default standardization function for exposures
1059  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1060  trimmed=True, setVisitInfo=True):
1061  """Default standardization function for images.
1062 
1063  This sets the Detector from the camera geometry
1064  and optionally set the Fiter. In both cases this saves
1065  having to persist some data in each exposure (or image).
1066 
1067  Parameters
1068  ----------
1069  mapping : `lsst.obs.base.Mapping`
1070  Where to get the values from.
1071  item : image-like object
1072  Can be any of lsst.afw.image.Exposure,
1073  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1074  or lsst.afw.image.MaskedImage
1075 
1076  dataId : `dict`
1077  Dataset identifier
1078  filter : `bool`
1079  Set filter? Ignored if item is already an exposure
1080  trimmed : `bool`
1081  Should detector be marked as trimmed?
1082  setVisitInfo : `bool`
1083  Should Exposure have its VisitInfo filled out from the metadata?
1084 
1085  Returns
1086  -------
1087  `lsst.afw.image.Exposure`
1088  The standardized Exposure.
1089  """
1090  try:
1091  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1092  except Exception as e:
1093  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1094  raise
1095 
1096  if mapping.level.lower() == "amp":
1097  self._setAmpDetector(item, dataId, trimmed)
1098  elif mapping.level.lower() == "ccd":
1099  self._setCcdDetector(item, dataId, trimmed)
1100 
1101  if filter:
1102  self._setFilter(mapping, item, dataId)
1103 
1104  return item
1105 
1106  def _defectLookup(self, dataId):
1107  """Find the defects for a given CCD.
1108 
1109  Parameters
1110  ----------
1111  dataId : `dict`
1112  Dataset identifier
1113 
1114  Returns
1115  -------
1116  `str`
1117  Path to the defects file or None if not available.
1118  """
1119  if self.defectRegistry is None:
1120  return None
1121  if self.registry is None:
1122  raise RuntimeError("No registry for defect lookup")
1123 
1124  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1125 
1126  dataIdForLookup = {'visit': dataId['visit']}
1127  # .lookup will fail in a posix registry because there is no template to provide.
1128  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1129  if len(rows) == 0:
1130  return None
1131  assert len(rows) == 1
1132  taiObs = rows[0][0]
1133 
1134  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1135  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1136  [(ccdKey, "?")],
1137  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1138  (ccdVal, taiObs))
1139  if not rows or len(rows) == 0:
1140  return None
1141  if len(rows) == 1:
1142  return os.path.join(self.defectPath, rows[0][0])
1143  else:
1144  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1145  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1146 
1147  def _makeCamera(self, policy, repositoryDir):
1148  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1149 
1150  Also set self.cameraDataLocation, if relevant (else it can be left None).
1151 
1152  This implementation assumes that policy contains an entry "camera" that points to the
1153  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1154  - a file named `camera.py` that contains persisted camera config
1155  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1156 
1157  Parameters
1158  ----------
1159  policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy`
1160  Policy with per-camera defaults already merged
1161  (PexPolicy only for backward compatibility).
1162  repositoryDir : `str`
1163  Policy repository for the subclassing module (obtained with
1164  getRepositoryPath() on the per-camera default dictionary).
1165  """
1166  if isinstance(policy, pexPolicy.Policy):
1167  policy = dafPersist.Policy(pexPolicy=policy)
1168  if 'camera' not in policy:
1169  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1170  cameraDataSubdir = policy['camera']
1171  self.cameraDataLocation = os.path.normpath(
1172  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1173  cameraConfig = afwCameraGeom.CameraConfig()
1174  cameraConfig.load(self.cameraDataLocation)
1175  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1176  return afwCameraGeom.makeCameraFromPath(
1177  cameraConfig=cameraConfig,
1178  ampInfoPath=ampInfoPath,
1179  shortNameFunc=self.getShortCcdName,
1180  pupilFactoryClass=self.PupilFactoryClass
1181  )
1182 
1183  def getRegistry(self):
1184  """Get the registry used by this mapper.
1185 
1186  Returns
1187  -------
1188  Registry or None
1189  The registry used by this mapper for this mapper's repository.
1190  """
1191  return self.registry
1192 
1193  def getImageCompressionSettings(self, datasetType, dataId):
1194  """Stuff image compression settings into a daf.base.PropertySet
1195 
1196  This goes into the ButlerLocation's "additionalData", which gets
1197  passed into the boost::persistence framework.
1198 
1199  Parameters
1200  ----------
1201  datasetType : `str`
1202  Type of dataset for which to get the image compression settings.
1203  dataId : `dict`
1204  Dataset identifier.
1205 
1206  Returns
1207  -------
1208  additionalData : `lsst.daf.base.PropertySet`
1209  Image compression settings.
1210  """
1211  mapping = self.mappings[datasetType]
1212  recipeName = mapping.recipe
1213  storageType = mapping.storage
1214  if storageType not in self._writeRecipes:
1215  return dafBase.PropertySet()
1216  if recipeName not in self._writeRecipes[storageType]:
1217  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1218  (datasetType, storageType, recipeName))
1219  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1220  seed = hash(tuple(dataId.items())) % 2**31
1221  for plane in ("image", "mask", "variance"):
1222  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1223  recipe.set(plane + ".scaling.seed", seed)
1224  return recipe
1225 
1226  def _initWriteRecipes(self):
1227  """Read the recipes for writing files
1228 
1229  These recipes are currently used for configuring FITS compression,
1230  but they could have wider uses for configuring different flavors
1231  of the storage types. A recipe is referred to by a symbolic name,
1232  which has associated settings. These settings are stored as a
1233  `PropertySet` so they can easily be passed down to the
1234  boost::persistence framework as the "additionalData" parameter.
1235 
1236  The list of recipes is written in YAML. A default recipe and
1237  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1238  and these may be overridden or supplemented by the individual obs_*
1239  packages' own policy/writeRecipes.yaml files.
1240 
1241  Recipes are grouped by the storage type. Currently, only the
1242  ``FitsStorage`` storage type uses recipes, which uses it to
1243  configure FITS image compression.
1244 
1245  Each ``FitsStorage`` recipe for FITS compression should define
1246  "image", "mask" and "variance" entries, each of which may contain
1247  "compression" and "scaling" entries. Defaults will be provided for
1248  any missing elements under "compression" and "scaling".
1249 
1250  The allowed entries under "compression" are:
1251 
1252  * algorithm (string): compression algorithm to use
1253  * rows (int): number of rows per tile (0 = entire dimension)
1254  * columns (int): number of columns per tile (0 = entire dimension)
1255  * quantizeLevel (float): cfitsio quantization level
1256 
1257  The allowed entries under "scaling" are:
1258 
1259  * algorithm (string): scaling algorithm to use
1260  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1261  * fuzz (bool): fuzz the values when quantising floating-point values?
1262  * seed (long): seed for random number generator when fuzzing
1263  * maskPlanes (list of string): mask planes to ignore when doing statistics
1264  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1265  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1266  * bscale: manually specified BSCALE (for MANUAL scaling)
1267  * bzero: manually specified BSCALE (for MANUAL scaling)
1268 
1269  A very simple example YAML recipe:
1270 
1271  FitsStorage:
1272  default:
1273  image: &default
1274  compression:
1275  algorithm: GZIP_SHUFFLE
1276  mask: *default
1277  variance: *default
1278  """
1279  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1280  recipes = dafPersist.Policy(recipesFile)
1281  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1282  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1283  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1284  supplements = dafPersist.Policy(supplementsFile)
1285  # Don't allow overrides, only supplements
1286  for entry in validationMenu:
1287  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1288  if intersection:
1289  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1290  (supplementsFile, entry, recipesFile, intersection))
1291  recipes.update(supplements)
1292 
1293  self._writeRecipes = {}
1294  for storageType in recipes.names(True):
1295  if "default" not in recipes[storageType]:
1296  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1297  (storageType, recipesFile))
1298  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1299 
1300 
1301 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1302  """Generate an Exposure from an image-like object
1303 
1304  If the image is a DecoratedImage then also set its WCS and metadata
1305  (Image and MaskedImage are missing the necessary metadata
1306  and Exposure already has those set)
1307 
1308  Parameters
1309  ----------
1310  image : Image-like object
1311  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1312  Exposure.
1313 
1314  Returns
1315  -------
1316  `lsst.afw.image.Exposure`
1317  Exposure containing input image.
1318  """
1319  metadata = None
1320  if isinstance(image, afwImage.MaskedImage):
1321  exposure = afwImage.makeExposure(image)
1322  elif isinstance(image, afwImage.DecoratedImage):
1323  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1324  metadata = image.getMetadata()
1325  try:
1326  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1327  exposure.setWcs(wcs)
1328  except pexExcept.TypeError as e:
1329  # raised on failure to create a wcs (and possibly others)
1330  if logger is None:
1331  logger = lsstLog.Log.getLogger("CameraMapper")
1332  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1333  " %s", e.args[0])
1334 
1335  exposure.setMetadata(metadata)
1336  elif isinstance(image, afwImage.Exposure):
1337  # Exposure
1338  exposure = image
1339  metadata = exposure.getMetadata()
1340  else:
1341  # Image
1342  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1343  #
1344  # set VisitInfo if we can
1345  #
1346  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1347  if metadata is not None:
1348  if mapper is None:
1349  if not logger:
1350  logger = lsstLog.Log.getLogger("CameraMapper")
1351  logger.warn("I can only set the VisitInfo if you provide a mapper")
1352  else:
1353  exposureId = mapper._computeCcdExposureId(dataId)
1354  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1355 
1356  exposure.getInfo().setVisitInfo(visitInfo)
1357 
1358  return exposure
1359 
1360 
1362  """Validate recipes for FitsStorage
1363 
1364  The recipes are supplemented with default values where appropriate.
1365 
1366  TODO: replace this custom validation code with Cerberus (DM-11846)
1367 
1368  Parameters
1369  ----------
1370  recipes : `lsst.daf.persistence.Policy`
1371  FitsStorage recipes to validate.
1372 
1373  Returns
1374  -------
1375  validated : `lsst.daf.base.PropertySet`
1376  Validated FitsStorage recipe.
1377 
1378  Raises
1379  ------
1380  `RuntimeError`
1381  If validation fails.
1382  """
1383  # Schemas define what should be there, and the default values (and by the default
1384  # value, the expected type).
1385  compressionSchema = {
1386  "algorithm": "NONE",
1387  "rows": 1,
1388  "columns": 0,
1389  "quantizeLevel": 0.0,
1390  }
1391  scalingSchema = {
1392  "algorithm": "NONE",
1393  "bitpix": 0,
1394  "maskPlanes": ["NO_DATA"],
1395  "seed": 0,
1396  "quantizeLevel": 4.0,
1397  "quantizePad": 5.0,
1398  "fuzz": True,
1399  "bscale": 1.0,
1400  "bzero": 0.0,
1401  }
1402 
1403  def checkUnrecognized(entry, allowed, description):
1404  """Check to see if the entry contains unrecognised keywords"""
1405  unrecognized = set(entry.keys()) - set(allowed)
1406  if unrecognized:
1407  raise RuntimeError(
1408  "Unrecognized entries when parsing image compression recipe %s: %s" %
1409  (description, unrecognized))
1410 
1411  validated = {}
1412  for name in recipes.names(True):
1413  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1414  rr = dafBase.PropertySet()
1415  validated[name] = rr
1416  for plane in ("image", "mask", "variance"):
1417  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1418  name + "->" + plane)
1419 
1420  for settings, schema in (("compression", compressionSchema),
1421  ("scaling", scalingSchema)):
1422  prefix = plane + "." + settings
1423  if settings not in recipes[name][plane]:
1424  for key in schema:
1425  rr.set(prefix + "." + key, schema[key])
1426  continue
1427  entry = recipes[name][plane][settings]
1428  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1429  for key in schema:
1430  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1431  rr.set(prefix + "." + key, value)
1432  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.