lsst.obs.base  15.0-12-g3681e7a+12
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base as dafBase
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.image as afwImage
34 import lsst.afw.table as afwTable
35 from lsst.afw.fits import readMetadata
36 import lsst.afw.cameraGeom as afwCameraGeom
37 import lsst.log as lsstLog
38 import lsst.pex.policy as pexPolicy
39 import lsst.pex.exceptions as pexExcept
40 from .exposureIdInfo import ExposureIdInfo
41 from .makeRawVisitInfo import MakeRawVisitInfo
42 from lsst.utils import getPackageDir
43 
44 __all__ = ["CameraMapper", "exposureFromImage"]
45 
46 
47 class CameraMapper(dafPersist.Mapper):
48 
49  """CameraMapper is a base class for mappers that handle images from a
50  camera and products derived from them. This provides an abstraction layer
51  between the data on disk and the code.
52 
53  Public methods: keys, queryMetadata, getDatasetTypes, map,
54  canStandardize, standardize
55 
56  Mappers for specific data sources (e.g., CFHT Megacam, LSST
57  simulations, etc.) should inherit this class.
58 
59  The CameraMapper manages datasets within a "root" directory. Note that
60  writing to a dataset present in the input root will hide the existing
61  dataset but not overwrite it. See #2160 for design discussion.
62 
63  A camera is assumed to consist of one or more rafts, each composed of
64  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
65  (amps). A camera is also assumed to have a camera geometry description
66  (CameraGeom object) as a policy file, a filter description (Filter class
67  static configuration) as another policy file, and an optional defects
68  description directory.
69 
70  Information from the camera geometry and defects are inserted into all
71  Exposure objects returned.
72 
73  The mapper uses one or two registries to retrieve metadata about the
74  images. The first is a registry of all raw exposures. This must contain
75  the time of the observation. One or more tables (or the equivalent)
76  within the registry are used to look up data identifier components that
77  are not specified by the user (e.g. filter) and to return results for
78  metadata queries. The second is an optional registry of all calibration
79  data. This should contain validity start and end entries for each
80  calibration dataset in the same timescale as the observation time.
81 
82  Subclasses will typically set MakeRawVisitInfoClass:
83 
84  MakeRawVisitInfoClass: a class variable that points to a subclass of
85  MakeRawVisitInfo, a functor that creates an
86  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
87 
88  Subclasses must provide the following methods:
89 
90  _extractDetectorName(self, dataId): returns the detector name for a CCD
91  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
92  a dataset identifier referring to that CCD or a subcomponent of it.
93 
94  _computeCcdExposureId(self, dataId): see below
95 
96  _computeCoaddExposureId(self, dataId, singleFilter): see below
97 
98  Subclasses may also need to override the following methods:
99 
100  _transformId(self, dataId): transformation of a data identifier
101  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
102  including making suitable for path expansion (e.g. removing commas).
103  The default implementation does nothing. Note that this
104  method should not modify its input parameter.
105 
106  getShortCcdName(self, ccdName): a static method that returns a shortened name
107  suitable for use as a filename. The default version converts spaces to underscores.
108 
109  _getCcdKeyVal(self, dataId): return a CCD key and value
110  by which to look up defects in the defects registry.
111  The default value returns ("ccd", detector name)
112 
113  _mapActualToPath(self, template, actualId): convert a template path to an
114  actual path, using the actual dataset identifier.
115 
116  The mapper's behaviors are largely specified by the policy file.
117  See the MapperDictionary.paf for descriptions of the available items.
118 
119  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
120  mappings (see Mappings class).
121 
122  Common default mappings for all subclasses can be specified in the
123  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
124  a simple way to add a product to all camera mappers.
125 
126  Functions to map (provide a path to the data given a dataset
127  identifier dictionary) and standardize (convert data into some standard
128  format or type) may be provided in the subclass as "map_{dataset type}"
129  and "std_{dataset type}", respectively.
130 
131  If non-Exposure datasets cannot be retrieved using standard
132  daf_persistence methods alone, a "bypass_{dataset type}" function may be
133  provided in the subclass to return the dataset instead of using the
134  "datasets" subpolicy.
135 
136  Implementations of map_camera and bypass_camera that should typically be
137  sufficient are provided in this base class.
138 
139  Notes
140  -----
141  TODO:
142 
143  - Handle defects the same was as all other calibration products, using the calibration registry
144  - Instead of auto-loading the camera at construction time, load it from the calibration registry
145  - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
146  of pyfits from this package.
147  """
148  packageName = None
149 
150  # a class or subclass of MakeRawVisitInfo, a functor that makes an
151  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
152  MakeRawVisitInfoClass = MakeRawVisitInfo
153 
154  # a class or subclass of PupilFactory
155  PupilFactoryClass = afwCameraGeom.PupilFactory
156 
157  def __init__(self, policy, repositoryDir,
158  root=None, registry=None, calibRoot=None, calibRegistry=None,
159  provided=None, parentRegistry=None, repositoryCfg=None):
160  """Initialize the CameraMapper.
161 
162  Parameters
163  ----------
164  policy : daf_persistence.Policy,
165  Can also be pexPolicy.Policy, only for backward compatibility.
166  Policy with per-camera defaults already merged.
167  repositoryDir : string
168  Policy repository for the subclassing module (obtained with
169  getRepositoryPath() on the per-camera default dictionary).
170  root : string, optional
171  Path to the root directory for data.
172  registry : string, optional
173  Path to registry with data's metadata.
174  calibRoot : string, optional
175  Root directory for calibrations.
176  calibRegistry : string, optional
177  Path to registry with calibrations' metadata.
178  provided : list of string, optional
179  Keys provided by the mapper.
180  parentRegistry : Registry subclass, optional
181  Registry from a parent repository that may be used to look up
182  data's metadata.
183  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
184  The configuration information for the repository this mapper is
185  being used with.
186  """
187 
188  dafPersist.Mapper.__init__(self)
189 
190  self.log = lsstLog.Log.getLogger("CameraMapper")
191 
192  if root:
193  self.root = root
194  elif repositoryCfg:
195  self.root = repositoryCfg.root
196  else:
197  self.root = None
198  if isinstance(policy, pexPolicy.Policy):
199  policy = dafPersist.Policy(policy)
200 
201  repoPolicy = repositoryCfg.policy if repositoryCfg else None
202  if repoPolicy is not None:
203  policy.update(repoPolicy)
204 
205  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
206  "MapperDictionary.paf",
207  "policy")
208  dictPolicy = dafPersist.Policy(defaultPolicyFile)
209  policy.merge(dictPolicy)
210 
211  # Levels
212  self.levels = dict()
213  if 'levels' in policy:
214  levelsPolicy = policy['levels']
215  for key in levelsPolicy.names(True):
216  self.levels[key] = set(levelsPolicy.asArray(key))
217  self.defaultLevel = policy['defaultLevel']
218  self.defaultSubLevels = dict()
219  if 'defaultSubLevels' in policy:
220  self.defaultSubLevels = policy['defaultSubLevels']
221 
222  # Root directories
223  if root is None:
224  root = "."
225  root = dafPersist.LogicalLocation(root).locString()
226 
227  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
228 
229  # If the calibRoot is passed in, use that. If not and it's indicated in
230  # the policy, use that. And otherwise, the calibs are in the regular
231  # root.
232  # If the location indicated by the calib root does not exist, do not
233  # create it.
234  calibStorage = None
235  if calibRoot is not None:
236  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238  create=False)
239  else:
240  calibRoot = policy.get('calibRoot', None)
241  if calibRoot:
242  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243  create=False)
244  if calibStorage is None:
245  calibStorage = self.rootStorage
246 
247  self.root = root
248 
249  # Registries
250  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
251  self.rootStorage, searchParents=False,
252  posixIfNoSql=(not parentRegistry))
253  if not self.registry:
254  self.registry = parentRegistry
255  needCalibRegistry = policy.get('needCalibRegistry', None)
256  if needCalibRegistry:
257  if calibStorage:
258  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
259  "calibRegistryPath", calibStorage,
260  posixIfNoSql=False) # NB never use posix for calibs
261  else:
262  raise RuntimeError(
263  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
264  "calibRoot ivar:%s or policy['calibRoot']:%s" %
265  (calibRoot, policy.get('calibRoot', None)))
266  else:
267  self.calibRegistry = None
268 
269  # Dict of valid keys and their value types
270  self.keyDict = dict()
271 
272  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
273  self._initWriteRecipes()
274 
275  # Camera geometry
276  self.cameraDataLocation = None # path to camera geometry config file
277  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
278 
279  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
280  # camera package, which is on the local filesystem.
281  self.defectRegistry = None
282  if 'defects' in policy:
283  self.defectPath = os.path.join(repositoryDir, policy['defects'])
284  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
285  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
286 
287  # Filter translation table
288  self.filters = None
289 
290  # verify that the class variable packageName is set before attempting
291  # to instantiate an instance
292  if self.packageName is None:
293  raise ValueError('class variable packageName must not be None')
294 
296 
297  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298  """Initialize mappings
299 
300  For each of the dataset types that we want to be able to read, there are
301  methods that can be created to support them:
302  * map_<dataset> : determine the path for dataset
303  * std_<dataset> : standardize the retrieved dataset
304  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
305  * query_<dataset> : query the registry
306 
307  Besides the dataset types explicitly listed in the policy, we create
308  additional, derived datasets for additional conveniences, e.g., reading
309  the header of an image, retrieving only the size of a catalog.
310 
311  Parameters
312  ----------
313  policy : `lsst.daf.persistence.Policy`
314  Policy with per-camera defaults already merged
315  rootStorage : `Storage subclass instance`
316  Interface to persisted repository data.
317  calibRoot : `Storage subclass instance`
318  Interface to persisted calib repository data
319  provided : `list` of `str`
320  Keys provided by the mapper
321  """
322  # Sub-dictionaries (for exposure/calibration/dataset types)
323  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324  "obs_base", "ImageMappingDictionary.paf", "policy"))
325  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326  "obs_base", "ExposureMappingDictionary.paf", "policy"))
327  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
328  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
329  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
330  "obs_base", "DatasetMappingDictionary.paf", "policy"))
331 
332  # Mappings
333  mappingList = (
334  ("images", imgMappingPolicy, ImageMapping),
335  ("exposures", expMappingPolicy, ExposureMapping),
336  ("calibrations", calMappingPolicy, CalibrationMapping),
337  ("datasets", dsMappingPolicy, DatasetMapping)
338  )
339  self.mappings = dict()
340  for name, defPolicy, cls in mappingList:
341  if name in policy:
342  datasets = policy[name]
343 
344  # Centrally-defined datasets
345  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
346  if os.path.exists(defaultsPath):
347  datasets.merge(dafPersist.Policy(defaultsPath))
348 
349  mappings = dict()
350  setattr(self, name, mappings)
351  for datasetType in datasets.names(True):
352  subPolicy = datasets[datasetType]
353  subPolicy.merge(defPolicy)
354 
355  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
356  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
357  subPolicy=subPolicy):
358  components = subPolicy.get('composite')
359  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
360  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
361  python = subPolicy['python']
362  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
363  disassembler=disassembler,
364  python=python,
365  dataId=dataId,
366  mapper=self)
367  for name, component in components.items():
368  butlerComposite.add(id=name,
369  datasetType=component.get('datasetType'),
370  setter=component.get('setter', None),
371  getter=component.get('getter', None),
372  subset=component.get('subset', False),
373  inputOnly=component.get('inputOnly', False))
374  return butlerComposite
375  setattr(self, "map_" + datasetType, compositeClosure)
376  # for now at least, don't set up any other handling for this dataset type.
377  continue
378 
379  if name == "calibrations":
380  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
381  provided=provided, dataRoot=rootStorage)
382  else:
383  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
384  self.keyDict.update(mapping.keys())
385  mappings[datasetType] = mapping
386  self.mappings[datasetType] = mapping
387  if not hasattr(self, "map_" + datasetType):
388  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
389  return mapping.map(mapper, dataId, write)
390  setattr(self, "map_" + datasetType, mapClosure)
391  if not hasattr(self, "query_" + datasetType):
392  def queryClosure(format, dataId, mapping=mapping):
393  return mapping.lookup(format, dataId)
394  setattr(self, "query_" + datasetType, queryClosure)
395  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
396  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
397  return mapping.standardize(mapper, item, dataId)
398  setattr(self, "std_" + datasetType, stdClosure)
399 
400  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
401  """Set convenience methods on CameraMapper"""
402  mapName = "map_" + datasetType + "_" + suffix
403  bypassName = "bypass_" + datasetType + "_" + suffix
404  queryName = "query_" + datasetType + "_" + suffix
405  if not hasattr(self, mapName):
406  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
407  if not hasattr(self, bypassName):
408  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
409  bypassImpl = getattr(self, "bypass_" + datasetType)
410  if bypassImpl is not None:
411  setattr(self, bypassName, bypassImpl)
412  if not hasattr(self, queryName):
413  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
414 
415  # Filename of dataset
416  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
417  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
418  # Metadata from FITS file
419  if subPolicy["storage"] == "FitsStorage": # a FITS image
420  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
421  readMetadata(location.getLocationsWithRoot()[0]))
422 
423  # Add support for configuring FITS compression
424  addName = "add_" + datasetType
425  if not hasattr(self, addName):
426  setattr(self, addName, self.getImageCompressionSettings)
427 
428  if name == "exposures":
429  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
430  afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
431  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
432  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
433  setMethods("visitInfo",
434  bypassImpl=lambda datasetType, pythonType, location, dataId:
435  afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
436  setMethods("filter",
437  bypassImpl=lambda datasetType, pythonType, location, dataId:
438  afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
439  setMethods("detector",
440  mapImpl=lambda dataId, write=False:
441  dafPersist.ButlerLocation(
442  pythonType="lsst.afw.cameraGeom.CameraConfig",
443  cppType="Config",
444  storageName="Internal",
445  locationList="ignored",
446  dataId=dataId,
447  mapper=self,
448  storage=None,
449  ),
450  bypassImpl=lambda datasetType, pythonType, location, dataId:
451  self.camera[self._extractDetectorName(dataId)]
452  )
453  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
454  afwImage.bboxFromMetadata(
455  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
456 
457  elif name == "images":
458  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
459  afwImage.bboxFromMetadata(
460  readMetadata(location.getLocationsWithRoot()[0])))
461 
462  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
463  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
464  readMetadata(os.path.join(location.getStorage().root,
465  location.getLocations()[0]), hdu=1))
466 
467  # Sub-images
468  if subPolicy["storage"] == "FitsStorage":
469  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
470  subId = dataId.copy()
471  del subId['bbox']
472  loc = mapping.map(mapper, subId, write)
473  bbox = dataId['bbox']
474  llcX = bbox.getMinX()
475  llcY = bbox.getMinY()
476  width = bbox.getWidth()
477  height = bbox.getHeight()
478  loc.additionalData.set('llcX', llcX)
479  loc.additionalData.set('llcY', llcY)
480  loc.additionalData.set('width', width)
481  loc.additionalData.set('height', height)
482  if 'imageOrigin' in dataId:
483  loc.additionalData.set('imageOrigin',
484  dataId['imageOrigin'])
485  return loc
486 
487  def querySubClosure(key, format, dataId, mapping=mapping):
488  subId = dataId.copy()
489  del subId['bbox']
490  return mapping.lookup(format, subId)
491  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
492 
493  if subPolicy["storage"] == "FitsCatalogStorage":
494  # Length of catalog
495  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
496  readMetadata(os.path.join(location.getStorage().root,
497  location.getLocations()[0]),
498  hdu=1).get("NAXIS2"))
499 
500  # Schema of catalog
501  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
502  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
503  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
504  location.getLocations()[0])))
505 
506  def _computeCcdExposureId(self, dataId):
507  """Compute the 64-bit (long) identifier for a CCD exposure.
508 
509  Subclasses must override
510 
511  Parameters
512  ----------
513  dataId : `dict`
514  Data identifier with visit, ccd.
515  """
516  raise NotImplementedError()
517 
518  def _computeCoaddExposureId(self, dataId, singleFilter):
519  """Compute the 64-bit (long) identifier for a coadd.
520 
521  Subclasses must override
522 
523  Parameters
524  ----------
525  dataId : `dict`
526  Data identifier with tract and patch.
527  singleFilter : `bool`
528  True means the desired ID is for a single-filter coadd, in which
529  case dataIdmust contain filter.
530  """
531  raise NotImplementedError()
532 
533  def _search(self, path):
534  """Search for path in the associated repository's storage.
535 
536  Parameters
537  ----------
538  path : string
539  Path that describes an object in the repository associated with
540  this mapper.
541  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
542  indicator will be stripped when searching and so will match
543  filenames without the HDU indicator, e.g. 'foo.fits'. The path
544  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
545 
546  Returns
547  -------
548  string
549  The path for this object in the repository. Will return None if the
550  object can't be found. If the input argument path contained an HDU
551  indicator, the returned path will also contain the HDU indicator.
552  """
553  return self.rootStorage.search(path)
554 
555  def backup(self, datasetType, dataId):
556  """Rename any existing object with the given type and dataId.
557 
558  The CameraMapper implementation saves objects in a sequence of e.g.:
559 
560  - foo.fits
561  - foo.fits~1
562  - foo.fits~2
563 
564  All of the backups will be placed in the output repo, however, and will
565  not be removed if they are found elsewhere in the _parent chain. This
566  means that the same file will be stored twice if the previous version was
567  found in an input repo.
568  """
569 
570  # Calling PosixStorage directly is not the long term solution in this
571  # function, this is work-in-progress on epic DM-6225. The plan is for
572  # parentSearch to be changed to 'search', and search only the storage
573  # associated with this mapper. All searching of parents will be handled
574  # by traversing the container of repositories in Butler.
575 
576  def firstElement(list):
577  """Get the first element in the list, or None if that can't be done.
578  """
579  return list[0] if list is not None and len(list) else None
580 
581  n = 0
582  newLocation = self.map(datasetType, dataId, write=True)
583  newPath = newLocation.getLocations()[0]
584  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
585  path = firstElement(path)
586  oldPaths = []
587  while path is not None:
588  n += 1
589  oldPaths.append((n, path))
590  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
591  path = firstElement(path)
592  for n, oldPath in reversed(oldPaths):
593  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
594 
595  def keys(self):
596  """Return supported keys.
597 
598  Returns
599  -------
600  iterable
601  List of keys usable in a dataset identifier
602  """
603  return iter(self.keyDict.keys())
604 
605  def getKeys(self, datasetType, level):
606  """Return a dict of supported keys and their value types for a given dataset
607  type at a given level of the key hierarchy.
608 
609  Parameters
610  ----------
611  datasetType : `str`
612  Dataset type or None for all dataset types.
613  level : `str` or None
614  Level or None for all levels or '' for the default level for the
615  camera.
616 
617  Returns
618  -------
619  `dict`
620  Keys are strings usable in a dataset identifier, values are their
621  value types.
622  """
623 
624  # not sure if this is how we want to do this. what if None was intended?
625  if level == '':
626  level = self.getDefaultLevel()
627 
628  if datasetType is None:
629  keyDict = copy.copy(self.keyDict)
630  else:
631  keyDict = self.mappings[datasetType].keys()
632  if level is not None and level in self.levels:
633  keyDict = copy.copy(keyDict)
634  for l in self.levels[level]:
635  if l in keyDict:
636  del keyDict[l]
637  return keyDict
638 
639  def getDefaultLevel(self):
640  return self.defaultLevel
641 
642  def getDefaultSubLevel(self, level):
643  if level in self.defaultSubLevels:
644  return self.defaultSubLevels[level]
645  return None
646 
647  @classmethod
648  def getCameraName(cls):
649  """Return the name of the camera that this CameraMapper is for."""
650  className = str(cls)
651  className = className[className.find('.'):-1]
652  m = re.search(r'(\w+)Mapper', className)
653  if m is None:
654  m = re.search(r"class '[\w.]*?(\w+)'", className)
655  name = m.group(1)
656  return name[:1].lower() + name[1:] if name else ''
657 
658  @classmethod
659  def getPackageName(cls):
660  """Return the name of the package containing this CameraMapper."""
661  if cls.packageName is None:
662  raise ValueError('class variable packageName must not be None')
663  return cls.packageName
664 
665  @classmethod
666  def getPackageDir(cls):
667  """Return the base directory of this package"""
668  return getPackageDir(cls.getPackageName())
669 
670  def map_camera(self, dataId, write=False):
671  """Map a camera dataset."""
672  if self.camera is None:
673  raise RuntimeError("No camera dataset available.")
674  actualId = self._transformId(dataId)
675  return dafPersist.ButlerLocation(
676  pythonType="lsst.afw.cameraGeom.CameraConfig",
677  cppType="Config",
678  storageName="ConfigStorage",
679  locationList=self.cameraDataLocation or "ignored",
680  dataId=actualId,
681  mapper=self,
682  storage=self.rootStorage
683  )
684 
685  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
686  """Return the (preloaded) camera object.
687  """
688  if self.camera is None:
689  raise RuntimeError("No camera dataset available.")
690  return self.camera
691 
692  def map_defects(self, dataId, write=False):
693  """Map defects dataset.
694 
695  Returns
696  -------
697  `lsst.daf.butler.ButlerLocation`
698  Minimal ButlerLocation containing just the locationList field
699  (just enough information that bypass_defects can use it).
700  """
701  defectFitsPath = self._defectLookup(dataId=dataId)
702  if defectFitsPath is None:
703  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
704 
705  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
706  dataId, self,
707  storage=self.rootStorage)
708 
709  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
710  """Return a defect based on the butler location returned by map_defects
711 
712  Parameters
713  ----------
714  butlerLocation : `lsst.daf.persistence.ButlerLocation`
715  locationList = path to defects FITS file
716  dataId : `dict`
717  Butler data ID; "ccd" must be set.
718 
719  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
720  into an object, which is what we want for now, since that conversion is a bit tricky.
721  """
722  detectorName = self._extractDetectorName(dataId)
723  defectsFitsPath = butlerLocation.locationList[0]
724  with pyfits.open(defectsFitsPath) as hduList:
725  for hdu in hduList[1:]:
726  if hdu.header["name"] != detectorName:
727  continue
728 
729  defectList = []
730  for data in hdu.data:
731  bbox = afwGeom.Box2I(
732  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
733  afwGeom.Extent2I(int(data['width']), int(data['height'])),
734  )
735  defectList.append(afwImage.DefectBase(bbox))
736  return defectList
737 
738  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
739 
740  def map_expIdInfo(self, dataId, write=False):
741  return dafPersist.ButlerLocation(
742  pythonType="lsst.obs.base.ExposureIdInfo",
743  cppType=None,
744  storageName="Internal",
745  locationList="ignored",
746  dataId=dataId,
747  mapper=self,
748  storage=self.rootStorage
749  )
750 
751  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
752  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
753  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
754  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
755  return ExposureIdInfo(expId=expId, expBits=expBits)
756 
757  def std_bfKernel(self, item, dataId):
758  """Disable standardization for bfKernel
759 
760  bfKernel is a calibration product that is numpy array,
761  unlike other calibration products that are all images;
762  all calibration images are sent through _standardizeExposure
763  due to CalibrationMapping, but we don't want that to happen to bfKernel
764  """
765  return item
766 
767  def std_raw(self, item, dataId):
768  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
769  return self._standardizeExposure(self.exposures['raw'], item, dataId,
770  trimmed=False, setVisitInfo=True)
771 
772  def map_skypolicy(self, dataId):
773  """Map a sky policy."""
774  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
775  "Internal", None, None, self,
776  storage=self.rootStorage)
777 
778  def std_skypolicy(self, item, dataId):
779  """Standardize a sky policy by returning the one we use."""
780  return self.skypolicy
781 
782 
787 
788  def _getCcdKeyVal(self, dataId):
789  """Return CCD key and value used to look a defect in the defect registry
790 
791  The default implementation simply returns ("ccd", full detector name)
792  """
793  return ("ccd", self._extractDetectorName(dataId))
794 
795  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
796  posixIfNoSql=True):
797  """Set up a registry (usually SQLite3), trying a number of possible
798  paths.
799 
800  Parameters
801  ----------
802  name : string
803  Name of registry.
804  description: `str`
805  Description of registry (for log messages)
806  path : string
807  Path for registry.
808  policy : string
809  Policy that contains the registry name, used if path is None.
810  policyKey : string
811  Key in policy for registry path.
812  storage : Storage subclass
813  Repository Storage to look in.
814  searchParents : bool, optional
815  True if the search for a registry should follow any Butler v1
816  _parent symlinks.
817  posixIfNoSql : bool, optional
818  If an sqlite registry is not found, will create a posix registry if
819  this is True.
820 
821  Returns
822  -------
823  lsst.daf.persistence.Registry
824  Registry object
825  """
826  if path is None and policyKey in policy:
827  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
828  if os.path.isabs(path):
829  raise RuntimeError("Policy should not indicate an absolute path for registry.")
830  if not storage.exists(path):
831  newPath = storage.instanceSearch(path)
832 
833  newPath = newPath[0] if newPath is not None and len(newPath) else None
834  if newPath is None:
835  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
836  path)
837  path = newPath
838  else:
839  self.log.warn("Unable to locate registry at policy path: %s", path)
840  path = None
841 
842  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
843  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
844  # root from path. Currently only works with PosixStorage
845  try:
846  root = storage.root
847  if path and (path.startswith(root)):
848  path = path[len(root + '/'):]
849  except AttributeError:
850  pass
851 
852  # determine if there is an sqlite registry and if not, try the posix registry.
853  registry = None
854 
855  def search(filename, description):
856  """Search for file in storage
857 
858  Parameters
859  ----------
860  filename : `str`
861  Filename to search for
862  description : `str`
863  Description of file, for error message.
864 
865  Returns
866  -------
867  path : `str` or `None`
868  Path to file, or None
869  """
870  result = storage.instanceSearch(filename)
871  if result:
872  return result[0]
873  self.log.debug("Unable to locate %s: %s", description, filename)
874  return None
875 
876  # Search for a suitable registry database
877  if path is None:
878  path = search("%s.pgsql" % name, "%s in root" % description)
879  if path is None:
880  path = search("%s.sqlite3" % name, "%s in root" % description)
881  if path is None:
882  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
883 
884  if path is not None:
885  if not storage.exists(path):
886  newPath = storage.instanceSearch(path)
887  newPath = newPath[0] if newPath is not None and len(newPath) else None
888  if newPath is not None:
889  path = newPath
890  localFileObj = storage.getLocalFile(path)
891  self.log.info("Loading %s registry from %s", description, localFileObj.name)
892  registry = dafPersist.Registry.create(localFileObj.name)
893  localFileObj.close()
894  elif not registry and posixIfNoSql:
895  try:
896  self.log.info("Loading Posix %s registry from %s", description, storage.root)
897  registry = dafPersist.PosixRegistry(storage.root)
898  except:
899  registry = None
900 
901  return registry
902 
903  def _transformId(self, dataId):
904  """Generate a standard ID dict from a camera-specific ID dict.
905 
906  Canonical keys include:
907  - amp: amplifier name
908  - ccd: CCD name (in LSST this is a combination of raft and sensor)
909  The default implementation returns a copy of its input.
910 
911  Parameters
912  ----------
913  dataId : `dict`
914  Dataset identifier; this must not be modified
915 
916  Returns
917  -------
918  `dict`
919  Transformed dataset identifier.
920  """
921 
922  return dataId.copy()
923 
924  def _mapActualToPath(self, template, actualId):
925  """Convert a template path to an actual path, using the actual data
926  identifier. This implementation is usually sufficient but can be
927  overridden by the subclass.
928 
929  Parameters
930  ----------
931  template : `str`
932  Template path
933  actualId : `dict`
934  Dataset identifier
935 
936  Returns
937  -------
938  `str`
939  Pathname
940  """
941 
942  try:
943  transformedId = self._transformId(actualId)
944  return template % transformedId
945  except Exception as e:
946  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
947 
948  @staticmethod
949  def getShortCcdName(ccdName):
950  """Convert a CCD name to a form useful as a filename
951 
952  The default implementation converts spaces to underscores.
953  """
954  return ccdName.replace(" ", "_")
955 
956  def _extractDetectorName(self, dataId):
957  """Extract the detector (CCD) name from the dataset identifier.
958 
959  The name in question is the detector name used by lsst.afw.cameraGeom.
960 
961  Parameters
962  ----------
963  dataId : `dict`
964  Dataset identifier.
965 
966  Returns
967  -------
968  `str`
969  Detector name
970  """
971  raise NotImplementedError("No _extractDetectorName() function specified")
972 
973  def _extractAmpId(self, dataId):
974  """Extract the amplifier identifer from a dataset identifier.
975 
976  .. note:: Deprecated in 11_0
977 
978  amplifier identifier has two parts: the detector name for the CCD
979  containing the amplifier and index of the amplifier in the detector.
980 
981  Parameters
982  ----------
983  dataId : `dict`
984  Dataset identifer
985 
986  Returns
987  -------
988  `tuple`
989  Amplifier identifier
990  """
991 
992  trDataId = self._transformId(dataId)
993  return (trDataId["ccd"], int(trDataId['amp']))
994 
995  def _setAmpDetector(self, item, dataId, trimmed=True):
996  """Set the detector object in an Exposure for an amplifier.
997 
998  Defects are also added to the Exposure based on the detector object.
999 
1000  Parameters
1001  ----------
1002  item : `lsst.afw.image.Exposure`
1003  Exposure to set the detector in.
1004  dataId : `dict`
1005  Dataset identifier
1006  trimmed : `bool`
1007  Should detector be marked as trimmed? (ignored)
1008  """
1009 
1010  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1011 
1012  def _setCcdDetector(self, item, dataId, trimmed=True):
1013  """Set the detector object in an Exposure for a CCD.
1014 
1015  Parameters
1016  ----------
1017  item : `lsst.afw.image.Exposure`
1018  Exposure to set the detector in.
1019  dataId : `dict`
1020  Dataset identifier
1021  trimmed : `bool`
1022  Should detector be marked as trimmed? (ignored)
1023  """
1024  if item.getDetector() is not None:
1025  return
1026 
1027  detectorName = self._extractDetectorName(dataId)
1028  detector = self.camera[detectorName]
1029  item.setDetector(detector)
1030 
1031  def _setFilter(self, mapping, item, dataId):
1032  """Set the filter object in an Exposure. If the Exposure had a FILTER
1033  keyword, this was already processed during load. But if it didn't,
1034  use the filter from the registry.
1035 
1036  Parameters
1037  ----------
1038  mapping : `lsst.obs.base.Mapping`
1039  Where to get the filter from.
1040  item : `lsst.afw.image.Exposure`
1041  Exposure to set the filter in.
1042  dataId : `dict`
1043  Dataset identifier.
1044  """
1045 
1046  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1047  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1048  return
1049 
1050  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1051  return
1052 
1053  actualId = mapping.need(['filter'], dataId)
1054  filterName = actualId['filter']
1055  if self.filters is not None and filterName in self.filters:
1056  filterName = self.filters[filterName]
1057  item.setFilter(afwImage.Filter(filterName))
1058 
1059  # Default standardization function for exposures
1060  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1061  trimmed=True, setVisitInfo=True):
1062  """Default standardization function for images.
1063 
1064  This sets the Detector from the camera geometry
1065  and optionally set the Fiter. In both cases this saves
1066  having to persist some data in each exposure (or image).
1067 
1068  Parameters
1069  ----------
1070  mapping : `lsst.obs.base.Mapping`
1071  Where to get the values from.
1072  item : image-like object
1073  Can be any of lsst.afw.image.Exposure,
1074  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1075  or lsst.afw.image.MaskedImage
1076 
1077  dataId : `dict`
1078  Dataset identifier
1079  filter : `bool`
1080  Set filter? Ignored if item is already an exposure
1081  trimmed : `bool`
1082  Should detector be marked as trimmed?
1083  setVisitInfo : `bool`
1084  Should Exposure have its VisitInfo filled out from the metadata?
1085 
1086  Returns
1087  -------
1088  `lsst.afw.image.Exposure`
1089  The standardized Exposure.
1090  """
1091  try:
1092  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1093  except Exception as e:
1094  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1095  raise
1096 
1097  if mapping.level.lower() == "amp":
1098  self._setAmpDetector(item, dataId, trimmed)
1099  elif mapping.level.lower() == "ccd":
1100  self._setCcdDetector(item, dataId, trimmed)
1101 
1102  if filter:
1103  self._setFilter(mapping, item, dataId)
1104 
1105  return item
1106 
1107  def _defectLookup(self, dataId):
1108  """Find the defects for a given CCD.
1109 
1110  Parameters
1111  ----------
1112  dataId : `dict`
1113  Dataset identifier
1114 
1115  Returns
1116  -------
1117  `str`
1118  Path to the defects file or None if not available.
1119  """
1120  if self.defectRegistry is None:
1121  return None
1122  if self.registry is None:
1123  raise RuntimeError("No registry for defect lookup")
1124 
1125  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1126 
1127  dataIdForLookup = {'visit': dataId['visit']}
1128  # .lookup will fail in a posix registry because there is no template to provide.
1129  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1130  if len(rows) == 0:
1131  return None
1132  assert len(rows) == 1
1133  taiObs = rows[0][0]
1134 
1135  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1136  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1137  [(ccdKey, "?")],
1138  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1139  (ccdVal, taiObs))
1140  if not rows or len(rows) == 0:
1141  return None
1142  if len(rows) == 1:
1143  return os.path.join(self.defectPath, rows[0][0])
1144  else:
1145  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1146  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1147 
1148  def _makeCamera(self, policy, repositoryDir):
1149  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1150 
1151  Also set self.cameraDataLocation, if relevant (else it can be left None).
1152 
1153  This implementation assumes that policy contains an entry "camera" that points to the
1154  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1155  - a file named `camera.py` that contains persisted camera config
1156  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1157 
1158  Parameters
1159  ----------
1160  policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy`
1161  Policy with per-camera defaults already merged
1162  (PexPolicy only for backward compatibility).
1163  repositoryDir : `str`
1164  Policy repository for the subclassing module (obtained with
1165  getRepositoryPath() on the per-camera default dictionary).
1166  """
1167  if isinstance(policy, pexPolicy.Policy):
1168  policy = dafPersist.Policy(pexPolicy=policy)
1169  if 'camera' not in policy:
1170  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1171  cameraDataSubdir = policy['camera']
1172  self.cameraDataLocation = os.path.normpath(
1173  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1174  cameraConfig = afwCameraGeom.CameraConfig()
1175  cameraConfig.load(self.cameraDataLocation)
1176  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1177  return afwCameraGeom.makeCameraFromPath(
1178  cameraConfig=cameraConfig,
1179  ampInfoPath=ampInfoPath,
1180  shortNameFunc=self.getShortCcdName,
1181  pupilFactoryClass=self.PupilFactoryClass
1182  )
1183 
1184  def getRegistry(self):
1185  """Get the registry used by this mapper.
1186 
1187  Returns
1188  -------
1189  Registry or None
1190  The registry used by this mapper for this mapper's repository.
1191  """
1192  return self.registry
1193 
1194  def getImageCompressionSettings(self, datasetType, dataId):
1195  """Stuff image compression settings into a daf.base.PropertySet
1196 
1197  This goes into the ButlerLocation's "additionalData", which gets
1198  passed into the boost::persistence framework.
1199 
1200  Parameters
1201  ----------
1202  datasetType : `str`
1203  Type of dataset for which to get the image compression settings.
1204  dataId : `dict`
1205  Dataset identifier.
1206 
1207  Returns
1208  -------
1209  additionalData : `lsst.daf.base.PropertySet`
1210  Image compression settings.
1211  """
1212  mapping = self.mappings[datasetType]
1213  recipeName = mapping.recipe
1214  storageType = mapping.storage
1215  if storageType not in self._writeRecipes:
1216  return dafBase.PropertySet()
1217  if recipeName not in self._writeRecipes[storageType]:
1218  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1219  (datasetType, storageType, recipeName))
1220  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1221  seed = hash(tuple(dataId.items())) % 2**31
1222  for plane in ("image", "mask", "variance"):
1223  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1224  recipe.set(plane + ".scaling.seed", seed)
1225  return recipe
1226 
1227  def _initWriteRecipes(self):
1228  """Read the recipes for writing files
1229 
1230  These recipes are currently used for configuring FITS compression,
1231  but they could have wider uses for configuring different flavors
1232  of the storage types. A recipe is referred to by a symbolic name,
1233  which has associated settings. These settings are stored as a
1234  `PropertySet` so they can easily be passed down to the
1235  boost::persistence framework as the "additionalData" parameter.
1236 
1237  The list of recipes is written in YAML. A default recipe and
1238  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1239  and these may be overridden or supplemented by the individual obs_*
1240  packages' own policy/writeRecipes.yaml files.
1241 
1242  Recipes are grouped by the storage type. Currently, only the
1243  ``FitsStorage`` storage type uses recipes, which uses it to
1244  configure FITS image compression.
1245 
1246  Each ``FitsStorage`` recipe for FITS compression should define
1247  "image", "mask" and "variance" entries, each of which may contain
1248  "compression" and "scaling" entries. Defaults will be provided for
1249  any missing elements under "compression" and "scaling".
1250 
1251  The allowed entries under "compression" are:
1252 
1253  * algorithm (string): compression algorithm to use
1254  * rows (int): number of rows per tile (0 = entire dimension)
1255  * columns (int): number of columns per tile (0 = entire dimension)
1256  * quantizeLevel (float): cfitsio quantization level
1257 
1258  The allowed entries under "scaling" are:
1259 
1260  * algorithm (string): scaling algorithm to use
1261  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1262  * fuzz (bool): fuzz the values when quantising floating-point values?
1263  * seed (long): seed for random number generator when fuzzing
1264  * maskPlanes (list of string): mask planes to ignore when doing statistics
1265  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1266  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1267  * bscale: manually specified BSCALE (for MANUAL scaling)
1268  * bzero: manually specified BSCALE (for MANUAL scaling)
1269 
1270  A very simple example YAML recipe:
1271 
1272  FitsStorage:
1273  default:
1274  image: &default
1275  compression:
1276  algorithm: GZIP_SHUFFLE
1277  mask: *default
1278  variance: *default
1279  """
1280  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1281  recipes = dafPersist.Policy(recipesFile)
1282  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1283  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1284  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1285  supplements = dafPersist.Policy(supplementsFile)
1286  # Don't allow overrides, only supplements
1287  for entry in validationMenu:
1288  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1289  if intersection:
1290  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1291  (supplementsFile, entry, recipesFile, intersection))
1292  recipes.update(supplements)
1293 
1294  self._writeRecipes = {}
1295  for storageType in recipes.names(True):
1296  if "default" not in recipes[storageType]:
1297  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1298  (storageType, recipesFile))
1299  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1300 
1301 
1302 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1303  """Generate an Exposure from an image-like object
1304 
1305  If the image is a DecoratedImage then also set its WCS and metadata
1306  (Image and MaskedImage are missing the necessary metadata
1307  and Exposure already has those set)
1308 
1309  Parameters
1310  ----------
1311  image : Image-like object
1312  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1313  Exposure.
1314 
1315  Returns
1316  -------
1317  `lsst.afw.image.Exposure`
1318  Exposure containing input image.
1319  """
1320  metadata = None
1321  if isinstance(image, afwImage.MaskedImage):
1322  exposure = afwImage.makeExposure(image)
1323  elif isinstance(image, afwImage.DecoratedImage):
1324  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1325  metadata = image.getMetadata()
1326  try:
1327  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1328  exposure.setWcs(wcs)
1329  except pexExcept.TypeError as e:
1330  # raised on failure to create a wcs (and possibly others)
1331  if logger is None:
1332  logger = lsstLog.Log.getLogger("CameraMapper")
1333  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1334  " %s", e.args[0])
1335 
1336  exposure.setMetadata(metadata)
1337  elif isinstance(image, afwImage.Exposure):
1338  # Exposure
1339  exposure = image
1340  metadata = exposure.getMetadata()
1341  else:
1342  # Image
1343  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1344  #
1345  # set VisitInfo if we can
1346  #
1347  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1348  if metadata is not None:
1349  if mapper is None:
1350  if not logger:
1351  logger = lsstLog.Log.getLogger("CameraMapper")
1352  logger.warn("I can only set the VisitInfo if you provide a mapper")
1353  else:
1354  exposureId = mapper._computeCcdExposureId(dataId)
1355  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1356 
1357  exposure.getInfo().setVisitInfo(visitInfo)
1358 
1359  return exposure
1360 
1361 
1363  """Validate recipes for FitsStorage
1364 
1365  The recipes are supplemented with default values where appropriate.
1366 
1367  TODO: replace this custom validation code with Cerberus (DM-11846)
1368 
1369  Parameters
1370  ----------
1371  recipes : `lsst.daf.persistence.Policy`
1372  FitsStorage recipes to validate.
1373 
1374  Returns
1375  -------
1376  validated : `lsst.daf.base.PropertySet`
1377  Validated FitsStorage recipe.
1378 
1379  Raises
1380  ------
1381  `RuntimeError`
1382  If validation fails.
1383  """
1384  # Schemas define what should be there, and the default values (and by the default
1385  # value, the expected type).
1386  compressionSchema = {
1387  "algorithm": "NONE",
1388  "rows": 1,
1389  "columns": 0,
1390  "quantizeLevel": 0.0,
1391  }
1392  scalingSchema = {
1393  "algorithm": "NONE",
1394  "bitpix": 0,
1395  "maskPlanes": ["NO_DATA"],
1396  "seed": 0,
1397  "quantizeLevel": 4.0,
1398  "quantizePad": 5.0,
1399  "fuzz": True,
1400  "bscale": 1.0,
1401  "bzero": 0.0,
1402  }
1403 
1404  def checkUnrecognized(entry, allowed, description):
1405  """Check to see if the entry contains unrecognised keywords"""
1406  unrecognized = set(entry.keys()) - set(allowed)
1407  if unrecognized:
1408  raise RuntimeError(
1409  "Unrecognized entries when parsing image compression recipe %s: %s" %
1410  (description, unrecognized))
1411 
1412  validated = {}
1413  for name in recipes.names(True):
1414  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1415  rr = dafBase.PropertySet()
1416  validated[name] = rr
1417  for plane in ("image", "mask", "variance"):
1418  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1419  name + "->" + plane)
1420 
1421  for settings, schema in (("compression", compressionSchema),
1422  ("scaling", scalingSchema)):
1423  prefix = plane + "." + settings
1424  if settings not in recipes[name][plane]:
1425  for key in schema:
1426  rr.set(prefix + "." + key, schema[key])
1427  continue
1428  entry = recipes[name][plane][settings]
1429  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1430  for key in schema:
1431  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1432  rr.set(prefix + "." + key, value)
1433  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.